Ready to test

This commit is contained in:
Mike Phares 2022-02-18 16:43:30 -07:00
parent baf8f8b1a5
commit cef49ab67b
182 changed files with 14333 additions and 15482 deletions

38
Adaptation/.vscode/format-report.json vendored Normal file
View File

@ -0,0 +1,38 @@
[
{
"DocumentId": {
"ProjectId": {
"Id": "5de73c6e-a3a2-4ec4-ae79-d15993268b3c"
},
"Id": "89553829-ed12-4613-93e4-505108d314f9"
},
"FileName": "Detail.cs",
"FilePath": "T:\\MET08DDUPSFS6420\\06_SourceCode\\MET08DDUPSFS6420\\Adaptation\\FileHandlers\\pcl\\Detail.cs",
"FileChanges": [
{
"LineNumber": 43,
"CharNumber": 16,
"DiagnosticId": "IDE1006",
"FormatDescription": "warning IDE1006: Naming rule violation: These words must begin with upper case characters: i"
}
]
},
{
"DocumentId": {
"ProjectId": {
"Id": "5de73c6e-a3a2-4ec4-ae79-d15993268b3c"
},
"Id": "edd1edc5-c335-46e6-9a59-d0ae024c8e63"
},
"FileName": "WSRequest.cs",
"FilePath": "T:\\MET08DDUPSFS6420\\06_SourceCode\\MET08DDUPSFS6420\\Adaptation\\FileHandlers\\MET08DDUPSFS6420\\WSRequest.cs",
"FileChanges": [
{
"LineNumber": 64,
"CharNumber": 16,
"DiagnosticId": "IDE1006",
"FormatDescription": "warning IDE1006: Naming rule violation: These words must begin with upper case characters: i"
}
]
}
]

40
Adaptation/.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,40 @@
{
"cSpell.words": [
"DDUPSFS",
"EQPT",
"gpcl",
"IKVM",
"ISMTP",
"Linc",
"LPDCM",
"messa",
"NOPAUSE",
"PDFC",
"pdmodel",
"pdsf",
"PPTST",
"RESIMAPTENCOR",
"TENCOR",
"TIBCO"
],
"workbench.colorCustomizations": {
"activityBar.activeBackground": "#7933a3",
"activityBar.activeBorder": "#31240f",
"activityBar.background": "#7933a3",
"activityBar.foreground": "#e7e7e7",
"activityBar.inactiveForeground": "#e7e7e799",
"activityBarBadge.background": "#31240f",
"activityBarBadge.foreground": "#e7e7e7",
"sash.hoverBorder": "#7933a3",
"statusBar.background": "#5c277c",
"statusBar.foreground": "#e7e7e7",
"statusBarItem.hoverBackground": "#7933a3",
"statusBarItem.remoteBackground": "#5c277c",
"statusBarItem.remoteForeground": "#e7e7e7",
"titleBar.activeBackground": "#5c277c",
"titleBar.activeForeground": "#e7e7e7",
"titleBar.inactiveBackground": "#5c277c99",
"titleBar.inactiveForeground": "#e7e7e799"
},
"peacock.color": "#5c277c"
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.Core
namespace Adaptation.Eaf.Core;
public class BackboneComponent
{
public class BackboneComponent
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.Core
namespace Adaptation.Eaf.Core;
public class BackboneStatusCache
{
public class BackboneStatusCache
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.Core
namespace Adaptation.Eaf.Core;
public interface ILoggingSetupManager
{
public interface ILoggingSetupManager
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.Core
namespace Adaptation.Eaf.Core;
public class StatusItem
{
public class StatusItem
{
}
}

View File

@ -2,47 +2,52 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.Core
namespace Adaptation.Eaf.Core;
public class Backbone
{
public class Backbone
{
public const string STATE_ERROR = "Error";
public const string STATE_OFFLINE = "Offline";
public const string STATE_RUNNING = "Running";
public const string STATE_SHUTDOWN = "Shutting Down";
public const string STATE_STARTING = "Starting";
protected Backbone() { }
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
[NotNull]
public static Backbone Instance { get; }
[NotNull]
public ILoggingSetupManager LoggingConfigurationManager { get; set; }
public BackboneStatusCache Status { get; }
public bool IsAutomatedRestartActive { get; }
public bool IsReadyForRestart { get; }
public string StartTime { get; }
public string State { get; }
public string Name { get; }
public string ConfigurationServiceAddress { get; }
public string CellName { get; }
protected bool IsInitialized { get; set; }
protected Dictionary<string, BackboneComponent> BackboneComponents { get; }
public const string STATE_ERROR = "Error";
public const string STATE_OFFLINE = "Offline";
public const string STATE_RUNNING = "Running";
public const string STATE_SHUTDOWN = "Shutting Down";
public const string STATE_STARTING = "Starting";
protected Backbone() { }
[NotNull]
public static Backbone Instance { get; }
[NotNull]
public ILoggingSetupManager LoggingConfigurationManager { get; set; }
public BackboneStatusCache Status { get; }
public bool IsAutomatedRestartActive { get; }
public bool IsReadyForRestart { get; }
public string StartTime { get; }
public string State { get; }
public string Name { get; }
public string ConfigurationServiceAddress { get; }
public string CellName { get; }
protected bool IsInitialized { get; set; }
protected Dictionary<string, BackboneComponent> BackboneComponents { get; }
public void AddBackboneComponent(BackboneComponent backboneComponent) { }
public bool ContainsBackboneComponent(string id) => throw new NotImplementedException();
[Obsolete("Use the capabilities exposed via the Status property -> GetAll. Will be removed with next major release.")]
public List<StatusItem> GetAllStatuses() => throw new NotImplementedException();
public BackboneComponent GetBackboneComponentById(string id) => throw new NotImplementedException();
public List<T> GetBackboneComponentsOfType<T>() => throw new NotImplementedException();
public List<BackboneComponent> GetBackboneComponentsOfType(Type type) => throw new NotImplementedException();
public void RegisterSubprocess(int pid) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(string statusName, string statusValue) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(BackboneComponent source, string statusName, string statusValue) { }
protected void CloseConnectionOfComponents(List<BackboneComponent> components) { }
protected virtual void StopAllComponents() { }
protected void StopComponents(List<BackboneComponent> components) { }
public void AddBackboneComponent(BackboneComponent backboneComponent) { }
public bool ContainsBackboneComponent(string id) { throw new NotImplementedException(); }
[Obsolete("Use the capabilities exposed via the Status property -> GetAll. Will be removed with next major release.")]
public List<StatusItem> GetAllStatuses() { throw new NotImplementedException(); }
public BackboneComponent GetBackboneComponentById(string id) { throw new NotImplementedException(); }
public List<T> GetBackboneComponentsOfType<T>() { throw new NotImplementedException(); }
public List<BackboneComponent> GetBackboneComponentsOfType(Type type) { throw new NotImplementedException(); }
public void RegisterSubprocess(int pid) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(string statusName, string statusValue) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(BackboneComponent source, string statusName, string statusValue) { }
protected void CloseConnectionOfComponents(List<BackboneComponent> components) { }
protected virtual void StopAllComponents() { }
protected void StopComponents(List<BackboneComponent> components) { }
}
}

View File

@ -1,24 +1,25 @@
using System;
namespace Adaptation.Eaf.Core.Smtp
namespace Adaptation.Eaf.Core.Smtp;
public class EmailMessage
{
public class EmailMessage
{
public EmailMessage() { }
public EmailMessage(string subject, string body, MailPriority priority = MailPriority.Normal) { }
#pragma warning disable CA2254
#pragma warning disable IDE0060
public string Body { get; }
public MailPriority Priority { get; }
public string Subject { get; }
public EmailMessage() { }
public EmailMessage(string subject, string body, MailPriority priority = MailPriority.Normal) { }
public EmailMessage PriorityHigh() { throw new NotImplementedException(); }
public EmailMessage PriorityLow() { throw new NotImplementedException(); }
public EmailMessage PriorityNormal() { throw new NotImplementedException(); }
public EmailMessage SetBody(string body) { throw new NotImplementedException(); }
public EmailMessage SetPriority(MailPriority priority) { throw new NotImplementedException(); }
public EmailMessage SetSubject(string subject) { throw new NotImplementedException(); }
public string Body { get; }
public MailPriority Priority { get; }
public string Subject { get; }
}
public EmailMessage PriorityHigh() => throw new NotImplementedException();
public EmailMessage PriorityLow() => throw new NotImplementedException();
public EmailMessage PriorityNormal() => throw new NotImplementedException();
public EmailMessage SetBody(string body) => throw new NotImplementedException();
public EmailMessage SetPriority(MailPriority priority) => throw new NotImplementedException();
public EmailMessage SetSubject(string subject) => throw new NotImplementedException();
}

View File

@ -1,9 +1,6 @@
namespace Adaptation.Eaf.Core.Smtp
namespace Adaptation.Eaf.Core.Smtp;
public interface ISmtp
{
public interface ISmtp
{
void Send(EmailMessage message);
}
void Send(EmailMessage message);
}

View File

@ -1,11 +1,8 @@
namespace Adaptation.Eaf.Core.Smtp
namespace Adaptation.Eaf.Core.Smtp;
public enum MailPriority
{
public enum MailPriority
{
Low = 0,
Normal = 1,
High = 2
}
Low = 0,
Normal = 1,
High = 2
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class ChangeDataCollectionHandler
{
public class ChangeDataCollectionHandler
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class DataCollectionRequest
{
public class DataCollectionRequest
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentEvent
{
public class EquipmentEvent
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentException
{
public class EquipmentException
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentSelfDescription
{
public class EquipmentSelfDescription
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class GetParameterValuesHandler
{
public class GetParameterValuesHandler
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IConnectionControl
{
public interface IConnectionControl
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IDataTracingHandler
{
public interface IDataTracingHandler
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentCommandService
{
public interface IEquipmentCommandService
{
}
}

View File

@ -1,16 +1,15 @@
using Adaptation.PeerGroup.GCL.Annotations;
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentControl : IPackageSource
{
public interface IEquipmentControl : IPackageSource
{
[NotNull]
IEquipmentSelfDescriptionBuilder SelfDescriptionBuilder { get; }
[NotNull]
IEquipmentDataCollection DataCollection { get; }
[NotNull]
IEquipmentCommandService Commands { get; }
[NotNull]
IConnectionControl Connection { get; }
}
[NotNull]
IEquipmentSelfDescriptionBuilder SelfDescriptionBuilder { get; }
[NotNull]
IEquipmentDataCollection DataCollection { get; }
[NotNull]
IEquipmentCommandService Commands { get; }
[NotNull]
IConnectionControl Connection { get; }
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentSelfDescriptionBuilder
{
public interface IEquipmentSelfDescriptionBuilder
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IPackage
{
public interface IPackage
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface ISelfDescriptionLookup
{
public interface ISelfDescriptionLookup
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IVirtualParameterValuesHandler
{
public interface IVirtualParameterValuesHandler
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class SetParameterValuesHandler
{
public class SetParameterValuesHandler
{
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public class TraceRequest
{
public class TraceRequest
{
}
}

View File

@ -3,37 +3,36 @@ using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IEquipmentDataCollection
{
IVirtualParameterValuesHandler VirtualParameterValuesHandler { get; }
ISelfDescriptionLookup SelfDescriptionLookup { get; }
EquipmentSelfDescription SelfDescription { get; }
IEnumerable<DataCollectionRequest> ActiveRequests { get; }
IDataTracingHandler DataTracingHandler { get; }
namespace Adaptation.Eaf.EquipmentCore.Control;
ParameterValue CreateParameterValue(EquipmentParameter parameter, object value);
void NotifyDataTracingAvailable(bool isAvailable);
void RegisterChangeDataCollectionHandler(ChangeDataCollectionHandler handler);
void RegisterDataTracingHandler(IDataTracingHandler handler);
void RegisterGetParameterValuesHandler(GetParameterValuesHandler handler);
void RegisterSetParameterValuesHandler(SetParameterValuesHandler handler);
void TriggerDeactivate(DataCollectionRequest deactivateRequest);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerPerformanceRestored();
void TriggerPerformanceWarning();
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, DateTime equipmentTimeStamp);
}
public interface IEquipmentDataCollection
{
IVirtualParameterValuesHandler VirtualParameterValuesHandler { get; }
ISelfDescriptionLookup SelfDescriptionLookup { get; }
EquipmentSelfDescription SelfDescription { get; }
IEnumerable<DataCollectionRequest> ActiveRequests { get; }
IDataTracingHandler DataTracingHandler { get; }
ParameterValue CreateParameterValue(EquipmentParameter parameter, object value);
void NotifyDataTracingAvailable(bool isAvailable);
void RegisterChangeDataCollectionHandler(ChangeDataCollectionHandler handler);
void RegisterDataTracingHandler(IDataTracingHandler handler);
void RegisterGetParameterValuesHandler(GetParameterValuesHandler handler);
void RegisterSetParameterValuesHandler(SetParameterValuesHandler handler);
void TriggerDeactivate(DataCollectionRequest deactivateRequest);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerPerformanceRestored();
void TriggerPerformanceWarning();
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, DateTime equipmentTimeStamp);
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IPackageSource
{
public interface IPackageSource
{
}
}

View File

@ -2,19 +2,24 @@
using Adaptation.PeerGroup.GCL.Annotations;
using System;
namespace Adaptation.Eaf.EquipmentCore.DataCollection.Reporting
namespace Adaptation.Eaf.EquipmentCore.DataCollection.Reporting;
public class ParameterValue
{
public class ParameterValue
{
public ParameterValue(EquipmentParameter definition, object value) { }
public ParameterValue(EquipmentParameter definition, object value, DateTime timestamp) { }
public virtual object Value { get; protected internal set; }
[NotNull]
public EquipmentParameter Definition { get; }
public DateTime Timestamp { get; protected set; }
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ParameterValue(EquipmentParameter definition, object value) { }
public ParameterValue(EquipmentParameter definition, object value, DateTime timestamp) { }
public virtual object Value { get; protected internal set; }
[NotNull]
public EquipmentParameter Definition { get; }
public DateTime Timestamp { get; protected set; }
public virtual ParameterValue Clone(EquipmentParameter newDefinition) => throw new NotImplementedException();
public override string ToString() => base.ToString();
public virtual ParameterValue Clone(EquipmentParameter newDefinition) { throw new NotImplementedException(); }
public override string ToString() { return base.ToString(); }
}
}

View File

@ -1,24 +1,27 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
public class EquipmentParameter
{
public class EquipmentParameter
{
public EquipmentParameter(EquipmentParameter source, ParameterTypeDefinition typeDefinition) { }
public EquipmentParameter(string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public EquipmentParameter(string id, string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public string Name { get; }
public string Id { get; }
public string Description { get; }
public string SourcePath { get; }
public string SourceEquipment { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool IsTransient { get; }
public bool IsReadOnly { get; }
#pragma warning disable CA2254
#pragma warning disable IDE0060
public override string ToString() { return base.ToString(); }
public string ToStringWithDetails() { return base.ToString(); }
}
public EquipmentParameter(EquipmentParameter source, ParameterTypeDefinition typeDefinition) { }
public EquipmentParameter(string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public EquipmentParameter(string id, string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public string Name { get; }
public string Id { get; }
public string Description { get; }
public string SourcePath { get; }
public string SourceEquipment { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool IsTransient { get; }
public bool IsReadOnly { get; }
public override string ToString() => base.ToString();
public string ToStringWithDetails() => base.ToString();
}

View File

@ -1,12 +1,16 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
{
public class Field
{
public Field(string name, string description, bool canBeNull, ParameterTypeDefinition typeDefinition) { }
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public class Field
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public Field(string name, string description, bool canBeNull, ParameterTypeDefinition typeDefinition) { }
public string Name { get; }
public string Description { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool CanBeNull { get; }
public string Name { get; }
public string Description { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool CanBeNull { get; }
}
}

View File

@ -1,12 +1,16 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public abstract class ParameterTypeDefinition
{
public abstract class ParameterTypeDefinition
{
public ParameterTypeDefinition(string name, string description) { }
public string Name { get; }
public string Description { get; }
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ParameterTypeDefinition(string name, string description) { }
public string Name { get; }
public string Description { get; }
public override string ToString() => base.ToString();
public override string ToString() { return base.ToString(); }
}
}

View File

@ -1,12 +1,16 @@
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public class StructuredType : ParameterTypeDefinition
{
public class StructuredType : ParameterTypeDefinition
{
public StructuredType(string name, string description, IList<Field> fields) : base(name, description) { }
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public StructuredType(string name, string description, IList<Field> fields) : base(name, description) { }
public IList<Field> Fields { get; }
public IList<Field> Fields { get; }
}
}

View File

@ -1,6 +1,5 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
public interface IConfigurationObject
{
public interface IConfigurationObject
{
}
}

View File

@ -1,26 +1,30 @@
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
[System.Runtime.Serialization.DataContractAttribute(IsReference = true)]
public class ModelObjectParameterDefinition : IConfigurationObject
{
[System.Runtime.Serialization.DataContractAttribute(IsReference = true)]
public class ModelObjectParameterDefinition : IConfigurationObject
{
public ModelObjectParameterDefinition() { }
public ModelObjectParameterDefinition(string name, ModelObjectParameterType valueType, object defaultValue) { }
public ModelObjectParameterDefinition(string name, Type enumType, object defaultValue) { }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long Id { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Value { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ModelObjectParameterType ValueType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string EnumType { get; set; }
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ModelObjectParameterDefinition() { }
public ModelObjectParameterDefinition(string name, ModelObjectParameterType valueType, object defaultValue) { }
public ModelObjectParameterDefinition(string name, Type enumType, object defaultValue) { }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long Id { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Value { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ModelObjectParameterType ValueType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string EnumType { get; set; }
public virtual ModelObjectParameterDefinition Clone() => null;
public virtual bool IsValidValue(string value) => false;
public virtual ModelObjectParameterDefinition Clone() { return null; }
public virtual bool IsValidValue(string value) { return false; }
}
}

View File

@ -1,17 +1,16 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
public enum ModelObjectParameterType
{
public enum ModelObjectParameterType
{
String = 0,
Bool = 1,
Byte = 2,
SignedByte = 3,
Integer = 4,
UnsignedInteger = 5,
LongInteger = 6,
UnsignedLongInteger = 7,
Double = 8,
Float = 9,
Enum = 10
}
String = 0,
Bool = 1,
Byte = 2,
SignedByte = 3,
Integer = 4,
UnsignedInteger = 5,
LongInteger = 6,
UnsignedLongInteger = 7,
Double = 8,
Float = 9,
Enum = 10
}

View File

@ -1,44 +1,43 @@
using Adaptation.PeerGroup.GCL.SecsDriver;
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.Semiconductor.CellInstances
{
[System.Runtime.Serialization.DataContractAttribute]
public class SecsConnectionConfiguration
{
public SecsConnectionConfiguration() { }
namespace Adaptation.Eaf.Management.ConfigurationData.Semiconductor.CellInstances;
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T6HsmsControlMessage { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T5ConnectionSeperation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T4InterBlock { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T3MessageReply { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T2Protocol { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T1InterCharacter { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SerialBaudRate? BaudRate { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SecsTransportType? PortType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? Port { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan LinkTestTimer { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Host { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? DeviceId { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsSessionMode? SessionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsConnectionMode? ConnectionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T7ConnectionIdle { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T8NetworkIntercharacter { get; set; }
}
[System.Runtime.Serialization.DataContractAttribute]
public class SecsConnectionConfiguration
{
public SecsConnectionConfiguration() { }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T6HsmsControlMessage { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T5ConnectionSeperation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T4InterBlock { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T3MessageReply { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T2Protocol { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T1InterCharacter { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SerialBaudRate? BaudRate { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SecsTransportType? PortType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? Port { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan LinkTestTimer { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Host { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? DeviceId { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsSessionMode? SessionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsConnectionMode? ConnectionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T7ConnectionIdle { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T8NetworkIntercharacter { get; set; }
}

View File

@ -0,0 +1,141 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Archive;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (_Description is not Description)
throw new Exception();
}
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
private void MoveArchive(DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
{ }
string logisticsSequence = _Logistics.Sequence.ToString();
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, descriptions, extra: false);
MoveArchive(dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,39 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers;
public class CellInstanceConnectionName
{
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
IFileRead result;
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
if (isDuplicator)
{
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
int hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
result = hyphens switch
{
(int)MET08DDUPSFS6420.Hyphen.IsArchive => new Archive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
(int)MET08DDUPSFS6420.Hyphen.IsDummy => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
(int)MET08DDUPSFS6420.Hyphen.IsXToArchive => new ToArchive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
_ => new MET08DDUPSFS6420.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
};
}
else
{
result = cellInstanceConnectionName switch
{
nameof(pcl) => new pcl.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
_ => throw new Exception(),
};
}
return result;
}
}

View File

@ -0,0 +1,308 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.Dummy;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly string[] _CellNames;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_LastDummyRunIndex = -1;
List<string> cellNames = new();
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
_CellNames = cellNames.ToArray();
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
Callback(null);
else
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (_Description is not Description)
throw new Exception();
}
void IFileRead.Callback(object state) => Callback(state);
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
{
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
try
{
if (warning)
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
{
foreach (string file in files)
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
{
string[] files;
bool warning = false;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
string inProcessDirectory = Path.Combine(_ProgressPath, "Dummy In-Process", sequence.ToString());
if (!Directory.Exists(inProcessDirectory))
_ = Directory.CreateDirectory(inProcessDirectory);
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
{
warning = true;
break;
}
}
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
}
private string GetCellName(string pathSegment)
{
string result = string.Empty;
foreach (string cellName in _CellNames)
{
if (pathSegment.ToLower().Contains(cellName.ToLower()))
{
result = cellName;
break;
}
}
if (string.IsNullOrEmpty(result))
{
int count;
List<(string CellName, int Count)> cellNames = new();
foreach (string cellName in _CellNames)
{
count = 0;
foreach (char @char in cellName.ToLower())
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
cellNames.Add(new(cellName, count));
}
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
}
return result;
}
private void Callback(object state)
{
try
{
string pathSegment;
string monARessource;
DateTime dateTime = DateTime.Now;
if (!_FileConnectorConfiguration.TargetFileLocation.Contains(_FileConnectorConfiguration.SourceFileLocation))
throw new Exception("Target must start with source");
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
if (!_IsEAFHosted || check)
{
string checkSegment;
string checkDirectory;
string sourceFileFilter;
string sourceArchiveFile;
string sourceFileLocation;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
if (!Directory.Exists(traceDummyDirectory))
_ = Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
File.AppendAllText(traceDummyFile, string.Empty);
if (_FileConnectorConfiguration.SourceFileLocation.EndsWith("\\"))
sourceFileLocation = _FileConnectorConfiguration.SourceFileLocation;
else
sourceFileLocation = string.Concat(_FileConnectorConfiguration.SourceFileLocation, '\\');
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
_LastDummyRunIndex = 0;
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, sourceFileFilter));
if (File.Exists(sourceArchiveFile))
{
checkSegment = _FileConnectorConfiguration.TargetFileLocation.Substring(sourceFileLocation.Length);
checkDirectory = Path.GetDirectoryName(sourceArchiveFile);
for (int z = 0; z < int.MaxValue; z++)
{
if (checkDirectory.Length < sourceFileLocation.Length || !checkDirectory.StartsWith(sourceFileLocation))
break;
checkDirectory = Path.GetDirectoryName(checkDirectory);
if (Directory.Exists(Path.Combine(checkDirectory, checkSegment)))
{
checkDirectory = Path.Combine(checkDirectory, checkSegment);
break;
}
}
if (!checkDirectory.EndsWith(checkSegment))
throw new Exception("Could not determine dummy target directory for extract!");
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
throw new Exception("Invalid file name for source archive file!");
pathSegment = checkDirectory.Substring(sourceFileLocation.Length);
monARessource = GetCellName(pathSegment);
if (string.IsNullOrEmpty(monARessource))
throw new Exception("Could not determine which cell archive file is associated with!");
if (_IsEAFHosted)
CallbackFileExists(sourceArchiveFile, traceDummyFile, checkDirectory, monARessource, sequence);
break;
}
}
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}

View File

@ -1,300 +0,0 @@
using Adaptation.Helpers;
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using log4net;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers
{
public partial class FileRead : ILogic
{
private ConfigData _ConfigData;
public FileRead()
{
Logistics = new Logistics();
_Log = LogManager.GetLogger(typeof(FileRead));
}
public ILogic ShallowCopy()
{
return (ILogic)MemberwiseClone();
}
public void WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
public Tuple<string, ConfigDataBase> GetOpenInsightTuple()
{
Tuple<string, ConfigDataBase> restuls = new Tuple<string, ConfigDataBase>(_ConfigData.OpenInsightSiViewer, _ConfigData);
return restuls;
}
public Tuple<string, JsonElement?, List<FileInfo>> GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, JsonElement?, List<FileInfo>> results;
_FileParameter.Clear();
DateTime dateTime = DateTime.Now;
if (_ConfigData.IsEvent && _ConfigData.Duplicator is null)
results = GetExtractResult(reportFullPath);
else if (_ConfigData.Duplicator.HasValue && _ConfigData.Duplicator.Value != ConfigData.Level.IsManualOIEntry)
results = GetDuplicatorExtractResult(reportFullPath, dateTime);
else if (_ConfigData.Duplicator.HasValue && _ConfigData.Duplicator.Value == ConfigData.Level.IsManualOIEntry)
results = _ConfigData.IsManualOIEntry(reportFullPath);
else
throw new Exception();
if (results.Item2 is null)
results = new Tuple<string, JsonElement?, List<FileInfo>>(results.Item1, JsonSerializer.Deserialize<JsonElement>("[]"), results.Item3);
int count = results.Item2.Value.GetArrayLength();
if (count > 0 && _ConfigData.EafHosted)
WritePDSF(results.Item2.Value);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
private Tuple<string, JsonElement?, List<FileInfo>> GetExtractResult(string reportFullPath)
{
Tuple<string, JsonElement?, List<FileInfo>> results = new Tuple<string, JsonElement?, List<FileInfo>>(string.Empty, null, new List<FileInfo>());
FileInfo fileInfo = new FileInfo(reportFullPath);
Logistics = new Logistics(ConfigData.NullData, _ConfigData.CellNames, _ConfigData.MesEntities, fileInfo, useSplitForMID: false, fileInfoLength: 50000);
SetFileParameterLotID(Logistics.MID);
if (new FileInfo(reportFullPath).Length < ConfigData.MinFileLength)
results.Item3.Add(fileInfo);
else
{
ProcessData processData = new ProcessData(this, _ConfigData, results.Item3);
if (!(processData.Header is null))
{
string mid = string.Concat(processData.Header.Reactor, "-", processData.Header.RDS, "-", processData.Header.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
Logistics.MID = mid;
SetFileParameterLotID(mid);
Logistics.ProcessJobID = processData.Header.Reactor;
}
if (processData.Header is null || !processData.Details.Any())
throw new Exception();
results = processData.GetResults(this, _ConfigData, results.Item3);
}
return results;
}
private Tuple<string, JsonElement?, List<FileInfo>> GetDuplicatorExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, JsonElement?, List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement pdsdBodyValues = ProcessDataStandardFormat.GetArray(pdsf);
results = new Tuple<string, JsonElement?, List<FileInfo>>(pdsf.Item1, pdsdBodyValues, new List<FileInfo>());
List<Duplicator.Description> processDataDescriptions = _ConfigData.GetProcessDataDescriptions(pdsdBodyValues);
Dictionary<Test, List<Duplicator.Description>> keyValuePairs = ProcessData.GetKeyValuePairs(_ConfigData, pdsdBodyValues, processDataDescriptions, extra: false);
bool isNotUsedInsightMetrologyViewerAttachments = (!(_Configuration.FileScanningIntervalInSeconds > 0) && _ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments);
bool isDummyRun = (ConfigData.DummyRuns.Any() && ConfigData.DummyRuns.ContainsKey(Logistics.JobID) && ConfigData.DummyRuns[Logistics.JobID].Any() && (from l in ConfigData.DummyRuns[Logistics.JobID] where l == Logistics.Sequence select 1).Any());
if (isDummyRun)
{
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
string duplicateDirectory;
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToIQSSi)
duplicateDirectory = string.Concat(_Configuration.TargetFileLocation, @"\ALL");
else if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsight)
duplicateDirectory = string.Concat(_Configuration.TargetFileLocation, @"\", segments[0]);
else
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_Configuration.TargetFileLocation)), @"\Data");
if (segments.Length > 2)
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
if (!Directory.Exists(duplicateDirectory))
Directory.CreateDirectory(duplicateDirectory);
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _Configuration.FileScanningIntervalInSeconds > 0) && _ConfigData.Duplicator.Value != ConfigData.Level.IsXToArchive && _ConfigData.Duplicator.Value != ConfigData.Level.IsArchive)
{
bool ganPPTST = false;
string successDirectory;
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToAPC)
successDirectory = string.Empty;
else
{
successDirectory = string.Concat(Path.GetDirectoryName(_Configuration.TargetFileLocation), @"\ViewerPath");
if (!Directory.Exists(successDirectory))
Directory.CreateDirectory(successDirectory);
}
CultureInfo cultureInfo = new CultureInfo("en-US");
Calendar calendar = cultureInfo.Calendar;
List<Tuple<IScopeInfo, string>> tuples = new List<Tuple<IScopeInfo, string>>();
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
string weekOfYear = calendar.GetWeekOfYear(Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string logisticsSequenceMemoryDirectory = string.Concat(_ConfigData.MemoryPath, @"\", _ConfigData.GetEquipmentType(), @"\Source\", weekDirectory, @"\", Logistics.Sequence);
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToAPC)
{
if (!isDummyRun && _ConfigData.EafHosted)
File.Copy(reportFullPath, duplicateFile, overwrite: true);
}
else
{
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewer)
{
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
ProcessData.WSRequest wsRequest = new ProcessData.WSRequest(this, fileReadDescriptions);
if (!isDummyRun && _ConfigData.EafHosted)
{
Tuple<string, WS.Results> wsResults = WS.SendData(_ConfigData.OpenInsightMetrogyViewerAPI, wsRequest);
if (!wsResults.Item2.Success)
throw new Exception(wsResults.ToString());
_Log.Debug(wsResults.Item2.HeaderID);
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
}
}
else
{
Test test;
string lines;
IScopeInfo scopeInfo;
foreach (KeyValuePair<Test, List<Duplicator.Description>> keyValuePair in keyValuePairs)
{
test = keyValuePair.Key;
//scopeInfo = new ScopeInfo(this, _ConfigData, test);
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsight)
scopeInfo = new ScopeInfo(this, _ConfigData, test, _ConfigData.IqsFile, _ConfigData.IqsQueryFilter);
else
scopeInfo = new ScopeInfo(this, _ConfigData, test, _ConfigData.OpenInsightFilePattern, _ConfigData.IqsQueryFilter);
//lines = ProcessDataStandardFormat.GetLines(Logistics, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
ganPPTST = fileReadDescriptions[0].Recipe.Contains("GAN_PPTST");
lines = ProcessData.GetLines(this, fileReadDescriptions, ganPPTST);
tuples.Add(new Tuple<IScopeInfo, string>(scopeInfo, lines));
}
}
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
{
string[] matchDirectories = Shared1567(reportFullPath, tuples);
if (!isDummyRun && _ConfigData.EafHosted && !isNotUsedInsightMetrologyViewerAttachments)
{
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
ProcessData.PostOpenInsightMetrologyViewerAttachments(_Log, _ConfigData, Logistics, dateTime, logisticsSequenceMemoryDirectory, fileReadDescriptions, matchDirectories[0]);
}
}
}
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsightMetrologyViewer && _ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
{
bool check = false;
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToIQSSi && _ConfigData.Duplicator.Value != ConfigData.Level.IsXToIQSGaN)
check = true;
else if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToIQSSi && !ganPPTST)
check = true;
else if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToIQSGaN && ganPPTST)
check = true;
//else
// Don't write file(s) //throw new Exception();
if (check)
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
}
}
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
{
string destinationDirectory;
//string destinationDirectory = WriteScopeInfo(_ConfigData.ProgressPath, Logistics, dateTime, duplicateDirectory, tuples);
FileInfo fileInfo = new FileInfo(reportFullPath);
string logisticsSequence = Logistics.Sequence.ToString();
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_Configuration.TargetFileLocation)), @"\", Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_ConfigData.EafHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
destinationDirectory = matchDirectories[0];
if (isDummyRun)
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
else
{
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
ProcessData.WSRequest wsRequest = new ProcessData.WSRequest(this, fileReadDescriptions);
JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
if (_ConfigData.EafHosted)
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
else
{
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
return results;
}
private void MoveArchive()
{
CultureInfo cultureInfo = new CultureInfo("en-US");
Calendar calendar = cultureInfo.Calendar;
string logisticsSequence = Logistics.Sequence.ToString();
string weekOfYear = calendar.GetWeekOfYear(Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string jobIdDirectory = string.Concat(_Configuration.TargetFileLocation, @"\", Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_Configuration.TargetFileLocation), @"\Archive\", Logistics.JobID, @"\", weekDirectory);
if (!Directory.Exists(destinationArchiveDirectory))
Directory.CreateDirectory(destinationArchiveDirectory);
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
public void Move(string reportFullPath, Tuple<string, JsonElement?, List<FileInfo>> extractResults, Exception exception = null)
{
Shared1872(reportFullPath, exception);
bool isErrorFile = !(exception is null);
if (!isErrorFile && _ConfigData.Duplicator.HasValue)
{
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToArchive)
Shared0192(reportFullPath);
else if (_ConfigData.EafHosted && _ConfigData.Duplicator.Value == ConfigData.Level.IsArchive)
MoveArchive();
if (_ConfigData.EafHosted && !string.IsNullOrEmpty(_ConfigData.ProgressPath))
CreateProgressDirectory(_ConfigData.ProgressPath, Logistics, (int?)_ConfigData.Duplicator, exceptionLines: null);
}
if (!isErrorFile && _ConfigData.Duplicator is null)
WriteIO(reportFullPath);
if (!_ConfigData.EafHosted)
{
object @object = GetFilePathGeneratorInfo(reportFullPath, isErrorFile: false);
if (!(@object is null) && @object is string to)
{
if (to.Contains("%"))
_Log.Debug("Can't debug without EAF Hosting");
else
Shared1124(reportFullPath, extractResults, to, _Configuration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,519 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly bool _IsDummy;
private readonly bool _IsNaEDA;
private readonly bool _IsXToAPC;
private readonly string _IqsFile;
private readonly bool _IsXToIQSSi;
private readonly bool _IsXToSPaCe;
private readonly bool _IsXToIQSGaN;
private readonly string _MemoryPath;
private readonly bool _IsXToOpenInsight;
private readonly string _OpenInsightFilePattern;
private readonly bool _IsXToOpenInsightMetrologyViewer;
private readonly Dictionary<string, string> _CellNames;
private readonly string _OpenInsightMetrologyViewerAPI;
private readonly bool _IsXToOpenInsightMetrologyViewerAttachments;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_LastDummyRunIndex = -1;
_IsDummy = _Hyphens == (int)Hyphen.IsDummy;
_IsNaEDA = _Hyphens == (int)Hyphen.IsNaEDA;
_IsXToAPC = _Hyphens == (int)Hyphen.IsXToAPC;
_CellNames = new Dictionary<string, string>();
_IsXToIQSSi = _Hyphens == (int)Hyphen.IsXToIQSSi;
_IsXToSPaCe = _Hyphens == (int)Hyphen.IsXToSPaCe;
_IsXToIQSGaN = _Hyphens == (int)Hyphen.IsXToIQSGaN;
_IsXToOpenInsight = _Hyphens == (int)Hyphen.IsXToOpenInsight;
_IsXToOpenInsightMetrologyViewer = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewer;
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
_IsXToOpenInsightMetrologyViewerAttachments = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
if (_IsDummy)
{
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
else
{
int milliSeconds;
milliSeconds = (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000 / 2);
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
milliSeconds += 2000;
}
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (_Description is not Description)
throw new Exception();
}
void IFileRead.Callback(object state) => Callback(state);
protected static List<pcl.Description> GetDescriptions(JsonElement[] jsonElements)
{
List<pcl.Description> results = new();
pcl.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<pcl.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string duplicateDirectory;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<pcl.Description> descriptions = GetDescriptions(jsonElements);
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
bool isNotUsedInsightMetrologyViewerAttachments = !(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _IsXToOpenInsightMetrologyViewerAttachments;
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
if (isDummyRun)
{
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (_IsXToIQSSi)
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\All");
else if (!_IsXToOpenInsight)
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
else
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
if (segments.Length > 2)
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
if (isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
bool ganPPTST = false;
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string successDirectory;
if (!_IsXToAPC)
successDirectory = string.Empty;
else
{
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
if (!Directory.Exists(successDirectory))
_ = Directory.CreateDirectory(successDirectory);
}
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
_ = Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
if (_IsXToAPC)
{
if (!isDummyRun && _IsEAFHosted)
File.Copy(reportFullPath, duplicateFile, overwrite: true);
}
else
{
if (_IsXToOpenInsightMetrologyViewer)
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
if (!isDummyRun && _IsEAFHosted)
{
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
if (!wsResults.Item2.Success)
throw new Exception(wsResults.ToString());
_Log.Debug(wsResults.Item2.HeaderID);
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
}
}
else
{
Test test;
string lines;
Shared.Properties.IScopeInfo scopeInfo;
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
{
test = keyValuePair.Key;
//scopeInfo = new ScopeInfo(test);
if (!_IsXToOpenInsight)
scopeInfo = new ScopeInfo(test, _IqsFile);
else
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
ganPPTST = descriptions[0].Recipe.Contains("GAN_PPTST");
lines = ProcessData.GetLines(this, _Logistics, descriptions, ganPPTST);
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
}
}
if (_IsXToOpenInsightMetrologyViewerAttachments)
{
string[] matchDirectories = Shared1567(reportFullPath, tuples);
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
}
}
if (!_IsXToOpenInsightMetrologyViewer && !_IsXToOpenInsightMetrologyViewerAttachments)
{
bool check = false;
if (!_IsXToIQSSi && !_IsXToIQSGaN)
check = true;
else if (_IsXToIQSSi && !ganPPTST)
check = true;
else if (_IsXToIQSGaN && ganPPTST)
check = true;
//else
// Don't write file(s) //throw new Exception();
if (check)
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
}
}
if (_IsXToOpenInsightMetrologyViewerAttachments)
{
string destinationDirectory;
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
destinationDirectory = matchDirectories[0];
if (isDummyRun)
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
else
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
if (_IsEAFHosted)
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
else
{
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
return results;
}
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string inProcessDirectory;
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
foreach (Tuple<string, string, string, string, int> item in tuples)
{
monARessource = item.Item1;
sourceArchiveFile = item.Item2;
inProcessDirectory = item.Item3;
checkDirectory = item.Item4;
fileCount = item.Item5;
try
{
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
else if (inProcessDirectory == checkDirectory)
continue;
if (!_IsEAFHosted)
continue;
if (!File.Exists(sourceArchiveFile))
continue;
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
continue;
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
if (fileConnectorConfigurationIncludeSubDirectories)
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!fileConnectorConfigurationIncludeSubDirectories)
{
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
_ = Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
}
private void Callback(object state)
{
if (!_IsDummy)
throw new Exception();
try
{
DateTime dateTime = DateTime.Now;
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
if (check)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string sourceFileLocation;
string inProcessDirectory;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
if (!Directory.Exists(traceDummyDirectory))
_ = Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
File.AppendAllText(traceDummyFile, string.Empty);
List<Tuple<string, string, string, string, int>> tuples = new();
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
throw new Exception("Invalid progress path");
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
{
monARessource = keyValuePair.Key;
if (!keyValuePair.Value.Contains('\\'))
continue;
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
{
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
if (!File.Exists(sourceArchiveFile))
continue;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
}
}
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
if (tuples.Any())
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= tuples.Count)
_LastDummyRunIndex = 0;
monARessource = tuples[_LastDummyRunIndex].Item1;
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
//fileCount = tuples[_LastDummyRunIndex].Item5;
tuples.Clear();
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
{
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
checkDirectory = inProcessDirectory;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
fileCount = files.Length;
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
checkDirectory = sourceFileLocation;
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
fileCount = files.Length;
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
}
}
if (tuples.Any())
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}

View File

@ -0,0 +1,17 @@
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
public enum Hyphen
{
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(logic, string.Concat("http://", serverName, "/api/inbound/Tencor"), headerAttachments, detailAttachments);
IsXToIQSSi, //bool WriteFileSPC(Dictionary
IsXToIQSGaN, //GAN_PPTST
IsXToOpenInsight, //bool WriteFileOpenInsight(Dictionary
IsXToOpenInsightMetrologyViewerAttachments, //Site-Two
IsXToAPC,
IsXToSPaCe,
IsXToArchive,
IsArchive,
IsDummy,
IsManualOIEntry,
IsNaEDA
}

View File

@ -0,0 +1,278 @@
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
public class ProcessData
{
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
{
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToIQSGaN, @"\EC_SPC_GaN\Traces\~\PollPath"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
//new Tuple<int, Enum, string>("IsDummy"
};
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions, bool ganPPTST)
{
if (fileRead is null)
{ }
StringBuilder result = new();
pcl.Description x = descriptions[0];
if (ganPPTST)
{
string slot;
string reactor;
const int eight = 8;
DateTime dateTime = DateTime.Parse(x.Date);
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
reactor = "R";
else
reactor = string.Concat("R", x.Lot.Substring(0, 2));
_ = result.Append(nameof(x.Date)).Append(';').
Append("Part").Append(';').
Append(nameof(x.Reactor)).Append(';').
Append("Lot").Append(';').
Append(nameof(pcl.Detail.Slot)).Append(';').
Append(nameof(pcl.Detail.Bin1)).Append(';').
Append(nameof(pcl.Detail.Bin2)).Append(';').
Append(nameof(pcl.Detail.Bin3)).Append(';').
Append(nameof(pcl.Detail.Bin4)).Append(';').
Append(nameof(pcl.Detail.Bin5)).Append(';').
Append(nameof(pcl.Detail.Bin6)).Append(';').
Append("Bin9").
AppendLine();
foreach (pcl.Description description in descriptions)
{
slot = description.Slot.Replace("*", string.Empty);
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(slot).Append(';').
Append(description.Bin1).Append(';').
Append(description.Bin2).Append(';').
Append(description.Bin3).Append(';').
Append(description.Bin4).Append(';').
Append(description.Bin5).Append(';').
Append(description.Bin6).Append(';').
Append(description.AreaCount).
AppendLine();
}
if (descriptions.Count != eight)
{
string negativeTenThousand = "-10000";
for (int i = descriptions.Count; i < eight; i++)
{
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).
AppendLine();
}
}
if (result.ToString().Split('\n').Length != (eight + 2))
throw new Exception(string.Concat("Must have ", eight, " samples"));
}
else
{
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
}
return result.ToString();
}
private static void UpdateDataPDF(List<pcl.Description> descriptions, string checkFileName)
{
string value;
object possiblePage;
object possibleString;
object possibleCOSArray;
java.util.List tokenList;
java.util.List arrayList;
java.io.OutputStream outputStream;
List<string> updateValues = new();
StringBuilder stringBuilder = new();
java.util.ListIterator tokenIterator;
java.util.ListIterator arrayIterator;
java.io.File file = new(checkFileName);
string reactorLoadLock = descriptions[0].Comments;
org.apache.pdfbox.pdmodel.common.PDStream pdStream;
org.apache.pdfbox.pdmodel.common.PDStream updatedStream;
org.apache.pdfbox.pdfparser.PDFStreamParser pdfStreamParser;
org.apache.pdfbox.pdfwriter.ContentStreamWriter contentStreamWriter;
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
org.apache.pdfbox.pdmodel.PDDocumentCatalog pdDocumentCatalog = pdDocument.getDocumentCatalog();
java.util.List pagesList = pdDocumentCatalog.getAllPages();
java.util.ListIterator pageIterator = pagesList.listIterator();
for (short i = 1; i < short.MaxValue; i++)
{
if (!pageIterator.hasNext())
break;
possiblePage = pageIterator.next();
if (possiblePage is not org.apache.pdfbox.pdmodel.PDPage page)
continue;
pdStream = page.getContents();
pdfStreamParser = new org.apache.pdfbox.pdfparser.PDFStreamParser(pdStream);
pdfStreamParser.parse();
tokenList = pdfStreamParser.getTokens();
tokenIterator = tokenList.listIterator();
for (short t = 1; i < short.MaxValue; t++)
{
if (!tokenIterator.hasNext())
break;
possibleCOSArray = tokenIterator.next();
if (possibleCOSArray is not org.apache.pdfbox.cos.COSArray cossArray)
continue;
_ = stringBuilder.Clear();
arrayList = cossArray.toList();
arrayIterator = arrayList.listIterator();
for (short a = 1; i < short.MaxValue; a++)
{
if (!arrayIterator.hasNext())
break;
possibleString = arrayIterator.next();
if (possibleString is not org.apache.pdfbox.cos.COSString cossString)
continue;
value = cossString.getString();
_ = stringBuilder.Append(value);
if (value != "]")
continue;
updateValues.Add(value);
value = stringBuilder.ToString();
if (value.Contains("[]"))
cossArray.setString(a - 1, string.Concat("*", reactorLoadLock, "]"));
else
cossArray.setString(a - 1, string.Concat(" {*", reactorLoadLock, "}]"));
}
}
if (updateValues.Any())
{
updatedStream = new org.apache.pdfbox.pdmodel.common.PDStream(pdDocument);
outputStream = updatedStream.createOutputStream();
contentStreamWriter = new org.apache.pdfbox.pdfwriter.ContentStreamWriter(outputStream);
contentStreamWriter.writeTokens(tokenList);
outputStream.close();
page.setContents(updatedStream);
}
}
if (updateValues.Any())
pdDocument.save(checkFileName);
pdDocument.close();
}
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<pcl.Description> descriptions, string matchDirectory)
{
if (fileRead is null)
{ }
if (dateTime == DateTime.MinValue)
{ }
if (logisticsSequenceMemoryDirectory is null)
{ }
if (descriptions is null)
{ }
if (matchDirectory is null)
{ }
string checkFileName;
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
if (pclFiles.Length != 1)
throw new Exception("Invalid source file count!");
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(pclFiles[0]);
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
if (!File.Exists(wsResultsMemoryFile))
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
string json = File.ReadAllText(wsResultsMemoryFile);
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
List<WS.Attachment> dataAttachments = new();
List<WS.Attachment> headerAttachments = new();
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_data.pdf");
if (!File.Exists(checkFileName))
throw new Exception("Header file doesn't exist!");
else
{
UpdateDataPDF(descriptions, checkFileName);
headerAttachments.Add(new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", checkFileName));
}
foreach (pcl.Description description in descriptions)
{
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_image.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Image.pdf", checkFileName));
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_data.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Data.pdf", checkFileName));
}
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
throw new Exception("Invalid attachment count!");
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments);
}
}

View File

@ -0,0 +1,199 @@
using Adaptation.Shared;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
public class WSRequest
{
public long Id { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Date { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string Layer { get; set; }
public string LotID { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Operator { get; set; }
public string ParseErrorText { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
public string CellName { get; set; }
public string Data { get; set; }
public int i { get; set; }
public List<pcl.Detail> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
internal WSRequest(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions)
{
if (fileRead is null)
{ }
i = -1;
Id = 0;
Zone = null;
Layer = null;
Title = null;
Data = "*Data*";
Details = new List<pcl.Detail>();
CellName = logistics.MesEntity;
pcl.Description x = descriptions[0];
//Header
{
AreaCountAvg = x.AreaCountAvg;
AreaCountMax = x.AreaCountMax;
AreaCountMin = x.AreaCountMin;
AreaCountStdDev = x.AreaCountStdDev;
AreaTotalAvg = x.AreaTotalAvg;
AreaTotalMax = x.AreaTotalMax;
AreaTotalMin = x.AreaTotalMin;
AreaTotalStdDev = x.AreaTotalStdDev;
Date = x.Date;
HazeAverageAvg = x.HazeAverageAvg;
HazeAverageMax = x.HazeAverageMax;
HazeAverageMin = x.HazeAverageMin;
HazeAverageStdDev = x.HazeAverageStdDev;
HazeRegionAvg = x.HazeRegionAvg;
HazeRegionMax = x.HazeRegionMax;
HazeRegionMin = x.HazeRegionMin;
HazeRegionStdDev = x.HazeRegionStdDev;
LotID = x.Lot;
LPDCM2Avg = x.LPDCM2Avg;
LPDCM2Max = x.LPDCM2Max;
LPDCM2Min = x.LPDCM2Min;
LPDCM2StdDev = x.LPDCM2StdDev;
LPDCountAvg = x.LPDCountAvg;
LPDCountMax = x.LPDCountMax;
LPDCountMin = x.LPDCountMin;
LPDCountStdDev = x.LPDCountStdDev;
ParseErrorText = x.ParseErrorText;
PSN = x.PSN;
RDS = x.RDS;
Reactor = x.Reactor;
Recipe = x.Recipe;
ScratchCountAvg = x.ScratchCountAvg;
ScratchCountMax = x.ScratchCountMax;
ScratchCountMin = x.ScratchCountMin;
ScratchCountStdDev = x.ScratchCountStdDev;
ScratchTotalAvg = x.ScratchTotalAvg;
ScratchTotalMax = x.ScratchTotalMax;
ScratchTotalMin = x.ScratchTotalMin;
ScratchTotalStdDev = x.ScratchTotalStdDev;
SumOfDefectsAvg = x.SumOfDefectsAvg;
SumOfDefectsMax = x.SumOfDefectsMax;
SumOfDefectsMin = x.SumOfDefectsMin;
SumOfDefectsStdDev = x.SumOfDefectsStdDev;
UniqueId = x.UniqueId;
}
pcl.Detail detail;
foreach (pcl.Description description in descriptions)
{
detail = new pcl.Detail
{
Data = "*Data*",
i = -1,
Id = 0, //item.Id,
AreaCount = description.AreaCount,
AreaTotal = description.AreaTotal,
Bin1 = description.Bin1,
Bin2 = description.Bin2,
Bin3 = description.Bin3,
Bin4 = description.Bin4,
Bin5 = description.Bin5,
Bin6 = description.Bin6,
Bin7 = description.Bin7,
Bin8 = description.Bin8,
Comments = description.Comments,
Date = description.Date,
Diameter = description.Diameter,
Exclusion = description.Exclusion,
Gain = description.Gain,
HazeAverage = description.HazeAverage,
HazePeak = description.HazePeak,
HazeRegion = description.HazeRegion,
HazeRng = description.HazeRng,
HeaderUniqueId = description.HeaderUniqueId,
LPDCM2 = description.LPDCM2,
LPDCount = description.LPDCount,
Laser = description.Laser,
Mean = description.Mean,
Recipe = description.Recipe,
ScratchCount = description.ScratchCount,
ScratchTotal = description.ScratchTotal,
Slot = description.Slot,
Sort = description.Sort,
StdDev = description.StdDev,
SumOfDefects = description.SumOfDefects,
Thresh = description.Thresh,
Thruput = description.Thruput,
Title = null,
UniqueId = description.UniqueId
};
Details.Add(detail);
}
Date = logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Any())
UniqueId = Details[0].HeaderUniqueId;
for (int i = 0; i < Details.Count; i++)
{
if (string.IsNullOrEmpty(Details[i].Bin1))
Details[i].Bin1 = null;
if (string.IsNullOrEmpty(Details[i].Bin2))
Details[i].Bin2 = null;
if (string.IsNullOrEmpty(Details[i].Bin3))
Details[i].Bin3 = null;
if (string.IsNullOrEmpty(Details[i].Bin4))
Details[i].Bin4 = null;
if (string.IsNullOrEmpty(Details[i].Bin5))
Details[i].Bin5 = null;
if (string.IsNullOrEmpty(Details[i].Bin6))
Details[i].Bin6 = null;
if (string.IsNullOrEmpty(Details[i].Bin7))
Details[i].Bin7 = null;
if (string.IsNullOrEmpty(Details[i].Bin8))
Details[i].Bin8 = null;
}
}
}

View File

@ -0,0 +1,145 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.IsManualOIEntry;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults, exception);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (_Description is not Description)
throw new Exception();
}
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
{ }
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
string monARessource;
const string site = "sjc";
string equipment = string.Empty;
string description = string.Empty;
string stateName = "MANUAL_OI_ENTRY";
string json = File.ReadAllText(reportFullPath);
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (jsonProperty.Name == "Equipment")
equipment = jsonProperty.Value.ToString();
else if (jsonProperty.Name == "Description")
description = jsonProperty.Value.ToString();
}
if (string.IsNullOrEmpty(equipment))
monARessource = _CellInstanceName;
else
monARessource = equipment;
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
if (_IsEAFHosted)
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning, description);
return results;
}
}

View File

@ -0,0 +1,140 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.ToArchive;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults, exception);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (_Description is not Description)
throw new Exception();
}
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
{ }
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
string duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
if (segments.Length > 2)
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string logisticsSequence = _Logistics.Sequence.ToString();
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
if (isDummyRun)
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
return results;
}
}

View File

@ -0,0 +1,499 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.pcl;
public class Description : IDescription, Shared.Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string Comments { get; set; }
public string Diameter { get; set; }
public string Exclusion { get; set; }
public string Gain { get; set; }
public string HeaderUniqueId { get; set; }
public string Laser { get; set; }
public string ParseErrorText { get; set; }
public string RDS { get; set; }
public string Slot { get; set; }
public string UniqueId { get; set; }
//
public string AreaCount { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotal { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Bin1 { get; set; }
public string Bin2 { get; set; }
public string Bin3 { get; set; }
public string Bin4 { get; set; }
public string Bin5 { get; set; }
public string Bin6 { get; set; }
public string Bin7 { get; set; }
public string Bin8 { get; set; }
public string HazeAverage { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazePeak { get; set; }
public string HazeRegion { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string HazeRng { get; set; }
public string LPDCM2 { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCount { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Mean { get; set; }
public string ScratchCount { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotal { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string Sort { get; set; }
public string StdDev { get; set; }
public string SumOfDefects { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string Thresh { get; set; }
public string Thruput { get; set; }
//
public object Data { get; set; }
public object Parameters { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed";
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(Comments),
nameof(Diameter),
nameof(Exclusion),
nameof(Gain),
nameof(HeaderUniqueId),
nameof(Laser),
nameof(ParseErrorText),
nameof(RDS),
nameof(Slot),
nameof(UniqueId)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(AreaCount),
nameof(AreaCountAvg),
nameof(AreaCountMax),
nameof(AreaCountMin),
nameof(AreaCountStdDev),
nameof(AreaTotal),
nameof(AreaTotalAvg),
nameof(AreaTotalMax),
nameof(AreaTotalMin),
nameof(AreaTotalStdDev),
nameof(Bin1),
nameof(Bin2),
nameof(Bin3),
nameof(Bin4),
nameof(Bin5),
nameof(Bin6),
nameof(Bin7),
nameof(Bin8),
nameof(HazeAverage),
nameof(HazeAverageAvg),
nameof(HazeAverageMax),
nameof(HazeAverageMin),
nameof(HazeAverageStdDev),
nameof(HazePeak),
nameof(HazeRegion),
nameof(HazeRegionAvg),
nameof(HazeRegionMax),
nameof(HazeRegionMin),
nameof(HazeRegionStdDev),
nameof(HazeRng),
nameof(LPDCM2),
nameof(LPDCM2Avg),
nameof(LPDCM2Max),
nameof(LPDCM2Min),
nameof(LPDCM2StdDev),
nameof(LPDCount),
nameof(LPDCountAvg),
nameof(LPDCountMax),
nameof(LPDCountMin),
nameof(LPDCountStdDev),
nameof(Mean),
nameof(ScratchCount),
nameof(ScratchCountAvg),
nameof(ScratchCountMax),
nameof(ScratchCountMin),
nameof(ScratchCountStdDev),
nameof(ScratchTotal),
nameof(ScratchTotalAvg),
nameof(ScratchTotalMax),
nameof(ScratchTotalMin),
nameof(ScratchTotalStdDev),
nameof(Sort),
nameof(StdDev),
nameof(SumOfDefects),
nameof(SumOfDefectsAvg),
nameof(SumOfDefectsMax),
nameof(SumOfDefectsMin),
nameof(SumOfDefectsStdDev),
nameof(Thresh),
nameof(Thruput)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.PSN,
Lot = processData.Lot,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
Comments = detail.Comments,
Diameter = detail.Diameter,
Exclusion = detail.Exclusion,
Gain = detail.Gain,
HeaderUniqueId = detail.UniqueId,
Laser = detail.Laser,
ParseErrorText = processData.ParseErrorText,
RDS = processData.RDS,
Slot = detail.Slot,
UniqueId = detail.UniqueId,
//
AreaCount = detail.AreaCount,
AreaCountAvg = processData.AreaCountAvg,
AreaCountMax = processData.AreaCountMax,
AreaCountMin = processData.AreaCountMin,
AreaCountStdDev = processData.AreaCountStdDev,
AreaTotal = detail.AreaTotal,
AreaTotalAvg = processData.AreaTotalAvg,
AreaTotalMax = processData.AreaTotalMax,
AreaTotalMin = processData.AreaTotalMin,
AreaTotalStdDev = processData.AreaTotalStdDev,
Bin1 = detail.Bin1,
Bin2 = detail.Bin2,
Bin3 = detail.Bin3,
Bin4 = detail.Bin4,
Bin5 = detail.Bin5,
Bin6 = detail.Bin6,
Bin7 = detail.Bin7,
Bin8 = detail.Bin8,
HazeAverage = detail.HazeAverage,
HazeAverageAvg = processData.HazeAverageAvg,
HazeAverageMax = processData.HazeAverageMax,
HazeAverageMin = processData.HazeAverageMin,
HazeAverageStdDev = processData.HazeAverageStdDev,
HazePeak = detail.HazePeak,
HazeRegion = detail.HazeRegion,
HazeRegionAvg = processData.HazeRegionAvg,
HazeRegionMax = processData.HazeRegionMax,
HazeRegionMin = processData.HazeRegionMin,
HazeRegionStdDev = processData.HazeRegionStdDev,
HazeRng = detail.HazeRng,
LPDCM2 = detail.LPDCM2,
LPDCM2Avg = processData.LPDCM2Avg,
LPDCM2Max = processData.LPDCM2Max,
LPDCM2Min = processData.LPDCM2Min,
LPDCM2StdDev = processData.LPDCM2StdDev,
LPDCount = detail.LPDCount,
LPDCountAvg = processData.LPDCountAvg,
LPDCountMax = processData.LPDCountMax,
LPDCountMin = processData.LPDCountMin,
LPDCountStdDev = processData.LPDCountStdDev,
Mean = detail.Mean,
ScratchCount = detail.ScratchCount,
ScratchCountAvg = processData.ScratchCountAvg,
ScratchCountMax = processData.ScratchCountMax,
ScratchCountMin = processData.ScratchCountMin,
ScratchCountStdDev = processData.ScratchCountStdDev,
ScratchTotal = detail.ScratchTotal,
ScratchTotalAvg = processData.ScratchTotalAvg,
ScratchTotalMax = processData.ScratchTotalMax,
ScratchTotalMin = processData.ScratchTotalMin,
ScratchTotalStdDev = processData.ScratchTotalStdDev,
Sort = detail.Sort,
StdDev = detail.StdDev,
SumOfDefects = detail.SumOfDefects,
SumOfDefectsAvg = processData.SumOfDefectsAvg,
SumOfDefectsMax = processData.SumOfDefectsMax,
SumOfDefectsMin = processData.SumOfDefectsMin,
SumOfDefectsStdDev = processData.SumOfDefectsStdDev,
Thresh = detail.Thresh,
Thruput = detail.Thruput
};
results.Add(description);
}
}
return results;
}
private static Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
Comments = nameof(Comments),
Diameter = nameof(Diameter),
Exclusion = nameof(Exclusion),
Gain = nameof(Gain),
HeaderUniqueId = nameof(HeaderUniqueId),
Laser = nameof(Laser),
ParseErrorText = nameof(ParseErrorText),
RDS = nameof(RDS),
Slot = nameof(Slot),
UniqueId = nameof(UniqueId),
//
AreaCount = nameof(AreaCount),
AreaCountAvg = nameof(AreaCountAvg),
AreaCountMax = nameof(AreaCountMax),
AreaCountMin = nameof(AreaCountMin),
AreaCountStdDev = nameof(AreaCountStdDev),
AreaTotal = nameof(AreaTotal),
AreaTotalAvg = nameof(AreaTotalAvg),
AreaTotalMax = nameof(AreaTotalMax),
AreaTotalMin = nameof(AreaTotalMin),
AreaTotalStdDev = nameof(AreaTotalStdDev),
Bin1 = nameof(Bin1),
Bin2 = nameof(Bin2),
Bin3 = nameof(Bin3),
Bin4 = nameof(Bin4),
Bin5 = nameof(Bin5),
Bin6 = nameof(Bin6),
Bin7 = nameof(Bin7),
Bin8 = nameof(Bin8),
HazeAverage = nameof(HazeAverage),
HazeAverageAvg = nameof(HazeAverageAvg),
HazeAverageMax = nameof(HazeAverageMax),
HazeAverageMin = nameof(HazeAverageMin),
HazeAverageStdDev = nameof(HazeAverageStdDev),
HazePeak = nameof(HazePeak),
HazeRegion = nameof(HazeRegion),
HazeRegionAvg = nameof(HazeRegionAvg),
HazeRegionMax = nameof(HazeRegionMax),
HazeRegionMin = nameof(HazeRegionMin),
HazeRegionStdDev = nameof(HazeRegionStdDev),
HazeRng = nameof(HazeRng),
LPDCM2 = nameof(LPDCM2),
LPDCM2Avg = nameof(LPDCM2Avg),
LPDCM2Max = nameof(LPDCM2Max),
LPDCM2Min = nameof(LPDCM2Min),
LPDCM2StdDev = nameof(LPDCM2StdDev),
LPDCount = nameof(LPDCount),
LPDCountAvg = nameof(LPDCountAvg),
LPDCountMax = nameof(LPDCountMax),
LPDCountMin = nameof(LPDCountMin),
LPDCountStdDev = nameof(LPDCountStdDev),
Mean = nameof(Mean),
ScratchCount = nameof(ScratchCount),
ScratchCountAvg = nameof(ScratchCountAvg),
ScratchCountMax = nameof(ScratchCountMax),
ScratchCountMin = nameof(ScratchCountMin),
ScratchCountStdDev = nameof(ScratchCountStdDev),
ScratchTotal = nameof(ScratchTotal),
ScratchTotalAvg = nameof(ScratchTotalAvg),
ScratchTotalMax = nameof(ScratchTotalMax),
ScratchTotalMin = nameof(ScratchTotalMin),
ScratchTotalStdDev = nameof(ScratchTotalStdDev),
Sort = nameof(Sort),
StdDev = nameof(StdDev),
SumOfDefects = nameof(SumOfDefects),
SumOfDefectsAvg = nameof(SumOfDefectsAvg),
SumOfDefectsMax = nameof(SumOfDefectsMax),
SumOfDefectsMin = nameof(SumOfDefectsMin),
SumOfDefectsStdDev = nameof(SumOfDefectsStdDev),
Thresh = nameof(Thresh),
Thruput = nameof(Thruput),
//
Data = nameof(Data),
Parameters = nameof(Parameters)
};
return result;
}
}

View File

@ -0,0 +1,45 @@
namespace Adaptation.FileHandlers.pcl;
public class Detail
{
public long Id { get; set; }
public string AreaCount { get; set; }
public string AreaTotal { get; set; }
public string Bin1 { get; set; }
public string Bin2 { get; set; }
public string Bin3 { get; set; }
public string Bin4 { get; set; }
public string Bin5 { get; set; }
public string Bin6 { get; set; }
public string Bin7 { get; set; }
public string Bin8 { get; set; }
public string Comments { get; set; }
public string Date { get; set; }
public string Diameter { get; set; }
public string Exclusion { get; set; }
public string Gain { get; set; }
public string HazeAverage { get; set; }
public string HazePeak { get; set; }
public string HazeRegion { get; set; }
public string HazeRng { get; set; }
public string HeaderUniqueId { get; set; }
public string LPDCM2 { get; set; }
public string LPDCount { get; set; }
public string Laser { get; set; }
public string Mean { get; set; }
public string Recipe { get; set; }
public string ScratchCount { get; set; }
public string ScratchTotal { get; set; }
public string Slot { get; set; }
public string Sort { get; set; }
public string StdDev { get; set; }
public string SumOfDefects { get; set; }
public string Thresh { get; set; }
public string Thruput { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Data { get; set; }
public int i { get; set; }
}

View File

@ -0,0 +1,131 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _GhostPCLFileName;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_GhostPCLFileName = string.Concat(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), @"\gpcl6win64.exe");
if (_IsEAFHosted && !File.Exists(_GhostPCLFileName))
throw new Exception("Ghost PCL FileName doesn't Exist!");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _GhostPCLFileName);
if (iProcessData is ProcessData processData)
{
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
}
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}

View File

@ -0,0 +1,672 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl;
public class ProcessData : IProcessData
{
private int _I;
private string _Data;
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Date { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Lot { get; set; }
public string ParseErrorText { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string UniqueId { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
{
fileInfoCollection.Clear();
_Details = new List<object>();
_I = 0;
_Data = string.Empty;
JobID = logistics.JobID;
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, ghostPCLFileName);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.Tencor);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
/// <summary>
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
/// </summary>
/// <param name="toEol">data line from Lot Summary</param>
private void FixToEolArray(ref string[] toEol)
{
const int MAX_COLUMNS = 9;
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
// is it short at least one data point
if (toEol.Length < MAX_COLUMNS)
{
_Log.Debug($"****FixToEolArray - Starting array:");
_Log.Debug(toEol);
_Log.Debug($"****FixToEolArray - Column widths:");
_Log.Debug(mColumnWidths);
string leftVal, rightVal;
// size up and assign a working list
List<string> toEolList = new(toEol);
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
_Log.Debug($"****FixToEolArray - New toEolList:");
_Log.Debug(toEolList);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, ""); // insert to top of list
_Log.Debug(toEolList);
// start at the end
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
// test for a bad value - does it have too many characters
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
if (toEolList[i].Length > mColumnWidths[i])
{
// split it up into its two parts
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
// insert new value
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0); // removes a null element at end
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
_Log.Debug(toEolList);
}
}
toEol = toEolList.ToArray();
_Log.Debug($"****FixToEolArray - Ending array:");
_Log.Debug(toEol);
}
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
_I = num + text.Length;
else
_I = _Data.Length;
}
private string GetBefore(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str.Trim();
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1.Trim();
}
private string GetBefore(string text, bool trim)
{
if (trim)
return GetBefore(text);
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str;
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1;
}
private static bool IsNullOrWhiteSpace(string text)
{
for (int index = 0; index < text.Length; ++index)
{
if (!char.IsWhiteSpace(text[index]))
return false;
}
return true;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
}
private string GetToEOL() => GetBefore("\n");
private string GetToEOL(bool trim)
{
if (trim)
return GetToEOL();
return GetBefore("\n", false);
}
private string GetToText(string text) => _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
private string GetToken()
{
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
++_I;
int j = _I;
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
++j;
string str = _Data.Substring(_I, j - _I);
_I = j;
return str.Trim();
}
private string PeekNextLine()
{
int j = _I;
string toEol = GetToEOL();
_I = j;
return toEol;
}
private void ParseLotSummary(IFileRead fileRead, ILogistics logistics, string headerFileName, Dictionary<string, string> pages, Dictionary<string, List<Detail>> slots)
{
if (fileRead is null)
{ }
_I = 0;
//string headerText;
//string altHeaderFileName = Path.ChangeExtension(headerFileName, ".txt");
//if (File.Exists(altHeaderFileName))
// headerText = File.ReadAllText(altHeaderFileName);
//else
//{
// //Pdfbox, IKVM.AWT.WinForms
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(headerFileName);
// org.apache.pdfbox.util.PDFTextStripper stripper = new org.apache.pdfbox.util.PDFTextStripper();
// headerText = stripper.getText(pdfDocument);
// pdfDocument.close();
// File.AppendAllText(altHeaderFileName, headerText);
//}
//result.Id = h;
//result.Title = h;
//result.Zone = h;
//result.PSN = h;
//result.Layer = h;
ParseErrorText = string.Empty;
if (!pages.ContainsKey(headerFileName))
throw new Exception();
_I = 0;
_Data = pages[headerFileName];
ScanPast("Date:");
Date = GetToEOL();
ScanPast("Recipe ID:");
Recipe = GetBefore("LotID:");
Recipe = Recipe.Replace(";", "");
if (_Data.Contains("[]"))
Lot = GetBefore("[]");
else if (_Data.Contains("[7]"))
Lot = GetBefore("[7]");
else
Lot = GetBefore("[");
// Remove illegal characters \/:*?"<>| found in the Lot.
Lot = Regex.Replace(Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
// determine number of wafers and their slot numbers
_Log.Debug(_Data.Substring(_I));
string slot;
string toEOL;
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
for (int i = 0; i < slotCount; i++)
{
ScanPast("*");
toEOL = GetToEOL(false);
slot = string.Concat("*", toEOL.Substring(0, 2));
if (!slots.ContainsKey(slot))
slots.Add(slot, new List<Detail>());
}
_Log.Debug($"****HeaderFile - Slots:");
_Log.Debug(slots);
ScanPast("Min:");
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
FixToEolArray(ref toEol1);
LPDCountMin = toEol1[0].Trim();
LPDCM2Min = toEol1[1].Trim();
AreaCountMin = toEol1[2].Trim();
AreaTotalMin = toEol1[3].Trim();
ScratchCountMin = toEol1[4].Trim();
ScratchTotalMin = toEol1[5].Trim();
SumOfDefectsMin = toEol1[6].Trim();
HazeRegionMin = toEol1[7].Trim();
HazeAverageMin = toEol1[8].Trim();
ScanPast("Max:");
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
FixToEolArray(ref toEol2);
LPDCountMax = toEol2[0].Trim();
LPDCM2Max = toEol2[1].Trim();
AreaCountMax = toEol2[2].Trim();
AreaTotalMax = toEol2[3].Trim();
ScratchCountMax = toEol2[4].Trim();
ScratchTotalMax = toEol2[5].Trim();
SumOfDefectsMax = toEol2[6].Trim();
HazeRegionMax = toEol2[7].Trim();
HazeAverageMax = toEol2[8].Trim();
ScanPast("Average:");
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
FixToEolArray(ref toEol3);
LPDCountAvg = toEol3[0].Trim();
LPDCM2Avg = toEol3[1].Trim();
AreaCountAvg = toEol3[2].Trim();
AreaTotalAvg = toEol3[3].Trim();
ScratchCountAvg = toEol3[4].Trim();
ScratchTotalAvg = toEol3[5].Trim();
SumOfDefectsAvg = toEol3[6].Trim();
HazeRegionAvg = toEol3[7].Trim();
HazeAverageAvg = toEol3[8].Trim();
ScanPast("Std Dev:");
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
FixToEolArray(ref toEol4);
LPDCountStdDev = toEol4[0].Trim();
LPDCM2StdDev = toEol4[1].Trim();
AreaCountStdDev = toEol4[2].Trim();
AreaTotalStdDev = toEol4[3].Trim();
ScratchCountStdDev = toEol4[4].Trim();
ScratchTotalStdDev = toEol4[5].Trim();
SumOfDefectsStdDev = toEol4[6].Trim();
HazeRegionStdDev = toEol4[7].Trim();
HazeAverageStdDev = toEol4[8].Trim();
string[] segments = Lot.Split('-');
if (segments.Length > 0)
Reactor = segments[0];
if (segments.Length > 1)
RDS = segments[1];
if (segments.Length > 2)
PSN = segments[2];
// Example of header.UniqueId is TENCOR1_33-289217-4693_201901300556533336
UniqueId = string.Format("{0}_{1}_{2}", logistics.JobID, Lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
}
private Detail ParseWaferSummary(string waferFileName, Dictionary<string, string> pages)
{
Detail result = new() { Data = "*Data*", i = -1, };
_I = 0;
//string waferText;
//string altWaferFileName = Path.ChangeExtension(waferFileName, ".txt");
//if (File.Exists(altWaferFileName))
// waferText = File.ReadAllText(altWaferFileName);
//else
//{
// //Pdfbox, IKVM.AWT.WinForms
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(waferFileName);
// org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
// waferText = dataStripper.getText(pdfDocument);
// pdfDocument.close();
// File.AppendAllText(altWaferFileName, waferText);
//}
List<string> stringList = new();
result.HeaderUniqueId = UniqueId;
result.Id = 0;
result.Title = null;
if (!pages.ContainsKey(waferFileName))
throw new Exception();
_I = 0;
_Data = pages[waferFileName];
ScanPast("Date:");
result.Date = GetToEOL();
ScanPast("ID#");
result.Slot = GetToEOL();
if (result.Slot.Length > 5)
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
//result.Slot = result.Slot.Replace("*", "");
ScanPast("Comments:");
result.Comments = GetToEOL();
ScanPast("Sort:");
result.Sort = GetToEOL();
ScanPast("LPD Count:");
result.LPDCount = GetToEOL();
ScanPast("LPD / cm2:");
result.LPDCM2 = GetToEOL();
while (GetBefore(":").Contains("Bin"))
stringList.Add(GetToEOL());
if (stringList.Count >= 1)
result.Bin1 = stringList[0];
if (stringList.Count >= 2)
result.Bin2 = stringList[1];
if (stringList.Count >= 3)
result.Bin3 = stringList[2];
if (stringList.Count >= 4)
result.Bin4 = stringList[3];
if (stringList.Count >= 5)
result.Bin5 = stringList[4];
if (stringList.Count >= 6)
result.Bin6 = stringList[5];
if (stringList.Count >= 7)
result.Bin7 = stringList[6];
if (stringList.Count >= 8)
result.Bin8 = stringList[7];
result.Mean = GetToEOL();
ScanPast("Std Dev:");
result.StdDev = GetToEOL();
ScanPast("Area Count:");
result.AreaCount = GetToEOL();
ScanPast("Area Total:");
result.AreaTotal = GetToEOL();
ScanPast("Scratch Count:");
result.ScratchCount = GetToEOL();
ScanPast("Scratch Total:");
result.ScratchTotal = GetToEOL();
ScanPast("Sum of All Defects:");
result.SumOfDefects = GetToEOL();
ScanPast("Haze Region:");
result.HazeRegion = GetToEOL();
ScanPast("Haze Average:");
result.HazeAverage = GetToEOL();
ScanPast("Haze Peak:");
result.HazePeak = GetToEOL();
ScanPast("Laser:");
result.Laser = GetBefore("Gain:");
result.Gain = GetBefore("Diameter:");
result.Diameter = GetToEOL();
ScanPast("Thresh:");
result.Thresh = GetBefore("Exclusion:");
result.Exclusion = GetToEOL();
ScanPast("Haze Rng:");
result.HazeRng = GetBefore("Thruput:");
result.Thruput = GetToEOL();
ScanPast("Recipe ID:");
result.Recipe = GetToEOL();
result.UniqueId = string.Format("{0}_{1}", UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
return result;
}
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
{
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(ghostPCLFileName, arguments);
_ = process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
{
object item;
string pageText;
string pagePDFFile;
string pageTextFile;
List<string> sourceFiles = new();
List<string> missingSlots = new();
List<Detail> dataFiles = new();
Dictionary<string, string> pages = new();
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath);
Dictionary<string, List<Detail>> slots = new();
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
sourceFiles.Add(sourceFileNamePdf);
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
////PdfSharp open pdf
//using (PdfSharp.Pdf.PdfDocument sourceDocument = PdfSharp.Pdf.IO.PdfReader.Open(sourceFileNamePdf, PdfSharp.Pdf.IO.PdfDocumentOpenMode.Import))
//{
// for (int idxPage = 0; idxPage < sourceDocument.PageCount; idxPage++)
// {
// // split the pdf into separate pages. Odd pages are wafer image, even are wafer summary. Last page is Lot Summary.
// _Log.Debug($"****ParseData - Splitting page: {idxPage}, sourceDocument: {sourceDocument.FullPath}, sourcePathFileNoExt: {sourcePathFileNoExt}");
// //SplitPage(sourceDocument, sourcePathFileNoExt, idxPage);
// pageNum = idxPage + 1;
// pageFile = string.Format("{0}_{1}.pdf", sourcePathFileNoExt, pageNum);
// _Log.Debug($"****SplitPage - Page {pageNum} Source file: {sourceDocument.FullPath}");
// _Log.Debug($"****SplitPage - Page {pageNum} Output file: {pageFile}");
// //PdfSharp Create new document
// PdfSharp.Pdf.PdfDocument outputDocument = new PdfSharp.Pdf.PdfDocument { Version = sourceDocument.Version };
// outputDocument.Info.Title = string.Format("Page {0} of {1}", pageNum, sourceDocument.Info.Title);
// outputDocument.Info.Creator = sourceDocument.Info.Creator;
// outputDocument.AddPage(sourceDocument.Pages[idxPage]);
// outputDocument.Pages[0].CropBox = new PdfSharp.Pdf.PdfRectangle(new PdfSharp.Drawing.XRect(0, 100, 700, 700));
// outputDocument.Save(pageFile);
// }
// sourceDocumentPageCount = sourceDocument.PageCount;
// sourceDocument.Close();
//}
java.io.File file = new(sourceFileNamePdf);
org.apache.pdfbox.util.Splitter splitter = new();
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
java.util.List list = splitter.split(pdDocument);
java.util.ListIterator iterator = list.listIterator();
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
for (short i = 1; i < short.MaxValue; i++)
{
if (!iterator.hasNext())
break;
item = iterator.next();
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", i, ".pdf");
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
if (File.Exists(pageTextFile))
{
pageText = File.ReadAllText(pageTextFile);
sourceFiles.Add(pageTextFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else if (File.Exists(pagePDFFile))
{
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
pageText = dataStripper.getText(document);
document.close();
sourceFiles.Add(pagePDFFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else
{
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pageText = dataStripper.getText(pd);
pd.save(pagePDFFile);
sourceFiles.Add(pagePDFFile);
pd.close();
File.WriteAllText(pageTextFile, pageText);
sourceFiles.Add(pageTextFile);
}
pages.Add(pagePDFFile, pageText);
}
pdDocument.close();
// parse lot summary
_Log.Debug($"****ParseData - Parsing lot summary");
List<Tuple<string, string>> pageMapping = new();
string headerFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", pages.Count, ".pdf");
ParseLotSummary(fileRead, logistics, headerFileName, pages, slots);
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
{
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
Detail dataFile = ParseWaferSummary(keyValuePair.Key, pages);
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != Recipe)
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!slots.ContainsKey(dataFile.Slot))
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
slots[dataFile.Slot].Add(dataFile);
}
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_data.pdf");
if (!File.Exists(checkFileName))
{
File.Move(headerFileName, checkFileName);
_ = sourceFiles.Remove(headerFileName);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
for (int i = pageMapping.Count - 1; i > -1; i--)
{
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
{
checkFileName = pageMapping[i].Item2;
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
}
else if (!string.IsNullOrEmpty(checkFileName))
{
//if (i == 0 || !string.IsNullOrEmpty(pageMapping[i - 1].Item2))
//{
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
//}
checkFileName = string.Empty;
}
}
foreach (KeyValuePair<string, List<Detail>> keyValuePair in slots)
{
if (!keyValuePair.Value.Any() || keyValuePair.Value[0] is null)
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
else
{
foreach (Detail data in keyValuePair.Value)
dataFiles.Add(data);
}
}
if (missingSlots.Any())
{
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_MissingSlots.txt");
File.WriteAllLines(missingSlotsFile, missingSlots);
sourceFiles.Add(missingSlotsFile);
}
Date = DateTime.Parse(Date).ToString();
//Equipment data is wrong!!!
Date = DateTime.Now.ToString();
//Equipment data is wrong!!!
//for (int i = 0; i < dataFiles.Count; i++)
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
}
}

View File

@ -1,49 +0,0 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Helpers
{
public partial class ConfigData
{
public enum Level
{
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(logic, string.Concat("http://", serverName, "/api/inbound/Tencor"), headerAttachments, detailAttachments);
IsXToIQSSi, //bool WriteFileSPC(Dictionary
IsXToIQSGaN, //GAN_PPTST
IsXToOpenInsight, //bool WriteFileOpenInsight(Dictionary
IsXToOpenInsightMetrologyViewerAttachments, //Site-Two
IsXToAPC,
IsXToSPaCe,
IsXToArchive,
IsArchive,
IsDummy,
IsManualOIEntry,
IsTIBCO,
IsNaEDA
}
public static List<Tuple<int, Enum, string>> LevelTuples
{
get
{
return new List<Tuple<int, Enum, string>>
{
new Tuple<int, Enum, string>(0, Level.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
new Tuple<int, Enum, string>(15, Level.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
new Tuple<int, Enum, string>(36, Level.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
new Tuple<int, Enum, string>(36, Level.IsXToIQSGaN, @"\EC_SPC_GaN\Traces\~\PollPath"),
new Tuple<int, Enum, string>(36, Level.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
new Tuple<int, Enum, string>(36, Level.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
new Tuple<int, Enum, string>(360, Level.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
new Tuple<int, Enum, string>(-36, Level.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
new Tuple<int, Enum, string>(180, Level.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
new Tuple<int, Enum, string>(36, Level.IsArchive, @"\EC_Characterization_Si\Processed")
//new Tuple<int, Enum, string>("IsDummy"
};
}
}
}
}

View File

@ -1,614 +0,0 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.Helpers
{
public partial class ConfigData : ConfigDataBase
{
internal const object NullData = null;
internal const int MinFileLength = 100;
public string IqsFile { get; private set; }
public string TracePath { get; private set; }
public Level? Duplicator { get; private set; }
public string IfxChannel { get; private set; }
public string IfxSubject { get; private set; }
public string MemoryPath { get; private set; }
public string NoWaferMap { get; private set; }
public string VillachPath { get; private set; }
public string ProgressPath { get; private set; }
public string IqsQueryFilter { get; private set; }
public string IfxSubjectPrefix { get; private set; }
public string GhostPCLFileName { get; private set; }
public string OpenInsightSiViewer { get; private set; }
public string OpenInsightFilePattern { get; private set; }
public string ConnectionStringLSL2SQL { get; private set; }
public string OIContextDataSearchPath { get; private set; }
public string OIContextDataResultsPath { get; private set; }
public string OIContextDataPendingPath { get; private set; }
public string IfxConfigurationLocation { get; private set; }
public string OpenInsightMetrogyViewerAPI { get; private set; }
public List<string> TransportSetupMessages { get; private set; }
public string IfxConfigurationLocationLocalCopy { get; private set; }
public static Dictionary<string, List<long>> DummyRuns { get; private set; }
private Timer _Timer;
private int _LastDummyRunIndex;
private readonly Calendar _Calendar;
private readonly string _ReportFullPath;
public ConfigData(ILogic logic, string cellName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, IList<ModelObjectParameterDefinition> modelObjectParameterDefinitions, string parameterizedModelObjectDefinitionType, bool isEAFHosted) :
base(cellName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, isEAFHosted)
{
_LastDummyRunIndex = -1;
UseCyclicalForDescription = false;
CultureInfo cultureInfo = new CultureInfo("en-US");
_Calendar = cultureInfo.Calendar;
string firstSourceFileFilter = fileConnectorConfiguration.SourceFileFilter.Split('|')[0];
if (DummyRuns is null)
DummyRuns = new Dictionary<string, List<long>>();
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellName);
int level = (cellInstanceConnectionName.Length - cellInstanceConnectionName.Replace("-", string.Empty).Length);
if (!isDuplicator)
Duplicator = null;
else
{
CellNames.Add(cellName, cellName);
MesEntities.Add(cellName, cellName);
Duplicator = (Level)level;
}
if (isDuplicator)
ProcessDataDescription = new Duplicator.Description();
else
ProcessDataDescription = new ProcessData.FileRead.Description();
if (!isDuplicator)
{
//Verify(fileConnectorConfiguration, cellInstanceConnectionName);
if (!EafHosted)
VerifyProcessDataDescription(logic);
else
WriteExportAliases(logic, cellName, cellInstanceConnectionName);
}
GhostPCLFileName = string.Concat(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), @"\gpcl6win64.exe");
if (EafHosted && Duplicator.HasValue && Duplicator.Value == Level.IsXToOpenInsightMetrologyViewerAttachments && !File.Exists(GhostPCLFileName))
throw new Exception("Ghost PCL FileName doesn't Exist!");
//LincPDFCFileName = string.Concat(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), @"\LincPDFC.exe");
if (!modelObjectParameterDefinitions.Any())
{
CellNames.Add(cellName, "****");
MesEntities.Add(cellName, "****");
}
else
{
int index;
string key;
string variable = string.Empty;
Dictionary<string, string> iqsSection = new Dictionary<string, string>();
Dictionary<string, string> pathSection = new Dictionary<string, string>();
Dictionary<string, string> tibcoSection = new Dictionary<string, string>();
Dictionary<string, string> commonSection = new Dictionary<string, string>();
Dictionary<string, string> openInsightSection = new Dictionary<string, string>();
Dictionary<string, string> connectionStringsSection = new Dictionary<string, string>();
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in modelObjectParameterDefinitions)
{
if (!modelObjectParameterDefinition.Name.Contains('.'))
continue;
else if (modelObjectParameterDefinition.Name.StartsWith("Description.") && (modelObjectParameterDefinition.Name.EndsWith(".EventName") || modelObjectParameterDefinition.Name.EndsWith(".EquipmentType")))
continue;
index = modelObjectParameterDefinition.Name.IndexOf(".");
if (index <= -1)
continue;
key = modelObjectParameterDefinition.Name.Substring(0, index);
variable = modelObjectParameterDefinition.Name.Substring(index + 1);
if (key == "COMMON")
commonSection.Add(variable, modelObjectParameterDefinition.Value);
else if (key == "CONNECTION STRINGS")
connectionStringsSection.Add(variable, modelObjectParameterDefinition.Value);
else if (key == "IQS")
iqsSection.Add(variable, modelObjectParameterDefinition.Value);
else if (key == "OpenInsight")
openInsightSection.Add(variable, modelObjectParameterDefinition.Value);
else if (key == "PATH")
pathSection.Add(variable, modelObjectParameterDefinition.Value);
//else if (key == "REACTOR")
// reactorTuples.Add(new Tuple<string, string>(variable, modelObjectParameterDefinition.Value));
else if (key == "TIBCO")
tibcoSection.Add(variable, modelObjectParameterDefinition.Value);
else
throw new Exception();
}
if (!iqsSection.Any())
throw new Exception("IQS section is missing from configuration");
else
{
key = "FILE";
if (iqsSection.ContainsKey(key))
IqsFile = iqsSection[key];
else
throw new Exception(string.Concat("Missing IQS Configuration entry for ", key));
if (string.IsNullOrEmpty(IqsFile))
throw new Exception(string.Format("IQS key {0} is empty", key));
key = "QUERY";
if (iqsSection.ContainsKey(key))
IqsQueryFilter = iqsSection[key];
else
throw new Exception(string.Concat("Missing IQS Configuration entry for ", key));
if (string.IsNullOrEmpty(IqsQueryFilter))
throw new Exception(string.Format("IQS key {0} is empty", key));
}
if (!pathSection.Any())
throw new Exception("Path section is missing from configuration");
else
{
key = "TRACE";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
TracePath = pathSection[key];
if (!string.IsNullOrEmpty(TracePath) && !Directory.Exists(TracePath))
Directory.CreateDirectory(TracePath);
key = "VILLACH";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
VillachPath = pathSection[key];
if (!string.IsNullOrEmpty(VillachPath) && !Directory.Exists(VillachPath))
Directory.CreateDirectory(VillachPath);
key = "Progress";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
ProgressPath = pathSection[key];
if (!string.IsNullOrEmpty(ProgressPath) && Directory.Exists(Path.GetPathRoot(ProgressPath)) && !Directory.Exists(ProgressPath))
Directory.CreateDirectory(ProgressPath);
key = "Memory";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
MemoryPath = pathSection[key];
if (!string.IsNullOrEmpty(MemoryPath) && !Directory.Exists(MemoryPath))
Directory.CreateDirectory(MemoryPath);
key = "OIContextDataSearch";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
OIContextDataSearchPath = pathSection[key];
if (!string.IsNullOrEmpty(OIContextDataSearchPath) && !Directory.Exists(OIContextDataSearchPath))
Directory.CreateDirectory(OIContextDataSearchPath);
key = "OIContextDataPending";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
OIContextDataPendingPath = pathSection[key];
if (!string.IsNullOrEmpty(OIContextDataPendingPath) && !Directory.Exists(OIContextDataPendingPath))
Directory.CreateDirectory(OIContextDataPendingPath);
key = "OIContextDataResults";
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
OIContextDataResultsPath = pathSection[key];
if (!string.IsNullOrEmpty(OIContextDataResultsPath) && !Directory.Exists(OIContextDataResultsPath))
Directory.CreateDirectory(OIContextDataResultsPath);
}
if (!commonSection.Any())
throw new Exception("Common section is missing from configuration");
else
{
key = "CELL_NAMES";
if (!commonSection.ContainsKey(key) || !commonSection[key].Contains(';') || !commonSection[key].Contains(':'))
throw new Exception();
else
{
string[] segments;
string[] cellNames = commonSection[key].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
foreach (string item in cellNames)
{
segments = item.Split(':');
CellNames.Add(segments[0].Trim(), segments[1].Trim());
}
}
if (!string.IsNullOrEmpty(cellName) && !CellNames.ContainsKey(cellName))
throw new Exception();
key = "MES_ENTITIES";
if (!commonSection.ContainsKey(key) || !commonSection[key].Contains(';') || !commonSection[key].Contains(':'))
throw new Exception();
else
{
string[] segments;
string[] mesEntity = commonSection[key].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
foreach (string item in mesEntity)
{
segments = item.Split(':');
MesEntities.Add(segments[0].Trim(), segments[1].Trim());
}
}
if (!string.IsNullOrEmpty(cellName) && !MesEntities.ContainsKey(cellName))
throw new Exception();
key = "NoWaferMap";
if (commonSection.ContainsKey(key))
NoWaferMap = commonSection[key];
if (string.IsNullOrEmpty(NoWaferMap))
throw new Exception(string.Format("Common key {0} is empty", key));
if (Duplicator.HasValue && Duplicator.Value == Level.IsXToOpenInsightMetrologyViewerAttachments && !File.Exists(NoWaferMap))
throw new Exception(string.Format("Common key {0} is doesn't exist!", key));
}
if (!connectionStringsSection.Any())
throw new Exception("Connection Strings section is missing from configuration");
else
{
key = "LSL2SQL";
if (connectionStringsSection.ContainsKey(key))
ConnectionStringLSL2SQL = connectionStringsSection[key];
else
throw new Exception(string.Format("{0} is missing from Equipment Type Configuration", key));
if (string.IsNullOrEmpty(ConnectionStringLSL2SQL))
throw new Exception(string.Format("Connection String key {0} is empty", key));
}
if (!openInsightSection.Any())
throw new Exception("OpenInsight section is missing from configuration");
else
{
key = "MetrologyViewerAPI";
if (openInsightSection.ContainsKey(key))
OpenInsightMetrogyViewerAPI = openInsightSection[key];
if (string.IsNullOrEmpty(OpenInsightMetrogyViewerAPI))
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
if (!OpenInsightMetrogyViewerAPI.Contains(":") || !OpenInsightMetrogyViewerAPI.Contains("."))
throw new Exception(string.Format("OpenInsight key {0} is invalid", key));
key = "SiViewer";
if (openInsightSection.ContainsKey(key))
OpenInsightSiViewer = openInsightSection[key];
if (string.IsNullOrEmpty(OpenInsightSiViewer))
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
if (!OpenInsightSiViewer.Contains(":") || !OpenInsightSiViewer.Contains("."))
throw new Exception(string.Format("OpenInsight key {0} is invalid", key));
key = "FilePattern";
if (openInsightSection.ContainsKey(key))
OpenInsightFilePattern = openInsightSection[key];
else
throw new Exception(string.Concat("Missing OpenInsight Configuration entry for ", key));
if (string.IsNullOrEmpty(OpenInsightFilePattern))
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
}
if (!MesEntities.Any())
throw new Exception();
if (!tibcoSection.Any())
throw new Exception("TIBCO section is missing from configuration");
else
{
key = "IFX_CHANNEL";
if (tibcoSection.ContainsKey(key))
IfxChannel = tibcoSection[key];
else
throw new Exception("IFX_CHANNEL is missing from Equipment Type Configuration");
if (string.IsNullOrEmpty(IfxChannel))
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
key = "IFX_SUBJECT_PREFIX";
if (tibcoSection.ContainsKey(key))
IfxSubjectPrefix = tibcoSection[key];
else
throw new Exception("IFX_SUBJECT_PREFIX is missing from Equipment Type Configuration");
if (string.IsNullOrEmpty(IfxSubjectPrefix))
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
key = "IFX_CONFIGURATION_LOCATION";
if (tibcoSection.ContainsKey(key))
IfxConfigurationLocation = tibcoSection[key];
else
throw new Exception("IFX_CONFIGURATION_LOCATION is missing from Equipment Type Configuration");
if (string.IsNullOrEmpty(IfxConfigurationLocation))
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
key = "IFX_CONFIGURATION_LOCATION_LOCAL_COPY";
if (tibcoSection.ContainsKey(key))
IfxConfigurationLocationLocalCopy = tibcoSection[key];
else
throw new Exception("IFX_CONFIGURATION_LOCATION_LOCAL_COPY is missing from Equipment Type Configuration");
if (string.IsNullOrEmpty(IfxConfigurationLocationLocalCopy))
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
key = "IFX_SUBJECT";
if (tibcoSection.ContainsKey(key))
IfxSubject = tibcoSection[key];
else
throw new Exception("IFX_SUBJECT KEY is missing from Equipment Type Configuration");
if (string.IsNullOrEmpty(IfxSubject))
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
if (Duplicator.HasValue && Duplicator.Value == Level.IsTIBCO)
{
Si.Transport.Initialize(this);
if (!string.IsNullOrEmpty(fileConnectorConfiguration.SourceFileLocation))
TransportSetupMessages = Si.Transport.Setup(useSleep: true, setIfxTransport: true);
else
TransportSetupMessages = Si.Transport.Setup(useSleep: false, setIfxTransport: false);
}
}
if (IsSourceTimer || IsDatabaseExportToIPDSF || (Duplicator.HasValue && Duplicator.Value == Level.IsDummy))
{
if (!Directory.Exists(fileConnectorConfiguration.SourceFileLocation))
Directory.CreateDirectory(fileConnectorConfiguration.SourceFileLocation);
_ReportFullPath = string.Concat(fileConnectorConfiguration.SourceFileLocation, firstSourceFileFilter.Replace("*", @"\"));
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
Callback(null);
else
{
int milliSeconds;
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
milliSeconds += 2000;
}
}
}
}
public void CheckProcessDataDescription(Dictionary<Test, List<Duplicator.Description>> results, bool extra)
{
foreach (Test test in results.Keys)
{
if (test == Test.Tencor)
{
if (!(ProcessDataDescription is ProcessData.FileRead.Description))
ProcessDataDescription = new ProcessData.FileRead.Description();
}
else
throw new Exception();
}
}
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string inProcessDirectory;
const string site = "sjc";
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
string stateName = string.Concat("Dummy_", _EventName);
foreach (Tuple<string, string, string, string, int> item in tuples)
{
monARessource = item.Item1;
sourceArchiveFile = item.Item2;
inProcessDirectory = item.Item3;
checkDirectory = item.Item4;
fileCount = item.Item5;
try
{
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
else if (inProcessDirectory == checkDirectory)
continue;
if (!EafHosted)
continue;
if (!File.Exists(sourceArchiveFile))
continue;
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
continue;
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
{
if (_EventName == EventName.FileRead)
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
else if (_EventName == EventName.FileReadDaily)
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
else
throw new Exception();
}
if (fileConnectorConfigurationIncludeSubDirectories)
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!fileConnectorConfigurationIncludeSubDirectories)
{
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
monIn.SendStatus(site, monARessource, stateName, State.Critical);
try
{
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
smtp.Send(emailMessage);
}
catch (Exception) { }
}
}
}
private void CallbackIsDummy()
{
DateTime dateTime = DateTime.Now;
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
if (check)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string sourceFileLocation;
string targetFileLocation;
string inProcessDirectory;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = string.Concat(Path.GetPathRoot(TracePath), @"\TracesDummy\", CellName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
if (!Directory.Exists(traceDummyDirectory))
Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", CellName, ".txt");
File.AppendAllText(traceDummyFile, string.Empty);
List<Tuple<string, string, string, string, int>> tuples = new List<Tuple<string, string, string, string, int>>();
foreach (var keyValuePair in CellNames)
{
monARessource = keyValuePair.Key;
if (!keyValuePair.Value.Contains(@"\"))
continue;
foreach (string sourceFileFilter in FileConnectorConfiguration.SourceFileFilter.Split('|'))
{
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
sourceFileLocation = Path.GetFullPath(FileConnectorConfiguration.SourceFileLocation);
else if (FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
sourceFileLocation = Path.GetFullPath(FileConnectorConfiguration.SourceFileLocation);
else
sourceFileLocation = Path.GetFullPath(string.Concat(FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
targetFileLocation = Path.GetFullPath(string.Concat(FileConnectorConfiguration.TargetFileLocation, @"\", keyValuePair.Value));
if (!File.Exists(sourceArchiveFile))
continue;
if (!DummyRuns.ContainsKey(monARessource))
DummyRuns.Add(monARessource, new List<long>());
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileLocation, targetFileLocation, sourceArchiveFile, 0));
}
}
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
if (tuples.Any())
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= tuples.Count)
_LastDummyRunIndex = 0;
monARessource = tuples[_LastDummyRunIndex].Item1;
sourceFileLocation = tuples[_LastDummyRunIndex].Item2;
targetFileLocation = tuples[_LastDummyRunIndex].Item3;
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
//fileCount = tuples[_LastDummyRunIndex].Item5;
tuples.Clear();
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
{
if (!DummyRuns[monARessource].Contains(sequence))
DummyRuns[monARessource].Add(sequence);
inProcessDirectory = string.Concat(ProgressPath, @"\", monARessource, @"\Dummy_in process\", sequence);
checkDirectory = inProcessDirectory;
if (!Directory.Exists(checkDirectory))
Directory.CreateDirectory(checkDirectory);
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
fileCount = files.Length;
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
checkDirectory = targetFileLocation;
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
fileCount = files.Length;
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
}
}
if (tuples.Any())
CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
}
}
private void Callback(object state)
{
try
{
if (Duplicator is null)
{
if (File.Exists(_ReportFullPath))
File.Delete(_ReportFullPath);
File.WriteAllText(_ReportFullPath, string.Empty);
}
else if (Duplicator.Value == Level.IsDummy)
CallbackIsDummy();
else
throw new Exception();
}
catch (Exception exception)
{
try
{
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
smtp.Send(emailMessage);
}
catch (Exception) { }
}
try
{
TimeSpan timeSpan;
if (IsDatabaseExportToIPDSF)
timeSpan = new TimeSpan(DateTime.Now.AddMinutes(1).Ticks - DateTime.Now.Ticks);
else if (IsSourceTimer)
timeSpan = new TimeSpan(DateTime.Now.AddMinutes(15).Ticks - DateTime.Now.Ticks);
else if (Duplicator.HasValue && Duplicator.Value == Level.IsDummy)
timeSpan = new TimeSpan(DateTime.Now.AddSeconds(FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
else if (Duplicator.HasValue)
timeSpan = new TimeSpan(DateTime.Now.AddSeconds(30).Ticks - DateTime.Now.Ticks);
else
timeSpan = new TimeSpan(DateTime.Now.AddDays(.5).Ticks - DateTime.Now.Ticks);
if (!(_Timer is null))
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
else
_Timer = new Timer(Callback, null, (long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
try
{
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
smtp.Send(emailMessage);
}
catch (Exception) { }
}
}
public Tuple<string, JsonElement?, List<FileInfo>> IsManualOIEntry(string reportFullPath)
{
Tuple<string, JsonElement?, List<FileInfo>> results;
string monARessource;
const string site = "sjc";
string equipment = string.Empty;
string description = string.Empty;
string stateName = "MANUAL_OI_ENTRY";
string json = File.ReadAllText(reportFullPath);
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
results = new Tuple<string, JsonElement?, List<FileInfo>>(string.Empty, jsonElement, new List<FileInfo>());
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (jsonProperty.Name == "Equipment")
equipment = jsonProperty.Value.ToString();
else if (jsonProperty.Name == "Description")
description = jsonProperty.Value.ToString();
}
if (string.IsNullOrEmpty(equipment))
monARessource = CellName;
else
monARessource = equipment;
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
if (EafHosted)
monIn.SendStatus(site, monARessource, stateName, State.Warning, description);
return results;
}
}
}

View File

@ -1,53 +0,0 @@
namespace Adaptation.Helpers
{
public partial class ProcessData
{
public class DataFile
{
public long Id { get; set; }
public string AreaCount { get; set; }
public string AreaTotal { get; set; }
public string Bin1 { get; set; }
public string Bin2 { get; set; }
public string Bin3 { get; set; }
public string Bin4 { get; set; }
public string Bin5 { get; set; }
public string Bin6 { get; set; }
public string Bin7 { get; set; }
public string Bin8 { get; set; }
public string Comments { get; set; }
public string Date { get; set; }
public string Diameter { get; set; }
public string Exclusion { get; set; }
public string Gain { get; set; }
public string HazeAverage { get; set; }
public string HazePeak { get; set; }
public string HazeRegion { get; set; }
public string HazeRng { get; set; }
public string HeaderUniqueId { get; set; }
public string LPDCM2 { get; set; }
public string LPDCount { get; set; }
public string Laser { get; set; }
public string Mean { get; set; }
public string Recipe { get; set; }
public string ScratchCount { get; set; }
public string ScratchTotal { get; set; }
public string Slot { get; set; }
public string Sort { get; set; }
public string StdDev { get; set; }
public string SumOfDefects { get; set; }
public string Thresh { get; set; }
public string Thruput { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Data { get; set; }
public int i { get; set; }
}
}
}

View File

@ -1,473 +0,0 @@
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.Helpers
{
public partial class ProcessData
{
public class FileRead
{
public class Description : IProcessDataDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string Comments { get; set; }
public string Diameter { get; set; }
public string Exclusion { get; set; }
public string Gain { get; set; }
public string HeaderUniqueId { get; set; }
public string Laser { get; set; }
public string ParseErrorText { get; set; }
public string RDS { get; set; }
public string Slot { get; set; }
public string UniqueId { get; set; }
//
public string AreaCount { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotal { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Bin1 { get; set; }
public string Bin2 { get; set; }
public string Bin3 { get; set; }
public string Bin4 { get; set; }
public string Bin5 { get; set; }
public string Bin6 { get; set; }
public string Bin7 { get; set; }
public string Bin8 { get; set; }
public string HazeAverage { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazePeak { get; set; }
public string HazeRegion { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string HazeRng { get; set; }
public string LPDCM2 { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCount { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Mean { get; set; }
public string ScratchCount { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotal { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string Sort { get; set; }
public string StdDev { get; set; }
public string SumOfDefects { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string Thresh { get; set; }
public string Thruput { get; set; }
//
public object Data { get; set; }
public object Parameters { get; set; }
public string GetEventName() { return nameof(FileRead); }
public string GetEventDescription() { return "File Has been read and parsed"; }
public List<string> GetHeaderNames(ILogic logic, ConfigDataBase configDataBase)
{
List<string> results = new List<string>
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
public List<string> GetDetailNames(ILogic logic, ConfigDataBase configDataBase)
{
List<string> results = new List<string>
{
nameof(Comments),
nameof(Diameter),
nameof(Exclusion),
nameof(Gain),
nameof(HeaderUniqueId),
nameof(Laser),
nameof(ParseErrorText),
nameof(RDS),
nameof(Slot),
nameof(UniqueId)
};
return results;
}
public List<string> GetParameterNames(ILogic logic, ConfigDataBase configDataBase)
{
List<string> results = new List<string>
{
nameof(AreaCount),
nameof(AreaCountAvg),
nameof(AreaCountMax),
nameof(AreaCountMin),
nameof(AreaCountStdDev),
nameof(AreaTotal),
nameof(AreaTotalAvg),
nameof(AreaTotalMax),
nameof(AreaTotalMin),
nameof(AreaTotalStdDev),
nameof(Bin1),
nameof(Bin2),
nameof(Bin3),
nameof(Bin4),
nameof(Bin5),
nameof(Bin6),
nameof(Bin7),
nameof(Bin8),
nameof(HazeAverage),
nameof(HazeAverageAvg),
nameof(HazeAverageMax),
nameof(HazeAverageMin),
nameof(HazeAverageStdDev),
nameof(HazePeak),
nameof(HazeRegion),
nameof(HazeRegionAvg),
nameof(HazeRegionMax),
nameof(HazeRegionMin),
nameof(HazeRegionStdDev),
nameof(HazeRng),
nameof(LPDCM2),
nameof(LPDCM2Avg),
nameof(LPDCM2Max),
nameof(LPDCM2Min),
nameof(LPDCM2StdDev),
nameof(LPDCount),
nameof(LPDCountAvg),
nameof(LPDCountMax),
nameof(LPDCountMin),
nameof(LPDCountStdDev),
nameof(Mean),
nameof(ScratchCount),
nameof(ScratchCountAvg),
nameof(ScratchCountMax),
nameof(ScratchCountMin),
nameof(ScratchCountStdDev),
nameof(ScratchTotal),
nameof(ScratchTotalAvg),
nameof(ScratchTotalMax),
nameof(ScratchTotalMin),
nameof(ScratchTotalStdDev),
nameof(Sort),
nameof(StdDev),
nameof(SumOfDefects),
nameof(SumOfDefectsAvg),
nameof(SumOfDefectsMax),
nameof(SumOfDefectsMin),
nameof(SumOfDefectsStdDev),
nameof(Thresh),
nameof(Thruput)
};
return results;
}
public List<string> GetPairedParameterNames(ILogic logic, ConfigDataBase configDataBase)
{
List<string> results = new List<string>();
return results;
}
public List<string> GetIgnoreParameterNames(ILogic logic, ConfigDataBase configDataBase, Test test)
{
List<string> results = new List<string>();
return results;
}
public List<string> GetNames(ILogic logic, ConfigDataBase configDataBase)
{
List<string> results = new List<string>();
IProcessDataDescription processDataDescription = GetDefault(logic, configDataBase);
string json = JsonSerializer.Serialize(processDataDescription, processDataDescription.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (!(@object is JsonElement jsonElement))
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
public IProcessDataDescription GetDisplayNames(ILogic logic, ConfigDataBase configDataBase)
{
Description result = new Description();
return result;
}
public IProcessDataDescription GetDefault(ILogic logic, ConfigDataBase configDataBase)
{
Description result = new Description
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = GetEventName(),
NullData = string.Empty,
JobID = logic.Logistics.JobID,
Sequence = logic.Logistics.Sequence.ToString(),
MesEntity = logic.Logistics.MesEntity,
ReportFullPath = logic.Logistics.ReportFullPath,
ProcessJobID = logic.Logistics.ProcessJobID,
MID = logic.Logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
Comments = nameof(Comments),
Diameter = nameof(Diameter),
Exclusion = nameof(Exclusion),
Gain = nameof(Gain),
HeaderUniqueId = nameof(HeaderUniqueId),
Laser = nameof(Laser),
ParseErrorText = nameof(ParseErrorText),
RDS = nameof(RDS),
Slot = nameof(Slot),
UniqueId = nameof(UniqueId),
//
AreaCount = nameof(AreaCount),
AreaCountAvg = nameof(AreaCountAvg),
AreaCountMax = nameof(AreaCountMax),
AreaCountMin = nameof(AreaCountMin),
AreaCountStdDev = nameof(AreaCountStdDev),
AreaTotal = nameof(AreaTotal),
AreaTotalAvg = nameof(AreaTotalAvg),
AreaTotalMax = nameof(AreaTotalMax),
AreaTotalMin = nameof(AreaTotalMin),
AreaTotalStdDev = nameof(AreaTotalStdDev),
Bin1 = nameof(Bin1),
Bin2 = nameof(Bin2),
Bin3 = nameof(Bin3),
Bin4 = nameof(Bin4),
Bin5 = nameof(Bin5),
Bin6 = nameof(Bin6),
Bin7 = nameof(Bin7),
Bin8 = nameof(Bin8),
HazeAverage = nameof(HazeAverage),
HazeAverageAvg = nameof(HazeAverageAvg),
HazeAverageMax = nameof(HazeAverageMax),
HazeAverageMin = nameof(HazeAverageMin),
HazeAverageStdDev = nameof(HazeAverageStdDev),
HazePeak = nameof(HazePeak),
HazeRegion = nameof(HazeRegion),
HazeRegionAvg = nameof(HazeRegionAvg),
HazeRegionMax = nameof(HazeRegionMax),
HazeRegionMin = nameof(HazeRegionMin),
HazeRegionStdDev = nameof(HazeRegionStdDev),
HazeRng = nameof(HazeRng),
LPDCM2 = nameof(LPDCM2),
LPDCM2Avg = nameof(LPDCM2Avg),
LPDCM2Max = nameof(LPDCM2Max),
LPDCM2Min = nameof(LPDCM2Min),
LPDCM2StdDev = nameof(LPDCM2StdDev),
LPDCount = nameof(LPDCount),
LPDCountAvg = nameof(LPDCountAvg),
LPDCountMax = nameof(LPDCountMax),
LPDCountMin = nameof(LPDCountMin),
LPDCountStdDev = nameof(LPDCountStdDev),
Mean = nameof(Mean),
ScratchCount = nameof(ScratchCount),
ScratchCountAvg = nameof(ScratchCountAvg),
ScratchCountMax = nameof(ScratchCountMax),
ScratchCountMin = nameof(ScratchCountMin),
ScratchCountStdDev = nameof(ScratchCountStdDev),
ScratchTotal = nameof(ScratchTotal),
ScratchTotalAvg = nameof(ScratchTotalAvg),
ScratchTotalMax = nameof(ScratchTotalMax),
ScratchTotalMin = nameof(ScratchTotalMin),
ScratchTotalStdDev = nameof(ScratchTotalStdDev),
Sort = nameof(Sort),
StdDev = nameof(StdDev),
SumOfDefects = nameof(SumOfDefects),
SumOfDefectsAvg = nameof(SumOfDefectsAvg),
SumOfDefectsMax = nameof(SumOfDefectsMax),
SumOfDefectsMin = nameof(SumOfDefectsMin),
SumOfDefectsStdDev = nameof(SumOfDefectsStdDev),
Thresh = nameof(Thresh),
Thruput = nameof(Thruput),
//
Data = nameof(Data),
Parameters = nameof(Parameters)
};
return result;
}
public List<IProcessDataDescription> GetDescription(ILogic logic, ConfigDataBase configDataBase, List<Test> tests, IProcessData iProcessData)
{
List<IProcessDataDescription> results = new List<IProcessDataDescription>();
if (iProcessData is null || !(iProcessData is ProcessData processData) || processData.Header is null || !processData.Details.Any())
results.Add(GetDefault(logic, configDataBase));
else
{
string nullData;
Description description;
object configDataNullData = ConfigData.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < processData.Details.Count; i++)
{
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = GetEventName(),
NullData = nullData,
JobID = logic.Logistics.JobID,
Sequence = logic.Logistics.Sequence.ToString(),
MesEntity = logic.Logistics.MesEntity,
ReportFullPath = logic.Logistics.ReportFullPath,
ProcessJobID = logic.Logistics.ProcessJobID,
MID = logic.Logistics.MID,
//
Date = processData.Header.Date,
Employee = processData.Header.PSN,
Lot = processData.Header.Lot,
PSN = processData.Header.PSN,
Reactor = processData.Header.Reactor,
Recipe = processData.Header.Recipe,
//
Comments = processData.Details[i].Comments,
Diameter = processData.Details[i].Diameter,
Exclusion = processData.Details[i].Exclusion,
Gain = processData.Details[i].Gain,
HeaderUniqueId = processData.Details[i].HeaderUniqueId,
Laser = processData.Details[i].Laser,
ParseErrorText = processData.Header.ParseErrorText,
RDS = processData.Header.RDS,
Slot = processData.Details[i].Slot,
UniqueId = processData.Details[i].UniqueId,
//
AreaCount = processData.Details[i].AreaCount,
AreaCountAvg = processData.Header.AreaCountAvg,
AreaCountMax = processData.Header.AreaCountMax,
AreaCountMin = processData.Header.AreaCountMin,
AreaCountStdDev = processData.Header.AreaCountStdDev,
AreaTotal = processData.Details[i].AreaTotal,
AreaTotalAvg = processData.Header.AreaTotalAvg,
AreaTotalMax = processData.Header.AreaTotalMax,
AreaTotalMin = processData.Header.AreaTotalMin,
AreaTotalStdDev = processData.Header.AreaTotalStdDev,
Bin1 = processData.Details[i].Bin1,
Bin2 = processData.Details[i].Bin2,
Bin3 = processData.Details[i].Bin3,
Bin4 = processData.Details[i].Bin4,
Bin5 = processData.Details[i].Bin5,
Bin6 = processData.Details[i].Bin6,
Bin7 = processData.Details[i].Bin7,
Bin8 = processData.Details[i].Bin8,
HazeAverage = processData.Details[i].HazeAverage,
HazeAverageAvg = processData.Header.HazeAverageAvg,
HazeAverageMax = processData.Header.HazeAverageMax,
HazeAverageMin = processData.Header.HazeAverageMin,
HazeAverageStdDev = processData.Header.HazeAverageStdDev,
HazePeak = processData.Details[i].HazePeak,
HazeRegion = processData.Details[i].HazeRegion,
HazeRegionAvg = processData.Header.HazeRegionAvg,
HazeRegionMax = processData.Header.HazeRegionMax,
HazeRegionMin = processData.Header.HazeRegionMin,
HazeRegionStdDev = processData.Header.HazeRegionStdDev,
HazeRng = processData.Details[i].HazeRng,
LPDCM2 = processData.Details[i].LPDCM2,
LPDCM2Avg = processData.Header.LPDCM2Avg,
LPDCM2Max = processData.Header.LPDCM2Max,
LPDCM2Min = processData.Header.LPDCM2Min,
LPDCM2StdDev = processData.Header.LPDCM2StdDev,
LPDCount = processData.Details[i].LPDCount,
LPDCountAvg = processData.Header.LPDCountAvg,
LPDCountMax = processData.Header.LPDCountMax,
LPDCountMin = processData.Header.LPDCountMin,
LPDCountStdDev = processData.Header.LPDCountStdDev,
Mean = processData.Details[i].Mean,
ScratchCount = processData.Details[i].ScratchCount,
ScratchCountAvg = processData.Header.ScratchCountAvg,
ScratchCountMax = processData.Header.ScratchCountMax,
ScratchCountMin = processData.Header.ScratchCountMin,
ScratchCountStdDev = processData.Header.ScratchCountStdDev,
ScratchTotal = processData.Details[i].ScratchTotal,
ScratchTotalAvg = processData.Header.ScratchTotalAvg,
ScratchTotalMax = processData.Header.ScratchTotalMax,
ScratchTotalMin = processData.Header.ScratchTotalMin,
ScratchTotalStdDev = processData.Header.ScratchTotalStdDev,
Sort = processData.Details[i].Sort,
StdDev = processData.Details[i].StdDev,
SumOfDefects = processData.Details[i].SumOfDefects,
SumOfDefectsAvg = processData.Header.SumOfDefectsAvg,
SumOfDefectsMax = processData.Header.SumOfDefectsMax,
SumOfDefectsMin = processData.Header.SumOfDefectsMin,
SumOfDefectsStdDev = processData.Header.SumOfDefectsStdDev,
Thresh = processData.Details[i].Thresh,
Thruput = processData.Details[i].Thruput
};
results.Add(description);
}
}
return results;
}
}
}
}
}

View File

@ -1,61 +0,0 @@
namespace Adaptation.Helpers
{
public partial class ProcessData
{
public class HeaderFile
{
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Date { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Lot { get; set; }
public string ParseErrorText { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string UniqueId { get; set; }
}
}
}

View File

@ -1,203 +0,0 @@
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Adaptation.Helpers
{
public partial class ProcessData
{
public class WSRequest
{
public long Id { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string Date { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string Layer { get; set; }
public string LotID { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Operator { get; set; }
public string ParseErrorText { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
public string CellName { get; set; }
public string Data { get; set; }
public int i { get; set; }
public List<DataFile> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
internal WSRequest(ILogic logic, List<FileRead.Description> descriptions)
{
i = -1;
Id = 0;
Zone = null;
Layer = null;
Title = null;
Data = "*Data*";
Details = new List<DataFile>();
CellName = logic.Logistics.MesEntity;
FileRead.Description x = descriptions[0];
//Header
{
AreaCountAvg = x.AreaCountAvg;
AreaCountMax = x.AreaCountMax;
AreaCountMin = x.AreaCountMin;
AreaCountStdDev = x.AreaCountStdDev;
AreaTotalAvg = x.AreaTotalAvg;
AreaTotalMax = x.AreaTotalMax;
AreaTotalMin = x.AreaTotalMin;
AreaTotalStdDev = x.AreaTotalStdDev;
Date = x.Date;
HazeAverageAvg = x.HazeAverageAvg;
HazeAverageMax = x.HazeAverageMax;
HazeAverageMin = x.HazeAverageMin;
HazeAverageStdDev = x.HazeAverageStdDev;
HazeRegionAvg = x.HazeRegionAvg;
HazeRegionMax = x.HazeRegionMax;
HazeRegionMin = x.HazeRegionMin;
HazeRegionStdDev = x.HazeRegionStdDev;
LotID = x.Lot;
LPDCM2Avg = x.LPDCM2Avg;
LPDCM2Max = x.LPDCM2Max;
LPDCM2Min = x.LPDCM2Min;
LPDCM2StdDev = x.LPDCM2StdDev;
LPDCountAvg = x.LPDCountAvg;
LPDCountMax = x.LPDCountMax;
LPDCountMin = x.LPDCountMin;
LPDCountStdDev = x.LPDCountStdDev;
ParseErrorText = x.ParseErrorText;
PSN = x.PSN;
RDS = x.RDS;
Reactor = x.Reactor;
Recipe = x.Recipe;
ScratchCountAvg = x.ScratchCountAvg;
ScratchCountMax = x.ScratchCountMax;
ScratchCountMin = x.ScratchCountMin;
ScratchCountStdDev = x.ScratchCountStdDev;
ScratchTotalAvg = x.ScratchTotalAvg;
ScratchTotalMax = x.ScratchTotalMax;
ScratchTotalMin = x.ScratchTotalMin;
ScratchTotalStdDev = x.ScratchTotalStdDev;
SumOfDefectsAvg = x.SumOfDefectsAvg;
SumOfDefectsMax = x.SumOfDefectsMax;
SumOfDefectsMin = x.SumOfDefectsMin;
SumOfDefectsStdDev = x.SumOfDefectsStdDev;
UniqueId = x.UniqueId;
}
DataFile dataFile;
foreach (FileRead.Description description in descriptions)
{
dataFile = new DataFile
{
Data = "*Data*",
i = -1,
Id = 0, //item.Id,
AreaCount = description.AreaCount,
AreaTotal = description.AreaTotal,
Bin1 = description.Bin1,
Bin2 = description.Bin2,
Bin3 = description.Bin3,
Bin4 = description.Bin4,
Bin5 = description.Bin5,
Bin6 = description.Bin6,
Bin7 = description.Bin7,
Bin8 = description.Bin8,
Comments = description.Comments,
Date = description.Date,
Diameter = description.Diameter,
Exclusion = description.Exclusion,
Gain = description.Gain,
HazeAverage = description.HazeAverage,
HazePeak = description.HazePeak,
HazeRegion = description.HazeRegion,
HazeRng = description.HazeRng,
HeaderUniqueId = description.HeaderUniqueId,
LPDCM2 = description.LPDCM2,
LPDCount = description.LPDCount,
Laser = description.Laser,
Mean = description.Mean,
Recipe = description.Recipe,
ScratchCount = description.ScratchCount,
ScratchTotal = description.ScratchTotal,
Slot = description.Slot,
Sort = description.Sort,
StdDev = description.StdDev,
SumOfDefects = description.SumOfDefects,
Thresh = description.Thresh,
Thruput = description.Thruput,
Title = null,
UniqueId = description.UniqueId
};
Details.Add(dataFile);
}
Date = logic.Logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Any())
UniqueId = Details[0].HeaderUniqueId;
for (int i = 0; i < Details.Count; i++)
{
if (string.IsNullOrEmpty(Details[i].Bin1))
Details[i].Bin1 = null;
if (string.IsNullOrEmpty(Details[i].Bin2))
Details[i].Bin2 = null;
if (string.IsNullOrEmpty(Details[i].Bin3))
Details[i].Bin3 = null;
if (string.IsNullOrEmpty(Details[i].Bin4))
Details[i].Bin4 = null;
if (string.IsNullOrEmpty(Details[i].Bin5))
Details[i].Bin5 = null;
if (string.IsNullOrEmpty(Details[i].Bin6))
Details[i].Bin6 = null;
if (string.IsNullOrEmpty(Details[i].Bin7))
Details[i].Bin7 = null;
if (string.IsNullOrEmpty(Details[i].Bin8))
Details[i].Bin8 = null;
}
}
}
}
}

View File

@ -1,912 +0,0 @@
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using log4net;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.Helpers
{
public partial class ProcessData : IProcessData
{
public HeaderFile Header { get; private set; }
public List<DataFile> Details { get; private set; }
private int _I;
private readonly ILog _Log;
private string _Data;
public ProcessData(ILogic logic, ConfigData configData, List<FileInfo> fileInfoCollection)
{
Header = null;
fileInfoCollection.Clear();
_I = 0;
_Data = string.Empty;
Details = new List<DataFile>();
_Log = LogManager.GetLogger(typeof(ProcessData));
Tuple<HeaderFile, List<DataFile>> tuple = Parse(logic, configData, fileInfoCollection);
Details.AddRange(tuple.Item2);
Header = tuple.Item1;
}
public Tuple<string, JsonElement?, List<FileInfo>> GetResults(ILogic logic, ConfigDataBase configDataBase, List<FileInfo> fileInfoCollection)
{
Tuple<string, JsonElement?, List<FileInfo>> results;
if (!(configDataBase is ConfigData configData))
throw new Exception();
List<Test> tests = new List<Test>();
List<IProcessDataDescription> descriptions;
EventName eventName = configData.GetEventNameValue();
if (eventName == EventName.FileRead && Details.Any())
{
foreach (DataFile item in Details)
tests.Add(Test.Tencor);
descriptions = configData.GetDescription(logic, tests, this);
}
else
throw new Exception();
if (!configData.EafHosted)
{
new FileRead.Description().GetDescription(logic, configData, tests, this);
}
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i].Test != (int)tests[i])
throw new Exception();
}
string json;
if (descriptions[0] is Duplicator.Description)
{
List<Duplicator.Description> duplicatorDescriptions = (from l in descriptions select (Duplicator.Description)l).ToList();
json = JsonSerializer.Serialize(duplicatorDescriptions, duplicatorDescriptions.GetType());
}
else if (descriptions[0] is FileRead.Description)
{
List<FileRead.Description> fileReadDescriptions = (from l in descriptions select (FileRead.Description)l).ToList();
json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
}
else
throw new Exception();
object @object = JsonSerializer.Deserialize<object>(json);
if (!(@object is JsonElement jsonElement))
throw new Exception();
results = new Tuple<string, JsonElement?, List<FileInfo>>(logic.Logistics.Logistics1[0], jsonElement, fileInfoCollection);
return results;
}
public static Dictionary<Test, List<Duplicator.Description>> GetKeyValuePairs(ConfigData configData, JsonElement jsonElement, List<Duplicator.Description> processDataDescriptions, bool extra = false)
{
Dictionary<Test, List<Duplicator.Description>> results = configData.GetKeyValuePairs(processDataDescriptions);
configData.CheckProcessDataDescription(results, extra);
return results;
}
public static List<FileRead.Description> GetProcessDataFileReadDescriptions(ConfigData configData, JsonElement jsonElement)
{
List<FileRead.Description> results = new List<FileRead.Description>();
List<IProcessDataDescription> processDataDescriptions = configData.GetIProcessDataDescriptions(jsonElement);
foreach (IProcessDataDescription processDataDescription in processDataDescriptions)
{
if (!(processDataDescription is FileRead.Description description))
continue;
results.Add(description);
}
return results;
}
public static string GetLines(ILogic logic, List<FileRead.Description> descriptions, bool ganPPTST)
{
StringBuilder result = new StringBuilder();
FileRead.Description x = descriptions[0];
if (ganPPTST)
{
string slot;
string reactor;
const int eight = 8;
DateTime dateTime = DateTime.Parse(x.Date);
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
reactor = "R";
else
reactor = string.Concat("R", x.Lot.Substring(0, 2));
result.Append(nameof(x.Date)).Append(";").
Append("Part").Append(";").
Append(nameof(x.Reactor)).Append(";").
Append("Lot").Append(";").
Append(nameof(DataFile.Slot)).Append(";").
Append(nameof(DataFile.Bin1)).Append(";").
Append(nameof(DataFile.Bin2)).Append(";").
Append(nameof(DataFile.Bin3)).Append(";").
Append(nameof(DataFile.Bin4)).Append(";").
Append(nameof(DataFile.Bin5)).Append(";").
Append(nameof(DataFile.Bin6)).Append(";").
Append("Bin9").
AppendLine();
foreach (FileRead.Description description in descriptions)
{
slot = description.Slot.Replace("*", string.Empty);
result.Append("!").Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(";").
Append("Particle Adder;").
Append(reactor).Append(";").
Append(lot).Append(";").
Append(slot).Append(";").
Append(description.Bin1).Append(";").
Append(description.Bin2).Append(";").
Append(description.Bin3).Append(";").
Append(description.Bin4).Append(";").
Append(description.Bin5).Append(";").
Append(description.Bin6).Append(";").
Append(description.AreaCount).
AppendLine();
}
if (descriptions.Count != eight)
{
string negitiveTenThousand = "-10000";
for (int i = descriptions.Count; i < eight; i++)
{
result.Append("!").Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(";").
Append("Particle Adder;").
Append(reactor).Append(";").
Append(lot).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).Append(";").
Append(negitiveTenThousand).
AppendLine();
}
}
if (result.ToString().Split('\n').Length != (eight + 2))
throw new Exception(string.Concat("Must have ", eight, " samples"));
}
else
{
char del = '\t';
result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logic.Logistics.MesEntity).Append(del). // 043 -
AppendLine();
}
return result.ToString();
}
private static void UpdateDataPDF(List<FileRead.Description> descriptions, string checkFileName)
{
string value;
object possiblePage;
object possibleString;
object possibleCOSArray;
java.util.List tokenList;
java.util.List arrayList;
java.io.OutputStream outputStream;
java.util.ListIterator tokenIterator;
java.util.ListIterator arrayIterator;
List<string> updateValues = new List<string>();
string reactorLoadLock = descriptions[0].Comments;
StringBuilder stringBuilder = new StringBuilder();
java.io.File file = new java.io.File(checkFileName);
org.apache.pdfbox.pdmodel.common.PDStream pdStream;
org.apache.pdfbox.pdmodel.common.PDStream updatedStream;
org.apache.pdfbox.pdfparser.PDFStreamParser pdfStreamParser;
org.apache.pdfbox.pdfwriter.ContentStreamWriter contentStreamWriter;
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
org.apache.pdfbox.pdmodel.PDDocumentCatalog pdDocumentCatalog = pdDocument.getDocumentCatalog();
java.util.List pagesList = pdDocumentCatalog.getAllPages();
java.util.ListIterator pageIterator = pagesList.listIterator();
for (short i = 1; i < short.MaxValue; i++)
{
if (!pageIterator.hasNext())
break;
possiblePage = pageIterator.next();
if (!(possiblePage is org.apache.pdfbox.pdmodel.PDPage page))
continue;
pdStream = page.getContents();
pdfStreamParser = new org.apache.pdfbox.pdfparser.PDFStreamParser(pdStream);
pdfStreamParser.parse();
tokenList = pdfStreamParser.getTokens();
tokenIterator = tokenList.listIterator();
for (short t = 1; i < short.MaxValue; t++)
{
if (!tokenIterator.hasNext())
break;
possibleCOSArray = tokenIterator.next();
if (!(possibleCOSArray is org.apache.pdfbox.cos.COSArray cossArray))
continue;
stringBuilder.Clear();
arrayList = cossArray.toList();
arrayIterator = arrayList.listIterator();
for (short a = 1; i < short.MaxValue; a++)
{
if (!arrayIterator.hasNext())
break;
possibleString = arrayIterator.next();
if (!(possibleString is org.apache.pdfbox.cos.COSString cossString))
continue;
value = cossString.getString();
stringBuilder.Append(value);
if (value != "]")
continue;
updateValues.Add(value);
value = stringBuilder.ToString();
if (value.Contains("[]"))
cossArray.setString(a - 1, string.Concat("*", reactorLoadLock, "]"));
else
cossArray.setString(a - 1, string.Concat(" {*", reactorLoadLock, "}]"));
}
}
if (updateValues.Any())
{
updatedStream = new org.apache.pdfbox.pdmodel.common.PDStream(pdDocument);
outputStream = updatedStream.createOutputStream();
contentStreamWriter = new org.apache.pdfbox.pdfwriter.ContentStreamWriter(outputStream);
contentStreamWriter.writeTokens(tokenList);
outputStream.close();
page.setContents(updatedStream);
}
}
if (updateValues.Any())
pdDocument.save(checkFileName);
pdDocument.close();
}
internal static void PostOpenInsightMetrologyViewerAttachments(ILog log, ConfigData configData, Logistics logistics, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<FileRead.Description> descriptions, string matchDirectory)
{
string checkFileName;
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
if (pclFiles.Length != 1)
throw new Exception("Invalid source file count!");
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(pclFiles[0]);
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
if (!File.Exists(wsResultsMemoryFile))
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
string json = File.ReadAllText(wsResultsMemoryFile);
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
List<WS.Attachment> dataAttachments = new List<WS.Attachment>();
List<WS.Attachment> headerAttachments = new List<WS.Attachment>();
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_data.pdf");
if (!File.Exists(checkFileName))
log.Debug("Header file doesn't exist!");
else
{
UpdateDataPDF(descriptions, checkFileName);
headerAttachments.Add(new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", checkFileName));
}
foreach (FileRead.Description description in descriptions)
{
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_image.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Image.pdf", checkFileName));
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_data.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Data.pdf", checkFileName));
}
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
log.Debug("Invalid attachment count!");
WS.AttachFiles(configData.OpenInsightMetrogyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments);
}
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(ConfigData configData, string sourceFile)
{
string result = Path.ChangeExtension(sourceFile, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", sourceFile, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(configData.GhostPCLFileName, arguments);
process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
/// <summary>
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
/// </summary>
/// <param name="toEol">data line from Lot Summary</param>
private void FixToEolArray(ref string[] toEol)
{
const int MAX_COLUMNS = 9;
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
// is it short at least one data point
if (toEol.Length < MAX_COLUMNS)
{
_Log.Debug($"****FixToEolArray - Starting array:");
_Log.Debug(toEol);
_Log.Debug($"****FixToEolArray - Column widths:");
_Log.Debug(mColumnWidths);
string leftVal, rightVal;
// size up and assign a working list
List<string> toEolList = new List<string>(toEol);
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
_Log.Debug($"****FixToEolArray - New toEolList:");
_Log.Debug(toEolList);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, ""); // insert to top of list
_Log.Debug(toEolList);
// start at the end
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
// test for a bad value - does it have too many characters
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
if (toEolList[i].Length > mColumnWidths[i])
{
// split it up into its two parts
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
// insert new value
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0); // removes a null element at end
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
_Log.Debug(toEolList);
}
}
toEol = toEolList.ToArray();
_Log.Debug($"****FixToEolArray - Ending array:");
_Log.Debug(toEol);
}
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
_I = num + text.Length;
else
_I = _Data.Length;
}
private string GetBefore(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str.Trim();
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1.Trim();
}
private string GetBefore(string text, bool trim)
{
if (trim)
return GetBefore(text);
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str;
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1;
}
private bool IsNullOrWhiteSpace(string text)
{
for (int index = 0; index < text.Length; ++index)
{
if (!char.IsWhiteSpace(text[index]))
return false;
}
return true;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
}
private string GetToEOL()
{
return GetBefore("\n");
}
private string GetToEOL(bool trim)
{
if (trim)
return GetToEOL();
return GetBefore("\n", false);
}
private string GetToText(string text)
{
return _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
}
private string GetToken()
{
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
++_I;
int j = _I;
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
++j;
string str = _Data.Substring(_I, j - _I);
_I = j;
return str.Trim();
}
private string PeekNextLine()
{
int j = _I;
string toEol = GetToEOL();
_I = j;
return toEol;
}
private HeaderFile ParseLotSummary(ILogic logic, string headerFileName, Dictionary<string, string> pages, Dictionary<string, List<DataFile>> slots)
{
HeaderFile result = new HeaderFile { JobID = logic.Logistics.JobID, MesEntity = logic.Logistics.MesEntity, Date = DateTime.Now.ToString() };
_I = 0;
//string headerText;
//string altHeaderFileName = Path.ChangeExtension(headerFileName, ".txt");
//if (File.Exists(altHeaderFileName))
// headerText = File.ReadAllText(altHeaderFileName);
//else
//{
// //Pdfbox, IKVM.AWT.WinForms
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(headerFileName);
// org.apache.pdfbox.util.PDFTextStripper stripper = new org.apache.pdfbox.util.PDFTextStripper();
// headerText = stripper.getText(pdfDocument);
// pdfDocument.close();
// File.AppendAllText(altHeaderFileName, headerText);
//}
//result.Id = h;
//result.Title = h;
//result.Zone = h;
//result.PSN = h;
//result.Layer = h;
result.ParseErrorText = string.Empty;
if (!pages.ContainsKey(headerFileName))
throw new Exception();
_I = 0;
_Data = pages[headerFileName];
ScanPast("Date:");
result.Date = GetToEOL();
ScanPast("Recipe ID:");
result.Recipe = GetBefore("LotID:");
result.Recipe = result.Recipe.Replace(";", "");
if (_Data.Contains("[]"))
result.Lot = GetBefore("[]");
else if (_Data.Contains("[7]"))
result.Lot = GetBefore("[7]");
else
result.Lot = GetBefore("[");
// Remove illegal characters \/:*?"<>| found in the Lot.
result.Lot = Regex.Replace(result.Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
// determine number of wafers and their slot numbers
_Log.Debug(_Data.Substring(_I));
string slot;
string toEOL;
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
for (int i = 0; i < slotCount; i++)
{
ScanPast("*");
toEOL = GetToEOL(false);
slot = string.Concat("*", toEOL.Substring(0, 2));
if (!slots.ContainsKey(slot))
slots.Add(slot, new List<DataFile>());
}
_Log.Debug($"****HeaderFile - Slots:");
_Log.Debug(slots);
ScanPast("Min:");
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
FixToEolArray(ref toEol1);
result.LPDCountMin = toEol1[0].Trim();
result.LPDCM2Min = toEol1[1].Trim();
result.AreaCountMin = toEol1[2].Trim();
result.AreaTotalMin = toEol1[3].Trim();
result.ScratchCountMin = toEol1[4].Trim();
result.ScratchTotalMin = toEol1[5].Trim();
result.SumOfDefectsMin = toEol1[6].Trim();
result.HazeRegionMin = toEol1[7].Trim();
result.HazeAverageMin = toEol1[8].Trim();
ScanPast("Max:");
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
FixToEolArray(ref toEol2);
result.LPDCountMax = toEol2[0].Trim();
result.LPDCM2Max = toEol2[1].Trim();
result.AreaCountMax = toEol2[2].Trim();
result.AreaTotalMax = toEol2[3].Trim();
result.ScratchCountMax = toEol2[4].Trim();
result.ScratchTotalMax = toEol2[5].Trim();
result.SumOfDefectsMax = toEol2[6].Trim();
result.HazeRegionMax = toEol2[7].Trim();
result.HazeAverageMax = toEol2[8].Trim();
ScanPast("Average:");
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
FixToEolArray(ref toEol3);
result.LPDCountAvg = toEol3[0].Trim();
result.LPDCM2Avg = toEol3[1].Trim();
result.AreaCountAvg = toEol3[2].Trim();
result.AreaTotalAvg = toEol3[3].Trim();
result.ScratchCountAvg = toEol3[4].Trim();
result.ScratchTotalAvg = toEol3[5].Trim();
result.SumOfDefectsAvg = toEol3[6].Trim();
result.HazeRegionAvg = toEol3[7].Trim();
result.HazeAverageAvg = toEol3[8].Trim();
ScanPast("Std Dev:");
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
FixToEolArray(ref toEol4);
result.LPDCountStdDev = toEol4[0].Trim();
result.LPDCM2StdDev = toEol4[1].Trim();
result.AreaCountStdDev = toEol4[2].Trim();
result.AreaTotalStdDev = toEol4[3].Trim();
result.ScratchCountStdDev = toEol4[4].Trim();
result.ScratchTotalStdDev = toEol4[5].Trim();
result.SumOfDefectsStdDev = toEol4[6].Trim();
result.HazeRegionStdDev = toEol4[7].Trim();
result.HazeAverageStdDev = toEol4[8].Trim();
string[] segments = result.Lot.Split('-');
if (segments.Length > 0)
result.Reactor = segments[0];
if (segments.Length > 1)
result.RDS = segments[1];
if (segments.Length > 2)
result.PSN = segments[2];
// Example of header.UniqueId is TENCOR1_33-289217-4693_201901300556533336
result.UniqueId = string.Format("{0}_{1}_{2}", logic.Logistics.JobID, result.Lot, Path.GetFileNameWithoutExtension(logic.Logistics.ReportFullPath));
return result;
}
private DataFile ParseWaferSummary(HeaderFile headerFile, string waferFileName, Dictionary<string, string> pages)
{
DataFile result = new DataFile { Data = "*Data*", i = -1, };
_I = 0;
//string waferText;
//string altWaferFileName = Path.ChangeExtension(waferFileName, ".txt");
//if (File.Exists(altWaferFileName))
// waferText = File.ReadAllText(altWaferFileName);
//else
//{
// //Pdfbox, IKVM.AWT.WinForms
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(waferFileName);
// org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
// waferText = dataStripper.getText(pdfDocument);
// pdfDocument.close();
// File.AppendAllText(altWaferFileName, waferText);
//}
List<string> stringList = new List<string>();
result.HeaderUniqueId = headerFile.UniqueId;
result.Id = 0;
result.Title = null;
if (!pages.ContainsKey(waferFileName))
throw new Exception();
_I = 0;
_Data = pages[waferFileName];
ScanPast("Date:");
result.Date = GetToEOL();
ScanPast("ID#");
result.Slot = GetToEOL();
if (result.Slot.Length > 5)
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
//result.Slot = result.Slot.Replace("*", "");
ScanPast("Comments:");
result.Comments = GetToEOL();
ScanPast("Sort:");
result.Sort = GetToEOL();
ScanPast("LPD Count:");
result.LPDCount = GetToEOL();
ScanPast("LPD / cm2:");
result.LPDCM2 = GetToEOL();
while (GetBefore(":").Contains("Bin"))
stringList.Add(GetToEOL());
if (stringList.Count >= 1)
result.Bin1 = stringList[0];
if (stringList.Count >= 2)
result.Bin2 = stringList[1];
if (stringList.Count >= 3)
result.Bin3 = stringList[2];
if (stringList.Count >= 4)
result.Bin4 = stringList[3];
if (stringList.Count >= 5)
result.Bin5 = stringList[4];
if (stringList.Count >= 6)
result.Bin6 = stringList[5];
if (stringList.Count >= 7)
result.Bin7 = stringList[6];
if (stringList.Count >= 8)
result.Bin8 = stringList[7];
result.Mean = GetToEOL();
ScanPast("Std Dev:");
result.StdDev = GetToEOL();
ScanPast("Area Count:");
result.AreaCount = GetToEOL();
ScanPast("Area Total:");
result.AreaTotal = GetToEOL();
ScanPast("Scratch Count:");
result.ScratchCount = GetToEOL();
ScanPast("Scratch Total:");
result.ScratchTotal = GetToEOL();
ScanPast("Sum of All Defects:");
result.SumOfDefects = GetToEOL();
ScanPast("Haze Region:");
result.HazeRegion = GetToEOL();
ScanPast("Haze Average:");
result.HazeAverage = GetToEOL();
ScanPast("Haze Peak:");
result.HazePeak = GetToEOL();
ScanPast("Laser:");
result.Laser = GetBefore("Gain:");
result.Gain = GetBefore("Diameter:");
result.Diameter = GetToEOL();
ScanPast("Thresh:");
result.Thresh = GetBefore("Exclusion:");
result.Exclusion = GetToEOL();
ScanPast("Haze Rng:");
result.HazeRng = GetBefore("Thruput:");
result.Thruput = GetToEOL();
ScanPast("Recipe ID:");
result.Recipe = GetToEOL();
result.UniqueId = string.Format("{0}_{1}", headerFile.UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
return result;
}
private Tuple<HeaderFile, List<DataFile>> Parse(ILogic logic, ConfigData configData, List<FileInfo> fileInfoCollection)
{
Tuple<HeaderFile, List<DataFile>> result;
object item;
string pageText;
string pagePDFFile;
string pageTextFile;
List<string> sourceFiles = new List<string>();
List<string> missingSlots = new List<string>();
List<DataFile> dataFiles = new List<DataFile>();
Dictionary<string, string> pages = new Dictionary<string, string>();
string sourcePath = Path.GetDirectoryName(logic.Logistics.ReportFullPath);
Dictionary<string, List<DataFile>> slots = new Dictionary<string, List<DataFile>>();
string sourceFileNamePdf = ConvertSourceFileToPdf(configData, logic.Logistics.ReportFullPath);
sourceFiles.Add(sourceFileNamePdf);
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(logic.Logistics.ReportFullPath);
////PdfSharp open pdf
//using (PdfSharp.Pdf.PdfDocument sourceDocument = PdfSharp.Pdf.IO.PdfReader.Open(sourceFileNamePdf, PdfSharp.Pdf.IO.PdfDocumentOpenMode.Import))
//{
// for (int idxPage = 0; idxPage < sourceDocument.PageCount; idxPage++)
// {
// // split the pdf into seperate pages. Odd pages are wafer image, even are wafer summary. Last page is Lot Summary.
// _Log.Debug($"****ParseData - Splitting page: {idxPage}, sourceDocument: {sourceDocument.FullPath}, sourcePathFileNoExt: {sourcePathFileNoExt}");
// //SplitPage(sourceDocument, sourcePathFileNoExt, idxPage);
// pageNum = idxPage + 1;
// pageFile = string.Format("{0}_{1}.pdf", sourcePathFileNoExt, pageNum);
// _Log.Debug($"****SplitPage - Page {pageNum} Source file: {sourceDocument.FullPath}");
// _Log.Debug($"****SplitPage - Page {pageNum} Output file: {pageFile}");
// //PdfSharp Create new document
// PdfSharp.Pdf.PdfDocument outputDocument = new PdfSharp.Pdf.PdfDocument { Version = sourceDocument.Version };
// outputDocument.Info.Title = string.Format("Page {0} of {1}", pageNum, sourceDocument.Info.Title);
// outputDocument.Info.Creator = sourceDocument.Info.Creator;
// outputDocument.AddPage(sourceDocument.Pages[idxPage]);
// outputDocument.Pages[0].CropBox = new PdfSharp.Pdf.PdfRectangle(new PdfSharp.Drawing.XRect(0, 100, 700, 700));
// outputDocument.Save(pageFile);
// }
// sourceDocumentPageCount = sourceDocument.PageCount;
// sourceDocument.Close();
//}
java.io.File file = new java.io.File(sourceFileNamePdf);
org.apache.pdfbox.util.Splitter splitter = new org.apache.pdfbox.util.Splitter();
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
java.util.List list = splitter.split(pdDocument);
java.util.ListIterator iterator = list.listIterator();
org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
for (short i = 1; i < short.MaxValue; i++)
{
if (!iterator.hasNext())
break;
item = iterator.next();
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", i, ".pdf");
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
if (File.Exists(pageTextFile))
{
pageText = File.ReadAllText(pageTextFile);
sourceFiles.Add(pageTextFile);
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
continue;
pd.close();
}
else if (File.Exists(pagePDFFile))
{
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
pageText = dataStripper.getText(document);
document.close();
sourceFiles.Add(pagePDFFile);
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
continue;
pd.close();
}
else
{
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
continue;
pageText = dataStripper.getText(pd);
pd.save(pagePDFFile);
sourceFiles.Add(pagePDFFile);
pd.close();
File.WriteAllText(pageTextFile, pageText);
sourceFiles.Add(pageTextFile);
}
pages.Add(pagePDFFile, pageText);
}
pdDocument.close();
// parse lot summary
_Log.Debug($"****ParseData - Parsing lot summary");
List<Tuple<string, string>> pageMapping = new List<Tuple<string, string>>();
string headerFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", pages.Count, ".pdf");
HeaderFile headerFile = ParseLotSummary(logic, headerFileName, pages, slots);
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
{
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
DataFile dataFile = ParseWaferSummary(headerFile, keyValuePair.Key, pages);
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != headerFile.Recipe)
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!slots.ContainsKey(dataFile.Slot))
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
slots[dataFile.Slot].Add(dataFile);
}
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_data.pdf");
if (!File.Exists(checkFileName))
{
File.Move(headerFileName, checkFileName);
sourceFiles.Remove(headerFileName);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
for (int i = pageMapping.Count - 1; i > -1; i--)
{
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
{
checkFileName = pageMapping[i].Item2;
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
}
else if (!string.IsNullOrEmpty(checkFileName))
{
//if (i == 0 || !string.IsNullOrEmpty(pageMapping[i - 1].Item2))
//{
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
//}
checkFileName = string.Empty;
}
}
foreach (KeyValuePair<string, List<DataFile>> keyValuePair in slots)
{
if (!keyValuePair.Value.Any() || keyValuePair.Value[0] is null)
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
else
{
foreach (DataFile data in keyValuePair.Value)
dataFiles.Add(data);
}
}
if (missingSlots.Any())
{
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_MissingSlots.txt");
File.WriteAllLines(missingSlotsFile, missingSlots);
sourceFiles.Add(missingSlotsFile);
}
headerFile.Date = DateTime.Parse(headerFile.Date).ToString();
//Equipment data is wrong!!!
headerFile.Date = DateTime.Now.ToString();
//Equipment data is wrong!!!
//for (int i = 0; i < dataFiles.Count; i++)
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
fileInfoCollection.Add(new FileInfo(logic.Logistics.ReportFullPath));
result = new Tuple<HeaderFile, List<DataFile>>(headerFile, dataFiles);
return result;
}
}
}

View File

@ -1,19 +0,0 @@
namespace Adaptation.Si
{
internal partial class Transport
{
public class Input
{
public string Sequence { get; set; }
public string Area { get; set; }
public string EquipmentType { get; set; }
public string MesEntity { get; set; }
public string MID { get; set; }
public string Recipe { get; set; }
}
}
}

View File

@ -1,18 +0,0 @@
namespace Adaptation.Si
{
internal partial class Transport
{
public class Item
{
public string Name { get; set; } //WaferLot //UniqueID
public string Type { get; set; } //SatelliteGroup //Sort
public string Number { get; set; } //PocketNumber //Slot
public string Qty { get; set; } //1
public string CarrierName { get; set; } //PROCESS_GROUP
}
}
}

View File

@ -1,198 +0,0 @@
using Adaptation.Helpers;
using System;
using System.Collections.Generic;
using System.Data.SqlClient;
using System.Globalization;
using System.IO;
using System.Text;
using System.Text.Json;
namespace Adaptation.Si
{
internal partial class Transport
{
public class Job
{
public string AutomationMode { get; }
public string BasicType { get; }
public string Equipment { get; }
public string JobName { get; }
public string LotName { get; }
public string PackageName { get; }
public string ProcessSpecName { get; }
public string ProcessType { get; }
public string ProductName { get; }
public string Qty { get; }
public string RecipeName { get; }
public string StateModel { get; }
//
public bool IsAreaSi { get; }
public DateTime DateTime { get; }
public List<Item> Items { get; }
public Job(ConfigData configData, string mid)
{
Items = new List<Item>();
if (mid[0] != '{' || mid[mid.Length - 1] != '}' || !mid.Contains("\"Si\""))
IsAreaSi = false;
else
{
string[] segments;
const string hypen = "-";
Input input = JsonSerializer.Deserialize<Input>(mid);
IsAreaSi = input.Area == "Si";
if (!long.TryParse(input.Sequence, out long sequence))
DateTime = DateTime.Now;
else
DateTime = new DateTime(sequence);
if (!string.IsNullOrEmpty(input.MID) && input.MID.Length > 9 && input.MID[2] == hypen[0] && input.MID[9] == hypen[0])
segments = input.MID.Split(hypen[0]);
else
segments = new string[] { hypen, hypen, hypen };
//
AutomationMode = string.Concat(DateTime.Ticks, ".", input.MesEntity);
if (segments[1] == hypen)
BasicType = hypen;
else
BasicType = GetBasicType(configData, hypen, segments[1]);
Equipment = input.MesEntity;
JobName = DateTime.Ticks.ToString();
if (segments[0] == hypen)
LotName = input.MID;
else
LotName = segments[1];
PackageName = hypen; //WAFER_ID WaferLot
ProcessSpecName = hypen; //WAFER_POS PocketNumber
ProcessType = segments[0];
ProductName = segments[2].Split('.')[0];
Qty = "1";
RecipeName = input.Recipe;
StateModel = input.EquipmentType;
Items.Add(new Item { Name = "0", Type = "NA", Number = (0 + 1).ToString(), Qty = "1", CarrierName = hypen });
MoveOldFiles(configData);
}
}
public string GetBasicType(ConfigData configData, string hypen, string rds)
{
string result;
// string json;
// string loadLock;
// JsonElement jsonElement;
// DateTime dateTime = DateTime.Now;
// string rdsFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.rds");
// string jsonFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.json");
// File.WriteAllText(Path.Combine(configData.OIContextDataSearchPath, $"{DateTime.Ticks}.rds"), rds);
// CultureInfo cultureInfo = new CultureInfo("en-US");
// Calendar calendar = cultureInfo.Calendar;
// string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
// string yearWeek = string.Concat(dateTime.ToString("yyyy"), "___Week_", weekOfYear);
// string resultsDirectory = Path.Combine(configData.OIContextDataResultsPath, yearWeek);
// if (!Directory.Exists(resultsDirectory))
// Directory.CreateDirectory(resultsDirectory);
// long breakAfter = dateTime.AddSeconds(60).Ticks;
// for (int i = 0; i < short.MaxValue; i++)
// {
// if (File.Exists(rdsFile) && File.Exists(jsonFile))
// {
// loadLock = string.Empty;
// json = File.ReadAllText(jsonFile);
// jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
// if (jsonElement.ValueKind == JsonValueKind.Object)
// {
// foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
// {
// if (jsonProperty.Name != "LoadLock")
// continue;
// loadLock = jsonProperty.Value.ToString();
// }
// }
// if (string.IsNullOrEmpty(loadLock))
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.err"));
// else
// {
// File.Move(rdsFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.rds"));
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.json"));
// result = loadLock;
// }
// break;
// }
// if (DateTime.Now.Ticks > breakAfter)
// break;
// }
object scalar = null;
StringBuilder sql = new StringBuilder();
sql.Append(" SELECT ").
Append(" CASE ").
Append(" WHEN LOAD_LOCK_SIDE = 'L' THEN 'Left - ' ").
Append(" WHEN LOAD_LOCK_SIDE = 'R' THEN 'Right - ' ").
Append(" ELSE LOAD_LOCK_SIDE ").
Append(" END + REACTOR_TYPE AS LOAD_LOCK ").
Append(" FROM [LSL2SQL].[dbo].[REACT_RUN] ").
Append($" WHERE RDS_NO = '{rds}' ");
//Append(" AND LOAD_SIG != '' ");
try
{
using (SqlConnection sqlConnection = new SqlConnection(configData.ConnectionStringLSL2SQL))
{
sqlConnection.Open();
using (SqlCommand sqlCommand = new SqlCommand(sql.ToString(), sqlConnection))
scalar = sqlCommand.ExecuteScalar();
sqlConnection.Close();
}
}
catch (Exception)
{
}
if (scalar is null)
result = hypen;
else
result = scalar.ToString();
return result;
}
private void MoveOldFiles(ConfigData configData)
{
string yearWeek;
string[] oldFiles;
FileInfo fileInfo;
string weekOfYear;
string moveDirectory;
DateTime daysOld = DateTime.Now.AddDays(-2);
CultureInfo cultureInfo = new CultureInfo("en-US");
Calendar calendar = cultureInfo.Calendar;
string[] directories = new string[] { configData.OIContextDataSearchPath, configData.OIContextDataPendingPath, configData.OIContextDataResultsPath };
foreach (string directory in directories)
{
try
{
oldFiles = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string oldFile in oldFiles)
{
fileInfo = new FileInfo(oldFile);
if (!fileInfo.Exists || fileInfo.LastWriteTime > daysOld)
continue;
weekOfYear = calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
yearWeek = string.Concat(fileInfo.LastWriteTime.ToString("yyyy"), "___Week_", weekOfYear);
moveDirectory = Path.Combine(fileInfo.DirectoryName, yearWeek);
if (!Directory.Exists(moveDirectory))
Directory.CreateDirectory(moveDirectory);
try
{ File.Move(oldFile, Path.Combine(moveDirectory, fileInfo.Name)); }
catch (Exception) { }
}
}
catch (Exception)
{
}
}
}
}
}
}

View File

@ -1,77 +0,0 @@
namespace Adaptation.Si
{
internal partial class Transport
{
/// <summary>
/// EDA-Configurator.pdf
/// CDS Namespace Reply Variables
/// </summary>
private class Logistics
{
/// <summary>
/// Basic Type
/// </summary>
public string BASIC_TYPE { get; set; }
/// <summary>
/// Text for additional infromation
/// </summary>
public string INFO { get; set; }
/// <summary>
/// Unique assignment of lot processing to the basic cell (Dresden)
/// </summary>
public string JOBID { get; set; }
/// <summary>
/// Equipment name used in MES
/// </summary>
public string MES_ENTITY { get; set; }
/// <summary>
/// Lot number, name for a lot
/// </summary>
public string MID { get; set; }
/// <summary>
/// Recipe (Process Program ID)
/// </summary>
public string PPID { get; set; }
/// <summary>
/// Process group (e.g. C5PR)
/// </summary>
public string PROCESS_GROUP { get; set; }
/// <summary>
/// Product name
/// </summary>
public string PRODUCT { get; set; }
/// <summary>
/// Total number of wafers in lot
/// </summary>
public string TOTAL_NUMBER_OF_WAFERS { get; set; }
/// <summary>
/// Equipment sequence number
/// </summary>
public string SEQUENCE { get; set; }
/// <summary>
/// Unique wafer number (barcode, OCR)
/// </summary>
public string WAFER_ID { get; set; }
/// <summary>
/// Wafer position in a tube (Furnace)
/// </summary>
public string WAFER_POS { get; set; }
}
}
}

View File

@ -1,218 +0,0 @@
using Adaptation.Helpers;
using Infineon.Yoda;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
namespace Adaptation.Si
{
internal partial class Transport
{
private static object _IfxTransport;
private static ConfigData _ConfigData;
internal static void Initialize(ConfigData configData)
{
_IfxTransport = null;
_ConfigData = configData;
}
internal static List<string> Setup(bool useSleep, bool setIfxTransport)
{
List<string> results = new List<string>();
if (useSleep)
{
for (int i = 1; i < 4; i++)
Thread.Sleep(500);
}
if (setIfxTransport)
{
results.Add(string.Concat("IfxTransport Subject: ", _ConfigData.IfxSubject));
IfxDoc ifxDoc = new IfxDoc();
ifxDoc.Add(IfxConst.SUBJECT_PREFIX, _ConfigData.IfxSubjectPrefix);
ifxDoc.Add(IfxConst.IFX_CHANNEL, _ConfigData.IfxChannel);
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION, _ConfigData.IfxConfigurationLocation);
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION_LOCAL_COPY, _ConfigData.IfxConfigurationLocationLocalCopy);
results.Add(string.Concat("IfxTransport Config: ", ifxDoc));
_IfxTransport = new IfxTransport();
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
ifxTransport.Create(ifxDoc);
if (useSleep)
{
for (int i = 1; i < 10; i++)
Thread.Sleep(500);
}
results.Add(string.Concat("IfxTransport Current Daemon: ", ifxTransport.CurrentDaemon));
results.Add(string.Concat("IfxTransport Current Network: ", ifxTransport.CurrentNetwork));
results.Add(string.Concat("IfxTransport Current Service: ", ifxTransport.CurrentService));
results.Add(string.Concat("IfxTransport Current PoolName: ", ifxTransport.CurrentPoolName));
}
for (int i = 1; i < 3; i++)
Thread.Sleep(500);
if (_IfxTransport is null)
throw new Exception();
else
{
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
string[] subjects = _ConfigData.IfxSubject.Split('|');
foreach (string subject in subjects)
ifxTransport.Subscribe(string.Concat(_ConfigData.IfxSubjectPrefix, ".", subject));
ifxTransport.ReliableMessage += MainTransport_ReliableMessage;
for (int i = 1; i < 3; i++)
Thread.Sleep(500);
}
return results;
}
private static void MoveSourceFiles(string[] sourceFiles, string pdsfFileLogistics, Calendar calendar)
{
DateTime dateTime;
string weekOfYear;
string checkDirectory;
foreach (string pdsfFile in sourceFiles)
{
if (pdsfFile == pdsfFileLogistics)
continue;
dateTime = new FileInfo(pdsfFile).LastWriteTime;
weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
checkDirectory = string.Concat(Path.GetDirectoryName(pdsfFile), @"\_ Logistics Archive\", dateTime.ToString("yyyy"), "_Week_", weekOfYear);
if (!Directory.Exists(checkDirectory))
Directory.CreateDirectory(checkDirectory);
try
{ File.Move(pdsfFile, string.Concat(checkDirectory, @"\", Path.GetFileName(pdsfFile))); }
catch (Exception) { }
}
}
private static string GetJobsMID(IfxDoc envelopeDocument)
{
string mid;
if (envelopeDocument is null || !envelopeDocument.FieldExists("LotName"))
mid = string.Empty;
else
mid = envelopeDocument.GetFieldByName("LotName").ToString();
return mid;
}
private static IfxDoc GetJobsReply(Job job)
{
IfxDoc result = new IfxDoc();
IfxDoc itemDoc;
IfxDoc jobDoc = new IfxDoc();
IfxDoc lotDoc = new IfxDoc();
IfxDoc recipeDoc = new IfxDoc();
List<IfxDoc> itemDocs = new List<IfxDoc>();
jobDoc.Add("AutomationMode", job.AutomationMode);
jobDoc.Add("CreationTimestamp", job.DateTime);
jobDoc.Add("CreationUser", "-");
jobDoc.Add("CurrentState", true);
jobDoc.Add("Equipment", job.Equipment);
jobDoc.Add("JobName", job.JobName);
jobDoc.Add("LastUpdateTimestamp", job.DateTime);
jobDoc.Add("LastUpdateUser", "-");
jobDoc.Add("ProcessType", job.ProcessType);
jobDoc.Add("StateModel", job.StateModel);
jobDoc.Add("Status", "-");
lotDoc.Add("BasicType", job.BasicType);
lotDoc.Add("IsActive", true);
lotDoc.Add("LotName", job.LotName);
lotDoc.Add("LotState", "-");
lotDoc.Add("PackageName", job.PackageName);
lotDoc.Add("ProcessSpecName", job.ProcessSpecName);
lotDoc.Add("ProductName", job.ProductName);
lotDoc.Add("Qty", job.Qty);
lotDoc.Add("Qty2", "-");
recipeDoc.Add("RecipeName", job.RecipeName);
lotDoc.Add("SpecName", "-");
foreach (Item item in job.Items)
{
itemDoc = new IfxDoc();
itemDoc.Add("Name", item.Name);
itemDoc.Add("Type", item.Type);
itemDoc.Add("Number", item.Number);
itemDoc.Add("Qty", item.Qty);
itemDoc.Add("CarrierName", item.CarrierName);
itemDocs.Add(itemDoc);
}
jobDoc.Add("Recipe", recipeDoc);
lotDoc.Add("Items", itemDocs.ToArray());
jobDoc.Add("Lots", new IfxDoc[] { lotDoc });
result.Add("FAJobs", new IfxDoc[] { jobDoc });
result.Add("IFX_ECD", "0");
result.Add("IFX_ETX", 0);
return result;
}
private static void MainTransport_ReliableMessage(string subject, string replySubject, IfxEnvelope ifxEnvelope)
{
try
{
string mid = string.Empty;
string[] sourceFiles = null;
DateTime dateTime = DateTime.Now;
string pdsfFileLogistics = string.Empty;
IfxDoc envelopeDocument = ifxEnvelope.ExtractDocument();
CultureInfo cultureInfo = new CultureInfo("en-US");
Calendar calendar = cultureInfo.Calendar;
string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekOfYearSegment = string.Concat(@"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", dateTime.ToString("yyyy-MM-dd"));
if (!string.IsNullOrEmpty(_ConfigData.FileConnectorConfiguration.SourceFileLocation))
{
string directory = string.Concat(_ConfigData.FileConnectorConfiguration.SourceFileLocation, weekOfYearSegment);
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
try
{ envelopeDocument.SaveAsXml(fileName); }
catch (Exception) { }
}
if (!subject.EndsWith("GETJOBS"))
throw new Exception();
mid = GetJobsMID(envelopeDocument);
Job job = new Job(_ConfigData, mid);
if (job.IsAreaSi)
{
IfxDoc sendReply = GetJobsReply(job);
ifxEnvelope.Transport.SendReply(ifxEnvelope, sendReply);
if (!string.IsNullOrEmpty(_ConfigData.FileConnectorConfiguration.TargetFileLocation))
{
string directory = string.Concat(_ConfigData.FileConnectorConfiguration.TargetFileLocation, weekOfYearSegment);
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
try
{ sendReply.SaveAsXml(fileName); }
catch (Exception) { }
}
}
if (!(sourceFiles is null) && !string.IsNullOrEmpty(pdsfFileLogistics))
MoveSourceFiles(sourceFiles, pdsfFileLogistics, calendar);
}
catch (Exception exception)
{
try
{
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", _ConfigData.EquipmentElementName, ":MainTransport_ReliableMessage"), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
smtp.Send(emailMessage);
}
catch (Exception) { }
string directory = _ConfigData.FileConnectorConfiguration.ErrorTargetFileLocation;
if (!string.IsNullOrEmpty(directory) && Directory.Exists(directory))
{
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".txt");
try
{ File.WriteAllLines(fileName, new string[] { exception.Message, string.Empty, string.Empty, exception.StackTrace }); }
catch (Exception) { }
}
}
}
}
}

View File

@ -1,13 +1,17 @@
namespace Adaptation.Ifx.Eaf.Common.Configuration
{
[System.Runtime.Serialization.DataContractAttribute]
public class ConnectionSetting
{
public ConnectionSetting(string name, string value) { }
namespace Adaptation.Ifx.Eaf.Common.Configuration;
[System.Runtime.Serialization.DataContractAttribute]
public class ConnectionSetting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ConnectionSetting(string name, string value) { }
[System.Runtime.Serialization.DataMemberAttribute]
public string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string Value { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string Value { get; set; }
}
}

View File

@ -1,19 +1,23 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component;
public class File
{
public class File
{
public File(string filePath) { throw new NotImplementedException(); }
public File(string filePath, DateTime timeFileFound) { throw new NotImplementedException(); }
public string Path { get; }
public DateTime TimeFound { get; }
public bool IsErrorFile { get; }
public Dictionary<string, string> ContentParameters { get; }
#pragma warning disable CA2254
#pragma warning disable IDE0060
public File(string filePath) => throw new NotImplementedException();
public File(string filePath, DateTime timeFileFound) => throw new NotImplementedException();
public string Path { get; }
public DateTime TimeFound { get; }
public bool IsErrorFile { get; }
public Dictionary<string, string> ContentParameters { get; }
public File UpdateContentParameters(Dictionary<string, string> contentParameters) => throw new NotImplementedException();
public File UpdateParsingStatus(bool isErrorFile) => throw new NotImplementedException();
public File UpdateContentParameters(Dictionary<string, string> contentParameters) { throw new NotImplementedException(); }
public File UpdateParsingStatus(bool isErrorFile) { throw new NotImplementedException(); }
}
}

View File

@ -2,34 +2,39 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component;
public class FilePathGenerator
{
public class FilePathGenerator
{
public const char PLACEHOLDER_IDENTIFIER = '%';
public const char PLACEHOLDER_SEPARATOR = ':';
public const string PLACEHOLDER_NOT_AVAILABLE = "NA";
public const string PLACEHOLDER_ORIGINAL_FILE_NAME = "OriginalFileName";
public const string PLACEHOLDER_ORIGINAL_FILE_EXTENSION = "OriginalFileExtension";
public const string PLACEHOLDER_DATE_TIME = "DateTime";
public const string PLACEHOLDER_SUB_FOLDER = "SubFolder";
public const string PLACEHOLDER_CELL_NAME = "CellName";
public FilePathGenerator(FileConnectorConfiguration config, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
public FilePathGenerator(FileConnectorConfiguration config, File file, bool isErrorFile = false, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
public FilePathGenerator(FileConnectorConfiguration config, string sourceFilePath, bool isErrorFile = false, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
protected string SubFolderPath { get; }
protected FileConnectorConfiguration Configuration { get; }
protected File File { get; }
protected bool IsErrorFile { get; }
protected string DefaultPlaceHolderValue { get; }
public const char PLACEHOLDER_IDENTIFIER = '%';
public const char PLACEHOLDER_SEPARATOR = ':';
public const string PLACEHOLDER_NOT_AVAILABLE = "NA";
public const string PLACEHOLDER_ORIGINAL_FILE_NAME = "OriginalFileName";
public const string PLACEHOLDER_ORIGINAL_FILE_EXTENSION = "OriginalFileExtension";
public const string PLACEHOLDER_DATE_TIME = "DateTime";
public const string PLACEHOLDER_SUB_FOLDER = "SubFolder";
public const string PLACEHOLDER_CELL_NAME = "CellName";
public FilePathGenerator(FileConnectorConfiguration config, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
public FilePathGenerator(FileConnectorConfiguration config, File file, bool isErrorFile = false, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
public FilePathGenerator(FileConnectorConfiguration config, string sourceFilePath, bool isErrorFile = false, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
protected string SubFolderPath { get; }
protected FileConnectorConfiguration Configuration { get; }
protected File File { get; }
protected bool IsErrorFile { get; }
protected string DefaultPlaceHolderValue { get; }
public string GetFullTargetPath() => throw new NotImplementedException();
public virtual string GetTargetFileName() => throw new NotImplementedException();
public string GetTargetFolder(bool throwExceptionIfNotExist = true) => throw new NotImplementedException();
protected virtual string GetSubFolder(string folderPattern, string subFolderPath) => throw new NotImplementedException();
protected virtual string PrepareFolderPath(string targetFolderPath, string subFolderPath) => throw new NotImplementedException();
protected string ReplacePlaceholder(string inputPath) => throw new NotImplementedException();
public string GetFullTargetPath() { throw new NotImplementedException(); }
public virtual string GetTargetFileName() { throw new NotImplementedException(); }
public string GetTargetFolder(bool throwExceptionIfNotExist = true) { throw new NotImplementedException(); }
protected virtual string GetSubFolder(string folderPattern, string subFolderPath) { throw new NotImplementedException(); }
protected virtual string PrepareFolderPath(string targetFolderPath, string subFolderPath) { throw new NotImplementedException(); }
protected string ReplacePlaceholder(string inputPath) { throw new NotImplementedException(); }
}
}

View File

@ -2,134 +2,133 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
[System.Runtime.Serialization.DataContractAttribute]
public class FileConnectorConfiguration
{
[System.Runtime.Serialization.DataContractAttribute]
public class FileConnectorConfiguration
public const ulong IDLE_EVENT_WAIT_TIME_DEFAULT = 360;
public const ulong FILE_HANDLE_TIMEOUT_DEFAULT = 15;
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnChanged { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? PostProcessingRetries { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? CopySourceFolderStructure { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfPostProcessingFailsEnum? IfPostProcessingFailsAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string AlternateTargetFolder { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? FileHandleTimeout { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? DeleteEmptySourceSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? IdleEventWaitTimeInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string FileAgeThreshold { get; set; }
public bool? FolderAgeCheckIndividualSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ZipModeEnum? ZipMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public FileAgeFilterEnum? FileAgeFilterMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipErrorTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ZipFileSubFolderLevel { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string DefaultPlaceHolderValue { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? UseZip64Mode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public List<ConnectionSetting> ConnectionSettings { get; set; }
public string SourceDirectoryCloaking { get; set; }
public string FolderAgeThreshold { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileScanningIntervalInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnCreated { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileFilter { get; set; }
public List<string> SourceFileFilters { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? IncludeSubDirectories { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual FileScanningOptionEnum? FileScanningOption { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileHandleWaitTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfFileExistEnum? IfFileExistAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ConnectionRetryInterval { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PreProcessingModeEnum? PreProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? PostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? ErrorPostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileAmount { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileName { get; set; }
public void Initialize() => throw new NotImplementedException();
public enum PostProcessingModeEnum
{
public const ulong IDLE_EVENT_WAIT_TIME_DEFAULT = 360;
public const ulong FILE_HANDLE_TIMEOUT_DEFAULT = 15;
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnChanged { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? PostProcessingRetries { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? CopySourceFolderStructure { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfPostProcessingFailsEnum? IfPostProcessingFailsAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string AlternateTargetFolder { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? FileHandleTimeout { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? DeleteEmptySourceSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? IdleEventWaitTimeInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string FileAgeThreshold { get; set; }
public bool? FolderAgeCheckIndividualSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ZipModeEnum? ZipMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public FileAgeFilterEnum? FileAgeFilterMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipErrorTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ZipFileSubFolderLevel { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string DefaultPlaceHolderValue { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? UseZip64Mode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public List<ConnectionSetting> ConnectionSettings { get; set; }
public string SourceDirectoryCloaking { get; set; }
public string FolderAgeThreshold { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileScanningIntervalInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnCreated { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileFilter { get; set; }
public List<string> SourceFileFilters { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? IncludeSubDirectories { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual FileScanningOptionEnum? FileScanningOption { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileHandleWaitTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfFileExistEnum? IfFileExistAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ConnectionRetryInterval { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PreProcessingModeEnum? PreProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? PostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? ErrorPostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileAmount { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileName { get; set; }
public void Initialize() { throw new NotImplementedException(); }
public enum PostProcessingModeEnum
{
None = 0,
Move = 1,
Copy = 2,
Rename = 3,
Zip = 4,
Delete = 5,
MoveFolder = 6,
CopyFolder = 7,
DeleteFolder = 8
}
public enum PreProcessingModeEnum
{
None = 0,
Process = 1
}
public enum IfFileExistEnum
{
Overwrite = 0,
LeaveFiles = 1,
Delete = 2
}
public enum IfPostProcessingFailsEnum
{
LeaveFiles = 0,
Delete = 1
}
public enum FileScanningOptionEnum
{
FileWatcher = 0,
TimeBased = 1
}
public enum ZipModeEnum
{
ZipByAmountOrTime = 0,
ZipByFileName = 1,
ZipBySubFolderName = 2
}
public enum FileAgeFilterEnum
{
IgnoreNewer = 0,
IgnoreOlder = 1
}
None = 0,
Move = 1,
Copy = 2,
Rename = 3,
Zip = 4,
Delete = 5,
MoveFolder = 6,
CopyFolder = 7,
DeleteFolder = 8
}
public enum PreProcessingModeEnum
{
None = 0,
Process = 1
}
public enum IfFileExistEnum
{
Overwrite = 0,
LeaveFiles = 1,
Delete = 2
}
public enum IfPostProcessingFailsEnum
{
LeaveFiles = 0,
Delete = 1
}
public enum FileScanningOptionEnum
{
FileWatcher = 0,
TimeBased = 1
}
public enum ZipModeEnum
{
ZipByAmountOrTime = 0,
ZipByFileName = 1,
ZipBySubFolderName = 2
}
public enum FileAgeFilterEnum
{
IgnoreNewer = 0,
IgnoreOlder = 1
}
}

View File

@ -2,13 +2,18 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.SelfDescription
{
public class FileConnectorParameterTypeDefinitionProvider
{
public FileConnectorParameterTypeDefinitionProvider() { }
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.SelfDescription;
public class FileConnectorParameterTypeDefinitionProvider
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public FileConnectorParameterTypeDefinitionProvider() { }
public IEnumerable<ParameterTypeDefinition> GetAllParameterTypeDefinition() => null;
public ParameterTypeDefinition GetParameterTypeDefinition(string name) => null;
public IEnumerable<ParameterTypeDefinition> GetAllParameterTypeDefinition() { return null; }
public ParameterTypeDefinition GetParameterTypeDefinition(string name) { return null; }
}
}

View File

@ -11,7 +11,7 @@
</PropertyGroup>
<PropertyGroup>
<VSTestLogger>trx</VSTestLogger>
<VSTestResultsDirectory>../../../Trunk/MET08DDUPSFS6420/05_TestResults/TestResults</VSTestResultsDirectory>
<VSTestResultsDirectory>../../../../MET08DDUPSFS6420/05_TestResults/TestResults</VSTestResultsDirectory>
</PropertyGroup>
<PropertyGroup>
<IsWindows Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))' == 'true'">true</IsWindows>

View File

@ -1,10 +1,9 @@
using System;
namespace Adaptation.PeerGroup.GCL.Annotations
namespace Adaptation.PeerGroup.GCL.Annotations;
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Parameter | AttributeTargets.Delegate, AllowMultiple = false, Inherited = true)]
public sealed class NotNullAttribute : Attribute
{
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Parameter | AttributeTargets.Delegate, AllowMultiple = false, Inherited = true)]
public sealed class NotNullAttribute : Attribute
{
public NotNullAttribute() { }
}
public NotNullAttribute() { }
}

View File

@ -1,8 +1,7 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
namespace Adaptation.PeerGroup.GCL.SecsDriver;
public enum HsmsConnectionMode
{
public enum HsmsConnectionMode
{
Active = 0,
Passive = 1
}
Active = 0,
Passive = 1
}

View File

@ -1,8 +1,7 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
namespace Adaptation.PeerGroup.GCL.SecsDriver;
public enum HsmsSessionMode
{
public enum HsmsSessionMode
{
MultiSession = 0,
SingleSession = 1
}
MultiSession = 0,
SingleSession = 1
}

View File

@ -1,8 +1,7 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
namespace Adaptation.PeerGroup.GCL.SecsDriver;
public enum SecsTransportType
{
public enum SecsTransportType
{
HSMS = 0,
Serial = 1
}
HSMS = 0,
Serial = 1
}

View File

@ -1,16 +1,15 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
namespace Adaptation.PeerGroup.GCL.SecsDriver;
public enum SerialBaudRate
{
public enum SerialBaudRate
{
Baud9600 = 0,
Baud19200 = 1,
Baud4800 = 2,
Baud2400 = 3,
Baud1200 = 4,
Baud300 = 5,
Baud150 = 6,
Baud38400 = 7,
Baud57600 = 8,
Baud115200 = 9
}
Baud9600 = 0,
Baud19200 = 1,
Baud4800 = 2,
Baud2400 = 3,
Baud1200 = 4,
Baud300 = 5,
Baud150 = 6,
Baud38400 = 7,
Baud57600 = 8,
Baud115200 = 9
}

View File

@ -1,20 +0,0 @@
namespace Adaptation.Shared.Deposition
{
public class DEP08EGANAIXG5
{
public enum Test
{
GRATXTCenter = Deposition.Test.GRATXTCenter,
GRATXTEdge = Deposition.Test.GRATXTEdge,
GRAXMLCenter = Deposition.Test.GRAXMLCenter,
GRAXMLEdgeN = Deposition.Test.GRAXMLEdgeN,
Health = Deposition.Test.Health,
Temps = Deposition.Test.Temps,
ToolTime = Deposition.Test.ToolTime
}
}
}

View File

@ -1,16 +0,0 @@
namespace Adaptation.Shared.Deposition
{
public enum Test
{
AFMRoughness = -1,
GRATXTCenter = 0,
GRATXTEdge = 1,
GRAXMLCenter = 2,
GRAXMLEdgeN = 3,
Health = 4,
Temps = 5,
ToolTime = 6
}
}

View File

@ -1,192 +0,0 @@
using Adaptation.Eaf.Core;
using Adaptation.Eaf.EquipmentCore.Control;
using Adaptation.Eaf.EquipmentCore.DataCollection.Reporting;
using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.SelfDescription;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.Shared
{
public class Description
{
public enum RowColumn
{
Test = 1000,
Count,
Index
}
public enum LogisticsColumn
{
EventName = 2000,
NullData,
JobID,
Sequence,
MesEntity,
ReportFullPath,
ProcessJobID,
MID
}
public enum Param
{
String = 0,
Integer = 2,
Double = 3,
Boolean = 4,
StructuredType = 5
}
internal const string FileFound = "FileFound";
public List<EquipmentParameter> EquipmentParameters { get; private set; }
public List<ParameterTypeDefinition> ParameterTypeDefinitions { get; private set; }
private readonly bool _UseCyclical;
private readonly List<string> _HeaderNames;
private readonly Dictionary<string, int> _KeyIndexPairs;
private readonly ParameterTypeDefinition _StructuredType;
private readonly FileConnectorParameterTypeDefinitionProvider _FileConnectorParameterTypeDefinitionProvider;
public Description(ILogic logic, ConfigDataBase configDataBase, IEquipmentControl equipmentControl)
{
_KeyIndexPairs = new Dictionary<string, int>();
_HeaderNames = configDataBase.GetHeaderNames(logic);
_UseCyclical = configDataBase.UseCyclicalForDescription;
_StructuredType = new StructuredType(nameof(StructuredType), string.Empty, new List<Field>());
_FileConnectorParameterTypeDefinitionProvider = new FileConnectorParameterTypeDefinitionProvider();
EquipmentParameters = new List<EquipmentParameter>();
ParameterTypeDefinitions = new List<ParameterTypeDefinition> { _StructuredType };
Dictionary<string, List<Tuple<Enum, string, string, object>>> keyValuePairsCollection = configDataBase.GetParameterInfo(logic, allowNull: false);
List<ParameterValue> results = GetParameterValues(equipmentControl, keyValuePairsCollection);
}
private List<ParameterValue> GetParameterValues(IEquipmentControl equipmentControl, Dictionary<string, List<Tuple<Enum, string, string, object>>> keyValuePairsCollection)
{
List<ParameterValue> results = new List<ParameterValue>();
Enum param;
object value;
Enum[] @params;
string description;
List<object[]> list;
EquipmentParameter equipmentParameter;
ParameterTypeDefinition parameterTypeDefinition;
bool addToEquipmentParameters = !EquipmentParameters.Any();
foreach (KeyValuePair<string, List<Tuple<Enum, string, string, object>>> keyValuePair in keyValuePairsCollection)
{
if (!addToEquipmentParameters && !_KeyIndexPairs.ContainsKey(keyValuePair.Key))
continue;
@params = (from l in keyValuePair.Value select l.Item1).Distinct().ToArray();
if (@params.Length != 1)
throw new Exception();
if (keyValuePair.Value[0].Item2 != keyValuePair.Key)
throw new Exception();
param = @params[0];
if (!addToEquipmentParameters)
equipmentParameter = EquipmentParameters[_KeyIndexPairs[keyValuePair.Key]];
else
{
description = keyValuePair.Value[0].Item3;
_KeyIndexPairs.Add(keyValuePair.Key, EquipmentParameters.Count());
if (param is Param.StructuredType || (_UseCyclical && !_HeaderNames.Contains(keyValuePair.Key)))
parameterTypeDefinition = _StructuredType;
else
parameterTypeDefinition = _FileConnectorParameterTypeDefinitionProvider.GetParameterTypeDefinition(param.ToString());
equipmentParameter = new EquipmentParameter(keyValuePair.Key, parameterTypeDefinition, description);
EquipmentParameters.Add(equipmentParameter);
}
if (!_UseCyclical || _HeaderNames.Contains(keyValuePair.Key))
value = keyValuePair.Value[0].Item4;
else
{
list = new List<object[]>();
for (int i = 0; i < keyValuePair.Value.Count; i++)
list.Add(new object[] { i, keyValuePair.Value[i].Item4 });
value = list;
}
if (equipmentControl is null || !(param is Param.StructuredType))
results.Add(new ParameterValue(equipmentParameter, value, DateTime.Now));
else
results.Add(equipmentControl.DataCollection.CreateParameterValue(equipmentParameter, value));
}
return results;
}
public List<ParameterValue> GetParameterValues(ILogic logic, IEquipmentControl equipmentControl, JsonElement jsonElement, int? i = null, Dictionary<string, object> keyValuePairs = null)
{
List<ParameterValue> results = new List<ParameterValue>();
if (_UseCyclical && (i is null || i.Value > 0))
throw new Exception();
if (jsonElement.ValueKind != JsonValueKind.Array)
throw new Exception();
Enum param;
Tuple<Enum, string, string, object> tuple;
JsonElement[] jsonElements = jsonElement.EnumerateArray().ToArray();
Dictionary<string, List<Tuple<Enum, string, string, object>>> keyValuePairsCollection = new Dictionary<string, List<Tuple<Enum, string, string, object>>>();
for (int r = i.Value; r < jsonElements.Length; r++)
{
foreach (JsonProperty jsonProperty in jsonElement[r].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object || jsonProperty.Value.ValueKind == JsonValueKind.Array)
{
param = Param.StructuredType;
//jValue = jObject.Value<JValue>("Item1");
throw new NotImplementedException("Item1");
}
else
{
switch (jsonProperty.Value.ValueKind)
{
case JsonValueKind.String:
param = Param.String;
break;
case JsonValueKind.Number:
param = Param.Double;
break;
case JsonValueKind.True:
case JsonValueKind.False:
param = Param.Boolean;
break;
case JsonValueKind.Null:
param = Param.String;
break;
default:
param = Param.StructuredType;
break;
}
}
tuple = new Tuple<Enum, string, string, object>(param, jsonProperty.Name, string.Empty, jsonProperty.Value.ToString());
if (!keyValuePairsCollection.ContainsKey(jsonProperty.Name))
keyValuePairsCollection.Add(jsonProperty.Name, new List<Tuple<Enum, string, string, object>>());
keyValuePairsCollection[jsonProperty.Name].Add(tuple);
}
if (!_UseCyclical)
break;
}
results = GetParameterValues(equipmentControl, keyValuePairsCollection);
return results;
}
public static string GetCellName()
{
string result;
if (Backbone.Instance?.CellName is null)
result = string.Empty;
else
result = Backbone.Instance.CellName;
if (result.Contains("-IO"))
result = result.Replace("-IO", string.Empty);
return result;
}
}
}

View File

@ -0,0 +1,142 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.Shared.Duplicator;
public class Description : IDescription, Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
public string Date { get; set; } //2021-10-23
string IDescription.GetEventDescription() => "File Has been read and parsed";
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new();
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
return results;
}
private static Description GetDisplayNames()
{
Description result = new();
return result;
}
private static Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
Date = logistics.DateTimeFromSequence.ToUniversalTime().ToString("MM/dd/yyyy HH:mm:ss")
};
return result;
}
}

View File

@ -1,53 +0,0 @@
namespace Adaptation.Shared
{
public enum EquipmentType
{
FileEquipment,
SemiEquipment,
//
DEP08EGANAIXG5,
//
MET08ANLYSDIFAAST230_Semi,
MET08DDUPSFS6420,
MET08DDUPSP1TBI,
MET08RESIHGCV,
MET08RESIMAPCDE,
MET08THFTIRQS408M,
MET08THFTIRSTRATUS,
//
MET08AFMD3100,
MET08BVHGPROBE,
MET08CVHGPROBE802B150,
MET08CVHGPROBE802B150_Monthly,
MET08CVHGPROBE802B150_Weekly,
MET08DDINCAN8620,
MET08DDINCAN8620_Daily,
MET08EBEAMINTEGRITY26,
MET08HALLHL5580,
MET08HALLHL5580_Monthly,
MET08HALLHL5580_Weekly,
MET08MESMICROSCOPE,
MET08NDFRESIMAP151C,
MET08NDFRESIMAP151C_Verification,
MET08PLMAPRPM,
MET08PLMAPRPM_Daily,
MET08PLMAPRPM_Verification,
MET08PLMPPLATO,
MET08PRFUSB4000,
MET08PRFUSB4000_Daily,
MET08PRFUSB4000_Monthly,
MET08PRFUSB4000_Weekly,
MET08PRFUSB4000_Verification,
MET08PRFUSB4000_Villach,
MET08UVH44GS100M,
MET08VPDSUBCON,
MET08WGEOMX203641Q,
MET08WGEOMX203641Q_Verification,
MET08XRDXPERTPROMRDXL,
MET08XRDXPERTPROMRDXL_Monthly,
MET08XRDXPERTPROMRDXL_Weekly,
METBRXRAYJV7300L
}
}

View File

@ -1,178 +0,0 @@
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
namespace Adaptation.Shared
{
public class ExtendedParameter
{
public string DiplayName { get; set; }
public string ControlPlanName { get; set; }
public bool? CriticalToShip { get; set; }
public string Unit { get; set; }
public double? LSL { get; set; }
public double? TSL { get; set; }
public double? USL { get; set; }
public string Value { get; set; }
public bool? Ignore { get; set; }
//public class ExtractorKeys
public string Lot { get; set; } //1
public string ToolID { get; set; } //2
public string Process { get; set; } //3
public string WaferID { get; set; } //4
public string Part { get; set; } //5
public string Recipe { get; set; } //6
public string ProcessFlow { get; set; } //7
//public class DataKeys
public string Employee { get; set; } //1
public string SID { get; set; } //2
public string WaferRegion { get; set; } //3
public string WaferScribe { get; set; } //4
public string WaferPosition { get; set; } //5
public string X { get; set; } //6
public string Y { get; set; } //7
public string EAFCellInstance { get; set; } //8
public string EAFReference { get; set; } //9
public string IQSReference { get; set; } //10
public ExtendedParameter(Logistics logistics, string diplayName, string controlPlanName)
{
DiplayName = diplayName;
ControlPlanName = controlPlanName;
CriticalToShip = null;
Unit = string.Empty;
LSL = null;
TSL = null;
USL = null;
Ignore = null;
Value = string.Empty;
//public class ExtractorKeys
Lot = string.Empty; //1
ToolID = string.Empty; //2
Process = string.Empty; //3
WaferID = string.Empty; //4
Part = string.Empty; //5
Recipe = string.Empty; //6
ProcessFlow = string.Empty; //7
//public class DataKeys
Employee = string.Empty; //1
SID = string.Empty; //2
WaferRegion = string.Empty; //3
WaferScribe = string.Empty; //4
WaferPosition = string.Empty; //5
X = string.Empty; //6
Y = string.Empty; //7
EAFCellInstance = string.Empty; //8
EAFReference = string.Empty; //9
IQSReference = string.Empty; //10
//
Lot = "-";
SID = "-";
Part = "-";
if (!(logistics is null))
{
ToolID = logistics.MesEntity;
EAFCellInstance = logistics.JobID;
}
}
public override string ToString()
{
return Value;
}
internal void Set(EquipmentType equipmentType, string queryFilter, Dictionary<Enum, string> allColumnCollection)
{
Column key;
EAFReference = equipmentType.ToString();
if (string.IsNullOrEmpty(queryFilter))
IQSReference = null;
else
IQSReference = queryFilter;
//
key = Column.SID;
if (!allColumnCollection.ContainsKey(key))
SID = "-";
else
SID = allColumnCollection[key];
key = Column.Employee;
if (!allColumnCollection.ContainsKey(key))
Employee = "AUTO";
else
Employee = allColumnCollection[key];
//
key = Column.Lot;
if (allColumnCollection.ContainsKey(key))
Lot = allColumnCollection[key];
//
key = Column.Part;
if (allColumnCollection.ContainsKey(key))
Part = allColumnCollection[key];
//
key = Column.Process;
if (allColumnCollection.ContainsKey(key))
Process = allColumnCollection[key];
//
key = Column.Recipe;
if (allColumnCollection.ContainsKey(key))
Recipe = allColumnCollection[key];
//
key = Column.Wafer_ID;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.Denton_Gun_Pocket;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.WaferPocket_Candela;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.WaferPocket_Warp;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
//
key = Column.Wafer_ID;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.Denton_Gun_Pocket;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.WaferPocket_Candela;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
key = Column.WaferPocket_Warp;
if (allColumnCollection.ContainsKey(key))
WaferID = allColumnCollection[key];
//
key = Column.Wafer_Region;
if (allColumnCollection.ContainsKey(key))
WaferRegion = allColumnCollection[key];
key = Column.Wafer_Scribe;
if (allColumnCollection.ContainsKey(key))
WaferScribe = allColumnCollection[key];
key = Column.WaferPosition_BV;
if (allColumnCollection.ContainsKey(key))
WaferPosition = allColumnCollection[key];
key = Column.WaferPosition_CV;
if (allColumnCollection.ContainsKey(key))
WaferPosition = allColumnCollection[key];
key = Column.WaferPosition_Hall;
if (allColumnCollection.ContainsKey(key))
WaferPosition = allColumnCollection[key];
key = Column.WaferPosition_PR;
if (allColumnCollection.ContainsKey(key))
WaferPosition = allColumnCollection[key];
key = Column.X_Coord;
if (allColumnCollection.ContainsKey(key))
X = allColumnCollection[key];
key = Column.Y_Coord;
if (allColumnCollection.ContainsKey(key))
Y = allColumnCollection[key];
}
}
}

View File

@ -1,652 +0,0 @@
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Adaptation.Shared
{
public class ExtractResult
{
public object ProcessData { get; internal set; }
public long LastTicksDuration { get; private set; }
public long BreakAfterSeconds { get; private set; }
public Enum[] EnumColumns { get; protected set; }
public List<string> SourceFiles { get; private set; }
public Column[] PairedColumns { get; protected set; }
public Dictionary<Enum, List<string>> Headers { get; protected set; }
public Dictionary<Enum, List<string>> Details { get; protected set; }
public Dictionary<Enum, List<string>> Parameters { get; protected set; }
public Dictionary<Enum, List<ExtendedParameter>> ExtendedParameters { get; protected set; }
public Dictionary<Enum, List<string>> DatabaseHeaders { get; protected set; }
public Dictionary<Enum, List<string>> DatabaseDetails { get; protected set; }
public Dictionary<Description.RowColumn, List<int>> RowColumns { get; protected set; }
public Dictionary<Test, Dictionary<Enum, List<int>>> IgnoreIndeices { get; protected set; }
public Dictionary<Description.LogisticsColumn, List<string>> LogisticsColumns { get; protected set; }
public ExtractResult(ExtractResult extractResult, long breakAfterSeconds, Enum[] enumColumns, Column[] pairedColumns)
{
if (enumColumns is null)
enumColumns = new Enum[] { };
if (pairedColumns is null)
pairedColumns = new Column[] { };
ProcessData = null;
EnumColumns = enumColumns;
PairedColumns = pairedColumns;
SourceFiles = new List<string>();
if (!(extractResult is null) && !(extractResult.SourceFiles is null))
SourceFiles.AddRange(extractResult.SourceFiles);
BreakAfterSeconds = breakAfterSeconds;
List<Enum> headers = new List<Enum>();
List<Enum> details = new List<Enum>();
List<Enum> parameters = new List<Enum>();
List<Enum> databaseHeaders = new List<Enum>();
List<Enum> databaseDetails = new List<Enum>();
UpdateLastTicksDuration(breakAfterSeconds * 10000000);
Common(headers, details, parameters, databaseHeaders, databaseDetails);
}
private void Common(List<Enum> headers, List<Enum> details, List<Enum> parameters, List<Enum> databaseHeaders, List<Enum> databaseDetails)
{
Headers = new Dictionary<Enum, List<string>>();
Details = new Dictionary<Enum, List<string>>();
Parameters = new Dictionary<Enum, List<string>>();
ExtendedParameters = new Dictionary<Enum, List<ExtendedParameter>>();
DatabaseHeaders = new Dictionary<Enum, List<string>>();
DatabaseDetails = new Dictionary<Enum, List<string>>();
IgnoreIndeices = new Dictionary<Test, Dictionary<Enum, List<int>>>();
LogisticsColumns = new Dictionary<Description.LogisticsColumn, List<string>>();
foreach (var item in headers)
Headers.Add(item, new List<string>());
foreach (var item in details)
Details.Add(item, new List<string>());
foreach (var item in parameters)
Parameters.Add(item, new List<string>());
foreach (var item in parameters)
ExtendedParameters.Add(item, new List<ExtendedParameter>());
foreach (var item in databaseHeaders)
DatabaseHeaders.Add(item, new List<string>());
foreach (var item in databaseDetails)
DatabaseDetails.Add(item, new List<string>());
Array array;
array = Enum.GetValues(typeof(Description.RowColumn));
RowColumns = new Dictionary<Description.RowColumn, List<int>>();
foreach (Description.RowColumn item in array)
RowColumns.Add(item, new List<int>());
array = Enum.GetValues(typeof(Description.LogisticsColumn));
foreach (Description.LogisticsColumn item in array)
LogisticsColumns.Add(item, new List<string>());
}
internal void Reset()
{
ProcessData = null;
SourceFiles.Clear();
List<Enum> headers = new List<Enum>();
List<Enum> details = new List<Enum>();
List<Enum> parameters = new List<Enum>();
List<Enum> databaseHeaders = new List<Enum>();
List<Enum> databaseDetails = new List<Enum>();
foreach (var item in Headers)
headers.Add(item.Key);
foreach (var item in Details)
details.Add(item.Key);
foreach (var item in Parameters)
parameters.Add(item.Key);
foreach (var item in DatabaseHeaders)
databaseHeaders.Add(item.Key);
foreach (var item in DatabaseDetails)
databaseDetails.Add(item.Key);
Common(headers, details, parameters, databaseHeaders, databaseDetails);
}
public ExtractResult ShallowCopy()
{
return (ExtractResult)MemberwiseClone();
}
internal void HeadersAddRange(Enum column)
{
Headers.Add(column, new List<string>());
}
internal void HeadersAddRange(params Enum[] columns)
{
foreach (var item in columns)
Headers.Add(item, new List<string>());
}
internal void HeadersAddRange(List<Enum> columns)
{
foreach (var item in columns)
Headers.Add(item, new List<string>());
}
internal void DetailsAddRange(Enum column)
{
Details.Add(column, new List<string>());
}
internal void DetailsAddRange(params Enum[] columns)
{
foreach (var item in columns)
Details.Add(item, new List<string>());
}
internal void DetailsAddRange(List<Enum> columns)
{
foreach (var item in columns)
Details.Add(item, new List<string>());
}
internal void ParametersAddRange(Enum column)
{
Parameters.Add(column, new List<string>());
}
internal void ParametersAddRange(params Enum[] columns)
{
foreach (var item in columns)
Parameters.Add(item, new List<string>());
}
internal void ParametersAddRange(List<Enum> columns)
{
foreach (var item in columns)
Parameters.Add(item, new List<string>());
}
internal void DatabaseHeadersAddRange(Enum column)
{
DatabaseHeaders.Add(column, new List<string>());
}
internal void DatabaseHeadersAddRange(params Enum[] columns)
{
foreach (var item in columns)
DatabaseHeaders.Add(item, new List<string>());
}
internal void DatabaseHeadersAddRange(List<Enum> columns)
{
foreach (var item in columns)
DatabaseHeaders.Add(item, new List<string>());
}
internal void DatabaseDetailsAddRange(Enum column)
{
DatabaseDetails.Add(column, new List<string>());
}
internal void DatabaseDetailsAddRange(params Enum[] columns)
{
foreach (var item in columns)
DatabaseDetails.Add(item, new List<string>());
}
internal void DatabaseDetailsAddRange(List<Enum> columns)
{
foreach (var item in columns)
DatabaseDetails.Add(item, new List<string>());
}
internal int GetCount()
{
int result = 0;
List<int> counts = new List<int>
{
RowColumns[Description.RowColumn.Test].Count()
};
foreach (var item in Headers)
counts.Add(item.Value.Count());
foreach (var item in Details)
counts.Add(item.Value.Count());
foreach (var item in Parameters)
counts.Add(item.Value.Count());
foreach (var item in DatabaseHeaders)
counts.Add(item.Value.Count());
foreach (var item in DatabaseDetails)
counts.Add(item.Value.Count());
result = counts.Max();
if (counts.Distinct().Count() != 1)
throw new Exception();
return result;
}
private Dictionary<Enum, List<string>> Merge(List<KeyValuePair<Enum, List<string>>> keyValuePairs)
{
Dictionary<Enum, List<string>> results = new Dictionary<Enum, List<string>>();
foreach (var element in keyValuePairs)
results.Add(element.Key, element.Value);
return results;
}
private List<KeyValuePair<Enum, List<string>>> GetAllColumnKeyValuePairs()
{
List<KeyValuePair<Enum, List<string>>> results = new List<KeyValuePair<Enum, List<string>>>();
foreach (var item in Headers)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in Details)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in Parameters)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in DatabaseHeaders)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in DatabaseDetails)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
return results;
}
internal Dictionary<Enum, List<string>> GetAllColumnCollection()
{
Dictionary<Enum, List<string>> results;
if (!EnumColumns.Any())
{
List<KeyValuePair<Enum, List<string>>> keyValuePairs = GetAllColumnKeyValuePairs();
results = Merge(keyValuePairs);
}
else
{
results = new Dictionary<Enum, List<string>>();
foreach (var item in EnumColumns)
results.Add(item, new List<string>());
foreach (var item in PairedColumns)
results.Add(item, new List<string>());
foreach (var item in Headers)
results[item.Key].AddRange(item.Value);
foreach (var item in Details)
results[item.Key].AddRange(item.Value);
foreach (var item in Parameters)
results[item.Key].AddRange(item.Value);
foreach (var item in DatabaseHeaders)
results[item.Key].AddRange(item.Value);
foreach (var item in DatabaseDetails)
results[item.Key].AddRange(item.Value);
int count = GetCount();
foreach (var keyValuePair in results)
{
for (int i = keyValuePair.Value.Count; i < count; i++)
results[keyValuePair.Key].Add(string.Empty);
}
}
return results;
}
private Dictionary<Enum, string> Merge(List<KeyValuePair<Enum, string>> keyValuePairs)
{
Dictionary<Enum, string> results = new Dictionary<Enum, string>();
foreach (var element in keyValuePairs)
results.Add(element.Key, element.Value);
return results;
}
private List<KeyValuePair<Enum, string>> GetAllColumnKeyValuePairs(int? i)
{
List<KeyValuePair<Enum, string>> results = new List<KeyValuePair<Enum, string>>();
if (i.HasValue)
{
foreach (var item in Headers)
results.Add(new KeyValuePair<Enum, string>(item.Key, item.Value[i.Value]));
foreach (var item in Details)
results.Add(new KeyValuePair<Enum, string>(item.Key, item.Value[i.Value]));
foreach (var item in Parameters)
results.Add(new KeyValuePair<Enum, string>(item.Key, item.Value[i.Value]));
foreach (var item in DatabaseHeaders)
results.Add(new KeyValuePair<Enum, string>(item.Key, item.Value[i.Value]));
foreach (var item in DatabaseDetails)
results.Add(new KeyValuePair<Enum, string>(item.Key, item.Value[i.Value]));
}
return results;
}
internal Dictionary<Enum, string> GetAllColumnCollection(int? i)
{
Dictionary<Enum, string> results;
List<KeyValuePair<Enum, string>> keyValuePairs = GetAllColumnKeyValuePairs(i);
results = Merge(keyValuePairs);
return results;
}
private List<KeyValuePair<Enum, List<string>>> GetToolHeadersAndDatabaseHeadersColumnKeyValuePairs()
{
List<KeyValuePair<Enum, List<string>>> results = new List<KeyValuePair<Enum, List<string>>>();
foreach (var item in Headers)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in DatabaseHeaders)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
return results;
}
internal Dictionary<Enum, List<string>> GetToolHeadersAndDatabaseHeadersCollection()
{
Dictionary<Enum, List<string>> results;
List<KeyValuePair<Enum, List<string>>> keyValuePairs = GetToolHeadersAndDatabaseHeadersColumnKeyValuePairs();
results = Merge(keyValuePairs);
return results;
}
private List<KeyValuePair<Enum, List<string>>> GetToolDetailsAndDatabaseDetailsColumnKeyValuePairs()
{
List<KeyValuePair<Enum, List<string>>> results = new List<KeyValuePair<Enum, List<string>>>();
foreach (var item in Details)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in Parameters)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
foreach (var item in DatabaseDetails)
results.Add(new KeyValuePair<Enum, List<string>>(item.Key, item.Value));
return results;
}
internal Dictionary<Enum, List<string>> GetToolDetailsAndDatabaseDetailsCollection()
{
Dictionary<Enum, List<string>> results;
List<KeyValuePair<Enum, List<string>>> keyValuePairs = GetToolDetailsAndDatabaseDetailsColumnKeyValuePairs();
results = Merge(keyValuePairs);
return results;
}
internal Dictionary<Test, List<int>> GetTests()
{
Dictionary<Test, List<int>> results = new Dictionary<Test, List<int>>();
Test test;
for (int i = 0; i < RowColumns[Description.RowColumn.Test].Count; i++)
{
test = (Test)RowColumns[Description.RowColumn.Test][i];
if (!results.ContainsKey(test))
results.Add(test, new List<int>());
results[test].Add(i);
}
return results;
}
internal void FillIn(string nullData, int count, Enum[] currentColumns)
{
foreach (Enum column in Headers.Keys)
{
for (int i = Headers[column].Count(); i < count; i++)
Headers[column].Add(nullData);
}
foreach (Enum column in Details.Keys)
{
for (int i = Details[column].Count(); i < count; i++)
Details[column].Add(nullData);
}
if (!(currentColumns is null))
{
foreach (Enum column in currentColumns)
{
for (int i = Parameters[column].Count(); i < count; i++)
Parameters[column].Add(nullData);
}
}
foreach (Enum column in Parameters.Keys)
{
for (int i = Parameters[column].Count(); i < count; i++)
Parameters[column].Add(string.Empty);
}
foreach (Enum column in DatabaseHeaders.Keys)
{
for (int i = DatabaseHeaders[column].Count(); i < count; i++)
DatabaseHeaders[column].Add(string.Empty);
}
foreach (Enum column in DatabaseDetails.Keys)
{
for (int i = DatabaseDetails[column].Count(); i < count; i++)
DatabaseDetails[column].Add(string.Empty);
}
if (RowColumns[Description.RowColumn.Count].Count() != RowColumns[Description.RowColumn.Test].Count())
{
count = RowColumns[Description.RowColumn.Test].Count();
RowColumns[Description.RowColumn.Count].Clear();
for (int i = 0; i < count; i++)
RowColumns[Description.RowColumn.Count].Add(count);
}
}
internal void SetCollections(Logistics logistics, Dictionary<Test, Dictionary<string, List<string>>> rawData)
{
Array array;
Column? column;
bool recordStartPresent = false;
Description.RowColumn? rowColumn;
Description.LogisticsColumn? logisticsColumn;
array = Enum.GetValues(typeof(Description.RowColumn));
Dictionary<string, Enum> headers = new Dictionary<string, Enum>();
Dictionary<string, Enum> details = new Dictionary<string, Enum>();
Dictionary<string, Enum> parameters = new Dictionary<string, Enum>();
Dictionary<string, Enum> databaseHeaders = new Dictionary<string, Enum>();
Dictionary<string, Enum> databaseDetails = new Dictionary<string, Enum>();
Dictionary<Description.RowColumn, List<string>> rowColumns = new Dictionary<Description.RowColumn, List<string>>();
foreach (var item in Headers)
headers.Add(item.Key.ToString(), item.Key);
foreach (var item in Details)
headers.Add(item.Key.ToString(), item.Key);
foreach (var item in Parameters)
headers.Add(item.Key.ToString(), item.Key);
foreach (var item in DatabaseHeaders)
headers.Add(item.Key.ToString(), item.Key);
foreach (var item in DatabaseDetails)
headers.Add(item.Key.ToString(), item.Key);
foreach (Description.RowColumn item in array)
rowColumns.Add(item, new List<string>());
foreach (var element in rawData)
{
foreach (var item in element.Value)
{
column = null;
rowColumn = null;
logisticsColumn = null;
if (item.Key == "Time")
continue;
else if (item.Key == "A_LOGISTICS")
continue;
else if (item.Key == "B_LOGISTICS")
continue;
else if (item.Key == "EventId")
continue;
else if (item.Key == ProcessDataStandardFormat.RecordStart)
{
recordStartPresent = true;
continue;
}
if (Enum.TryParse(item.Key, out Column columnTry))
column = columnTry;
else
{
if (Enum.TryParse(item.Key, out Description.LogisticsColumn logisticsColumnTry))
logisticsColumn = logisticsColumnTry;
else
{
if (Enum.TryParse(item.Key, out Description.RowColumn rowColumnTry))
rowColumn = rowColumnTry;
}
}
if (rowColumn.HasValue)
rowColumns[rowColumn.Value].AddRange(item.Value);
else if (logisticsColumn.HasValue)
LogisticsColumns[logisticsColumn.Value].AddRange(item.Value);
else if (column.HasValue)
{
if (Headers.ContainsKey(column.Value))
Headers[column.Value].AddRange(item.Value);
else if (Details.ContainsKey(column.Value))
Details[column.Value].AddRange(item.Value);
else if (Parameters.ContainsKey(column.Value))
Parameters[column.Value].AddRange(item.Value);
else if (DatabaseHeaders.ContainsKey(column.Value))
DatabaseHeaders[column.Value].AddRange(item.Value);
else if (DatabaseDetails.ContainsKey(column.Value))
DatabaseDetails[column.Value].AddRange(item.Value);
else
{
if (!recordStartPresent)
throw new Exception();
}
}
else
{
if (headers.ContainsKey(item.Key))
Headers[headers[item.Key]].AddRange(item.Value);
else if (details.ContainsKey(item.Key))
Details[details[item.Key]].AddRange(item.Value);
else if (parameters.ContainsKey(item.Key))
Parameters[parameters[item.Key]].AddRange(item.Value);
else if (databaseHeaders.ContainsKey(item.Key))
DatabaseHeaders[databaseHeaders[item.Key]].AddRange(item.Value);
else if (databaseDetails.ContainsKey(item.Key))
DatabaseDetails[databaseDetails[item.Key]].AddRange(item.Value);
else
{
if (!recordStartPresent)
throw new Exception();
}
}
}
}
foreach (var element in rowColumns)
{
for (int i = 0; i < element.Value.Count(); i++)
{
int.TryParse(element.Value[i], out int rowColumnTry);
RowColumns[element.Key].Add(rowColumnTry);
}
}
array = Enum.GetValues(typeof(Description.RowColumn));
foreach (Description.RowColumn item in array)
{
if (!RowColumns.ContainsKey(item))
throw new Exception();
}
array = Enum.GetValues(typeof(Description.LogisticsColumn));
foreach (Description.LogisticsColumn item in array)
{
if (!LogisticsColumns.ContainsKey(item))
throw new Exception();
}
int count = rowColumns[Description.RowColumn.Test].Count();
foreach (var element in DatabaseHeaders)
{
for (int i = element.Value.Count(); i < count; i++)
element.Value.Add(string.Empty);
}
foreach (var element in DatabaseDetails)
{
for (int i = element.Value.Count(); i < count; i++)
element.Value.Add(string.Empty);
}
string nullData;
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
Dictionary<Enum, List<string>> keyValuePairs;
foreach (Test key in rawData.Keys)
{
IgnoreIndeices.Add(key, new Dictionary<Enum, List<int>>());
for (int g = 1; g < 4; g++)
{
switch (g)
{
case 1:
keyValuePairs = Details;
break;
case 2:
keyValuePairs = Parameters;
break;
case 3:
keyValuePairs = DatabaseDetails;
break;
default:
throw new Exception();
}
foreach (var element in keyValuePairs)
{
IgnoreIndeices[key].Add(element.Key, new List<int>());
if (!element.Value.Any())
{
for (int i = 0; i < RowColumns[Description.RowColumn.Test].Count(); i++)
{
IgnoreIndeices[key][element.Key].Add(i);
element.Value.Add(string.Empty);
}
}
else
{
for (int i = 0; i < element.Value.Count(); i++)
{
if (RowColumns[Description.RowColumn.Test][i] == (int)key)
{
if (string.IsNullOrEmpty(element.Value[i]))
IgnoreIndeices[key][element.Key].Add(i);
else if (!(logistics.NullData is null) && element.Value[i] == nullData)
IgnoreIndeices[key][element.Key].Add(i);
}
}
}
}
}
}
if (recordStartPresent)
FillIn(string.Empty, RowColumns[Description.RowColumn.Test].Count(), currentColumns: null);
GetCount();
}
internal void UpdateLastTicksDuration(long ticksDuration)
{
if (ticksDuration < 50000000)
ticksDuration = 50000000;
LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
}
private string GetTupleFile(Logistics logistics, ScopeInfo scopeInfo, string duplicateDirectory)
{
string result;
string rds;
string dateValue;
string datePlaceholder;
string[] segments = logistics.MID.Split('-');
if (segments.Length < 2)
rds = "%RDS%";
else
rds = segments[1];
segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length == 0)
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace("%RDS%", rds));
else
{
datePlaceholder = "%DateTime%";
segments = segments[1].Split('%');
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
foreach (string segment in scopeInfo.FileName.Split('%'))
{
if (!segment.Contains(segments[0]))
continue;
datePlaceholder = string.Concat('%', segment, '%');
}
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileName.Replace("%RDS%", rds).Replace(datePlaceholder, dateValue));
}
if (result.Contains('%'))
throw new Exception("Placeholder exists!");
return result;
}
internal void AutoAdd(Enum key, string value)
{
if (Headers.ContainsKey(key))
Headers[key].Add(value);
else if (Details.ContainsKey(key))
Details[key].Add(value);
else if (Parameters.ContainsKey(key))
Parameters[key].Add(value);
else if (DatabaseHeaders.ContainsKey(key))
DatabaseHeaders[key].Add(value);
else if (DatabaseDetails.ContainsKey(key))
DatabaseDetails[key].Add(value);
else
throw new Exception();
}
}
}

View File

@ -0,0 +1,813 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace Adaptation.Shared;
public class FileRead : Properties.IFileRead
{
protected string _NullData;
protected readonly ILog _Log;
protected long _MinFileLength;
protected Logistics _Logistics;
protected readonly ISMTP _SMTP;
protected readonly int _Hyphens;
protected readonly bool _IsEvent;
protected string _ReportFullPath;
protected long _LastTicksDuration;
protected readonly bool _IsEAFHosted;
protected readonly string _EventName;
protected readonly string _MesEntity;
protected readonly string _TracePath;
protected readonly bool _IsDuplicator;
protected readonly Calendar _Calendar;
protected readonly bool _IsSourceTimer;
protected readonly string _VillachPath;
protected readonly string _ProgressPath;
protected readonly string _EquipmentType;
protected readonly long _BreakAfterSeconds;
protected readonly string _ExceptionSubject;
protected readonly string _CellInstanceName;
protected readonly string _EventNameFileRead;
protected readonly IDescription _Description;
protected readonly bool _UseCyclicalForDescription;
protected readonly string _CellInstanceConnectionName;
protected readonly string _CellInstanceConnectionNameBase;
protected readonly Dictionary<string, List<long>> _DummyRuns;
protected readonly Dictionary<string, string> _FileParameter;
protected readonly string _ParameterizedModelObjectDefinitionType;
protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
protected readonly IList<ModelObjectParameterDefinition> _ModelObjectParameterDefinitions;
bool Properties.IFileRead.IsEvent => _IsEvent;
string Properties.IFileRead.NullData => _NullData;
string Properties.IFileRead.EventName => _EventName;
string Properties.IFileRead.MesEntity => _MesEntity;
bool Properties.IFileRead.IsEAFHosted => _IsEAFHosted;
string Properties.IFileRead.EquipmentType => _EquipmentType;
string Properties.IFileRead.ReportFullPath => _ReportFullPath;
string Properties.IFileRead.CellInstanceName => _CellInstanceName;
string Properties.IFileRead.ExceptionSubject => _ExceptionSubject;
bool Properties.IFileRead.UseCyclicalForDescription => _UseCyclicalForDescription;
string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
_SMTP = smtp;
_IsEvent = isEvent;
_DummyRuns = dummyRuns;
_LastTicksDuration = 0;
_IsEAFHosted = isEAFHosted;
_Description = description;
_FileParameter = fileParameter;
_ReportFullPath = string.Empty;
_CellInstanceName = cellInstanceName;
_Calendar = new CultureInfo("en-US").Calendar;
_Log = LogManager.GetLogger(typeof(FileRead));
_UseCyclicalForDescription = useCyclicalForDescription;
_CellInstanceConnectionName = cellInstanceConnectionName;
_ModelObjectParameterDefinitions = modelObjectParameters;
_FileConnectorConfiguration = fileConnectorConfiguration;
_ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
_IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
_Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
_ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
string suffix;
string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
string @namespace = segments[0];
string eventNameFileRead = "FileRead";
string eventName = segments[segments.Length - 1];
bool isDuplicator = segments[0] == cellInstanceName;
_IsDuplicator = isDuplicator;
_CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
if (eventName == eventNameFileRead)
suffix = string.Empty;
else
suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
if (!isEAFHosted)
{
if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
throw new Exception(cellInstanceConnectionName);
if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
throw new Exception(cellInstanceConnectionName);
if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
throw new Exception(cellInstanceConnectionName);
// if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
// throw new Exception(cellInstanceConnectionName);
// if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
// throw new Exception(cellInstanceConnectionName);
}
ModelObjectParameterDefinition[] paths = GetProperties(cellInstanceConnectionName, modelObjectParameters, "Path.");
if (paths.Length < 4)
throw new Exception(cellInstanceConnectionName);
if (isDuplicator)
_MesEntity = string.Empty;
else
_MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
_TracePath = (from l in paths where l.Name.EndsWith("Trace") select l.Value).FirstOrDefault();
_VillachPath = (from l in paths where l.Name.EndsWith("Villach") select l.Value).FirstOrDefault();
_ProgressPath = (from l in paths where l.Name.EndsWith("Progress") select l.Value).FirstOrDefault();
_EventName = eventName;
_EventNameFileRead = eventNameFileRead;
_EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
long breakAfterSeconds;
if (_FileConnectorConfiguration is null)
breakAfterSeconds = 360;
else
{
if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
breakAfterSeconds = 360;
else
breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
}
_BreakAfterSeconds = breakAfterSeconds;
UpdateLastTicksDuration(breakAfterSeconds * 10000000);
if (_IsDuplicator)
{
if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
throw new Exception("_Configuration is empty?");
if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
throw new Exception("_Configuration is incorrect for a duplicator!");
if (_FileConnectorConfiguration is not null)
{
if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
throw new Exception("SourceDirectoryCloaking is empty?");
if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
}
}
}
protected static string GetPropertyValue(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyName)
{
string result;
List<string> results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
if (results.Count != 1)
throw new Exception(cellInstanceConnectionName);
result = results[0];
return result;
}
protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
if (!results.Any())
throw new Exception(cellInstanceConnectionName);
return results;
}
protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
if (!results.Any())
throw new Exception(cellInstanceConnectionName);
return results;
}
protected void UpdateLastTicksDuration(long ticksDuration)
{
if (ticksDuration < 50000000)
ticksDuration = 50000000;
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
}
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)
{
if (thread is not null)
{
ThreadState threadState;
for (short i = 0; i < short.MaxValue; i++)
{
if (thread is null)
break;
else
{
threadState = thread.ThreadState;
if (threadState is not ThreadState.Running and not ThreadState.WaitSleepJoin)
break;
}
Thread.Sleep(500);
}
lock (threadExceptions)
{
if (threadExceptions.Any())
{
foreach (Exception item in threadExceptions)
_Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
Exception exception = threadExceptions[0];
threadExceptions.Clear();
throw exception;
}
}
}
}
protected void CreateProgressDirectory(string[] exceptionLines)
{
string progressDirectory;
StringBuilder stringBuilder = new();
if (_Hyphens == 0)
progressDirectory = Path.Combine(_ProgressPath, _CellInstanceConnectionName);
else
{
_ = stringBuilder.Clear();
for (int i = 0; i < _Hyphens; i++)
{
if (i > 0 && (i % 2) == 0)
_ = stringBuilder.Append(' ');
_ = stringBuilder.Append('-');
}
progressDirectory = string.Concat(_ProgressPath, @"\", (_Hyphens + 1).ToString().PadLeft(2, '0'), " ", stringBuilder).Trim();
}
DateTime dateTime = DateTime.Now;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
progressDirectory = string.Concat(progressDirectory, @"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
if (!Directory.Exists(progressDirectory))
_ = Directory.CreateDirectory(progressDirectory);
if (exceptionLines is not null)
{
string fileName = string.Concat(progressDirectory, @"\readme.txt");
try
{ File.WriteAllLines(fileName, exceptionLines); }
catch (Exception) { }
}
}
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results;
bool isErrorFile = exception is not null;
if (!to.EndsWith(@"\"))
_ = string.Concat(to, @"\");
if (!isErrorFile)
results = Array.Empty<string>();
else
{
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
Shared0449(to, results);
}
if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Any())
{
string itemFile;
List<string> directories = new();
foreach (FileInfo sourceFile in extractResults.Item4)
{
if (sourceFile.FullName != _Logistics.ReportFullPath)
{
itemFile = sourceFile.FullName.Replace(from, to);
Shared1880(itemFile, directories, sourceFile, isErrorFile);
}
else if (!isErrorFile && _Logistics is not null)
Shared1811(to, sourceFile);
}
Shared0231(directories);
}
return results;
}
protected static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
{
Queue<string> queue = new();
queue.Enqueue(path);
while (queue.Count > 0)
{
path = queue.Dequeue();
foreach (string subDirectory in Directory.GetDirectories(path))
{
queue.Enqueue(subDirectory);
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
yield return subDirectory;
}
}
}
protected string GetProcessedDirectory(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory)
{
string result = duplicateDirectory;
string logisticsSequence = logistics.Sequence.ToString();
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(logistics.ReportFullPath)) };
else
matchDirectories = new string[] { GetDirectoriesRecursively(Path.GetDirectoryName(progressPath), logisticsSequence).FirstOrDefault() };
if (matchDirectories.Length == 0 || string.IsNullOrEmpty(matchDirectories[0]))
matchDirectories = Directory.GetDirectories(duplicateDirectory, string.Concat('*', logisticsSequence, '*'), SearchOption.AllDirectories);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (!matchDirectories[0].Contains("_processed"))
{
result = string.Concat(matchDirectories[0].Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), dateTime.Ticks - logistics.Sequence, "_processed");
Directory.Move(matchDirectories[0], result);
result = string.Concat(result, @"\", logistics.Sequence);
if (!Directory.Exists(result))
_ = Directory.CreateDirectory(result);
}
return result;
}
protected string WriteScopeInfo(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
string result = GetProcessedDirectory(progressPath, logistics, dateTime, duplicateDirectory);
string tupleFile;
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string duplicateFile = string.Concat(result, @"\", fileName, ".pdsf");
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else
tupleFile = string.Concat(result, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
File.WriteAllText(tupleFile, tuple.Item2);
}
File.Copy(logistics.ReportFullPath, duplicateFile, overwrite: true);
return result;
}
protected static string GetTupleFile(Logistics logistics, Properties.IScopeInfo scopeInfo, string duplicateDirectory)
{
string result;
string rds;
string dateValue;
string datePlaceholder;
string[] segments = logistics.MID.Split('-');
if (segments.Length < 2)
rds = "%RDS%";
else
rds = segments[1];
segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length == 0)
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace("%RDS%", rds));
else
{
datePlaceholder = "%DateTime%";
segments = segments[1].Split('%');
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
foreach (string segment in scopeInfo.FileName.Split('%'))
{
if (!segment.Contains(segments[0]))
continue;
datePlaceholder = string.Concat('%', segment, '%');
}
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileName.Replace("%RDS%", rds).Replace(datePlaceholder, dateValue));
}
if (result.Contains('%'))
throw new Exception("Placeholder exists!");
return result;
}
protected void WaitForFileConsumption(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, string successDirectory, string duplicateDirectory, string duplicateFile, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
bool check;
long preWait;
string tupleFile;
List<int> consumedFileIndices = new();
List<string> duplicateFiles = new();
bool moreThanAnHour = (_BreakAfterSeconds > 3600);
StringBuilder stringBuilder = new();
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
if (moreThanAnHour)
preWait = dateTime.AddSeconds(30).Ticks;
else
preWait = dateTime.AddTicks(_LastTicksDuration).Ticks;
if (!tuples.Any())
duplicateFiles.Add(duplicateFile);
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else if (!tuple.Item1.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
else
tupleFile = GetTupleFile(logistics, tuple.Item1, duplicateDirectory);
duplicateFiles.Add(tupleFile);
File.WriteAllText(tupleFile, tuple.Item2);
}
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
if (!moreThanAnHour)
{
for (short z = 0; z < short.MaxValue; z++)
{
try
{
check = (string.IsNullOrEmpty(successDirectory) || File.Exists(successFile));
if (check)
{
consumedFileIndices.Clear();
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (!File.Exists(duplicateFiles[i]))
consumedFileIndices.Add(i);
}
if (consumedFileIndices.Count == duplicateFiles.Count)
break;
}
}
catch (Exception) { }
if (DateTime.Now.Ticks > breakAfter)
{
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (File.Exists(duplicateFiles[i]))
{
try
{ File.Delete(duplicateFiles[i]); }
catch (Exception) { }
_ = stringBuilder.Append('<').Append(duplicateFiles[i]).Append("> ");
}
}
throw new Exception(string.Concat("After {", _BreakAfterSeconds, "} seconds, right side of {", sourceDirectoryCloaking, "} didn't consume file(s) ", stringBuilder));
}
Thread.Sleep(500);
}
}
}
protected void SetFileParameter(string key, string value)
{
if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
{
if (_FileParameter.ContainsKey(key))
_FileParameter[key] = value;
else
_FileParameter.Add(key, value);
}
}
protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
key = "LotIDWithLogisticsSequence";
string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
SetFileParameter(key, value);
}
protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
{
key = "LotIDWithLogisticsSequence";
value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
}
SetFileParameter(key, value);
}
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{
string directory;
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_VillachPath, _EquipmentType, "Target");
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
}
protected void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && _IsDuplicator)
{
if (_IsEAFHosted && !string.IsNullOrEmpty(_ProgressPath))
CreateProgressDirectory(exceptionLines: null);
}
if (!_IsEAFHosted)
{
string to;
if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
to = _FileConnectorConfiguration.TargetFileLocation;
else
to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
foreach (KeyValuePair<string, string> keyValuePair in _FileParameter)
to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
if (to.Contains('%'))
_Log.Debug("Can't debug without EAF Hosting");
else
_ = Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
}
}
protected void TriggerEvents(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
object value;
string description;
List<object[]> list;
for (int i = 0; i < extractResults.Item3.Length; i++)
{
_Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.ContainsKey(jsonProperty.Name))
description = string.Empty;
else
description = keyValuePairs[jsonProperty.Name].Split('|')[0];
if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
value = jsonProperty.Value.ToString();
else
{
list = new List<object[]>();
for (int z = 0; z < extractResults.Item3.Length; z++)
list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
value = list;
}
}
if (_UseCyclicalForDescription)
break;
}
}
protected Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract(IFileRead fileRead, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
results = null;
else
{
string[] segments;
string[] matches = null;
foreach (string subSourceFileFilter in _FileConnectorConfiguration.SourceFileFilters)
{
segments = subSourceFileFilter.Split('\\');
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
else
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
if (matches.Any())
break;
}
if (matches is null || !matches.Any())
results = null;
else
{
_ReportFullPath = matches[0];
results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
if (!_IsEAFHosted)
TriggerEvents(results, headerNames, keyValuePairs);
}
}
return results;
}
protected static Dictionary<Test, List<Properties.IDescription>> GetKeyValuePairs(List<Properties.IDescription> descriptions)
{
Dictionary<Test, List<Properties.IDescription>> results = new();
Test testKey;
for (int i = 0; i < descriptions.Count; i++)
{
testKey = (Test)descriptions[i].Test;
if (!results.ContainsKey(testKey))
results.Add(testKey, new List<Properties.IDescription>());
results[testKey].Add(descriptions[i]);
}
return results;
}
protected static List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
{
List<Properties.IDescription> results = new();
Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
protected static Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> GetTuple(IFileRead fileRead, IEnumerable<Properties.IDescription> descriptions, bool extra = false)
{
Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> result;
Dictionary<Test, List<Properties.IDescription>> keyValuePairs = GetKeyValuePairs(descriptions.ToList());
Test[] tests = (from l in keyValuePairs select l.Key).ToArray();
fileRead.CheckTests(tests, extra);
result = new Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>>(tests, keyValuePairs);
return result;
}
protected void Shared0449(string to, string[] exceptionLines)
{
if (_IsDuplicator)
CreateProgressDirectory(exceptionLines: null);
else
{
string fileName = string.Concat(to, @"\readme.txt");
try
{
if (!Directory.Exists(to))
_ = Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
catch (Exception ex) { _Log.Error(ex.Message); }
}
}
protected void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
{
string itemDirectory;
directories.Add(Path.GetDirectoryName(sourceFile.FullName));
itemDirectory = Path.GetDirectoryName(itemFile);
FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
if (!isErrorFile)
processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
else
processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
{
_ = Directory.CreateDirectory(itemDirectory);
FileInfo fileInfo = new(_Logistics.ReportFullPath);
Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
}
if (_IsEAFHosted)
{
switch (processingModeEnum)
{
case FileConnectorConfiguration.PostProcessingModeEnum.Move:
File.Move(sourceFile.FullName, itemFile);
break;
case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
File.Copy(sourceFile.FullName, itemFile);
break;
case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
File.Delete(sourceFile.FullName);
break;
default:
throw new Exception();
}
}
}
protected void Shared1811(string to, FileInfo sourceFile)
{
if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
{
string directoryName = Path.GetFileName(to);
string jobIdDirectory = Path.GetDirectoryName(to);
DateTime dateTime = DateTime.Now.AddMinutes(-15);
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string destinationDirectory = string.Concat(jobIdDirectory, @"\_ Ignore 100 bytes\", weekDirectory, @"\", directoryName);
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
try
{
string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string checkDirectory in checkDirectories)
{
if (!checkDirectory.Contains('_'))
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Any())
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Any())
continue;
if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
continue;
Directory.Delete(checkDirectory, recursive: false);
}
}
catch (Exception) { throw; }
}
}
protected void Shared0231(List<string> directories)
{
if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
{
foreach (string directory in (from l in directories orderby l.Split('\\').Length descending select l).Distinct())
{
if (Directory.Exists(directory) && !Directory.GetFiles(directory).Any())
Directory.Delete(directory);
}
}
}
protected void Shared0413(DateTime dateTime, bool isDummyRun, string successDirectory, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples, string duplicateFile)
{
if (!isDummyRun && _IsEAFHosted)
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, successDirectory, duplicateDirectory, duplicateFile, tuples);
else
{
long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.ConnectionRetryInterval.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
break;
Thread.Sleep(500);
}
}
}
protected static void Shared0607(string reportFullPath, string duplicateDirectory, string logisticsSequence, string destinationDirectory)
{
if (destinationDirectory == duplicateDirectory)
throw new Exception("Check Target File Folder for %LotIDWithLogisticsSequence%_in process on CI (not Duplicator)");
if (destinationDirectory.EndsWith(logisticsSequence))
destinationDirectory = Path.GetDirectoryName(destinationDirectory);
string[] deleteFiles = Directory.GetFiles(destinationDirectory, "*", SearchOption.AllDirectories);
if (deleteFiles.Length > 250)
throw new Exception("Safety net!");
foreach (string file in deleteFiles)
File.Delete(file);
Directory.Delete(destinationDirectory, recursive: true);
File.Delete(reportFullPath);
}
protected string[] Shared1567(string reportFullPath, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
string[] results;
string historicalText;
string logisticsSequence = _Logistics.Sequence.ToString();
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string fileName = Path.GetFileNameWithoutExtension(reportFullPath);
string sequenceDirectory = string.Concat(matchDirectories[0], @"\", logisticsSequence);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
fileName = string.Concat(sequenceDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
if (_IsEAFHosted)
File.WriteAllText(fileName, tuple.Item2);
else
{
if (File.Exists(fileName))
{
historicalText = File.ReadAllText(fileName);
if (tuple.Item2 != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
results = matchDirectories;
return results;
}
protected void Shared1277(string reportFullPath, string destinationDirectory, string logisticsSequence, string jobIdDirectory, string json)
{
string ecCharacterizationSi = Path.GetDirectoryName(Path.GetDirectoryName(jobIdDirectory));
string destinationJobIdDirectory = string.Concat(ecCharacterizationSi, @"\Processed\", _Logistics.JobID);
if (!Directory.Exists(destinationJobIdDirectory))
_ = Directory.CreateDirectory(destinationJobIdDirectory);
destinationJobIdDirectory = string.Concat(destinationJobIdDirectory, @"\", Path.GetFileName(destinationDirectory).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), DateTime.Now.Ticks - _Logistics.Sequence);
string sequenceDirectory = string.Concat(destinationJobIdDirectory, @"\", logisticsSequence);
string jsonFileName = string.Concat(sequenceDirectory, @"\", Path.GetFileNameWithoutExtension(reportFullPath), ".json");
Directory.Move(destinationDirectory, destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
File.Copy(reportFullPath, string.Concat(sequenceDirectory, @"\", Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
}
}
// 2022-02-14 -> Shared - FileRead

View File

@ -1,16 +0,0 @@
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.Shared
{
public interface IProcessData
{
Tuple<string, JsonElement?, List<FileInfo>> GetResults(ILogic logic, ConfigDataBase configData, List<FileInfo> fileInfoCollection);
}
}

View File

@ -1,26 +0,0 @@
using Adaptation.Shared.Metrology;
using System.Collections.Generic;
namespace Adaptation.Shared
{
public interface IProcessDataDescription
{
int Test { get; set; }
int Count { get; set; }
int Index { get; set; }
IProcessDataDescription GetDefault(ILogic logic, ConfigDataBase configDataBase);
IProcessDataDescription GetDisplayNames(ILogic logic, ConfigDataBase configDataBase);
List<IProcessDataDescription> GetDescription(ILogic logic, ConfigDataBase configDataBase, List<Test> tests, IProcessData iProcessData);
List<string> GetDetailNames(ILogic logic, ConfigDataBase configDataBase);
List<string> GetHeaderNames(ILogic logic, ConfigDataBase configDataBase);
List<string> GetIgnoreParameterNames(ILogic logic, ConfigDataBase configDataBase, Test test);
List<string> GetNames(ILogic logic, ConfigDataBase configDataBase);
List<string> GetPairedParameterNames(ILogic logic, ConfigDataBase configDataBase);
List<string> GetParameterNames(ILogic logic, ConfigDataBase configDataBase);
string GetEventDescription();
}
}

View File

@ -1,239 +0,0 @@
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.Shared
{
public class IQSRecord
{
public string SID { get; protected set; }
public string Part { get; protected set; }
public string Process { get; protected set; }
public string Lot { get; protected set; }
public string SampleSize { get; protected set; }
public string ParameterName { get; protected set; }
public string TestNumber { get; protected set; }
public string ParameterValue { get; protected set; }
public string WaferID { get; protected set; }
public string WaferScribe { get; protected set; }
public string Pocket { get; protected set; }
public string EpiThicknessMean { get; protected set; }
public string WaferRegion { get; protected set; }
public string ToolID { get; protected set; }
public string EmployeeID { get; protected set; }
public string EmployeeName { get; protected set; }
public string Date { get; protected set; }
public Column Column { get; protected set; }
public IQSRecord(object sID, object part, object process, object lot, object sampleSize, object parameterName, object testNumber, object parameterValue, object waferID, object waferScribe, object pocket, object epiThicknessMean, object waferRegion, object toolID, object employeeID, object employeeName, object date, Dictionary<string, Column> keyValuePairs)
{
if (sID is null)
SID = string.Empty;
else
SID = sID.ToString();
if (part is null)
Part = string.Empty;
else
Part = part.ToString();
if (process is null)
Process = string.Empty;
else
Process = process.ToString();
if (lot is null)
Lot = string.Empty;
else
Lot = lot.ToString();
if (sampleSize is null)
SampleSize = string.Empty;
else
SampleSize = sampleSize.ToString();
if (parameterName is null)
ParameterName = string.Empty;
else
ParameterName = parameterName.ToString();
if (testNumber is null)
TestNumber = string.Empty;
else
TestNumber = testNumber.ToString();
if (parameterValue is null)
ParameterValue = string.Empty;
else
ParameterValue = parameterValue.ToString();
if (waferID is null)
WaferID = string.Empty;
else
WaferID = waferID.ToString();
if (waferScribe is null)
WaferScribe = string.Empty;
else
WaferScribe = waferScribe.ToString();
if (pocket is null)
Pocket = string.Empty;
else
Pocket = pocket.ToString();
if (epiThicknessMean is null)
EpiThicknessMean = string.Empty;
else
EpiThicknessMean = epiThicknessMean.ToString();
if (waferRegion is null)
WaferRegion = string.Empty;
else
WaferRegion = waferRegion.ToString();
if (toolID is null)
ToolID = string.Empty;
else
ToolID = toolID.ToString();
if (employeeID is null)
EmployeeID = string.Empty;
else
EmployeeID = employeeID.ToString();
if (employeeName is null)
EmployeeName = string.Empty;
else
EmployeeName = employeeName.ToString();
if (date is null)
Date = string.Empty;
else
Date = date.ToString();
if (parameterName is null || !keyValuePairs.ContainsKey(parameterName.ToString()))
Column = Column.AFM_Roughness;
else
Column = keyValuePairs[parameterName.ToString()];
}
private static string GetBaseTableJoins()
{
StringBuilder result = new StringBuilder();
result.Append(" from [irmnspc].[dbo].sgrp_ext se ").
Append(" join [irmnspc].[dbo].test_dat td on se.f_test = td.f_test ").
Append(" join [irmnspc].[dbo].part_dat pd on se.f_part = pd.f_part ").
Append(" join [irmnspc].[dbo].part_lot pl on se.f_lot = pl.f_lot ").
Append(" join [irmnspc].[dbo].prcs_dat pr on se.f_prcs = pr.f_prcs ").
Append(" join [irmnspc].[dbo].empl_inf em on se.f_empl = em.f_empl ");
return result.ToString();
}
internal static StringBuilder GetIqsRecordsSinceSql()
{
StringBuilder result = new StringBuilder();
result.Append(" select ").
Append(" se.f_sgrp [sid], concat(se.f_sgrp, ', ', td.f_name, ', ', pd.f_name, ', ', pl.f_name, ', ', pr.f_name, ', ', em.f_name, ', ', se.f_sgtm) [csv] ").
Append(GetBaseTableJoins()).
Append(" where se.f_sgrp >= 1543459064 ").
Append(" and se.f_sgrp > ( @lastSID - 20 ) ").
Append(" /* and dateadd(hh, -7, (dateadd(ss, convert(bigint, se.f_sgtm), '19700101'))) >= '2019-08-25 00:00:00.000' */ ").
Append(" and td.f_name = @key ").
Append(" group by se.f_sgrp, td.f_name, pd.f_name, pl.f_name, pr.f_name, em.f_name, se.f_sgtm ").
Append(" order by se.f_sgrp, pd.f_name, td.f_name ");
return result;
}
internal static StringBuilder GetIqsRecordsSql()
{
StringBuilder result = new StringBuilder();
result.Append(" select ").
Append(" ta.id [SID], ").
Append(" ta.ms [Part], ").
Append(" ta.pr [Process], ").
Append(" ta.lt [Lot], ").
Append(" ta.sz [Sample Size], ").
Append(" ta.pn [Parameter Name], ").
Append(" ta.tn [Test Number], ").
Append(" ta.pv [Parameter Value], ").
Append(" tb.v1337859646 [Wafer ID], ").
Append(" tb.v1337859592 [Wafer Scribe], ").
Append(" tb.v1342510661 [Pocket], ").
Append(" tb.v1340294286 [Epi Thickness Mean], ").
Append(" tb.v1345566180 [Wafer Region], ").
Append(" tb.v1363881711 [Tool ID], ").
Append(" ta.em [Employee ID], ").
Append(" ta.en [Employee Name], ").
Append(" ta.dt [Date] ").
Append(" from ( ").
Append(" select ").
Append(" se.f_sgrp id, ").
Append(" se.f_sgsz sz, ").
Append(" concat(se.f_tsno, '.', se.f_sbno) tn, ").
Append(" se.f_val pv, ").
Append(" se.f_empl em, ").
Append(" dateadd(hh, -7, (dateadd(ss, convert(bigint, se.f_sgtm), '19700101'))) dt, ").
Append(" td.f_name pn, ").
Append(" pd.f_name as ms, ").
Append(" pl.f_name lt, ").
Append(" pr.f_name pr, ").
Append(" em.f_name en ").
Append(GetBaseTableJoins()).
Append(" where se.f_sgrp = @sid ").
Append(" ) as ta ").
Append(" join ( ").
Append(" select ").
Append(" se.f_sgrp id, ").
Append(" max(case when dd.f_dsgp = 1337859646 then dd.f_name end) as v1337859646, ").
Append(" max(case when dd.f_dsgp = 1337859592 then dd.f_name end) as v1337859592, ").
Append(" max(case when dd.f_dsgp = 1342510661 then dd.f_name end) as v1342510661, ").
Append(" max(case when dd.f_dsgp = 1340294286 then dd.f_name end) as v1340294286, ").
Append(" max(case when dd.f_dsgp = 1345566180 then dd.f_name end) as v1345566180, ").
Append(" max(case when dd.f_dsgp = 1363881711 then dd.f_name end) as v1363881711 ").
Append(" from [irmnspc].[dbo].sgrp_ext se ").
Append(" join [irmnspc].[dbo].test_dat td on se.f_test = td.f_test ").
Append(" join [irmnspc].[dbo].sgrp_dsc sd on se.f_sgrp = sd.f_sgrp ").
Append(" join [irmnspc].[dbo].desc_dat dd on sd.f_desc = dd.f_desc ").
Append(" and isnull(dd.f_name, '') <> '' ").
Append(" where se.f_sgrp = @sid ").
Append(" and dd.f_dsgp in (1337859646 /* Wafer ID */, 1337859592 /* Wafer Scribe */, 1342510661 /* Pocket */, 1340294286 /* Epi Thickness Mean */, 1345566180 /* Wafer Region */, 1363881711 /* Tool ID */) ").
Append(" group by se.f_sgrp ").
Append(" ) tb on ta.id = tb.id ").
Append(" order by ta.id desc, ta.ms, ta.pr, ta.lt, ta.sz, ta.tn, ta.dt, ta.pn ");
return result;
}
internal static List<IQSRecord> GetIqsRecords(Dictionary<int, List<object>> rawData, int count)
{
List<IQSRecord> results = new List<IQSRecord>();
IQSRecord iqsRecord;
List<object> c0 = rawData[0];
List<object> c1 = rawData[1];
List<object> c2 = rawData[2];
List<object> c3 = rawData[3];
List<object> c4 = rawData[4];
List<object> c5 = rawData[5];
List<object> c6 = rawData[6];
List<object> c7 = rawData[7];
List<object> c8 = rawData[8];
List<object> c9 = rawData[9];
List<object> cA = rawData[10];
List<object> cB = rawData[11];
List<object> cC = rawData[12];
List<object> cD = rawData[13];
List<object> cE = rawData[14];
List<object> cF = rawData[15];
List<object> cG = rawData[16];
if (c0.Any())
{
Array array = Enum.GetValues(typeof(Column));
Dictionary<string, Column> keyValuePairs = new Dictionary<string, Column>();
foreach (Column column in array)
keyValuePairs.Add(column.GetDiplayName(), column);
for (int i = 0; i < c0.Count; i++)
{
iqsRecord = new IQSRecord(c0[i], c1[i], c2[i], c3[i], c4[i], c5[i], c6[i], c7[i], c8[i], c9[i], cA[i], cB[i], cC[i], cD[i], cE[i], cF[i], cG[i], keyValuePairs);
results.Add(iqsRecord);
}
}
return results;
}
public override string ToString()
{
//(1337859646 /* Wafer ID */, 1337859592 /* Wafer Scribe */, 1342510661 /* Pocket */, 1340294286 /* Epi Thickness Mean */, 1345566180 /* Wafer Region */, 1363881711 /* Tool ID */) ").
//return string.Concat(SID, Part, Process, Lot, SampleSize, TestNumber, WaferID, WaferScribe, Pocket, EpiThicknessMean, WaferRegion, ToolID, EmployeeID, EmployeeName, Date);
return string.Concat(SID, Part, Process, Lot, SampleSize, TestNumber, EmployeeID, EmployeeName, Date);
}
}
}

View File

@ -1,21 +0,0 @@
using System;
namespace Adaptation.Shared
{
public interface IScopeInfo
{
Enum Enum { get; }
string HTML { get; }
string Title { get; }
string FileName { get; }
int TestValue { get; }
string Header { get; }
string QueryFilter { get; }
string FileNameWithoutExtension { get; }
EquipmentType EquipmentType { get; }
}
}

View File

@ -1,171 +0,0 @@
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace Shared
{
public class IsEnvironment
{
public enum Name
{
LinuxDevelopment,
LinuxProduction,
LinuxStaging,
OSXDevelopment,
OSXProduction,
OSXStaging,
WindowsDevelopment,
WindowsProduction,
WindowsStaging
}
public bool DebuggerWasAttachedDuringConstructor { get; private set; }
public bool Development { get; private set; }
public bool Linux { get; private set; }
public bool OSX { get; private set; }
public bool Production { get; private set; }
public bool Staging { get; private set; }
public bool Windows { get; private set; }
public string Profile { get; private set; }
public string AppSettingsFileName { get; private set; }
public string ASPNetCoreEnvironment { get; private set; }
public IsEnvironment(string testCategory)
{
if (testCategory.EndsWith(".json"))
{
Production = testCategory == "appsettings.json";
Staging = testCategory.EndsWith(nameof(Staging));
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Development = testCategory.EndsWith(nameof(Development));
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
}
else
{
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
OSX = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(OSX));
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
Linux = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(Linux));
Staging = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Staging));
Windows = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(Windows));
Production = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Production));
Development = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Development));
}
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount: null);
}
public IsEnvironment(bool isDevelopment, bool isStaging, bool isProduction)
{
Staging = isStaging;
Production = isProduction;
Development = isDevelopment;
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount: null);
}
public IsEnvironment(int? processesCount, bool nullASPNetCoreEnvironmentIsDevelopment, bool nullASPNetCoreEnvironmentIsProduction)
{
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (nullASPNetCoreEnvironmentIsDevelopment && nullASPNetCoreEnvironmentIsProduction)
throw new Exception();
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && nullASPNetCoreEnvironmentIsProduction)
Production = true;
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && nullASPNetCoreEnvironmentIsDevelopment)
Development = true;
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && !nullASPNetCoreEnvironmentIsDevelopment && !nullASPNetCoreEnvironmentIsProduction)
throw new Exception();
else
{
Staging = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Staging));
Production = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Production));
Development = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Development));
}
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount);
}
private string GetProfile()
{
string result;
if (Windows && Production)
result = nameof(Production);
else if (Windows && Staging)
result = nameof(Staging);
else if (Windows && Development)
result = nameof(Development);
else if (Linux && Production)
result = nameof(Name.LinuxProduction);
else if (Linux && Staging)
result = nameof(Name.LinuxStaging);
else if (Linux && Development)
result = nameof(Name.LinuxDevelopment);
else if (OSX && Production)
result = nameof(Name.OSXProduction);
else if (OSX && Staging)
result = nameof(Name.OSXStaging);
else if (OSX && Development)
result = nameof(Name.OSXDevelopment);
else
throw new Exception();
return result;
}
private string GetAppSettingsFileName(int? processesCount)
{
string result;
if (Production)
{
if (processesCount is null)
result = "appsettings.json";
else
result = $"appsettings.{processesCount}.json";
}
else
{
string environment;
if (Staging)
environment = nameof(Staging);
else if (Development)
environment = nameof(Development);
else
throw new Exception();
if (processesCount is null)
result = $"appsettings.{environment}.json";
else
result = $"appsettings.{environment}.{processesCount}.json";
}
return result;
}
public static string GetEnvironmentName(IsEnvironment isEnvironment)
{
string result;
if (isEnvironment.Windows)
result = nameof(IsEnvironment.Windows);
else if (isEnvironment.Linux)
result = nameof(IsEnvironment.Linux);
else if (isEnvironment.OSX)
result = nameof(IsEnvironment.OSX);
else
throw new Exception();
return result;
}
}
}

View File

@ -1,244 +1,208 @@
using System;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Adaptation.Shared
namespace Adaptation.Shared;
public class Logistics : ILogistics
{
public class Logistics
public object NullData { get; private set; }
public string JobID { get; private set; } //CellName
public long Sequence { get; private set; } //Ticks
public DateTime DateTimeFromSequence { get; private set; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; private set; }
public string MesEntity { get; private set; } //SPC
public string ReportFullPath { get; private set; } //Extract file
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; set; } //Lot & Pocket || Lot
public List<string> Tags { get; set; }
public List<string> Logistics1 { get; set; }
public List<Logistics2> Logistics2 { get; set; }
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
NullData = null;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
JobID = fileRead.CellInstanceName;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = string.Empty;
ProcessJobID = nameof(ProcessJobID);
MID = nameof(MID);
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public object NullData { get; private set; }
public string JobID { get; private set; } //CellName
public long Sequence { get; private set; } //Ticks
public DateTime DateTimeFromSequence { get; private set; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; private set; }
public string MesEntity { get; private set; } //SPC
public string ReportFullPath { get; private set; } //Extract file
public string ProcessJobID { get; internal set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; internal set; } //Lot & Pocket || Lot
public List<string> Tags { get; internal set; }
public List<string> Logistics1 { get; internal set; }
public List<Logistics2> Logistics2 { get; internal set; }
public Logistics()
public Logistics(IFileRead fileRead, string reportFullPath, bool useSplitForMID, int? fileInfoLength = null)
{
if (string.IsNullOrEmpty(fileRead.CellInstanceName))
throw new Exception();
if (string.IsNullOrEmpty(fileRead.MesEntity))
throw new Exception();
NullData = fileRead.NullData;
FileInfo fileInfo = new(reportFullPath);
DateTime dateTime = fileInfo.LastWriteTime;
if (fileInfoLength.HasValue && fileInfo.Length < fileInfoLength.Value)
dateTime = dateTime.AddTicks(-1);
JobID = fileRead.CellInstanceName;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = fileRead.MesEntity;
ReportFullPath = fileInfo.FullName;
ProcessJobID = nameof(ProcessJobID);
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
if (useSplitForMID)
{
if (fileNameWithoutExtension.IndexOf(".") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('.')[0].Trim();
if (fileNameWithoutExtension.IndexOf("_") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('_')[0].Trim();
if (fileNameWithoutExtension.IndexOf("-") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('-')[0].Trim();
}
MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
{
string key;
DateTime dateTime;
string[] segments;
Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
DateTime dateTime = DateTime.Now;
NullData = null;
JobID = Description.GetCellName();
JobID = "null";
dateTime = new FileInfo(reportFullPath).LastWriteTime;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = string.Empty;
ProcessJobID = nameof(ProcessJobID);
MID = nameof(MID);
ReportFullPath = reportFullPath;
ProcessJobID = "R##";
MID = "null";
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public Logistics(object nullData, Dictionary<string, string> cellNames, Dictionary<string, string> mesEntities, FileInfo fileInfo, bool useSplitForMID, int? fileInfoLength = null)
else
{
NullData = nullData;
string mesEntity = string.Empty;
string jobID = Description.GetCellName();
DateTime dateTime = fileInfo.LastWriteTime;
if (fileInfoLength.HasValue && fileInfo.Length < fileInfoLength.Value)
dateTime = dateTime.AddTicks(-1);
if (string.IsNullOrEmpty(jobID))
string logistics1Line1 = Logistics1[0];
key = "NULL_DATA=";
if (!logistics1Line1.Contains(key))
NullData = null;
else
{
if (cellNames.Count == 1)
jobID = cellNames.ElementAt(0).Key;
else
{
foreach (var element in cellNames)
{
if (fileInfo.FullName.IndexOf(element.Key, StringComparison.OrdinalIgnoreCase) > -1 || fileInfo.FullName.IndexOf(element.Value, StringComparison.OrdinalIgnoreCase) > -1)
{
jobID = element.Key;
break;
}
}
}
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
NullData = segments[1].Split(';')[0];
}
key = "JOBID=";
if (!logistics1Line1.Contains(key))
JobID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
JobID = segments[1].Split(';')[0];
}
key = "SEQUENCE=";
if (!logistics1Line1.Contains(key))
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
if (!long.TryParse(segments[1].Split(';')[0].Split('.')[0], out long sequence) || sequence < new DateTime(1999, 1, 1).Ticks)
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
dateTime = new DateTime(sequence);
}
if (string.IsNullOrEmpty(jobID))
throw new Exception();
if (mesEntities.ContainsKey(jobID))
mesEntity = mesEntities[jobID];
else if (mesEntities.Count == 1)
mesEntity = mesEntities.ElementAt(0).Value;
//
if (string.IsNullOrEmpty(mesEntity))
throw new Exception();
JobID = jobID;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = mesEntity;
ReportFullPath = fileInfo.FullName;
ProcessJobID = nameof(ProcessJobID);
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
if (useSplitForMID)
DateTime lastWriteTime = new FileInfo(reportFullPath).LastWriteTime;
if (TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
if (fileNameWithoutExtension.IndexOf(".") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('.')[0].Trim();
if (fileNameWithoutExtension.IndexOf("_") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('_')[0].Trim();
if (fileNameWithoutExtension.IndexOf("-") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('-')[0].Trim();
if (lastWriteTime != dateTime)
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
{
string key;
DateTime dateTime;
string[] segments;
Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
NullData = null;
JobID = "null";
dateTime = new FileInfo(reportFullPath).LastWriteTime;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
key = "MES_ENTITY=";
if (!logistics1Line1.Contains(key))
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = reportFullPath;
ProcessJobID = "R##";
MID = "null";
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
else
{
string logistics1Line1 = Logistics1[0];
key = "NULL_DATA=";
if (!logistics1Line1.Contains(key))
NullData = null;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
NullData = segments[1].Split(';')[0];
}
key = "JOBID=";
if (!logistics1Line1.Contains(key))
JobID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
JobID = segments[1].Split(';')[0];
}
key = "SEQUENCE=";
if (!logistics1Line1.Contains(key))
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
if (!long.TryParse(segments[1].Split(';')[0].Split('.')[0], out long sequence) || sequence < new DateTime(1999, 1, 1).Ticks)
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
dateTime = new DateTime(sequence);
}
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
DateTime lastWriteTime = new FileInfo(reportFullPath).LastWriteTime;
if (TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
if (lastWriteTime != dateTime)
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
key = "MES_ENTITY=";
if (!logistics1Line1.Contains(key))
MesEntity = DefaultMesEntity(dateTime);
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MesEntity = segments[1].Split(';')[0];
}
ReportFullPath = reportFullPath;
key = "PROCESS_JOBID=";
if (!logistics1Line1.Contains(key))
ProcessJobID = "R##";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
ProcessJobID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics1Line1.Contains(key))
MID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MesEntity = segments[1].Split(';')[0];
}
Logistics2 logistics2;
Tags = new List<string>();
Logistics2 = new List<Logistics2>();
for (int i = 1; i < Logistics1.Count(); i++)
ReportFullPath = reportFullPath;
key = "PROCESS_JOBID=";
if (!logistics1Line1.Contains(key))
ProcessJobID = "R##";
else
{
if (Logistics1[i].StartsWith("LOGISTICS_2"))
{
logistics2 = new Logistics2(Logistics1[i]);
Logistics2.Add(logistics2);
}
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
ProcessJobID = segments[1].Split(';')[0];
}
for (int i = Logistics1.Count() - 1; i > -1; i--)
key = "MID=";
if (!logistics1Line1.Contains(key))
MID = "null";
else
{
if (Logistics1[i].StartsWith("LOGISTICS_2"))
Logistics1.RemoveAt(i);
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
}
public Logistics ShallowCopy()
Logistics2 logistics2;
Tags = new List<string>();
Logistics2 = new List<Logistics2>();
for (int i = 1; i < Logistics1.Count; i++)
{
return (Logistics)MemberwiseClone();
if (Logistics1[i].StartsWith("LOGISTICS_2"))
{
logistics2 = new Logistics2(Logistics1[i]);
Logistics2.Add(logistics2);
}
}
private string DefaultMesEntity(DateTime dateTime)
for (int i = Logistics1.Count - 1; i > -1; i--)
{
return string.Concat(dateTime.Ticks, "_MES_ENTITY");
}
internal string GetLotViaMostCommonMethod()
{
return MID.Substring(0, MID.Length - 2);
}
internal string GetPocketNumberViaMostCommonMethod()
{
return MID.Substring(MID.Length - 2);
}
internal void Update(string dateTime, string processJobID, string mid)
{
if (!DateTime.TryParse(dateTime, out DateTime dateTimeCasted))
dateTimeCasted = DateTime.Now;
NullData = null;
//JobID = Description.GetCellName();
Sequence = dateTimeCasted.Ticks;
DateTimeFromSequence = dateTimeCasted;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTimeCasted).TotalSeconds;
//MesEntity = DefaultMesEntity(dateTime);
//ReportFullPath = string.Empty;
ProcessJobID = processJobID;
MID = mid;
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
if (Logistics1[i].StartsWith("LOGISTICS_2"))
Logistics1.RemoveAt(i);
}
}
public Logistics ShallowCopy() => (Logistics)MemberwiseClone();
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal string GetLotViaMostCommonMethod() => MID.Substring(0, MID.Length - 2);
internal string GetPocketNumberViaMostCommonMethod() => MID.Substring(MID.Length - 2);
internal void Update(string dateTime, string processJobID, string mid)
{
if (!DateTime.TryParse(dateTime, out DateTime dateTimeCasted))
dateTimeCasted = DateTime.Now;
NullData = null;
//JobID = Description.GetCellName();
Sequence = dateTimeCasted.Ticks;
DateTimeFromSequence = dateTimeCasted;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTimeCasted).TotalSeconds;
//MesEntity = DefaultMesEntity(dateTime);
//ReportFullPath = string.Empty;
ProcessJobID = processJobID;
MID = mid;
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
}

View File

@ -1,80 +1,78 @@
using System;
namespace Adaptation.Shared
namespace Adaptation.Shared;
public class Logistics2 : Methods.ILogistics2
{
public class Logistics2
public string MID { get; private set; }
public string RunNumber { get; private set; }
public string SatelliteGroup { get; private set; }
public string PartNumber { get; private set; }
public string PocketNumber { get; private set; }
public string WaferLot { get; private set; }
public string Recipe { get; private set; }
public Logistics2(string logistics2)
{
public string MID { get; private set; }
public string RunNumber { get; private set; }
public string SatelliteGroup { get; private set; }
public string PartNumber { get; private set; }
public string PocketNumber { get; private set; }
public string WaferLot { get; private set; }
public string Recipe { get; private set; }
public Logistics2(string logistics2)
string key;
string[] segments;
key = "JOBID=";
if (!logistics2.Contains(key))
MID = "null";
else
{
string key;
string[] segments;
key = "JOBID=";
if (!logistics2.Contains(key))
MID = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics2.Contains(key))
RunNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
RunNumber = segments[1].Split(';')[0];
}
key = "INFO=";
if (!logistics2.Contains(key))
SatelliteGroup = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
SatelliteGroup = segments[1].Split(';')[0];
}
key = "PRODUCT=";
if (!logistics2.Contains(key))
PartNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PartNumber = segments[1].Split(';')[0];
}
key = "CHAMBER=";
if (!logistics2.Contains(key))
PocketNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PocketNumber = segments[1].Split(';')[0];
}
key = "WAFER_ID=";
if (!logistics2.Contains(key))
WaferLot = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
WaferLot = segments[1].Split(';')[0];
}
key = "PPID=";
if (!logistics2.Contains(key))
Recipe = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
Recipe = segments[1].Split(';')[0];
}
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics2.Contains(key))
RunNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
RunNumber = segments[1].Split(';')[0];
}
key = "INFO=";
if (!logistics2.Contains(key))
SatelliteGroup = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
SatelliteGroup = segments[1].Split(';')[0];
}
key = "PRODUCT=";
if (!logistics2.Contains(key))
PartNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PartNumber = segments[1].Split(';')[0];
}
key = "CHAMBER=";
if (!logistics2.Contains(key))
PocketNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PocketNumber = segments[1].Split(';')[0];
}
key = "WAFER_ID=";
if (!logistics2.Contains(key))
WaferLot = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
WaferLot = segments[1].Split(';')[0];
}
key = "PPID=";
if (!logistics2.Contains(key))
Recipe = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
Recipe = segments[1].Split(';')[0];
}
}
}

View File

@ -0,0 +1,22 @@
using System.Collections.Generic;
using System.Text.Json;
namespace Adaptation.Shared.Methods;
public interface IDescription
{
string GetEventDescription();
List<string> GetDetailNames();
List<string> GetHeaderNames();
IDescription GetDisplayNames();
List<string> GetParameterNames();
List<string> GetPairedParameterNames();
List<string> GetIgnoreParameterNames(Test test);
List<string> GetNames(IFileRead fileRead, Logistics logistics);
JsonProperty[] GetDefault(IFileRead fileRead, Logistics logistics);
Dictionary<string, string> GetDisplayNamesJsonElement(IFileRead fileRead);
IDescription GetDefaultDescription(IFileRead fileRead, Logistics logistics);
List<IDescription> GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData);
}

View File

@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.Shared.Methods;
public interface IFileRead : Properties.IFileRead
{
void WaitForThread();
JsonProperty[] GetDefault();
void Callback(object state);
string GetEventDescription();
List<string> GetHeaderNames();
void CheckTests(Test[] tests, bool extra);
Dictionary<string, string> GetDisplayNamesJsonElement();
Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract();
List<IDescription> GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData);
void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception = null);
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, string eventName);
string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception);
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Shared.Methods;
public interface ILogistics : Properties.ILogistics
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Shared.Methods;
public interface ILogistics2 : Properties.ILogistics2
{
}

View File

@ -0,0 +1,14 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.Shared.Methods;
public interface IProcessData : Properties.IProcessData
{
string GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors);
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection);
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.Shared.Methods;
public interface ISMTP
{
void SendLowPriorityEmailMessage(string subject, string body);
void SendHighPriorityEmailMessage(string subject, string body);
void SendNormalPriorityEmailMessage(string subject, string body);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,419 +0,0 @@
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared.Metrology
{
public class ConfigDataBase
{
public bool UseCyclicalForDescription { get; protected set; }
public Dictionary<string, string> CellNames { get; protected set; }
public Dictionary<string, string> MesEntities { get; protected set; }
public IProcessDataDescription ProcessDataDescription { get; protected set; }
public bool IsEvent { get; private set; }
public bool EafHosted { get; private set; }
public string CellName { get; private set; }
public bool IsSourceTimer { get; private set; }
public EquipmentType EquipmentType => _EquipmentType;
public string EquipmentElementName { get; private set; }
public bool IsDatabaseExportToIPDSF { get; private set; }
public EquipmentType? EquipmentConnection => _EquipmentConnection;
public FileConnectorConfiguration FileConnectorConfiguration { get; private set; }
protected readonly EventName _EventName;
protected readonly EquipmentType _EquipmentType;
protected readonly EquipmentType? _EquipmentConnection;
protected readonly Dictionary<string, string> _Reactors;
public ConfigDataBase(string cellName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, bool isEAFHosted)
{
CellName = cellName;
EafHosted = isEAFHosted;
EquipmentType equipmentTypeValue;
_Reactors = new Dictionary<string, string>();
CellNames = new Dictionary<string, string>();
MesEntities = new Dictionary<string, string>();
EquipmentElementName = cellInstanceConnectionName;
FileConnectorConfiguration = fileConnectorConfiguration;
string[] segments = parameterizedModelObjectDefinitionType.Split('.');
IsSourceTimer = (fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt"));
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
IsDatabaseExportToIPDSF = (fileConnectorConfiguration.SourceFileLocation.Contains("DatabaseExport"));
if (!Enum.TryParse(segments[segments.Length - 1], out EventName eventNameValue))
throw new Exception(cellInstanceConnectionName);
if (!Enum.TryParse(cellInstanceConnectionNameBase, out equipmentTypeValue))
_EquipmentConnection = null;
else
_EquipmentConnection = equipmentTypeValue;
string suffix;
switch (eventNameValue)
{
case EventName.FileRead:
suffix = string.Empty;
break;
case EventName.FileReadDaily:
suffix = "_Daily";
break;
case EventName.FileReadWeekly:
suffix = "_Weekly";
break;
case EventName.FileReadMonthly:
suffix = "_Monthly";
break;
case EventName.FileReadVerification:
suffix = "_Verification";
break;
default:
throw new Exception(cellInstanceConnectionName);
}
string parameterizedModelObjectDefinitionTypeAppended = string.Concat(segments[0], suffix);
IsEvent = cellInstanceConnectionNameBase != parameterizedModelObjectDefinitionTypeAppended;
_EventName = eventNameValue;
if (!Enum.TryParse(parameterizedModelObjectDefinitionTypeAppended, out equipmentTypeValue))
throw new Exception(cellInstanceConnectionName);
_EquipmentType = equipmentTypeValue;
if (!isEAFHosted && equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
throw new Exception(cellInstanceConnectionName);
}
public string GetEventName()
{
string result = _EventName.ToString();
return result;
}
public EventName GetEventNameValue()
{
EventName result = _EventName;
return result;
}
public string GetEquipmentType()
{
string result;
if (_EquipmentConnection is null)
result = _EquipmentType.ToString();
else
result = _EquipmentConnection.Value.ToString();
return result;
}
public string GetEventDescription()
{
string result = ProcessDataDescription.GetEventDescription();
return result;
}
public IProcessDataDescription GetDefault(ILogic logic)
{
IProcessDataDescription result = ProcessDataDescription.GetDefault(logic, this);
return result;
}
public IProcessDataDescription GetDisplayNames(ILogic logic)
{
IProcessDataDescription result = ProcessDataDescription.GetDisplayNames(logic, this);
return result;
}
public List<string> GetDetailNames(ILogic logic)
{
List<string> results = ProcessDataDescription.GetDetailNames(logic, this);
return results;
}
public List<string> GetHeaderNames(ILogic logic)
{
List<string> results = ProcessDataDescription.GetHeaderNames(logic, this);
return results;
}
public List<string> GetNames(ILogic logic)
{
List<string> results = ProcessDataDescription.GetNames(logic, this);
return results;
}
public List<string> GetPairedParameterNames(ILogic logic)
{
List<string> results = ProcessDataDescription.GetPairedParameterNames(logic, this);
return results;
}
public List<string> GetParameterNames(ILogic logic)
{
List<string> results = ProcessDataDescription.GetParameterNames(logic, this);
return results;
}
public List<IProcessDataDescription> GetDescription(ILogic logic, List<Test> tests, IProcessData iProcessData)
{
List<IProcessDataDescription> results = ProcessDataDescription.GetDescription(logic, this, tests, iProcessData);
return results;
}
public string GetCurrentReactor(ILogic logic)
{
string result = string.Empty;
foreach (KeyValuePair<string, string> keyValuePair in _Reactors)
{
foreach (string filePrefix in keyValuePair.Value.Split('|'))
{
if (logic.Logistics.MID.StartsWith(filePrefix) || (_EventName != EventName.FileRead && MesEntities.ContainsKey(logic.Logistics.JobID) && keyValuePair.Value == MesEntities[logic.Logistics.JobID]))
{
result = keyValuePair.Key;
break;
}
}
}
if (string.IsNullOrEmpty(result) && _Reactors.Count == 1)
result = _Reactors.ElementAt(0).Key;
return result;
}
protected JsonElement GetDefaultJsonElement(ILogic logic)
{
JsonElement result;
IProcessDataDescription processDataDescription = ProcessDataDescription.GetDefault(logic, this);
string json = JsonSerializer.Serialize(processDataDescription, processDataDescription.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
result = (JsonElement)@object;
return result;
}
public Dictionary<string, List<Tuple<Enum, string, string, object>>> GetParameterInfo(ILogic logic, bool allowNull)
{
Dictionary<string, List<Tuple<Enum, string, string, object>>> results = new Dictionary<string, List<Tuple<Enum, string, string, object>>>();
string description;
Enum param;
Tuple<Enum, string, string, object> tuple;
JsonElement defaultJsonElement = GetDefaultJsonElement(logic);
Dictionary<string, string> keyValuePairs = GetDisplayNamesJsonElement(logic);
foreach (JsonProperty jsonProperty in defaultJsonElement.EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Null && !allowNull)
throw new Exception();
if (jsonProperty.Value.ValueKind == JsonValueKind.Object || jsonProperty.Value.ValueKind == JsonValueKind.Array)
{
description = string.Empty;
param = Description.Param.StructuredType;
//jValue = jObject.Value<JValue>("Item1");
throw new NotImplementedException("Item1");
}
else
{
switch (jsonProperty.Value.ValueKind)
{
case JsonValueKind.String:
param = Description.Param.String;
break;
case JsonValueKind.Number:
param = Description.Param.Double;
break;
case JsonValueKind.True:
case JsonValueKind.False:
param = Description.Param.Boolean;
break;
case JsonValueKind.Null:
param = Description.Param.String;
break;
default:
param = Description.Param.StructuredType;
break;
}
}
if (!keyValuePairs.ContainsKey(jsonProperty.Name))
description = string.Empty;
else
description = keyValuePairs[jsonProperty.Name];
tuple = new Tuple<Enum, string, string, object>(param, jsonProperty.Name, description, jsonProperty.Value.ToString());
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, new List<Tuple<Enum, string, string, object>>());
results[jsonProperty.Name].Add(tuple);
}
return results;
}
protected void WriteExportAliases(ILogic logic, string cellName, string equipmentElementName)
{
int i = 0;
Enum param;
object value;
Enum[] @params;
string description;
StringBuilder stringBuilder = new StringBuilder();
string shareRoot = @"\\messv02ecc1.ec.local\EC_EDA";
string shareDirectory = string.Concat(shareRoot, @"\Staging\Pdsf\", cellName, @"\ExportAliases\", equipmentElementName);
Dictionary<string, List<Tuple<Enum, string, string, object>>> keyValuePairs;
if (!(logic is null))
keyValuePairs = GetParameterInfo(logic, allowNull: false);
else
keyValuePairs = new Dictionary<string, List<Tuple<Enum, string, string, object>>>();
stringBuilder.AppendLine("\"AliasName\";\"Condition\";\"EventId\";\"ExceptionId\";\"Formula\";\"HardwareId\";\"OrderId\";\"ParameterName\";\"Remark\";\"ReportName\";\"SourceId\";\"Use\"");
if (!Directory.Exists(shareRoot))
return;
if (!Directory.Exists(shareDirectory))
Directory.CreateDirectory(shareDirectory);
string shareFile = string.Concat(shareDirectory, @"\", DateTime.Now.Ticks, ".csv");
foreach (KeyValuePair<string, List<Tuple<Enum, string, string, object>>> keyValuePair in keyValuePairs)
{
i += 1;
@params = (from l in keyValuePair.Value select l.Item1).Distinct().ToArray();
if (@params.Length != 1)
throw new Exception();
if (keyValuePair.Value[0].Item2 != keyValuePair.Key)
throw new Exception();
param = @params[0];
if (!(param is Description.Param.String))
stringBuilder.AppendLine($"\"{keyValuePair.Key}\";\"\";\"\";\"\";\"\";\"\";\"{i}\";\"{cellName}/{EquipmentElementName}/{keyValuePair.Key}\";\"\";\"{cellName}/{EquipmentElementName}/{_EventName}\";\"\";\"True\"");
else
{
description = keyValuePair.Value[0].Item3.Split('|')[0];
if (string.IsNullOrEmpty(description))
continue;
value = keyValuePair.Value[0].Item4;
stringBuilder.AppendLine($"\"'{description}'\";\"\";\"\";\"\";\"\";\"\";\"{i}\";\"{cellName}/{EquipmentElementName}/{value}\";\"\";\"{cellName}/{EquipmentElementName}/{_EventName}\";\"\";\"True\"");
}
}
if (keyValuePairs.Any())
File.WriteAllText(shareFile, stringBuilder.ToString());
}
public Dictionary<string, string> GetDisplayNamesJsonElement(ILogic logic)
{
Dictionary<string, string> results = new Dictionary<string, string>();
IProcessDataDescription processDataDescription = ProcessDataDescription.GetDisplayNames(logic, this);
string json = JsonSerializer.Serialize(processDataDescription, processDataDescription.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
public List<string> GetIgnoreParameterNames(ILogic logic, Test test, bool includePairedParameterNames)
{
List<string> results = ProcessDataDescription.GetIgnoreParameterNames(logic, this, test);
if (includePairedParameterNames)
{
string value;
List<string> pairedParameterNames = ProcessDataDescription.GetPairedParameterNames(logic, this);
IProcessDataDescription processDataDescription = ProcessDataDescription.GetDisplayNames(logic, this);
string json = JsonSerializer.Serialize(processDataDescription, processDataDescription.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (!(@object is JsonElement jsonElement))
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object || jsonProperty.Value.ValueKind == JsonValueKind.Array)
throw new Exception();
value = jsonProperty.Value.ToString();
if (!results.Contains(jsonProperty.Name) && pairedParameterNames.Contains(jsonProperty.Name) && (string.IsNullOrEmpty(value) || value[0] == '|'))
results.Add(jsonProperty.Name);
}
}
return results;
}
public List<Duplicator.Description> GetProcessDataDescriptions(JsonElement jsonElement)
{
List<Duplicator.Description> results;
if (jsonElement.ValueKind != JsonValueKind.Array)
throw new Exception();
JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
results = JsonSerializer.Deserialize<List<Duplicator.Description>>(jsonElement.ToString(), jsonSerializerOptions);
return results;
}
public Dictionary<Test, List<Duplicator.Description>> GetKeyValuePairs(List<Duplicator.Description> processDataDescriptions)
{
Dictionary<Test, List<Duplicator.Description>> results = new Dictionary<Test, List<Duplicator.Description>>();
Test testKey;
for (int i = 0; i < processDataDescriptions.Count; i++)
{
testKey = (Test)processDataDescriptions[i].Test;
if (!results.ContainsKey(testKey))
results.Add(testKey, new List<Duplicator.Description>());
results[testKey].Add(processDataDescriptions[i]);
}
return results;
}
public Dictionary<string, List<string>> GetKeyValuePairs(JsonElement jsonElement, List<Duplicator.Description> processDataDescriptions, Test test)
{
Dictionary<string, List<string>> results = new Dictionary<string, List<string>>();
Test testKey;
if (jsonElement.ValueKind != JsonValueKind.Array)
throw new Exception();
JsonElement[] jsonElements = jsonElement.EnumerateArray().ToArray();
if (processDataDescriptions.Count != jsonElements.Length)
throw new Exception();
for (int i = 0; i < processDataDescriptions.Count; i++)
{
testKey = (Test)processDataDescriptions[i].Test;
if (testKey != test)
continue;
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object || jsonProperty.Value.ValueKind == JsonValueKind.Array)
throw new Exception();
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, new List<string>());
results[jsonProperty.Name].Add(jsonProperty.Value.ToString());
}
}
return results;
}
protected void VerifyProcessDataDescription(ILogic logic)
{
string description;
bool allowNull = false;
JsonElement defaultJsonElement = GetDefaultJsonElement(logic);
Dictionary<string, string> keyValuePairs = GetDisplayNamesJsonElement(logic);
JsonProperty[] jsonProperties = defaultJsonElement.EnumerateObject().ToArray();
foreach (JsonProperty jsonProperty in jsonProperties)
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Null && !allowNull)
throw new Exception();
if (!(jsonProperty.Value.ValueKind is JsonValueKind.String) || !keyValuePairs.ContainsKey(jsonProperty.Name))
description = string.Empty;
else
description = keyValuePairs[jsonProperty.Name].Split('|')[0];
}
}
public List<IProcessDataDescription> GetIProcessDataDescriptions(JsonElement jsonElement)
{
List<IProcessDataDescription> results = new List<IProcessDataDescription>();
if (jsonElement.ValueKind != JsonValueKind.Array)
throw new Exception();
object @object;
Type type = ProcessDataDescription.GetType();
JsonElement[] jsonElements = jsonElement.EnumerateArray().ToArray();
JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
for (int i = 0; i < jsonElements.Length; i++)
{
@object = JsonSerializer.Deserialize(jsonElements[i].ToString(), type, jsonSerializerOptions);
if (!(@object is IProcessDataDescription processDataDescription))
continue;
results.Add(processDataDescription);
}
return results;
}
}
}

View File

@ -1,13 +0,0 @@
namespace Adaptation.Shared.Metrology
{
public enum EventName
{
FileRead,
FileReadDaily,
FileReadMonthly,
FileReadVerification,
FileReadWeekly
}
}

View File

@ -1,45 +0,0 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace Adaptation.Shared.Metrology
{
public interface ILogic
{
ILogic ShallowCopy();
Logistics Logistics { get; }
void ConfigurationRestore();
string GetConfigurationErrorTargetFileLocation();
string GetConfigurationSourceFileLocation();
string GetConfigurationTarget2FileLocation();
string GetConfigurationTargetFileLocation();
string GetConfigurationTargetFileName();
Tuple<string, JsonElement?, List<FileInfo>> GetExtractResult(string reportFullPath, string eventName);
object GetFilePathGeneratorInfo(string reportFullPath, bool isErrorFile);
string GetReportFullPath(Dictionary<string, object> keyValuePairs);
string GetTarget2FileLocation();
void Move(string reportFullPath, Tuple<string, JsonElement?, List<FileInfo>> extractResults, Exception exception = null);
Tuple<string, JsonElement?, List<FileInfo>> ReExtract(string searchDirectory, string sourceFileFilter);
void ReflectionCreateSelfDescription(string equipmentElementName, int? input, string cellName, string debugConfig, string[] strings, bool[] booleans, long[] numbers, string[] enums);
ConfigDataBase ReflectionCreateSelfDescriptionV2(string json);
string ResolveErrorTargetPlaceHolders(string reportFullPath, bool createDirectory = true, string fileFoundPath = "");
string ResolveSourcePlaceHolders(string reportFullPath, bool createDirectory = true);
string ResolveTarget2PlaceHolders(string reportFullPath, bool createDirectory = true, string fileFoundPath = "");
string ResolveTargetPlaceHolders(string reportFullPath, bool createDirectory = true, string fileFoundPath = "");
void SetFileParameter(string key, string value);
void SetFileParameterLotID(string value, bool includeLogisticsSequence = false);
void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true);
void SetFileParameterSystemDateTimeToLogisticsSequence();
void SetPlaceHolder(string reportFullPath, string key, string value);
void SetTarget2FileLocation(string value);
}
}

Some files were not shown because too many files have changed in this diff Show More