Match TFS Changeset 303350

This commit is contained in:
Mike Phares 2022-02-01 18:21:37 -07:00
parent 512ee138ae
commit 7531f7994a
110 changed files with 17588 additions and 13 deletions

9
.gitignore vendored
View File

@ -328,3 +328,12 @@ ASALocalRun/
# MFractors (Xamarin productivity tool) working folder
.mfractor/
##
## Visual Studio Code
##
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json

336
Adaptation/.editorconfig Normal file
View File

@ -0,0 +1,336 @@
# Remove the line below if you want to inherit .editorconfig settings from higher directories
root = true
# C# files
[*.cs]
#### Core EditorConfig Options ####
# Indentation and spacing
indent_size = 4
indent_style = space
tab_width = 4
# New line preferences
end_of_line = crlf
insert_final_newline = false
#### .NET Coding Conventions ####
# Organize usings
dotnet_separate_import_directive_groups = false
dotnet_sort_system_directives_first = false
file_header_template = unset
# this. and Me. preferences
dotnet_style_qualification_for_event = false:error
dotnet_style_qualification_for_field = false
dotnet_style_qualification_for_method = false:error
dotnet_style_qualification_for_property = false:error
# Language keywords vs BCL types preferences
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true
# Parentheses preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
# Modifier preferences
dotnet_style_require_accessibility_modifiers = for_non_interface_members
# Expression-level preferences
dotnet_style_coalesce_expression = true
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_namespace_match_folder = true
dotnet_style_null_propagation = true:warning
dotnet_style_object_initializer = true:warning
dotnet_style_operator_placement_when_wrapping = beginning_of_line
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_compound_assignment = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false
dotnet_style_prefer_conditional_expression_over_return = false
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_simplified_boolean_expressions = true:warning
dotnet_style_prefer_simplified_interpolation = true
# Field preferences
dotnet_style_readonly_field = true:warning
# Parameter preferences
dotnet_code_quality_unused_parameters = all
# Suppression preferences
dotnet_remove_unnecessary_suppression_exclusions = 0
# New line preferences
dotnet_style_allow_multiple_blank_lines_experimental = false:warning
dotnet_style_allow_statement_immediately_after_block_experimental = true
#### C# Coding Conventions ####
# var preferences
csharp_style_var_elsewhere = false:warning
csharp_style_var_for_built_in_types = false:warning
csharp_style_var_when_type_is_apparent = false:warning
# Expression-bodied members
csharp_style_expression_bodied_accessors = when_on_single_line:warning
csharp_style_expression_bodied_constructors = when_on_single_line:warning
csharp_style_expression_bodied_indexers = when_on_single_line:warning
csharp_style_expression_bodied_lambdas = when_on_single_line:warning
csharp_style_expression_bodied_local_functions = when_on_single_line:warning
csharp_style_expression_bodied_methods = when_on_single_line:warning
csharp_style_expression_bodied_operators = when_on_single_line:warning
csharp_style_expression_bodied_properties = when_on_single_line:warning
# Pattern matching preferences
csharp_style_pattern_matching_over_as_with_null_check = true:warning
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_prefer_not_pattern = true:warning
csharp_style_prefer_pattern_matching = true:warning
csharp_style_prefer_switch_expression = true:warning
# Null-checking preferences
csharp_style_conditional_delegate_call = true
# Modifier preferences
csharp_prefer_static_local_function = true:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async
# Code-block preferences
csharp_prefer_braces = false
csharp_prefer_simple_using_statement = true:warning
csharp_style_namespace_declarations = file_scoped:warning
# Expression-level preferences
csharp_prefer_simple_default_expression = true:warning
csharp_style_deconstructed_variable_declaration = false
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning
csharp_style_inlined_variable_declaration = false
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_style_prefer_index_operator = false:warning
csharp_style_prefer_null_check_over_type_check = true
csharp_style_prefer_range_operator = false:warning
csharp_style_throw_expression = true
csharp_style_unused_value_assignment_preference = discard_variable:warning
csharp_style_unused_value_expression_statement_preference = discard_variable:warning
# 'using' directive preferences
csharp_using_directive_placement = outside_namespace
# New line preferences
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true
csharp_style_allow_embedded_statements_on_same_line_experimental = true
#### C# Formatting Rules ####
# New line preferences
csharp_new_line_before_catch = true
csharp_new_line_before_else = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = all
csharp_new_line_between_query_expression_clauses = true
# Indentation preferences
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_case_contents_when_block = true
csharp_indent_labels = one_less_than_current
csharp_indent_switch_labels = true
# Space preferences
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false
# Wrapping preferences
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = false
#### Naming styles ####
# Naming rules
dotnet_naming_rule.interface_should_be_begins_with_i.severity = warning
dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
dotnet_naming_rule.types_should_be_pascal_case.severity = warning
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.symbols = private_or_internal_field
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.enum_should_be_pascal_case.severity = warning
dotnet_naming_rule.enum_should_be_pascal_case.symbols = enum
dotnet_naming_rule.enum_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.symbols = public_or_protected_field
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.class_should_be_pascal_case.severity = warning
dotnet_naming_rule.class_should_be_pascal_case.symbols = class
dotnet_naming_rule.class_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.method_should_be_pascal_case.severity = warning
dotnet_naming_rule.method_should_be_pascal_case.symbols = method
dotnet_naming_rule.method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.symbols = private_or_internal_static_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.static_field_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_field_should_be_pascal_case.symbols = static_field
dotnet_naming_rule.static_field_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.property_should_be_pascal_case.severity = warning
dotnet_naming_rule.property_should_be_pascal_case.symbols = property
dotnet_naming_rule.property_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_method_should_be_pascal_case.symbols = static_method
dotnet_naming_rule.static_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.private_method_should_be_pascal_case.symbols = private_method
dotnet_naming_rule.private_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.event_should_be_pascal_case.severity = warning
dotnet_naming_rule.event_should_be_pascal_case.symbols = event
dotnet_naming_rule.event_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.delegate_should_be_pascal_case.severity = warning
dotnet_naming_rule.delegate_should_be_pascal_case.symbols = delegate
dotnet_naming_rule.delegate_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.struct_should_be_pascal_case.severity = warning
dotnet_naming_rule.struct_should_be_pascal_case.symbols = struct
dotnet_naming_rule.struct_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = warning
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
# Symbol specifications
dotnet_naming_symbols.class.applicable_kinds = class
dotnet_naming_symbols.class.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.class.required_modifiers =
dotnet_naming_symbols.interface.applicable_kinds = interface
dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.interface.required_modifiers =
dotnet_naming_symbols.struct.applicable_kinds = struct
dotnet_naming_symbols.struct.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.struct.required_modifiers =
dotnet_naming_symbols.enum.applicable_kinds = enum
dotnet_naming_symbols.enum.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.enum.required_modifiers =
dotnet_naming_symbols.delegate.applicable_kinds = delegate
dotnet_naming_symbols.delegate.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.delegate.required_modifiers =
dotnet_naming_symbols.event.applicable_kinds = event
dotnet_naming_symbols.event.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.event.required_modifiers =
dotnet_naming_symbols.method.applicable_kinds = method
dotnet_naming_symbols.method.applicable_accessibilities = public
dotnet_naming_symbols.method.required_modifiers =
dotnet_naming_symbols.private_method.applicable_kinds = method
dotnet_naming_symbols.private_method.applicable_accessibilities = private
dotnet_naming_symbols.private_method.required_modifiers =
dotnet_naming_symbols.abstract_method.applicable_kinds = method
dotnet_naming_symbols.abstract_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.abstract_method.required_modifiers = abstract
dotnet_naming_symbols.static_method.applicable_kinds = method
dotnet_naming_symbols.static_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_method.required_modifiers = static
dotnet_naming_symbols.property.applicable_kinds = property
dotnet_naming_symbols.property.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.property.required_modifiers =
dotnet_naming_symbols.public_or_protected_field.applicable_kinds = field
dotnet_naming_symbols.public_or_protected_field.applicable_accessibilities = public, protected
dotnet_naming_symbols.public_or_protected_field.required_modifiers =
dotnet_naming_symbols.static_field.applicable_kinds = field
dotnet_naming_symbols.static_field.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_field.required_modifiers = static
dotnet_naming_symbols.private_or_internal_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_field.required_modifiers =
dotnet_naming_symbols.private_or_internal_static_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_static_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_static_field.required_modifiers = static
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.types.required_modifiers =
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.non_field_members.required_modifiers =
# Naming styles
dotnet_naming_style.pascal_case.required_prefix =
dotnet_naming_style.pascal_case.required_suffix =
dotnet_naming_style.pascal_case.word_separator =
dotnet_naming_style.pascal_case.capitalization = pascal_case
dotnet_naming_style.begins_with_i.required_prefix = I
dotnet_naming_style.begins_with_i.required_suffix =
dotnet_naming_style.begins_with_i.word_separator =
dotnet_naming_style.begins_with_i.capitalization = pascal_case
dotnet_naming_style.private_of_internal_field.required_prefix = _
dotnet_naming_style.private_of_internal_field.required_suffix =
dotnet_naming_style.private_of_internal_field.word_separator =
dotnet_naming_style.private_of_internal_field.capitalization = pascal_case

10
Adaptation/.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,10 @@
{
"configurations": [
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": 90560
}
]
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Core
{
public class BackboneComponent
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Core
{
public class BackboneStatusCache
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Core
{
public interface ILoggingSetupManager
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Core
{
public class StatusItem
{
}
}

View File

@ -0,0 +1,48 @@
using Adaptation.PeerGroup.GCL.Annotations;
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.Core
{
public class Backbone
{
public const string STATE_ERROR = "Error";
public const string STATE_OFFLINE = "Offline";
public const string STATE_RUNNING = "Running";
public const string STATE_SHUTDOWN = "Shutting Down";
public const string STATE_STARTING = "Starting";
protected Backbone() { }
[NotNull]
public static Backbone Instance { get; }
[NotNull]
public ILoggingSetupManager LoggingConfigurationManager { get; set; }
public BackboneStatusCache Status { get; }
public bool IsAutomatedRestartActive { get; }
public bool IsReadyForRestart { get; }
public string StartTime { get; }
public string State { get; }
public string Name { get; }
public string ConfigurationServiceAddress { get; }
public string CellName { get; }
protected bool IsInitialized { get; set; }
protected Dictionary<string, BackboneComponent> BackboneComponents { get; }
public void AddBackboneComponent(BackboneComponent backboneComponent) { }
public bool ContainsBackboneComponent(string id) { throw new NotImplementedException(); }
[Obsolete("Use the capabilities exposed via the Status property -> GetAll. Will be removed with next major release.")]
public List<StatusItem> GetAllStatuses() { throw new NotImplementedException(); }
public BackboneComponent GetBackboneComponentById(string id) { throw new NotImplementedException(); }
public List<T> GetBackboneComponentsOfType<T>() { throw new NotImplementedException(); }
public List<BackboneComponent> GetBackboneComponentsOfType(Type type) { throw new NotImplementedException(); }
public void RegisterSubprocess(int pid) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(string statusName, string statusValue) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(BackboneComponent source, string statusName, string statusValue) { }
protected void CloseConnectionOfComponents(List<BackboneComponent> components) { }
protected virtual void StopAllComponents() { }
protected void StopComponents(List<BackboneComponent> components) { }
}
}

View File

@ -0,0 +1,24 @@
using System;
namespace Adaptation.Eaf.Core.Smtp
{
public class EmailMessage
{
public EmailMessage() { }
public EmailMessage(string subject, string body, MailPriority priority = MailPriority.Normal) { }
public string Body { get; }
public MailPriority Priority { get; }
public string Subject { get; }
public EmailMessage PriorityHigh() { throw new NotImplementedException(); }
public EmailMessage PriorityLow() { throw new NotImplementedException(); }
public EmailMessage PriorityNormal() { throw new NotImplementedException(); }
public EmailMessage SetBody(string body) { throw new NotImplementedException(); }
public EmailMessage SetPriority(MailPriority priority) { throw new NotImplementedException(); }
public EmailMessage SetSubject(string subject) { throw new NotImplementedException(); }
}
}

View File

@ -0,0 +1,9 @@
namespace Adaptation.Eaf.Core.Smtp
{
public interface ISmtp
{
void Send(EmailMessage message);
}
}

View File

@ -0,0 +1,11 @@
namespace Adaptation.Eaf.Core.Smtp
{
public enum MailPriority
{
Low = 0,
Normal = 1,
High = 2
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class ChangeDataCollectionHandler
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class DataCollectionRequest
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class EquipmentEvent
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class EquipmentException
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class EquipmentSelfDescription
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class GetParameterValuesHandler
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IConnectionControl
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IDataTracingHandler
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IEquipmentCommandService
{
}
}

View File

@ -0,0 +1,16 @@
using Adaptation.PeerGroup.GCL.Annotations;
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IEquipmentControl : IPackageSource
{
[NotNull]
IEquipmentSelfDescriptionBuilder SelfDescriptionBuilder { get; }
[NotNull]
IEquipmentDataCollection DataCollection { get; }
[NotNull]
IEquipmentCommandService Commands { get; }
[NotNull]
IConnectionControl Connection { get; }
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IEquipmentSelfDescriptionBuilder
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IPackage
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface ISelfDescriptionLookup
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IVirtualParameterValuesHandler
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class SetParameterValuesHandler
{
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public class TraceRequest
{
}
}

View File

@ -0,0 +1,39 @@
using Adaptation.Eaf.EquipmentCore.DataCollection.Reporting;
using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IEquipmentDataCollection
{
IVirtualParameterValuesHandler VirtualParameterValuesHandler { get; }
ISelfDescriptionLookup SelfDescriptionLookup { get; }
EquipmentSelfDescription SelfDescription { get; }
IEnumerable<DataCollectionRequest> ActiveRequests { get; }
IDataTracingHandler DataTracingHandler { get; }
ParameterValue CreateParameterValue(EquipmentParameter parameter, object value);
void NotifyDataTracingAvailable(bool isAvailable);
void RegisterChangeDataCollectionHandler(ChangeDataCollectionHandler handler);
void RegisterDataTracingHandler(IDataTracingHandler handler);
void RegisterGetParameterValuesHandler(GetParameterValuesHandler handler);
void RegisterSetParameterValuesHandler(SetParameterValuesHandler handler);
void TriggerDeactivate(DataCollectionRequest deactivateRequest);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerPerformanceRestored();
void TriggerPerformanceWarning();
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, DateTime equipmentTimeStamp);
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.EquipmentCore.Control
{
public interface IPackageSource
{
}
}

View File

@ -0,0 +1,20 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using Adaptation.PeerGroup.GCL.Annotations;
using System;
namespace Adaptation.Eaf.EquipmentCore.DataCollection.Reporting
{
public class ParameterValue
{
public ParameterValue(EquipmentParameter definition, object value) { }
public ParameterValue(EquipmentParameter definition, object value, DateTime timestamp) { }
public virtual object Value { get; protected internal set; }
[NotNull]
public EquipmentParameter Definition { get; }
public DateTime Timestamp { get; protected set; }
public virtual ParameterValue Clone(EquipmentParameter newDefinition) { throw new NotImplementedException(); }
public override string ToString() { return base.ToString(); }
}
}

View File

@ -0,0 +1,24 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription
{
public class EquipmentParameter
{
public EquipmentParameter(EquipmentParameter source, ParameterTypeDefinition typeDefinition) { }
public EquipmentParameter(string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public EquipmentParameter(string id, string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public string Name { get; }
public string Id { get; }
public string Description { get; }
public string SourcePath { get; }
public string SourceEquipment { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool IsTransient { get; }
public bool IsReadOnly { get; }
public override string ToString() { return base.ToString(); }
public string ToStringWithDetails() { return base.ToString(); }
}
}

View File

@ -0,0 +1,12 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
{
public class Field
{
public Field(string name, string description, bool canBeNull, ParameterTypeDefinition typeDefinition) { }
public string Name { get; }
public string Description { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool CanBeNull { get; }
}
}

View File

@ -0,0 +1,12 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
{
public abstract class ParameterTypeDefinition
{
public ParameterTypeDefinition(string name, string description) { }
public string Name { get; }
public string Description { get; }
public override string ToString() { return base.ToString(); }
}
}

View File

@ -0,0 +1,12 @@
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes
{
public class StructuredType : ParameterTypeDefinition
{
public StructuredType(string name, string description, IList<Field> fields) : base(name, description) { }
public IList<Field> Fields { get; }
}
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
{
public interface IConfigurationObject
{
}
}

View File

@ -0,0 +1,26 @@
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
{
[System.Runtime.Serialization.DataContractAttribute(IsReference = true)]
public class ModelObjectParameterDefinition : IConfigurationObject
{
public ModelObjectParameterDefinition() { }
public ModelObjectParameterDefinition(string name, ModelObjectParameterType valueType, object defaultValue) { }
public ModelObjectParameterDefinition(string name, Type enumType, object defaultValue) { }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long Id { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Value { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ModelObjectParameterType ValueType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string EnumType { get; set; }
public virtual ModelObjectParameterDefinition Clone() { return null; }
public virtual bool IsValidValue(string value) { return false; }
}
}

View File

@ -0,0 +1,17 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation
{
public enum ModelObjectParameterType
{
String = 0,
Bool = 1,
Byte = 2,
SignedByte = 3,
Integer = 4,
UnsignedInteger = 5,
LongInteger = 6,
UnsignedLongInteger = 7,
Double = 8,
Float = 9,
Enum = 10
}
}

View File

@ -0,0 +1,44 @@
using Adaptation.PeerGroup.GCL.SecsDriver;
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.Semiconductor.CellInstances
{
[System.Runtime.Serialization.DataContractAttribute]
public class SecsConnectionConfiguration
{
public SecsConnectionConfiguration() { }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T6HsmsControlMessage { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T5ConnectionSeperation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T4InterBlock { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T3MessageReply { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T2Protocol { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T1InterCharacter { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SerialBaudRate? BaudRate { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual SecsTransportType? PortType { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? Port { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan LinkTestTimer { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual string Host { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? DeviceId { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsSessionMode? SessionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual HsmsConnectionMode? ConnectionMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T7ConnectionIdle { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual TimeSpan T8NetworkIntercharacter { get; set; }
}
}

View File

@ -0,0 +1,36 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers
{
public class CellInstanceConnectionName
{
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
IFileRead result;
int levelIsArchive = 7;
int levelIsXToArchive = 6;
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
if (isDuplicator)
result = new MET08THFTIRQS408M.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive);
else
{
result = cellInstanceConnectionName switch
{
nameof(txt) => new txt.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
nameof(Stratus) => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
//"QS408M" => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
_ => throw new Exception(),
};
}
return result;
}
}
}

View File

@ -0,0 +1,555 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
{
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly string _IqsFile;
private readonly int _HyphenIsDummy;
private readonly int _HyphenIsNaEDA;
private readonly string _MemoryPath;
private readonly int _HyphenIsXToAPC;
private readonly int _HyphenIsXToIQSSi;
private readonly int _HyphenIsXToSPaCe;
private readonly string _OriginalDataBioRad;
private readonly int _HyphenIsXToOpenInsight;
private readonly string _EventNameFileReadDaily;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightMetrologyViewerAPI;
private readonly Dictionary<string, string> _CellNames;
private readonly int _HyphenIsXToOpenInsightMetrologyViewer;
private readonly int _HyphenIsXToOpenInsightMetrologyViewerAttachments;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
if (hyphenIsArchive != (int)Hyphen.IsArchive)
throw new Exception(cellInstanceConnectionName);
if (hyphenXToArchive != (int)Hyphen.IsXToArchive)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
_LastDummyRunIndex = -1;
if (_HyphenIsNaEDA == 0)
{ }
if (_HyphenIsXToSPaCe == 0)
{ }
if (_HyphenIsXToIQSSi == 0)
{ }
_CellNames = new Dictionary<string, string>();
_HyphenIsNaEDA = (int)Hyphen.IsNaEDA;
_HyphenIsDummy = (int)Hyphen.IsDummy;
_HyphenIsXToAPC = (int)Hyphen.IsXToAPC;
_HyphenIsXToIQSSi = (int)Hyphen.IsXToIQSSi;
_HyphenIsXToSPaCe = (int)Hyphen.IsXToSPaCe;
_HyphenIsXToOpenInsight = (int)Hyphen.IsXToOpenInsight;
_EventNameFileReadDaily = string.Concat(_EventNameFileRead, "Daily");
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
_HyphenIsXToOpenInsightMetrologyViewer = (int)Hyphen.IsXToOpenInsightMetrologyViewer;
_HyphenIsXToOpenInsightMetrologyViewerAttachments = (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
if (_Hyphens == _HyphenIsDummy)
{
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
else
{
int milliSeconds;
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
milliSeconds += 2000;
}
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (!(_Description is Description))
throw new Exception();
}
void IFileRead.Callback(object state)
{
Callback(state);
}
void IFileRead.MoveArchive()
{
string logisticsSequence = _Logistics.Sequence.ToString();
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
if (!Directory.Exists(destinationArchiveDirectory))
Directory.CreateDirectory(destinationArchiveDirectory);
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
protected List<Stratus.Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Stratus.Description> results = new();
Stratus.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Stratus.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string duplicateDirectory;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Stratus.Description> descriptions = GetDescriptions(jsonElements);
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
bool isNotUsedInsightMetrologyViewerAttachments = (!(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments);
bool isDummyRun = (_DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any());
if (isDummyRun)
{
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (_Hyphens != _HyphenIsXToOpenInsight)
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
else
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
if (segments.Length > 2)
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
if (!Directory.Exists(duplicateDirectory))
Directory.CreateDirectory(duplicateDirectory);
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens != _HyphenIsXToArchive && _Hyphens != _HyphenIsArchive)
{
if (!Directory.Exists(duplicateDirectory))
Directory.CreateDirectory(duplicateDirectory);
string successDirectory;
if (_Hyphens != _HyphenIsXToAPC)
successDirectory = string.Empty;
else
{
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
if (!Directory.Exists(successDirectory))
Directory.CreateDirectory(successDirectory);
}
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
if (_Hyphens == _HyphenIsXToAPC)
{
if (!isDummyRun && _IsEAFHosted)
File.Copy(reportFullPath, duplicateFile, overwrite: true);
}
else
{
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewer)
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
if (!isDummyRun && _IsEAFHosted)
{
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
if (!wsResults.Item2.Success)
throw new Exception(wsResults.ToString());
_Log.Debug(wsResults.Item2.HeaderID);
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
}
}
else
{
Test test;
string lines;
Shared.Properties.IScopeInfo scopeInfo;
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
{
test = keyValuePair.Key;
//scopeInfo = new ScopeInfo(test);
if (_Hyphens != _HyphenIsXToOpenInsight)
scopeInfo = new ScopeInfo(test, _IqsFile);
else
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
lines = ProcessData.GetLines(this, _Logistics, descriptions);
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
}
}
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
{
string[] matchDirectories = Shared1567(reportFullPath, tuples);
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OriginalDataBioRad, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
}
}
if (_Hyphens != _HyphenIsXToOpenInsightMetrologyViewer && _Hyphens != _HyphenIsXToOpenInsightMetrologyViewerAttachments)
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
}
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
{
string destinationDirectory;
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
destinationDirectory = matchDirectories[0];
if (isDummyRun)
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
else
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
if (_IsEAFHosted)
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
else
{
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
return results;
}
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string inProcessDirectory;
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
foreach (Tuple<string, string, string, string, int> item in tuples)
{
monARessource = item.Item1;
sourceArchiveFile = item.Item2;
inProcessDirectory = item.Item3;
checkDirectory = item.Item4;
fileCount = item.Item5;
try
{
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
else if (inProcessDirectory == checkDirectory)
continue;
if (!_IsEAFHosted)
continue;
if (!File.Exists(sourceArchiveFile))
continue;
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
continue;
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
{
if (_EventName == _EventNameFileRead)
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
else if (_EventName == _EventNameFileReadDaily)
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
else
throw new Exception();
}
if (fileConnectorConfigurationIncludeSubDirectories)
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!fileConnectorConfigurationIncludeSubDirectories)
{
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
}
private void Callback(object state)
{
if (_Hyphens != _HyphenIsDummy)
throw new Exception();
try
{
DateTime dateTime = DateTime.Now;
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
if (check)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string sourceFileLocation;
string inProcessDirectory;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
if (!Directory.Exists(traceDummyDirectory))
Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
File.AppendAllText(traceDummyFile, string.Empty);
List<Tuple<string, string, string, string, int>> tuples = new();
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
throw new Exception("Invalid progress path");
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
{
monARessource = keyValuePair.Key;
if (!keyValuePair.Value.Contains(@"\"))
continue;
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
{
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
if (!File.Exists(sourceArchiveFile))
continue;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
}
}
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
if (tuples.Any())
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= tuples.Count)
_LastDummyRunIndex = 0;
monARessource = tuples[_LastDummyRunIndex].Item1;
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
//fileCount = tuples[_LastDummyRunIndex].Item5;
tuples.Clear();
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
{
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
checkDirectory = inProcessDirectory;
if (!Directory.Exists(checkDirectory))
Directory.CreateDirectory(checkDirectory);
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
fileCount = files.Length;
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
checkDirectory = sourceFileLocation;
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
fileCount = files.Length;
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
}
}
if (tuples.Any())
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}
}

View File

@ -0,0 +1,18 @@
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
{
public enum Hyphen
{
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(file, string.Concat("http://", serverName, "/api/inbound/StratusBioRad"), headerAttachments);
IsXToIQSSi, //NA <d7p1:FileScanningIntervalInSeconds>-361</d7p1:FileScanningIntervalInSeconds>
IsXToOpenInsight, //bool WriteFileOpenInsight(StratusBioRadFile
IsXToOpenInsightMetrologyViewerAttachments, //Site-One
IsXToAPC,
IsXToSPaCe,
IsXToArchive,
IsArchive,
IsDummy,
IsNaEDA
}
}

View File

@ -0,0 +1,97 @@
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
{
public class ProcessData
{
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
{
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
//new Tuple<int, Enum, string>("IsDummy"
};
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
{
StringBuilder results = new();
if (fileRead is null)
{ }
Stratus.Description x = descriptions[0];
results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append('\t').
Append(x.Date).Append('\t').
Append(logistics.JobID).Append('\t').
Append("FQA Thickness").Append('\t').
Append(x.Employee).Append('\t').
Append(x.Recipe).Append('\t').
Append(x.Reactor).Append('\t').
Append(x.RDS).Append('\t').
Append(x.PSN).Append('\t').
Append(x.Lot).Append('\t').
Append(x.Cassette).Append('\t').
Append(x.MeanThickness);
for (int i = 0; i < descriptions.Count; i++)
results.Append('\t').Append(descriptions[i].Slot).Append('\t').Append(descriptions[i].Mean);
return results.ToString();
}
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string originalDataBioRad, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<Stratus.Description> descriptions, string matchDirectory)
{
if (fileRead is null)
{ }
if (dateTime == DateTime.MinValue)
{ }
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
if (!File.Exists(wsResultsMemoryFile))
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
string json = File.ReadAllText(wsResultsMemoryFile);
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
string dataPDFFile = Path.Combine(matchDirectory, $"{wsResultsHeaderID}.pdf");
string[] txtFiles = Directory.GetFiles(matchDirectory, string.Concat(originalDataBioRad, "*.txt"), SearchOption.TopDirectoryOnly);
if (txtFiles.Length != 1)
throw new Exception("Invalid source file count!");
string[] lines = File.ReadAllLines(txtFiles[0]);
lines = (from l in lines where !string.IsNullOrEmpty(l) select l).ToArray();
if (lines.Length > 1)
{
org.apache.pdfbox.pdmodel.PDDocument pdDocument = new();
org.apache.pdfbox.pdmodel.PDPage pdPage = new();
pdDocument.addPage(pdPage);
org.apache.pdfbox.pdmodel.edit.PDPageContentStream pdPageContentStream = new(pdDocument, pdPage);
org.apache.pdfbox.pdmodel.font.PDFont pdFont = org.apache.pdfbox.pdmodel.font.PDType1Font.HELVETICA;
pdPageContentStream.setFont(pdFont, 16);
for (int i = 1; i < lines.Length; i++)
{
pdPageContentStream.beginText();
pdPageContentStream.moveTextPositionByAmount(16, 750 - (i * 16));
pdPageContentStream.drawString(lines[i]);
pdPageContentStream.endText();
}
pdPageContentStream.close();
pdDocument.save(dataPDFFile);
pdDocument.close();
List<WS.Attachment> headerAttachments = new() { new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", dataPDFFile) };
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments: null);
}
}
}
}

View File

@ -0,0 +1,93 @@
using Adaptation.Shared;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
{
public class WSRequest
{
public long Id { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public string CellName { get; set; }
public string Date { get; set; }
public string FilePath { get; set; }
public string MeanThickness { get; set; }
public string Operator { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public List<Stratus.Detail> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
internal WSRequest(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
{
Id = 0;
FilePath = string.Empty;
if (fileRead is null)
{ }
CellName = logistics.MesEntity;
if (descriptions[0] is not Stratus.Description x)
throw new Exception();
Details = new List<Stratus.Detail>();
//Header
{
Batch = x.Lot;
Cassette = x.Cassette;
Date = x.Date;
MeanThickness = x.MeanThickness;
Operator = x.Employee;
PSN = x.PSN;
RDS = x.RDS;
Reactor = x.Reactor;
Recipe = x.Recipe;
StdDev = x.GradeStdDev;
Title = x.Title;
UniqueId = x.UniqueId;
}
string[] segments;
Stratus.Detail detail;
foreach (Stratus.Description description in descriptions)
{
detail = new Stratus.Detail
{
HeaderUniqueId = description.HeaderUniqueId,
Mean = description.Mean,
PassFail = description.PassFail,
Position = description.Position,
Recipe = description.Recipe,
Slot = description.Slot,
StdDev = description.StdDev,
Thickness = description.Thickness,
UniqueId = description.UniqueId,
Wafer = description.Wafer,
};
detail.Points = new();
segments = description.Position.Split(',');
foreach (string segment in segments)
detail.Points.Add(new Stratus.Point { HeaderUniqueId = description.HeaderUniqueId, UniqueId = description.UniqueId, Position = segment });
segments = description.Thickness.Split(',');
if (detail.Points.Count != segments.Length)
throw new Exception();
for (int i = 0; i < detail.Points.Count; i++)
detail.Points[i].Thickness = segments[i];
Details.Add(detail);
}
if (Date is null)
Date = logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Any())
UniqueId = Details[0].HeaderUniqueId;
}
}
}

View File

@ -0,0 +1,276 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Stratus
{
public class Description : IDescription, Shared.Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string Cassette { get; set; }
public string GradeStdDev { get; set; }
public string HeaderUniqueId { get; set; }
public string MeanThickness { get; set; }
public string PassFail { get; set; }
public string RDS { get; set; }
public string Slot { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
//
public string Mean { get; set; }
public string Position { get; set; }
public string StdDev { get; set; }
public string Thickness { get; set; }
string IDescription.GetEventDescription()
{
return "File Has been read and parsed";
}
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(Cassette),
nameof(GradeStdDev),
nameof(HeaderUniqueId),
nameof(MeanThickness),
nameof(PassFail),
nameof(RDS),
nameof(Slot),
nameof(Title),
nameof(UniqueId),
nameof(Wafer)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(Mean),
nameof(Position),
nameof(StdDev),
nameof(Thickness)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.Employee,
Lot = processData.Batch,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
Cassette = processData.Cassette,
GradeStdDev = processData.StdDev,
HeaderUniqueId = detail.HeaderUniqueId,
MeanThickness = processData.MeanThickness,
PassFail = detail.PassFail,
RDS = processData.RDS,
Slot = detail.Slot,
Title = processData.Title,
UniqueId = detail.UniqueId,
Wafer = detail.Wafer,
//
Mean = detail.Mean,
Position = detail.Position,
StdDev = detail.StdDev,
Thickness = detail.Thickness
};
results.Add(description);
}
}
return results;
}
private Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
Cassette = nameof(Cassette),
GradeStdDev = nameof(GradeStdDev),
HeaderUniqueId = nameof(HeaderUniqueId),
MeanThickness = nameof(MeanThickness),
PassFail = nameof(PassFail),
RDS = nameof(RDS),
Slot = nameof(Slot),
Title = nameof(Title),
UniqueId = nameof(UniqueId),
Wafer = nameof(Wafer),
//
Mean = nameof(Mean),
Position = nameof(Position),
StdDev = nameof(StdDev),
Thickness = nameof(Thickness)
};
return result;
}
}
}

View File

@ -0,0 +1,23 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.Stratus
{
public class Detail
{
public string HeaderUniqueId { get; set; }
public string Mean { get; set; }
public string PassFail { get; set; }
public string Position { get; set; }
public string Recipe { get; set; }
public string Slot { get; set; }
public string StdDev { get; set; }
public string UniqueId { get; set; }
public string Thickness { get; set; }
public string Wafer { get; set; }
public List<Point> Points { get; set; }
}
}

View File

@ -0,0 +1,148 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.Stratus
{
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _OriginalDataBioRad;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, dataText: string.Empty);
if (iProcessData is ProcessData processData)
{
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
}
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}
}

View File

@ -0,0 +1,19 @@
namespace Adaptation.FileHandlers.Stratus
{
public class Point
{
public string HeaderUniqueId { get; set; }
public string Position { get; set; }
public string Thickness { get; set; }
public string UniqueId { get; set; }
public override string ToString()
{
return string.Concat(Position, ";", Thickness, ";");
}
}
}

View File

@ -0,0 +1,467 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.Stratus
{
public partial class ProcessData : IProcessData
{
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public string Date { get; set; }
public string FilePath { get; set; }
public string MeanThickness { get; set; }
public string Employee { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
private int _I;
private string _Data;
private readonly ILog _Log;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string dataText)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
{
throw new Exception(string.Concat("See ", nameof(Parse)));
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadStratus);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private string GetBefore(string text)
{
string str;
string str1;
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
str1 = str.Trim();
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
str1 = str.Trim();
}
return str1;
}
private string GetBefore(string text, bool trim)
{
string str;
string before;
if (!trim)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
before = str;
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
before = str;
}
}
else
{
before = GetBefore(text);
}
return before;
}
private string GetToEOL()
{
string result;
if (_Data.IndexOf("\n", _I) > -1)
result = GetBefore("\n");
else
result = GetBefore(Environment.NewLine);
return result;
}
private string GetToEOL(bool trim)
{
string str;
if (_Data.IndexOf("\n", _I) > -1)
str = (!trim ? GetBefore("\n", false) : GetToEOL());
else
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
return str;
}
private string GetToken()
{
while (true)
{
if ((_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1))))
{
break;
}
_I++;
}
int num = _I;
while (true)
{
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
{
break;
}
num++;
}
string str = _Data.Substring(_I, num - _I);
_I = num;
return str.Trim();
}
private string GetToText(string text)
{
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
return str;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
}
private bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
private string PeekNextLine()
{
int num = _I;
string toEOL = GetToEOL();
_I = num;
return toEOL;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
_I = _Data.Length;
}
else
{
_I = num + text.Length;
}
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
{
DateTime result;
string inputDateFormat = "MM/dd/yy HH:mm";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string receivedData)
{
if (fileRead is null)
{ }
_I = 0;
_Data = string.Empty;
List<Detail> details = new();
if (string.IsNullOrEmpty(receivedData))
receivedData = File.ReadAllText(logistics.ReportFullPath);
_Log.Debug($"****ParseData - Source file contents:");
_Log.Debug(receivedData);
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
foreach (string file in files)
fileInfoCollection.Add(new FileInfo(file));
if (!string.IsNullOrEmpty(receivedData))
{
int i;
int num;
int num1;
int num2;
Point point;
Detail detail;
string[] segments;
string batch = "Batch";
string started = "started";
string cassette = "Cassette";
string startedAt = "started at";
_I = 0;
_Data = receivedData;
if (!_Data.Contains(batch) || !_Data.Contains(started))
Batch = string.Empty;
else
{
for (int z = 0; z < int.MaxValue; z++)
{
ScanPast(batch);
if (!_Data.Substring(_I).Contains(batch))
break;
}
Batch = GetToText(started);
ScanPast(startedAt);
}
ScanPast(cassette);
if (!_Data.Substring(_I).Contains(started))
Cassette = string.Empty;
else
Cassette = GetToText(started);
// Remove illegal characters \/:*?"<>| found in the Cassette.
Cassette = Regex.Replace(Cassette, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (Cassette.StartsWith("1T") || Cassette.StartsWith("1t"))
Cassette = Cassette.Substring(2);
Title = (!string.IsNullOrEmpty(Batch) ? Batch : Cassette);
ScanPast(startedAt);
string dateTimeText = GetToEOL();
if (dateTimeText.EndsWith("."))
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
DateTime dateTime = GetDateTime(logistics, dateTimeText);
Date = dateTime.ToString();
if (Cassette.Contains('.'))
segments = Cassette.Split(new char[] { '.' });
else if (Cassette.Contains('-'))
segments = Cassette.Split(new char[] { '-' });
else if (!Cassette.Contains('\u005F'))
segments = Cassette.Split(new char[] { ' ' });
else
segments = Cassette.Split(new char[] { '\u005F' });
if (segments.Length >= 1)
Reactor = segments[0];
if (segments.Length >= 2)
RDS = segments[1];
if (segments.Length >= 3)
PSN = segments[2];
if (segments.Length >= 4)
Employee = segments[3];
if (Reactor.Length > 3)
{
RDS = Reactor;
Reactor = string.Empty;
}
num1 = 0;
if (PeekNextLine().Contains("Wafer"))
{
_Log.Debug("****ProcessData Contains Wafer");
while (!PeekNextLine().Contains(cassette))
{
num2 = num1;
num1 = num2 + 1;
if (num2 > 25)
break;
else
{
_Log.Debug("****ProcessData new stratusBioRadWaferDetail");
detail = new Detail();
ScanPast("Wafer");
detail.Wafer = GetToEOL();
if (detail.Wafer.EndsWith("."))
{
_Log.Debug("****ProcessData Removing Wafer");
detail.Wafer = detail.Wafer.Remove(detail.Wafer.Length - 1, 1);
}
ScanPast("Slot");
detail.Slot = GetToEOL();
ScanPast("Recipe");
Recipe = GetToEOL();
if (Recipe.EndsWith("."))
{
_Log.Debug("****ProcessData Removing Recipe");
Recipe = Recipe.Remove(Recipe.Length - 1, 1);
}
detail.Recipe = Recipe;
GetToEOL();
if (PeekNextLine().Contains("Thickness"))
{
ScanPast("1 - ");
num = Convert.ToInt32(GetToken());
_Log.Debug(string.Concat("****ProcessData Thickness =", num));
detail.Points = new();
for (i = 0; i < num; i++)
{
point = new() { Thickness = GetToken() };
detail.Points.Add(point);
point.Position = Convert.ToString(detail.Points.Count);
}
}
GetToEOL();
if (PeekNextLine().Contains("Thickness"))
{
ScanPast("11 - ");
num = Convert.ToInt32(GetToken());
for (i = detail.Points.Count; i < num; i++)
{
point = new() { Thickness = GetToken() };
detail.Points.Add(point);
point.Position = Convert.ToString(detail.Points.Count);
}
}
ScanPast("Slot");
GetToken();
detail.PassFail = GetToken();
if (detail.PassFail.EndsWith("."))
{
_Log.Debug("****ProcessData Removing PassFail");
detail.PassFail = detail.PassFail.Remove(detail.PassFail.Length - 1, 1);
}
ScanPast("Mean");
detail.Mean = GetToken();
if (detail.Mean.EndsWith(","))
{
_Log.Debug("****ProcessData Removing Mean");
detail.Mean = detail.Mean.Remove(detail.Mean.Length - 1, 1);
}
ScanPast("STDD");
detail.StdDev = GetToEOL();
if (detail.StdDev.EndsWith("."))
{
_Log.Debug("****ProcessData Removing stdDev");
detail.StdDev = detail.StdDev.Remove(detail.StdDev.Length - 1, 1);
}
detail.UniqueId = string.Concat("_Wafer-", detail.Wafer, "_Slot-", detail.Slot, "_Point-", detail.Position);
details.Add(detail);
if (PeekNextLine().Contains(cassette))
GetToEOL();
if (PeekNextLine().Contains(cassette))
GetToEOL();
if (PeekNextLine().Contains("Process failed"))
GetToEOL();
}
}
ScanPast("Mean");
MeanThickness = GetToken();
if (MeanThickness.EndsWith(","))
{
_Log.Debug("****ProcessData Removing MeanThickness");
MeanThickness = MeanThickness.Remove(MeanThickness.Length - 1, 1);
}
ScanPast("STDD");
StdDev = GetToken();
if (StdDev.EndsWith(","))
{
_Log.Debug("****ProcessData Removing thi.StdDev");
StdDev = StdDev.Remove(StdDev.Length - 1, 1);
}
}
}
StringBuilder stringBuilder = new();
UniqueId = string.Concat("StratusBioRad_", Reactor, "_", RDS, "_", PSN, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
foreach (Detail detail in details)
{
detail.HeaderUniqueId = UniqueId;
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
if (detail.Points is null)
detail.Points = new List<Point>();
foreach (Point bioRadDetail in detail.Points)
{
bioRadDetail.HeaderUniqueId = detail.HeaderUniqueId;
bioRadDetail.UniqueId = detail.UniqueId;
}
stringBuilder.Clear();
foreach (Point point in detail.Points)
stringBuilder.Append(point.Thickness).Append(',');
if (stringBuilder.Length > 0)
stringBuilder.Remove(stringBuilder.Length - 1, 1);
detail.Thickness = stringBuilder.ToString();
stringBuilder.Clear();
foreach (Point point in detail.Points)
stringBuilder.Append(point.Position).Append(',');
if (stringBuilder.Length > 0)
stringBuilder.Remove(stringBuilder.Length - 1, 1);
detail.Position = stringBuilder.ToString();
}
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
_Details.AddRange(details);
}
}
}

View File

@ -0,0 +1,175 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.txt
{
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _OriginalDataBioRad;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: false);
SetFileParameterLotID(_Logistics.MID);
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
bool isBioRad;
string dataText;
string cassetteID;
string fileNameTemp;
string tupleFileName;
DateTime cassetteTicks;
string directoryName = Path.GetDirectoryName(reportFullPath);
string sequenceDirectoryName = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _Logistics.Sequence);
string originalDataBioRad = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, _Logistics.Sequence, ".txt");
List<Tuple<string, bool, DateTime, string>> tuples = ProcessData.GetTuples(this, _Logistics, dateTime, results.Item4, _OriginalDataBioRad);
if (_IsEAFHosted)
{
if (tuples.Any())
{
if (!Directory.Exists(sequenceDirectoryName))
Directory.CreateDirectory(sequenceDirectoryName);
File.Move(reportFullPath, originalDataBioRad);
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
}
foreach (Tuple<string, bool, DateTime, string> tuple in tuples)
{
isBioRad = tuple.Item2;
dataText = tuple.Item4;
cassetteID = tuple.Item1;
cassetteTicks = tuple.Item3;
if (isBioRad)
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
else
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
fileNameTemp = string.Concat(sequenceDirectoryName, @"\", tupleFileName);
File.WriteAllText(fileNameTemp, dataText);
File.SetLastWriteTime(fileNameTemp, cassetteTicks);
if (_Logistics.Sequence != cassetteTicks.Ticks && File.Exists(originalDataBioRad))
File.Copy(originalDataBioRad, string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, cassetteTicks.Ticks, ".txt"));
File.Move(fileNameTemp, string.Concat(directoryName, @"\", tupleFileName));
}
if (Directory.Exists(sequenceDirectoryName))
Directory.Delete(sequenceDirectoryName);
}
}
return results;
}
}
}

View File

@ -0,0 +1,510 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.txt
{
public partial class ProcessData
{
internal static List<Tuple<string, bool, DateTime, string>> GetTuples(FileRead fileRead, Logistics logistics, DateTime dateTime, List<FileInfo> fileInfoCollection, string originalDataBioRad)
{
List<Tuple<string, bool, DateTime, string>> results = new();
ILog log = LogManager.GetLogger(typeof(ProcessData));
// ***********************************************************************************
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
// ***********************************************************************************
string line;
StreamReader rawDataFilePtr;
bool? cassetteScanCompleted = null;
// Scrub the source file to verify that for each cassette, present in the file, there is a complete
// data set (i.e., that is there is a start and finished statement).
//
// Scenario #1 - Normal
// For every cassette "started" there must be a matching cassette "finished".
// Scenario #2 - Only Cassette "finished" (with or witout additional cassette complete data sets)
// Incomplete data file. File will be process and generate error for the incomplete portion.
// Scenario #3 - Only Cassette "Started"
// Bail out of the solution. Source data file not ready to be processed.
using (rawDataFilePtr = new StreamReader(logistics.ReportFullPath))
{
for (short i = 0; i < short.MaxValue; i++)
{
line = rawDataFilePtr.ReadLine();
if (line is null)
break;
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
{
cassetteScanCompleted = false;
log.Debug("****Extract() - CassetteScanCompleted = FALSE");
}
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
{
cassetteScanCompleted = true;
log.Debug("****Extract() - CassetteScanCompleted = TRUE");
}
}
// Making sure that the file has been released
rawDataFilePtr.Close();
if (!(rawDataFilePtr is null))
rawDataFilePtr.Dispose();
}
if (cassetteScanCompleted is null || !cassetteScanCompleted.Value)
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
else
{
Dictionary<string, List<string>> cassetteIDAndDataSets = new();
if (!string.IsNullOrEmpty(logistics.ReportFullPath))
{
string[] segments;
int cassetteEndIndex;
int thicknessCounter;
string thicknessHead;
string thicknessInfo;
string thicknessTail;
int cassetteStartIndex;
StringBuilder lines = new();
string slotID = string.Empty;
string cassetteID = string.Empty;
string batchHeader = string.Empty;
bool finishedReadingThicknessInfo;
bool slotInformationCaptured = false;
bool pointsInformationCaptured = false;
bool sourceInformationCaptured = false;
bool waferWaferInformationCaptured = false;
bool destinationInformationCaptured = false;
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
List<Tuple<string, int, int>> cassetteStartAndEnds = new();
for (int i = 0; i < reportFullPathlines.Length; i++)
{
line = reportFullPathlines[i].Trim();
if (string.IsNullOrEmpty(line))
continue;
if (line.StartsWith("Batch") && line.Contains("started"))
batchHeader = line;
if (i + 1 == reportFullPathlines.Length)
continue;
if (line.StartsWith("Cassette") && line.Contains("started"))
{
for (int j = i + 1; j < reportFullPathlines.Length; j++)
{
if (j + 1 == reportFullPathlines.Length)
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j));
else
{
line = reportFullPathlines[j].Trim();
if (line.StartsWith("Cassette") && line.Contains("started"))
{
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j - 1));
break;
}
}
}
}
}
foreach (Tuple<string, int, int> tuple in cassetteStartAndEnds)
{
lines.Clear();
batchHeader = tuple.Item1;
cassetteEndIndex = tuple.Item3;
cassetteStartIndex = tuple.Item2;
for (int l = cassetteStartIndex; l <= cassetteEndIndex; l++)
{
line = reportFullPathlines[l].Trim();
if (string.IsNullOrEmpty(line))
continue;
if (l == cassetteStartIndex)
{
// Save the previously saved "Batch Header"
lines.AppendLine(batchHeader);
// Save the first line of the cassette scan information
lines.AppendLine(line);
// Each new cassette initialize the WaferWafer information flag
waferWaferInformationCaptured = false;
slotInformationCaptured = false;
if (line.Length > 9)
{
// Detected a new cassette data scan. Extract the cassette ID.
// Example: "Cassette 47-241330-4238 started."
segments = line.Substring(9).Split(new string[] { "started" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Any())
{
// Detected a new cassette scan in the raw source file
cassetteID = segments[0].Trim();
cassetteID = cassetteID.Replace(":", string.Empty);
cassetteID = cassetteID.Replace("*", string.Empty);
cassetteID = cassetteID.Replace("\\", string.Empty);
}
}
}
// Continue reading and saving the cassette scan information, into the cassette
// scan output file, until the end of the cassette scan "Finished" statement has
// been detected.
// Maintain standard for mat between various BioRad tools. The "Points" and "Thickness"
// values between vaious BioRad tools might be spread over multiple lines. The following
// is simply to regroup the "Points" and "Thickness" information on the same line accordingly.
if (line.StartsWith("Wafer Wafer"))
{
lines.AppendLine(line);
slotInformationCaptured = false;
waferWaferInformationCaptured = true;
}
else if (line.StartsWith("Slot"))
{
slotID = string.Empty;
segments = line.Split(' ');
if (segments.Length > 1)
slotID = segments[1];
// There are cases where the WaferWafer information is missing. Create a
// WaferWafer entry based off the slot number.
if (!waferWaferInformationCaptured)
{
waferWaferInformationCaptured = true;
lines.AppendLine("Wafer Wafer " + slotID + ".");
}
lines.AppendLine(line);
slotInformationCaptured = true;
}
else if (line.StartsWith("Recipe"))
{
lines.AppendLine(line);
pointsInformationCaptured = false;
}
else if (line.StartsWith("Points"))
{
lines.AppendLine(line);
pointsInformationCaptured = true;
}
else if (line.Contains("Thickness"))
{
// Before addressing the "Thickness" section, ensure that the "Points" section
// has been found. Otherwise, we need to write out a default value.
if (!pointsInformationCaptured)
{
// No "Points" information has been capture. Default to "Points : 0 0"
lines.AppendLine("Points : 0 0");
pointsInformationCaptured = true;
}
// The "Thickness" output section comes out differently between various Stratus tools. In some
// cases, the thickness values are either empty (no values), on the same line or on different lines.
// Below are examples of how the data needs to be formatted after being parsed:
// Thickness, um 1 - 1 0
// Thickness, um 1 - 1 13.630
// Thickness, um 1 - 9 1.197 1.231 1.248 1.235 1.199 1.202 1.236 1.242 1.212
thicknessCounter = 0;
thicknessHead = line;
thicknessInfo = "";
thicknessTail = "";
finishedReadingThicknessInfo = false;
for (int t = l + 1; t <= cassetteEndIndex; t++)
{
l = t;
line = reportFullPathlines[l].Trim();
if (string.IsNullOrEmpty(line))
continue;
if (!line.StartsWith("Slot"))
{
thicknessCounter++;
thicknessTail = string.Concat(thicknessTail, " ", line);
}
else
{
finishedReadingThicknessInfo = true;
if (thicknessCounter != 0)
thicknessInfo = string.Concat(" 1 - ", thicknessCounter);
else
{
// Two possible formatting scenarios at this point. Either the data was already
// formatted properly on one line. Or the Thickness value was missing, in which
// case we need to default the thickness value to zero (0).
segments = thicknessHead.Split(' ');
if (segments.Length > 2)
{
// The "Thickness" raw data if formatted as a normal single line format and
// already include the Header + Info + Tail
}
else
{
// The "Thikness raw data has no values. Formatting the output with zero.
thicknessInfo = " 1 - 1";
thicknessTail = " 0";
}
}
lines.AppendLine(string.Concat(thicknessHead, thicknessInfo, thicknessTail));
// The "Slot" keyword is the tag that determines the end of the Thickness section. The "Slot"
// information has already been ready. Simply write it back.
lines.AppendLine(line);
}
if (finishedReadingThicknessInfo)
break;
}
}
else if (line.StartsWith("Mean"))
{
lines.AppendLine(line);
sourceInformationCaptured = false;
destinationInformationCaptured = false;
}
else if (line.StartsWith("Source:") && slotInformationCaptured)
{
lines.AppendLine(line);
sourceInformationCaptured = true;
}
else if (line.StartsWith("Destination:") && slotInformationCaptured)
{
if (!sourceInformationCaptured)
{
sourceInformationCaptured = true;
lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
}
lines.AppendLine(line);
destinationInformationCaptured = true;
// Each time a cassette slot section has been completed, we must reinitialize
// the "Wafer Wafer" information flag in case there are multiple slots in the
// same cassette
slotInformationCaptured = false;
waferWaferInformationCaptured = false;
}
else if (line.StartsWith("Cassette") && line.Contains("finished."))
{
// Reach the end of the cassette data set information
if (!sourceInformationCaptured)
{
sourceInformationCaptured = true;
lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
}
if (!destinationInformationCaptured)
{
destinationInformationCaptured = true;
lines.AppendLine(string.Concat("Destination: Slot ", slotID, ", Cassette"));
// Each time a cassette slot section has been completed, we must reinitialize
// the "Wafer Wafer" information flag in case there are multiple slots in the
// same cassette
slotInformationCaptured = false;
waferWaferInformationCaptured = false;
}
// Write the end of cassette statement to the output file
lines.AppendLine(line);
// Read the Mean-Average line information, post the cassette "Finished" statement
for (int a = l + 1; a <= cassetteEndIndex; a++)
{
l = a;
line = reportFullPathlines[l].Trim();
if (string.IsNullOrEmpty(line))
continue;
// There are many blank lines in the source file. Search for the first
// occurence of the string "Mean".
if (line.StartsWith("Mean"))
{
lines.AppendLine(line);
break;
}
// The mean Average information is missing. We are done reading the cassette information.
if (line.StartsWith("Batch"))
break;
}
if (!cassetteIDAndDataSets.ContainsKey(cassetteID))
cassetteIDAndDataSets.Add(cassetteID, new List<string>());
cassetteIDAndDataSets[cassetteID].Add(lines.ToString());
}
}
}
if (cassetteStartAndEnds is null)
{ }
}
if (cassetteIDAndDataSets.Any())
{
int wafer;
string user;
string runID;
bool isBioRad;
string recipe;
int count = -1;
int stringIndex;
string dataText;
string dataType;
string[] segments;
string cassetteID;
string recipeName;
IProcessData iProcessData;
DateTime cassetteDateTime;
string recipeSearch = "Recipe";
string toolType = string.Empty;
StringBuilder contents = new();
Stratus.ProcessData processData;
foreach (KeyValuePair<string, List<string>> keyValuePair in cassetteIDAndDataSets)
{
isBioRad = false;
dataType = string.Empty;
cassetteID = keyValuePair.Key;
for (int i = 0; i < keyValuePair.Value.Count; i++)
{
dataText = keyValuePair.Value[i];
// Finished capturing the complete cassette scan data information. Release the cassette file.
if (dataText.Contains("Cassette") &&
dataText.Contains("Wafer") &&
dataText.Contains("Slot") &&
dataText.Contains("Recipe") &&
dataText.Contains("Points") &&
dataText.Contains("Thickness") &&
dataText.Contains("Mean") &&
dataText.Contains("Source:") &&
dataText.Contains("Destination:"))
{
// Extract the recipe name
runID = string.Empty;
recipeName = string.Empty;
stringIndex = dataText.IndexOf(recipeSearch);
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
log.Debug($"****Extract(FDR): recipeName = {recipeName}");
if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
{
/***************************************/
/* STRATUS Measurement = FQA Thickness */
/***************************************/
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
// measurement. The data from thise scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
isBioRad = false;
toolType = "STRATUS";
dataType = "FQA Thickness";
}
else if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("prod_", StringComparison.CurrentCultureIgnoreCase) >= 0))
{
/******************************************/
/* BIORAD Measurement = Product Thickness */
/******************************************/
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
// measurement. The data from thise scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
isBioRad = true;
toolType = "BIORAD";
dataType = "Product Thickness";
}
else if (!(string.IsNullOrEmpty(recipeName)) &&
((recipeName.IndexOf("T-Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
(recipeName.IndexOf("T_Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
(recipeName.IndexOf("T-Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
(recipeName.IndexOf("T_Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
(recipeName.IndexOf("T-High", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
(recipeName.IndexOf("T_High", StringComparison.CurrentCultureIgnoreCase) >= 0)))
{
/*************************************/
/* BIORAD Measurement = No Uploading */
/*************************************/
// Recipes that contains the substring "T-Low, T_Low, T-Mid, T_Mid and T-High, T_High" are BIORAD verification recipe. The information
// should be uploaded to the Metrology Viewer database as BIORAD. No OpenInsight.
isBioRad = true;
toolType = "BIORAD";
dataType = "Verification";
}
else
{
// Count the number of wafers (ref. "Source: Slot") in the cassette
int waferCount = Regex.Matches(dataText, "Source: Slot").Count;
if (waferCount == 1)
{
// Metrology Thickness. Upload to OpenInsight same as BR2 and BR3
isBioRad = true;
toolType = "BIORAD";
dataType = "Metrology Thickness";
}
else if (waferCount > 1)
{
// Inspection Measurement. Do not upload to OpenInsight.
isBioRad = true;
toolType = "BIORAD";
dataType = "Inspection";
}
}
}
log.Debug($"****Extract(FDR): ToolType = {toolType}");
log.Debug($"****Extract(FDR): DataType = {dataType}");
if (!isBioRad)
{
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, dataText));
}
else
{
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText: dataText);
iProcessData = processData;
if (!iProcessData.Details.Any())
log.Warn("No Details!");
else
{
foreach (object item in iProcessData.Details)
{
if (item is not Stratus.Detail detail)
throw new Exception();
count += 1;
contents.Clear();
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(count * -1);
user = processData.Employee?.ToString() ?? "";
recipe = detail.Recipe?.ToString() ?? "";
contents.Append("Bio-Rad ").Append("QS400MEPI".PadRight(17)).Append("Recipe: ").Append(recipe.PadRight(25)).AppendLine(processData.Date);
contents.Append("operator: ").Append(user.PadRight(22)).Append("batch: BIORAD #").AppendLine(logistics.JobID.Substring(6, 1));
contents.Append("cassette: ").Append("".PadRight(22)).Append("wafer: ").AppendLine(processData.Cassette);
contents.AppendLine("--------------------------------------------------------------------------------");
contents.AppendLine(" position thickness position thickness position thickness");
segments = detail.Thickness.Split(',');
for (int j = 0; j < segments.Length; j++)
{
wafer = j + 1;
contents.Append(wafer.ToString().PadLeft(11));
if ((wafer % 3) > 0)
contents.Append(segments[j].PadLeft(10));
else
contents.AppendLine(segments[j].PadLeft(10));
}
if ((segments.Length % 3) > 0)
contents.AppendLine();
contents.Append(" wafer mean thickness = ").Append(detail.Mean).Append(", std. dev = ").Append(detail.StdDev).Append(" ").AppendLine(detail.PassFail);
contents.AppendLine("================================================================================");
contents.AppendLine("");
contents.AppendLine("Radial variation (computation B) PASS:");
contents.AppendLine("");
contents.AppendLine(" thickness 0.0000");
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, contents.ToString()));
}
}
}
}
}
}
}
// **********************************************
// * Step #3 - Protect the raw data source file *
// **********************************************
// The multi-cassettes raw source file is ready to be splitted. Each cassette scan set has
// been determined to be complete (i.e., has the started & finished statements). At this point
// it is important to rename the multi-cassette raw data source file, located in the RawData
// folder, to a different name so that the tool does not attempt to update the file while being
// processed by the EAF cell instance.
// Get the last date/time the DataBioRad.txt file was updated
DateTime afterCheck = File.GetLastWriteTime(logistics.ReportFullPath);
// Ensure that the DataBioRad.txt file has not been updated since the FileReader began the healthcheck
// If the date/time values are different between the "Before" and "After" checks then let it go. The
// tool is still busy trying to update the file. The FileReader will try to catch the data on the
// next udpate.
if (logistics.DateTimeFromSequence != afterCheck)
{
results.Clear();
log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
}
return results;
}
}
}

View File

@ -0,0 +1,13 @@
namespace Adaptation.Ifx.Eaf.Common.Configuration
{
[System.Runtime.Serialization.DataContractAttribute]
public class ConnectionSetting
{
public ConnectionSetting(string name, string value) { }
[System.Runtime.Serialization.DataMemberAttribute]
public string Name { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string Value { get; set; }
}
}

View File

@ -0,0 +1,19 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component
{
public class File
{
public File(string filePath) { throw new NotImplementedException(); }
public File(string filePath, DateTime timeFileFound) { throw new NotImplementedException(); }
public string Path { get; }
public DateTime TimeFound { get; }
public bool IsErrorFile { get; }
public Dictionary<string, string> ContentParameters { get; }
public File UpdateContentParameters(Dictionary<string, string> contentParameters) { throw new NotImplementedException(); }
public File UpdateParsingStatus(bool isErrorFile) { throw new NotImplementedException(); }
}
}

View File

@ -0,0 +1,35 @@
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component
{
public class FilePathGenerator
{
public const char PLACEHOLDER_IDENTIFIER = '%';
public const char PLACEHOLDER_SEPARATOR = ':';
public const string PLACEHOLDER_NOT_AVAILABLE = "NA";
public const string PLACEHOLDER_ORIGINAL_FILE_NAME = "OriginalFileName";
public const string PLACEHOLDER_ORIGINAL_FILE_EXTENSION = "OriginalFileExtension";
public const string PLACEHOLDER_DATE_TIME = "DateTime";
public const string PLACEHOLDER_SUB_FOLDER = "SubFolder";
public const string PLACEHOLDER_CELL_NAME = "CellName";
public FilePathGenerator(FileConnectorConfiguration config, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
public FilePathGenerator(FileConnectorConfiguration config, File file, bool isErrorFile = false, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
public FilePathGenerator(FileConnectorConfiguration config, string sourceFilePath, bool isErrorFile = false, Dictionary<string, string> customPattern = null) { throw new NotImplementedException(); }
protected string SubFolderPath { get; }
protected FileConnectorConfiguration Configuration { get; }
protected File File { get; }
protected bool IsErrorFile { get; }
protected string DefaultPlaceHolderValue { get; }
public string GetFullTargetPath() { throw new NotImplementedException(); }
public virtual string GetTargetFileName() { throw new NotImplementedException(); }
public string GetTargetFolder(bool throwExceptionIfNotExist = true) { throw new NotImplementedException(); }
protected virtual string GetSubFolder(string folderPattern, string subFolderPath) { throw new NotImplementedException(); }
protected virtual string PrepareFolderPath(string targetFolderPath, string subFolderPath) { throw new NotImplementedException(); }
protected string ReplacePlaceholder(string inputPath) { throw new NotImplementedException(); }
}
}

View File

@ -0,0 +1,135 @@
using Adaptation.Ifx.Eaf.Common.Configuration;
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration
{
[System.Runtime.Serialization.DataContractAttribute]
public class FileConnectorConfiguration
{
public const ulong IDLE_EVENT_WAIT_TIME_DEFAULT = 360;
public const ulong FILE_HANDLE_TIMEOUT_DEFAULT = 15;
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnChanged { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? PostProcessingRetries { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? CopySourceFolderStructure { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfPostProcessingFailsEnum? IfPostProcessingFailsAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string AlternateTargetFolder { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? FileHandleTimeout { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? DeleteEmptySourceSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? IdleEventWaitTimeInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string FileAgeThreshold { get; set; }
public bool? FolderAgeCheckIndividualSubFolders { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual ZipModeEnum? ZipMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public FileAgeFilterEnum? FileAgeFilterMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ZipErrorTargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ZipFileSubFolderLevel { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string DefaultPlaceHolderValue { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public bool? UseZip64Mode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public List<ConnectionSetting> ConnectionSettings { get; set; }
public string SourceDirectoryCloaking { get; set; }
public string FolderAgeThreshold { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileScanningIntervalInSeconds { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? TriggerOnCreated { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string SourceFileFilter { get; set; }
public List<string> SourceFileFilters { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual bool? IncludeSubDirectories { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual FileScanningOptionEnum? FileScanningOption { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string TargetFileName { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? FileHandleWaitTime { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public IfFileExistEnum? IfFileExistAction { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public long? ConnectionRetryInterval { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PreProcessingModeEnum? PreProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? PostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public PostProcessingModeEnum? ErrorPostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public virtual long? ZipFileAmount { get; set; }
[System.Runtime.Serialization.DataMemberAttribute]
public string ErrorTargetFileName { get; set; }
public void Initialize() { throw new NotImplementedException(); }
public enum PostProcessingModeEnum
{
None = 0,
Move = 1,
Copy = 2,
Rename = 3,
Zip = 4,
Delete = 5,
MoveFolder = 6,
CopyFolder = 7,
DeleteFolder = 8
}
public enum PreProcessingModeEnum
{
None = 0,
Process = 1
}
public enum IfFileExistEnum
{
Overwrite = 0,
LeaveFiles = 1,
Delete = 2
}
public enum IfPostProcessingFailsEnum
{
LeaveFiles = 0,
Delete = 1
}
public enum FileScanningOptionEnum
{
FileWatcher = 0,
TimeBased = 1
}
public enum ZipModeEnum
{
ZipByAmountOrTime = 0,
ZipByFileName = 1,
ZipBySubFolderName = 2
}
public enum FileAgeFilterEnum
{
IgnoreNewer = 0,
IgnoreOlder = 1
}
}
}

View File

@ -0,0 +1,14 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.SelfDescription
{
public class FileConnectorParameterTypeDefinitionProvider
{
public FileConnectorParameterTypeDefinitionProvider() { }
public IEnumerable<ParameterTypeDefinition> GetAllParameterTypeDefinition() { return null; }
public ParameterTypeDefinition GetParameterTypeDefinition(string name) { return null; }
}
}

View File

@ -0,0 +1,103 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup Label="Globals">
<SccProjectName>SAK</SccProjectName>
<SccProvider>SAK</SccProvider>
<SccAuxPath>SAK</SccAuxPath>
<SccLocalPath>SAK</SccLocalPath>
</PropertyGroup>
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<IsPackable>false</IsPackable>
</PropertyGroup>
<PropertyGroup>
<VSTestLogger>trx</VSTestLogger>
<VSTestResultsDirectory>../../../Trunk/MET08THFTIRSTRATUS/05_TestResults/TestResults</VSTestResultsDirectory>
</PropertyGroup>
<PropertyGroup>
<IsWindows Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))' == 'true'">true</IsWindows>
<IsOSX Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::OSX)))' == 'true'">true</IsOSX>
<IsLinux Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Linux)))' == 'true'">true</IsLinux>
</PropertyGroup>
<PropertyGroup Condition="'$(IsWindows)'=='true'">
<DefineConstants>Windows</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(IsOSX)'=='true'">
<DefineConstants>OSX</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(IsLinux)'=='true'">
<DefineConstants>Linux</DefineConstants>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="3.1.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Configuration" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="6.0.0"/>
<PackageReference Include="Microsoft.Extensions.Logging" Version="6.0.0"/>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0"/>
<PackageReference Include="MSTest.TestAdapter" Version="2.2.8"/>
<PackageReference Include="MSTest.TestFramework" Version="2.2.8"/>
<PackageReference Include="System.Configuration.ConfigurationManager" Version="6.0.0"/>
<PackageReference Include="System.Data.OleDb" Version="6.0.0"/>
<PackageReference Include="System.Data.SqlClient" Version="4.8.3"/>
<PackageReference Include="System.Text.Json" Version="6.0.1"/>
</ItemGroup>
<ItemGroup>
<PackageReference Include="External.Common.Logging.Core" Version="3.3.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="External.Common.Logging" Version="3.3.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="External.Infineon.Monitoring.MonA" Version="1.2.0.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="External.Infineon.Yoda" Version="5.2.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="External.log4net" Version="2.0.8">
<NoWarn>NU1701</NoWarn>
</PackageReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Media" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Text" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Util" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="Pdfbox" Version="1.1.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="RoboSharp" Version="1.2.4" />
<PackageReference Include="Tibco.Rendezvous" Version="8.5.0" />
</ItemGroup>
<ItemGroup>
<None Include="appsettings.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Include="appsettings.Development.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -0,0 +1,10 @@
using System;
namespace Adaptation.PeerGroup.GCL.Annotations
{
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Parameter | AttributeTargets.Delegate, AllowMultiple = false, Inherited = true)]
public sealed class NotNullAttribute : Attribute
{
public NotNullAttribute() { }
}
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
{
public enum HsmsConnectionMode
{
Active = 0,
Passive = 1
}
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
{
public enum HsmsSessionMode
{
MultiSession = 0,
SingleSession = 1
}
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
{
public enum SecsTransportType
{
HSMS = 0,
Serial = 1
}
}

View File

@ -0,0 +1,16 @@
namespace Adaptation.PeerGroup.GCL.SecsDriver
{
public enum SerialBaudRate
{
Baud9600 = 0,
Baud19200 = 1,
Baud4800 = 2,
Baud2400 = 3,
Baud1200 = 4,
Baud300 = 5,
Baud150 = 6,
Baud38400 = 7,
Baud57600 = 8,
Baud115200 = 9
}
}

View File

@ -0,0 +1,148 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.Shared.Duplicator
{
public class Description : IDescription, Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
public string Date { get; set; } //2021-10-23
string IDescription.GetEventDescription()
{
return "File Has been read and parsed";
}
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new();
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
return results;
}
private Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
Date = logistics.DateTimeFromSequence.ToUniversalTime().ToString("MM/dd/yyyy HH:mm:ss")
};
return result;
}
}
}

View File

@ -0,0 +1,834 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace Adaptation.Shared
{
public class FileRead : Properties.IFileRead
{
protected string _NullData;
protected readonly ILog _Log;
protected long _MinFileLength;
protected Logistics _Logistics;
protected readonly ISMTP _SMTP;
protected readonly int _Hyphens;
protected readonly bool _IsEvent;
protected string _ReportFullPath;
protected long _LastTicksDuration;
protected readonly bool _IsEAFHosted;
protected readonly string _EventName;
protected readonly string _MesEntity;
protected readonly string _TracePath;
protected readonly bool _IsDuplicator;
protected readonly Calendar _Calendar;
protected readonly bool _IsSourceTimer;
protected readonly string _VillachPath;
protected readonly int _HyphenIsArchive;
protected readonly string _ProgressPath;
protected readonly string _EquipmentType;
protected readonly int _HyphenIsXToArchive;
protected readonly long _BreakAfterSeconds;
protected readonly string _ExceptionSubject;
protected readonly string _CellInstanceName;
protected readonly string _EventNameFileRead;
protected readonly IDescription _Description;
protected readonly bool _UseCyclicalForDescription;
protected readonly string _CellInstanceConnectionName;
protected readonly string _CellInstanceConnectionNameBase;
protected readonly Dictionary<string, List<long>> _DummyRuns;
protected readonly Dictionary<string, string> _FileParameter;
protected readonly string _ParameterizedModelObjectDefinitionType;
protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
protected readonly IList<ModelObjectParameterDefinition> _ModelObjectParameterDefinitions;
bool Properties.IFileRead.IsEvent => _IsEvent;
string Properties.IFileRead.NullData => _NullData;
string Properties.IFileRead.EventName => _EventName;
string Properties.IFileRead.MesEntity => _MesEntity;
bool Properties.IFileRead.IsEAFHosted => _IsEAFHosted;
string Properties.IFileRead.EquipmentType => _EquipmentType;
string Properties.IFileRead.ReportFullPath => _ReportFullPath;
string Properties.IFileRead.CellInstanceName => _CellInstanceName;
string Properties.IFileRead.ExceptionSubject => _ExceptionSubject;
bool Properties.IFileRead.UseCyclicalForDescription => _UseCyclicalForDescription;
string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive)
{
_SMTP = smtp;
_IsEvent = isEvent;
_DummyRuns = dummyRuns;
_LastTicksDuration = 0;
_IsEAFHosted = isEAFHosted;
_Description = description;
_FileParameter = fileParameter;
_ReportFullPath = string.Empty;
_HyphenIsArchive = hyphenIsArchive;
_CellInstanceName = cellInstanceName;
_HyphenIsXToArchive = hyphenXToArchive;
_Calendar = new CultureInfo("en-US").Calendar;
_Log = LogManager.GetLogger(typeof(FileRead));
_UseCyclicalForDescription = useCyclicalForDescription;
_CellInstanceConnectionName = cellInstanceConnectionName;
_ModelObjectParameterDefinitions = modelObjectParameters;
_FileConnectorConfiguration = fileConnectorConfiguration;
_ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
_IsSourceTimer = (fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt"));
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
_Hyphens = (cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length);
_ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
string suffix;
string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
string @namespace = segments[0];
string eventNameFileRead = "FileRead";
string eventName = segments[segments.Length - 1];
bool isDuplicator = segments[0] == cellInstanceName;
_IsDuplicator = isDuplicator;
_CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
if (eventName == eventNameFileRead)
suffix = string.Empty;
else
suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
if (!isEAFHosted)
{
if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
throw new Exception(cellInstanceConnectionName);
if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
throw new Exception(cellInstanceConnectionName);
if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
throw new Exception(cellInstanceConnectionName);
// if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
// throw new Exception(cellInstanceConnectionName);
// if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
// throw new Exception(cellInstanceConnectionName);
}
ModelObjectParameterDefinition[] paths = GetProperties(cellInstanceConnectionName, modelObjectParameters, "Path.");
if (paths.Length < 4)
throw new Exception(cellInstanceConnectionName);
if (isDuplicator)
_MesEntity = string.Empty;
else
_MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
_TracePath = (from l in paths where l.Name.EndsWith("Trace") select l.Value).FirstOrDefault();
_VillachPath = (from l in paths where l.Name.EndsWith("Villach") select l.Value).FirstOrDefault();
_ProgressPath = (from l in paths where l.Name.EndsWith("Progress") select l.Value).FirstOrDefault();
_EventName = eventName;
_EventNameFileRead = eventNameFileRead;
_EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
long breakAfterSeconds;
if (_FileConnectorConfiguration is null)
breakAfterSeconds = 360;
else
{
if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
breakAfterSeconds = 360;
else
breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
}
_BreakAfterSeconds = breakAfterSeconds;
UpdateLastTicksDuration(breakAfterSeconds * 10000000);
if (_IsDuplicator)
{
if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
throw new Exception("_Configuration is empty?");
if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
throw new Exception("_Configuration is incorrect for a duplicator!");
if (!(_FileConnectorConfiguration is null))
{
if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
throw new Exception("SourceDirectoryCloaking is empty?");
if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
}
}
}
protected string GetPropertyValue(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyName)
{
string result;
List<string> results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
if (results.Count != 1)
throw new Exception(cellInstanceConnectionName);
result = results[0];
return result;
}
protected ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
if (!results.Any())
throw new Exception(cellInstanceConnectionName);
return results;
}
protected ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
if (!results.Any())
throw new Exception(cellInstanceConnectionName);
return results;
}
protected void UpdateLastTicksDuration(long ticksDuration)
{
if (ticksDuration < 50000000)
ticksDuration = 50000000;
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
}
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)
{
if (!(thread is null))
{
ThreadState threadState;
for (short i = 0; i < short.MaxValue; i++)
{
if (thread is null)
break;
else
{
threadState = thread.ThreadState;
if (threadState != ThreadState.Running && threadState != ThreadState.WaitSleepJoin)
break;
}
Thread.Sleep(500);
}
lock (threadExceptions)
{
if (threadExceptions.Any())
{
foreach (Exception item in threadExceptions)
_Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
Exception exception = threadExceptions[0];
threadExceptions.Clear();
throw exception;
}
}
}
}
protected void CreateProgressDirectory(string progressPath, Logistics logistics, int? duplicator, string[] exceptionLines)
{
string progressDirectory;
StringBuilder stringBuilder = new();
if (duplicator is null || duplicator.Value == 0)
progressDirectory = string.Concat(progressPath, @"\EquipmentIntegration");
else
{
stringBuilder.Clear();
for (int i = 0; i < duplicator.Value; i++)
{
if (i > 0 && (i % 2) == 0)
stringBuilder.Append(' ');
stringBuilder.Append('-');
}
progressDirectory = string.Concat(progressPath, @"\", (duplicator.Value + 1).ToString().PadLeft(2, '0'), " ", stringBuilder).Trim();
}
DateTime dateTime = DateTime.Now;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
progressDirectory = string.Concat(progressDirectory, @"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", logistics.MID, "_", logistics.Sequence, "_", DateTime.Now.Ticks - logistics.Sequence);
if (!Directory.Exists(progressDirectory))
Directory.CreateDirectory(progressDirectory);
if (!(exceptionLines is null))
{
string fileName = string.Concat(progressDirectory, @"\readme.txt");
try
{ File.WriteAllLines(fileName, exceptionLines); }
catch (Exception) { }
}
}
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results;
bool isErrorFile = !(exception is null);
if (!to.EndsWith(@"\"))
string.Concat(to, @"\");
if (!isErrorFile)
results = new string[] { };
else
{
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
Shared0449(to, results);
}
if (!(extractResults is null) && !(extractResults.Item4 is null) && extractResults.Item4.Any())
{
string itemFile;
List<string> directories = new();
foreach (FileInfo sourceFile in extractResults.Item4)
{
if (sourceFile.FullName != _Logistics.ReportFullPath)
{
itemFile = sourceFile.FullName.Replace(from, to);
Shared1880(itemFile, directories, sourceFile, isErrorFile);
}
else if (!isErrorFile && !(_Logistics is null))
Shared1811(to, sourceFile);
}
Shared0231(directories);
}
return results;
}
protected IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
{
Queue<string> queue = new();
queue.Enqueue(path);
while (queue.Count > 0)
{
path = queue.Dequeue();
foreach (string subDirectory in Directory.GetDirectories(path))
{
queue.Enqueue(subDirectory);
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
yield return subDirectory;
}
}
}
protected string GetProcessedDirectory(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory)
{
string result = duplicateDirectory;
string logisticsSequence = logistics.Sequence.ToString();
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(logistics.ReportFullPath)) };
else
matchDirectories = new string[] { GetDirectoriesRecursively(Path.GetDirectoryName(progressPath), logisticsSequence).FirstOrDefault() };
if (matchDirectories.Length == 0 || string.IsNullOrEmpty(matchDirectories[0]))
matchDirectories = Directory.GetDirectories(duplicateDirectory, string.Concat('*', logisticsSequence, '*'), SearchOption.AllDirectories);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (!matchDirectories[0].Contains("_processed"))
{
result = string.Concat(matchDirectories[0].Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), dateTime.Ticks - logistics.Sequence, "_processed");
Directory.Move(matchDirectories[0], result);
result = string.Concat(result, @"\", logistics.Sequence);
if (!Directory.Exists(result))
Directory.CreateDirectory(result);
}
return result;
}
protected string WriteScopeInfo(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
string result = GetProcessedDirectory(progressPath, logistics, dateTime, duplicateDirectory);
string tupleFile;
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string duplicateFile = string.Concat(result, @"\", fileName, ".pdsf");
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else
tupleFile = string.Concat(result, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
File.WriteAllText(tupleFile, tuple.Item2);
}
File.Copy(logistics.ReportFullPath, duplicateFile, overwrite: true);
return result;
}
protected string GetTupleFile(Logistics logistics, Properties.IScopeInfo scopeInfo, string duplicateDirectory)
{
string result;
string rds;
string dateValue;
string datePlaceholder;
string[] segments = logistics.MID.Split('-');
if (segments.Length < 2)
rds = "%RDS%";
else
rds = segments[1];
segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length == 0)
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace("%RDS%", rds));
else
{
datePlaceholder = "%DateTime%";
segments = segments[1].Split('%');
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
foreach (string segment in scopeInfo.FileName.Split('%'))
{
if (!segment.Contains(segments[0]))
continue;
datePlaceholder = string.Concat('%', segment, '%');
}
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileName.Replace("%RDS%", rds).Replace(datePlaceholder, dateValue));
}
if (result.Contains('%'))
throw new Exception("Placeholder exists!");
return result;
}
protected void WaitForFileConsumption(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, string successDirectory, string duplicateDirectory, string duplicateFile, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
bool check;
long preWait;
string tupleFile;
List<int> consumedFileIndices = new();
List<string> duplicateFiles = new();
bool moreThanAnHour = (_BreakAfterSeconds > 3600);
StringBuilder stringBuilder = new();
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
if (moreThanAnHour)
preWait = dateTime.AddSeconds(30).Ticks;
else
preWait = dateTime.AddTicks(_LastTicksDuration).Ticks;
if (!tuples.Any())
duplicateFiles.Add(duplicateFile);
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else if (!tuple.Item1.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
else
tupleFile = GetTupleFile(logistics, tuple.Item1, duplicateDirectory);
duplicateFiles.Add(tupleFile);
File.WriteAllText(tupleFile, tuple.Item2);
}
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
if (!moreThanAnHour)
{
for (short z = 0; z < short.MaxValue; z++)
{
try
{
check = (string.IsNullOrEmpty(successDirectory) || File.Exists(successFile));
if (check)
{
consumedFileIndices.Clear();
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (!File.Exists(duplicateFiles[i]))
consumedFileIndices.Add(i);
}
if (consumedFileIndices.Count == duplicateFiles.Count)
break;
}
}
catch (Exception) { }
if (DateTime.Now.Ticks > breakAfter)
{
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (File.Exists(duplicateFiles[i]))
{
try
{ File.Delete(duplicateFiles[i]); }
catch (Exception) { }
stringBuilder.Append("<").Append(duplicateFiles[i]).Append("> ");
}
}
throw new Exception(string.Concat("After {", _BreakAfterSeconds, "} seconds, right side of {", sourceDirectoryCloaking, "} didn't consume file(s) ", stringBuilder));
}
Thread.Sleep(500);
}
}
}
protected void SetFileParameter(string key, string value)
{
if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
{
if (_FileParameter.ContainsKey(key))
_FileParameter[key] = value;
else
_FileParameter.Add(key, value);
}
}
protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
key = "LotIDWithLogisticsSequence";
string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
SetFileParameter(key, value);
}
protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
{
key = "LotIDWithLogisticsSequence";
value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
}
SetFileParameter(key, value);
}
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{
string directory;
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_VillachPath, _EquipmentType, "Target");
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
}
protected void Move(IFileRead fileRead, Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = !(exception is null);
if (!isErrorFile && _IsDuplicator)
{
if (_Hyphens == _HyphenIsXToArchive)
Shared0192();
else if (_IsEAFHosted && _Hyphens == _HyphenIsArchive)
fileRead.MoveArchive();
if (_IsEAFHosted && !string.IsNullOrEmpty(_ProgressPath))
CreateProgressDirectory(_ProgressPath, _Logistics, _Hyphens, exceptionLines: null);
}
if (!_IsEAFHosted)
{
string to;
if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
to = _FileConnectorConfiguration.TargetFileLocation;
else
to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
foreach (KeyValuePair<string, string> keyValuePair in _FileParameter)
to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
if (to.Contains("%"))
_Log.Debug("Can't debug without EAF Hosting");
else
Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
}
}
protected void TriggerEvents(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
object value;
string description;
List<object[]> list;
for (int i = 0; i < extractResults.Item3.Length; i++)
{
_Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.ContainsKey(jsonProperty.Name))
description = string.Empty;
else
description = keyValuePairs[jsonProperty.Name].Split('|')[0];
if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
value = jsonProperty.Value.ToString();
else
{
list = new List<object[]>();
for (int z = 0; z < extractResults.Item3.Length; z++)
list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
value = list;
}
}
if (_UseCyclicalForDescription)
break;
}
}
protected Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract(IFileRead fileRead, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
results = null;
else
{
string[] segments;
string[] matches = null;
foreach (string subSourceFileFilter in _FileConnectorConfiguration.SourceFileFilters)
{
segments = subSourceFileFilter.Split('\\');
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
else
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
if (matches.Any())
break;
}
if (matches is null || !matches.Any())
results = null;
else
{
_ReportFullPath = matches[0];
results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
if (!_IsEAFHosted)
TriggerEvents(results, headerNames, keyValuePairs);
}
}
return results;
}
protected Dictionary<Test, List<Properties.IDescription>> GetKeyValuePairs(List<Properties.IDescription> descriptions)
{
Dictionary<Test, List<Properties.IDescription>> results = new();
Test testKey;
for (int i = 0; i < descriptions.Count; i++)
{
testKey = (Test)descriptions[i].Test;
if (!results.ContainsKey(testKey))
results.Add(testKey, new List<Properties.IDescription>());
results[testKey].Add(descriptions[i]);
}
return results;
}
protected List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
{
List<Properties.IDescription> results = new();
Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
protected Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> GetTuple(IFileRead fileRead, IEnumerable<Properties.IDescription> descriptions, bool extra = false)
{
Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> result;
Dictionary<Test, List<Properties.IDescription>> keyValuePairs = GetKeyValuePairs(descriptions.ToList());
Test[] tests = (from l in keyValuePairs select l.Key).ToArray();
fileRead.CheckTests(tests, extra);
result = new Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>>(tests, keyValuePairs);
return result;
}
protected void Shared0449(string to, string[] exceptionLines)
{
if (_IsDuplicator)
CreateProgressDirectory(_ProgressPath, _Logistics, _Hyphens, exceptionLines);
else
{
string fileName = string.Concat(to, @"\readme.txt");
try
{
if (!Directory.Exists(to))
Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
catch (Exception ex) { _Log.Error(ex.Message); }
}
}
protected void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
{
string itemDirectory;
directories.Add(Path.GetDirectoryName(sourceFile.FullName));
itemDirectory = Path.GetDirectoryName(itemFile);
FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
if (!isErrorFile)
processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
else
processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
{
Directory.CreateDirectory(itemDirectory);
FileInfo fileInfo = new(_Logistics.ReportFullPath);
Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
}
if (_IsEAFHosted)
{
switch (processingModeEnum)
{
case FileConnectorConfiguration.PostProcessingModeEnum.Move:
File.Move(sourceFile.FullName, itemFile);
break;
case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
File.Copy(sourceFile.FullName, itemFile);
break;
case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
File.Delete(sourceFile.FullName);
break;
default:
throw new Exception();
}
}
}
protected void Shared1811(string to, FileInfo sourceFile)
{
if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
{
string directoryName = Path.GetFileName(to);
string jobIdDirectory = Path.GetDirectoryName(to);
DateTime dateTime = DateTime.Now.AddMinutes(-15);
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string destinationDirectory = string.Concat(jobIdDirectory, @"\_ Ignore 100 bytes\", weekDirectory, @"\", directoryName);
if (!Directory.Exists(destinationDirectory))
Directory.CreateDirectory(destinationDirectory);
File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
try
{
string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string checkDirectory in checkDirectories)
{
if (!checkDirectory.Contains("_"))
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Any())
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Any())
continue;
if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
continue;
Directory.Delete(checkDirectory, recursive: false);
}
}
catch (Exception) { throw; }
}
}
protected void Shared0231(List<string> directories)
{
if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
{
foreach (string directory in (from l in directories orderby l.Split('\\').Length descending select l).Distinct())
{
if (Directory.Exists(directory) && !Directory.GetFiles(directory).Any())
Directory.Delete(directory);
}
}
}
protected void Shared0413(DateTime dateTime, bool isDummyRun, string successDirectory, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples, string duplicateFile)
{
if (!isDummyRun && _IsEAFHosted)
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, successDirectory, duplicateDirectory, duplicateFile, tuples);
else
{
long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.ConnectionRetryInterval.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
break;
Thread.Sleep(500);
}
}
}
protected void Shared0607(string reportFullPath, string duplicateDirectory, string logisticsSequence, string destinationDirectory)
{
if (destinationDirectory == duplicateDirectory)
throw new Exception("Check Target File Folder for %LotIDWithLogisticsSequence%_in process on CI (not Duplicator)");
if (destinationDirectory.EndsWith(logisticsSequence))
destinationDirectory = Path.GetDirectoryName(destinationDirectory);
string[] deleteFiles = Directory.GetFiles(destinationDirectory, "*", SearchOption.AllDirectories);
if (deleteFiles.Length > 250)
throw new Exception("Safety net!");
foreach (string file in deleteFiles)
File.Delete(file);
Directory.Delete(destinationDirectory, recursive: true);
File.Delete(reportFullPath);
}
protected void Shared0192()
{
if (!string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
}
protected string[] Shared1567(string reportFullPath, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
string[] results;
string historicalText;
string logisticsSequence = _Logistics.Sequence.ToString();
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string fileName = Path.GetFileNameWithoutExtension(reportFullPath);
string sequenceDirectory = string.Concat(matchDirectories[0], @"\", logisticsSequence);
if (!Directory.Exists(sequenceDirectory))
Directory.CreateDirectory(sequenceDirectory);
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
fileName = string.Concat(sequenceDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
if (_IsEAFHosted)
File.WriteAllText(fileName, tuple.Item2);
else
{
if (File.Exists(fileName))
{
historicalText = File.ReadAllText(fileName);
if (tuple.Item2 != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
results = matchDirectories;
return results;
}
protected void Shared1277(string reportFullPath, string destinationDirectory, string logisticsSequence, string jobIdDirectory, string json)
{
string ecCharacterizationSi = Path.GetDirectoryName(Path.GetDirectoryName(jobIdDirectory));
string destinationJobIdDirectory = string.Concat(ecCharacterizationSi, @"\Processed\", _Logistics.JobID);
if (!Directory.Exists(destinationJobIdDirectory))
Directory.CreateDirectory(destinationJobIdDirectory);
destinationJobIdDirectory = string.Concat(destinationJobIdDirectory, @"\", Path.GetFileName(destinationDirectory).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), DateTime.Now.Ticks - _Logistics.Sequence);
string sequenceDirectory = string.Concat(destinationJobIdDirectory, @"\", logisticsSequence);
string jsonFileName = string.Concat(sequenceDirectory, @"\", Path.GetFileNameWithoutExtension(reportFullPath), ".json");
Directory.Move(destinationDirectory, destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
Directory.CreateDirectory(sequenceDirectory);
File.Copy(reportFullPath, string.Concat(sequenceDirectory, @"\", Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
}
}
}
// 2021-12-17 -> Shared - FileRead

View File

@ -0,0 +1,223 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Adaptation.Shared
{
public class Logistics : ILogistics
{
public object NullData { get; private set; }
public string JobID { get; private set; } //CellName
public long Sequence { get; private set; } //Ticks
public DateTime DateTimeFromSequence { get; private set; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; private set; }
public string MesEntity { get; private set; } //SPC
public string ReportFullPath { get; private set; } //Extract file
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; set; } //Lot & Pocket || Lot
public List<string> Tags { get; set; }
public List<string> Logistics1 { get; set; }
public List<Logistics2> Logistics2 { get; set; }
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
NullData = null;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
JobID = fileRead.CellInstanceName;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = string.Empty;
ProcessJobID = nameof(ProcessJobID);
MID = nameof(MID);
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public Logistics(IFileRead fileRead, string reportFullPath, bool useSplitForMID, int? fileInfoLength = null)
{
if (string.IsNullOrEmpty(fileRead.CellInstanceName))
throw new Exception();
if (string.IsNullOrEmpty(fileRead.MesEntity))
throw new Exception();
NullData = fileRead.NullData;
FileInfo fileInfo = new(reportFullPath);
DateTime dateTime = fileInfo.LastWriteTime;
if (fileInfoLength.HasValue && fileInfo.Length < fileInfoLength.Value)
dateTime = dateTime.AddTicks(-1);
JobID = fileRead.CellInstanceName;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = fileRead.MesEntity;
ReportFullPath = fileInfo.FullName;
ProcessJobID = nameof(ProcessJobID);
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
if (useSplitForMID)
{
if (fileNameWithoutExtension.IndexOf(".") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('.')[0].Trim();
if (fileNameWithoutExtension.IndexOf("_") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('_')[0].Trim();
if (fileNameWithoutExtension.IndexOf("-") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('-')[0].Trim();
}
MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
{
string key;
DateTime dateTime;
string[] segments;
Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
NullData = null;
JobID = "null";
dateTime = new FileInfo(reportFullPath).LastWriteTime;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = reportFullPath;
ProcessJobID = "R##";
MID = "null";
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
else
{
string logistics1Line1 = Logistics1[0];
key = "NULL_DATA=";
if (!logistics1Line1.Contains(key))
NullData = null;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
NullData = segments[1].Split(';')[0];
}
key = "JOBID=";
if (!logistics1Line1.Contains(key))
JobID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
JobID = segments[1].Split(';')[0];
}
key = "SEQUENCE=";
if (!logistics1Line1.Contains(key))
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
if (!long.TryParse(segments[1].Split(';')[0].Split('.')[0], out long sequence) || sequence < new DateTime(1999, 1, 1).Ticks)
dateTime = new FileInfo(reportFullPath).LastWriteTime;
else
dateTime = new DateTime(sequence);
}
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
DateTime lastWriteTime = new FileInfo(reportFullPath).LastWriteTime;
if (TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
if (lastWriteTime != dateTime)
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
key = "MES_ENTITY=";
if (!logistics1Line1.Contains(key))
MesEntity = DefaultMesEntity(dateTime);
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MesEntity = segments[1].Split(';')[0];
}
ReportFullPath = reportFullPath;
key = "PROCESS_JOBID=";
if (!logistics1Line1.Contains(key))
ProcessJobID = "R##";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
ProcessJobID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics1Line1.Contains(key))
MID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
}
Logistics2 logistics2;
Tags = new List<string>();
Logistics2 = new List<Logistics2>();
for (int i = 1; i < Logistics1.Count(); i++)
{
if (Logistics1[i].StartsWith("LOGISTICS_2"))
{
logistics2 = new Logistics2(Logistics1[i]);
Logistics2.Add(logistics2);
}
}
for (int i = Logistics1.Count() - 1; i > -1; i--)
{
if (Logistics1[i].StartsWith("LOGISTICS_2"))
Logistics1.RemoveAt(i);
}
}
public Logistics ShallowCopy()
{
return (Logistics)MemberwiseClone();
}
private string DefaultMesEntity(DateTime dateTime)
{
return string.Concat(dateTime.Ticks, "_MES_ENTITY");
}
internal string GetLotViaMostCommonMethod()
{
return MID.Substring(0, MID.Length - 2);
}
internal string GetPocketNumberViaMostCommonMethod()
{
return MID.Substring(MID.Length - 2);
}
internal void Update(string dateTime, string processJobID, string mid)
{
if (!DateTime.TryParse(dateTime, out DateTime dateTimeCasted))
dateTimeCasted = DateTime.Now;
NullData = null;
//JobID = Description.GetCellName();
Sequence = dateTimeCasted.Ticks;
DateTimeFromSequence = dateTimeCasted;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTimeCasted).TotalSeconds;
//MesEntity = DefaultMesEntity(dateTime);
//ReportFullPath = string.Empty;
ProcessJobID = processJobID;
MID = mid;
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
}
}
}

View File

@ -0,0 +1,81 @@
using System;
namespace Adaptation.Shared
{
public class Logistics2 : Methods.ILogistics2
{
public string MID { get; private set; }
public string RunNumber { get; private set; }
public string SatelliteGroup { get; private set; }
public string PartNumber { get; private set; }
public string PocketNumber { get; private set; }
public string WaferLot { get; private set; }
public string Recipe { get; private set; }
public Logistics2(string logistics2)
{
string key;
string[] segments;
key = "JOBID=";
if (!logistics2.Contains(key))
MID = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics2.Contains(key))
RunNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
RunNumber = segments[1].Split(';')[0];
}
key = "INFO=";
if (!logistics2.Contains(key))
SatelliteGroup = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
SatelliteGroup = segments[1].Split(';')[0];
}
key = "PRODUCT=";
if (!logistics2.Contains(key))
PartNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PartNumber = segments[1].Split(';')[0];
}
key = "CHAMBER=";
if (!logistics2.Contains(key))
PocketNumber = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
PocketNumber = segments[1].Split(';')[0];
}
key = "WAFER_ID=";
if (!logistics2.Contains(key))
WaferLot = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
WaferLot = segments[1].Split(';')[0];
}
key = "PPID=";
if (!logistics2.Contains(key))
Recipe = "null";
else
{
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
Recipe = segments[1].Split(';')[0];
}
}
}
}

View File

@ -0,0 +1,25 @@
using System.Collections.Generic;
using System.Text.Json;
namespace Adaptation.Shared.Methods
{
public interface IDescription
{
string GetEventDescription();
List<string> GetDetailNames();
List<string> GetHeaderNames();
IDescription GetDisplayNames();
List<string> GetParameterNames();
List<string> GetPairedParameterNames();
List<string> GetIgnoreParameterNames(Test test);
List<string> GetNames(IFileRead fileRead, Logistics logistics);
JsonProperty[] GetDefault(IFileRead fileRead, Logistics logistics);
Dictionary<string, string> GetDisplayNamesJsonElement(IFileRead fileRead);
IDescription GetDefaultDescription(IFileRead fileRead, Logistics logistics);
List<IDescription> GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData);
}
}

View File

@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.Shared.Methods
{
public interface IFileRead : Properties.IFileRead
{
void MoveArchive();
void WaitForThread();
JsonProperty[] GetDefault();
void Callback(object state);
string GetEventDescription();
List<string> GetHeaderNames();
void CheckTests(Test[] tests, bool extra);
Dictionary<string, string> GetDisplayNamesJsonElement();
Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract();
List<IDescription> GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData);
void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception = null);
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, string eventName);
string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception);
}
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.Shared.Methods
{
public interface ILogistics : Properties.ILogistics
{
}
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.Shared.Methods
{
public interface ILogistics2 : Properties.ILogistics2
{
}
}

View File

@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.Shared.Methods
{
public interface IProcessData : Properties.IProcessData
{
string GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors);
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection);
}
}

View File

@ -0,0 +1,9 @@
namespace Adaptation.Shared.Methods
{
public interface ISMTP
{
void SendLowPriorityEmailMessage(string subject, string body);
void SendHighPriorityEmailMessage(string subject, string body);
void SendNormalPriorityEmailMessage(string subject, string body);
}
}

View File

@ -0,0 +1,306 @@
using System;
using System.IO;
namespace Adaptation.Shared.Metrology
{
public class ScopeInfo : Properties.IScopeInfo
{
public Test Test { get; private set; }
public Enum Enum { get; private set; }
public string HTML { get; private set; }
public string Title { get; private set; }
public string FileName { get; private set; }
public int TestValue { get; private set; }
public string Header { get; private set; }
public string QueryFilter { get; private set; }
public string FileNameWithoutExtension { get; private set; }
public ScopeInfo(Test test, string fileName, string queryFilter = "", string title = "", string html = "")
{
Enum = test;
Test = test;
HTML = html;
Title = title;
FileName = fileName;
TestValue = (int)test;
Header = string.Empty;
QueryFilter = queryFilter;
FileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileName);
}
public ScopeInfo(Test test)
{
Enum = test;
Test = test;
TestValue = (int)test;
switch (Test)
{
case Test.AFMRoughness:
FileNameWithoutExtension = "afm_iqs_01";
Header = string.Empty;
QueryFilter = "AFM Roughness";
Title = "AFM";
HTML = @"GaN Epi Data\10 - afm.html";
break;
case Test.BreakdownVoltageCenter:
FileNameWithoutExtension = "bv_iqs_01";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage";
Title = "Breakdown Voltage-Center";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.BreakdownVoltageEdge:
FileNameWithoutExtension = "bv_iqs_01_Edge";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage - Edge";
Title = "Breakdown Voltage-Edge";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.BreakdownVoltageMiddle8in:
FileNameWithoutExtension = "bv_iqs_01_Middle";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage - Middle";
Title = "Breakdown Voltage-Middle (8 in)";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.CV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV";
HTML = @"GaN Epi Data\05 - cv.html";
break;
case Test.MonthlyCV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV Monthly Verification";
HTML = @"Metrology\07 - cv_verif_monthly.html";
break;
case Test.WeeklyCV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV Weekly Verification";
HTML = @"Metrology\16 - cv_verif_weekly.html";
break;
case Test.CandelaKlarfDC:
FileNameWithoutExtension = "candela_iqs_01";
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count";
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaLaser:
FileNameWithoutExtension = "candela_iqs_01";
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;Pits;Tool ID;Defect Count";
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaVerify:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Concat("LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;RunID;Reactor;", "Slip Lines;Cracks;Epi Def;Haze Spot;Small LPD;Medium LPD;Large LPD;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count");
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaPSL:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Empty;
QueryFilter = "102-83nm";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaProdU:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Empty;
QueryFilter = "SPE verification";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.Denton:
FileNameWithoutExtension = "denton_iqs_01";
Header = "Tool;fDate;Run;Recipe;Operator;Name;Value";
QueryFilter = "Denton_Voltage_AVG";
Title = "Denton Data";
HTML = @"Support Process\03 - ebeam02_denton_v1.html";
break;
case Test.Hall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Data";
HTML = @"GaN Epi Data\04 - hall.html";
break;
case Test.MonthlyHall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Monthly Verification";
HTML = @"Metrology\06 - hall_verif_monthly.html";
break;
case Test.WeeklyHall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Weekly Verification";
HTML = @"Metrology\15 - hall_verif_weekly.html";
break;
case Test.Lehighton:
FileNameWithoutExtension = "lei_iqs_01";
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
QueryFilter = "LEI RS Average value";
Title = "Lehighton";
HTML = @"GaN Epi Data\13 - lehighton.html";
break;
case Test.VerificationLehighton:
FileNameWithoutExtension = "___";
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
QueryFilter = "___";
Title = "LEI Weekly Verification 2 Ohm cm";
HTML = @"Metrology\14 - lei_verif_weekly.html.html";
break;
case Test.Microscope:
FileNameWithoutExtension = string.Empty;
Header = string.Empty;
QueryFilter = "Microscope Center 5x";
Title = "Total Microscope Defects";
HTML = string.Empty;
break;
case Test.RPMXY:
FileNameWithoutExtension = "RPM_Data";
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
QueryFilter = "Barrier_Composition_RPM_XY";
Title = "RPM XY Data ***&*** View Data";
HTML = @"GaN Epi Data\09 - rpm --- 08 - photoluminescence.html";
break;
case Test.RPMAverage:
FileNameWithoutExtension = "RPMdata-short";
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
QueryFilter = "Epi Thickness Mean";
Title = "RPM Average Data";
HTML = @"GaN Epi Data\09 - rpm.html";
break;
case Test.RPMPLRatio:
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
QueryFilter = "PL Ratio";
Title = "Photoluminescence: PL Ratio";
HTML = @"GaN Epi Data\08 - photoluminescence.html";
break;
case Test.DailyRPMXY:
FileNameWithoutExtension = "RPM_Data";
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
QueryFilter = "Barrier_Composition_RPM_XY";
Title = "";
HTML = @"Metrology\?";
break;
case Test.DailyRPMAverage:
FileNameWithoutExtension = "RPMdata-short";
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
QueryFilter = "Epi Thickness Mean";
Title = "";
HTML = @"Metrology\?";
break;
case Test.DailyRPMPLRatio:
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
QueryFilter = "PL Ratio";
Title = "RPM Daily Verification";
HTML = @"Metrology\17 - rpm_verif_daily.html";
break;
case Test.VerificationRPM:
FileNameWithoutExtension = "PhotoLuminescence_Ver";
Header = "Part;Process;Date;Test;Value";
QueryFilter = "PL Edge Wavelength";
Title = "PL Daily Verification - [PL Edge Wavelength]";
HTML = @"Metrology\18 - photoluminescence_verif_daily.html";
break;
case Test.Photoreflectance:
FileNameWithoutExtension = "photoreflect_iqs_01";
Header = "Lot;Date;Part;Reactor;Scribe;Pocket;Tool;Point;WaferPosition_PR;PR_Peak";
QueryFilter = "PR Barrier Composition";
Title = "Photoreflectance 6 in, Photoreflectance 8 in";
HTML = @"GaN Epi Data\07 - photoreflectance.html";
break;
case Test.UV:
FileNameWithoutExtension = "uv_iqs_01";
Header = string.Empty;
QueryFilter = "UV Broken";
Title = "UV";
HTML = @"GaN Epi Data\15 - uv 2.1.html";
break;
case Test.VpdIcpmsAnalyte:
FileNameWithoutExtension = "VPD_iqs_01";
Header = "Reactor;RunID;RunDate;PartNumber;PocketNumber;WaferScribe;Analyte;Value";
QueryFilter = "Mg";
Title = "VpdIcpmsAnalyteData";
HTML = @"";
break;
case Test.WarpAndBow:
FileNameWithoutExtension = "warp_iqs_01";
Header = "fDate;fRecipeName;fProductId;g4Scribe;warp;bow;tool;Reactor;Pocket ID;bow_range;BowX;BowY;CenterBow";
QueryFilter = "BowCenter";
Title = "Warp and Bow";
HTML = @"GaN Epi Data\14 - warp.html";
break;
case Test.VerificationWarpAndBow:
FileNameWithoutExtension = "warp_ver_iqs_01";
Header = "Part;Process;Date;WaferScribe;totWarp;bow";
QueryFilter = "Bow Calibration";
Title = "6 Inch Warp/Bow Daily Verification, 8 Inch Warp/Bow Daily Verification";
HTML = @"Metrology\19 - warp_cal_daily.html";
break;
case Test.XRDXY:
FileNameWithoutExtension = "xrd_iqs_NEW_01";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
QueryFilter = "SL Period";
Title = "XRD XY Raw Data Viewer";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
case Test.XRDWeightedAverage:
FileNameWithoutExtension = "xrd_iqs_NEW_01_WtAVG";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;Name;Value;Group";
//QueryFilter = "Al% Barrier WTAVG";
QueryFilter = "SL Period WTAVG";
Title = "XRD Weighted Average Data";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
case Test.MonthlyXRD:
FileNameWithoutExtension = "xrd_monthly_ver_iqs_01";
Header = "Part;Process;Date;TestName;Value";
QueryFilter = "XRD 2-Theta Position";
Title = "XRD Monthly Verification";
HTML = @"Metrology\03 - xrd_verif_monthly.html";
break;
case Test.WeeklyXRD:
FileNameWithoutExtension = "xrd_weekly_ver_iqs_01";
Header = "Part;Process;Lot;Date;TestName;Value";
QueryFilter = "XRD Weekly AL% Center";
Title = "XRD Weekly Verification";
HTML = @"Metrology\12 - xrd_verif_weekly.html";
break;
case Test.JVXRD:
FileNameWithoutExtension = "xrd_iqs_NEW_01";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
QueryFilter = "SL Period";
Title = "XRD XY Raw Data Viewer";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
default:
throw new Exception();
}
FileName = string.Concat(FileNameWithoutExtension, ".txt");
}
public ScopeInfo ShallowCopy()
{
return (ScopeInfo)MemberwiseClone();
}
}
}

View File

@ -0,0 +1,24 @@
namespace Adaptation.Shared.Metrology
{
public partial class WS
{
public class Attachment
{
public string UniqueId { get; set; }
public string DestinationFileName { get; set; }
public string SourceFileName { get; set; }
public Attachment(string uniqueId, string destinationFileName, string sourceFileName)
{
UniqueId = uniqueId;
DestinationFileName = destinationFileName;
SourceFileName = sourceFileName;
}
}
}
}

View File

@ -0,0 +1,33 @@
using System.Collections.Generic;
using System.Text.Json;
namespace Adaptation.Shared.Metrology
{
public partial class WS
{
// this class represents the response from the Inbound API endpoint
public class Results
{
// true or false if data was written to the database
public bool Success { get; set; }
// if true, contains ID of the Header record in the database
public long HeaderID { get; set; }
// if false, this collection will contain a list of errors
public List<string> Errors { get; set; }
// this collection will contain a list of warnings, they will not prevent data from being saved
public List<string> Warnings { get; set; }
// this is just a helper function to make displaying the results easier
public override string ToString()
{
return JsonSerializer.Serialize(this, GetType());
}
}
}
}

View File

@ -0,0 +1,127 @@
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Text.Json;
namespace Adaptation.Shared.Metrology
{
public partial class WS
{
public static Tuple<string, Results> SendData(string url, object payload, int timeoutSeconds = 120)
{
Results results = new Results();
string resultsJson = string.Empty;
try
{
string json = JsonSerializer.Serialize(payload, payload.GetType());
if (string.IsNullOrEmpty(url) || !url.Contains(":") || !url.Contains("."))
throw new Exception("Invalid URL");
using (HttpClient httpClient = new HttpClient())
{
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
HttpRequestMessage httpRequestMessage = new HttpRequestMessage
{
RequestUri = new Uri(url),
Method = HttpMethod.Post,
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
results = JsonSerializer.Deserialize<Results>(resultsJson);
}
if (!results.Success)
results.Errors.Add(results.ToString());
}
catch (Exception e)
{
Exception exception = e;
StringBuilder stringBuilder = new StringBuilder();
while (!(exception is null))
{
stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
if (results.Errors is null)
results.Errors = new List<string>();
results.Errors.Add(stringBuilder.ToString());
}
return new Tuple<string, Results>(resultsJson, results);
}
// this method is a wrapper for attaching a file to either a header or data record
// URL is the same URL used for SendData, ex: http://localhost/api/inbound/CDE
// attachToHeaderId is the ID returned by SendData
// attachToDataUniqueId is the string unique ID for the data record, aka the Title of the Sharepoint list entry
// fileContents is a byte array with the contents of the file
// fileName is which attachment this is, image.pdf, data.pdf, data.txt, header.pdf, etc
// timeoutSeconds is configured as the request timeout
// this method will either succeed or throw an exception
// also, this has been made synchronous
public static void AttachFile(string url, long attachToHeaderId, string attachToDataUniqueId, byte[] fileContents, string fileName, int timeoutSeconds = 60)
{
using (HttpClient httpClient = new HttpClient())
{
string requestUrl = url + "/attachment?headerid=" + attachToHeaderId.ToString();
if (!string.IsNullOrWhiteSpace(attachToDataUniqueId))
{
requestUrl += "&datauniqueid=";
requestUrl += System.Net.WebUtility.UrlEncode(attachToDataUniqueId);
}
requestUrl += "&filename="; // this is just so the web server log shows the filename
requestUrl += System.Net.WebUtility.UrlEncode(fileName);
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
MultipartFormDataContent multipartFormDataContent = new MultipartFormDataContent();
ByteArrayContent byteArrayContent = new ByteArrayContent(fileContents);
byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
multipartFormDataContent.Add(byteArrayContent, "attachment", fileName);
HttpResponseMessage httpResponseMessage = httpClient.PostAsync(requestUrl, multipartFormDataContent).Result;
if (httpResponseMessage.IsSuccessStatusCode)
return;
string resultBody = httpResponseMessage.Content.ReadAsStringAsync().Result;
throw new Exception("Attachment failed: " + resultBody);
}
}
public static void AttachFiles(string url, long headerID, List<Attachment> headerAttachments = null, List<Attachment> dataAttachments = null)
{
try
{
if (!(headerAttachments is null))
{
foreach (Attachment attachment in headerAttachments)
AttachFile(url, headerID, "", System.IO.File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
}
if (!(dataAttachments is null))
{
foreach (Attachment attachment in dataAttachments)
AttachFile(url, headerID, attachment.UniqueId, System.IO.File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
}
//MessageBox.Show(r.ToString());
}
catch (Exception e)
{
Exception exception = e;
StringBuilder stringBuilder = new StringBuilder();
while (!(exception is null))
{
stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
//MessageBox.Show(msgs.ToString(), "Exception", //MessageBoxButtons.OK, //MessageBoxIcon.Error);
throw new Exception(stringBuilder.ToString());
}
}
}
}

View File

@ -0,0 +1,13 @@
namespace Adaptation.Shared
{
public enum ParameterType
{
String = 0,
Integer = 2,
Double = 3,
Boolean = 4,
StructuredType = 5
}
}

View File

@ -0,0 +1,426 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.Shared
{
public class ProcessDataStandardFormat
{
public const string RecordStart = "RECORD_START";
public enum SearchFor
{
EquipmentIntegration = 1,
BusinessIntegration = 2,
SystemExport = 3,
Archive = 4
}
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{
string result;
if (!jsonElements.Any())
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
stringBuilder.Append("\"Time\"").Append('\t');
stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
stringBuilder.Append("\"").Append(jsonProperty.Name).Append("\"").Append('\t');
}
break;
}
stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
stringBuilder.Clear();
stringBuilder.Append("0.1").Append('\t');
stringBuilder.Append("1").Append('\t');
stringBuilder.Append("2").Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
stringBuilder.Append(jsonProperty.Value).Append('\t');
stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
stringBuilder.Clear();
foreach (string line in lines)
stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
}
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
{
string segment;
List<string> body = new();
StringBuilder logistics = new();
if (lines is null)
lines = File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = new string[] { };
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Count(); r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Count(); i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
logistics.AppendLine(lines[i]);
}
break;
}
}
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
}
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
{
JsonElement[] results;
string logistics = pdsf.Item1;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
if (!bodyLines.Any() || !bodyLines[0].Contains('\t'))
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
else
{
string value;
string[] segments;
StringBuilder stringBuilder = new();
foreach (string bodyLine in bodyLines)
{
stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(",");
else
stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
stringBuilder.Remove(stringBuilder.Length - 1, 1);
stringBuilder.AppendLine("},");
}
stringBuilder.Remove(stringBuilder.Length - 3, 3);
results = JsonSerializer.Deserialize<JsonElement[]>(string.Concat("[", stringBuilder, "]"));
}
return results;
}
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<string, List<string>> results = new();
string[] segments;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
foreach (string column in columns)
results.Add(column, new List<string>());
foreach (string bodyLine in bodyLines)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= columns.Length)
continue;
results[columns[c]].Add(segments[c]);
}
}
return results;
}
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
if (!keyValuePairs.ContainsKey(testColumn))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < keyValuePairs[testColumn].Count; i++)
{
if (Enum.TryParse(keyValuePairs[testColumn][i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ')
{
return GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
}
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ')
{
return GetString(SearchFor.BusinessIntegration, addSpaces, separator);
}
public static string SystemExport(bool addSpaces = true, char separator = ' ')
{
return GetString(SearchFor.SystemExport, addSpaces, separator);
}
public static string Archive(bool addSpaces = true, char separator = ' ')
{
return GetString(SearchFor.Archive, addSpaces, separator);
}
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
{
StringBuilder result = new();
if (ignoreParameterNames is null)
ignoreParameterNames = new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
line.Clear();
line.Append("!");
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
line.Append(nullData);
else
line.Append(keyValuePair.Value[r]);
line.Append(';');
}
}
if (!pairedParameterNames.Any())
{
line.Remove(line.Length - 1, 1);
result.AppendLine(line.ToString());
}
}
return result.ToString();
}
public static List<string> PDSFToFixedWidth(string reportFullPath)
{
List<string> results = new();
if (!File.Exists(reportFullPath))
throw new Exception();
int[] group;
string line;
int startsAt = 0;
string[] segments;
int? currentGroup = null;
char inputSeperator = '\t';
char outputSeperator = '\t';
List<int> vs = new();
List<int[]> groups = new();
string[] lines = File.ReadAllLines(reportFullPath);
StringBuilder stringBuilder = new();
for (int i = 0; i < lines.Length; i++)
{
if (string.IsNullOrEmpty(lines[i]))
continue;
segments = lines[i].Split(inputSeperator);
if (currentGroup is null)
currentGroup = segments.Length;
if (segments.Length != currentGroup)
{
currentGroup = segments.Length;
groups.Add(new int[] { startsAt, i - 1 });
startsAt = i;
}
}
if (startsAt == lines.Length - 1 && lines[0].Split(inputSeperator).Length != currentGroup)
groups.Add(new int[] { lines.Length - 1, lines.Length - 1 });
for (int g = 0; g < groups.Count; g++)
{
vs.Clear();
group = groups[g];
line = lines[group[0]];
segments = line.Split(inputSeperator);
for (int s = 0; s < segments.Length; s++)
vs.Add(segments[s].Length);
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
segments = line.Split(inputSeperator);
for (int s = 0; s < segments.Length; s++)
{
if (vs[s] < segments[s].Length)
vs[s] = segments[s].Length;
}
}
stringBuilder.Clear();
for (int s = 0; s < segments.Length; s++)
stringBuilder.Append((s + 1).ToString().PadLeft(vs[s], ' ')).Append(outputSeperator);
stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
stringBuilder.Clear();
segments = line.Split(inputSeperator);
for (int s = 0; s < segments.Length; s++)
stringBuilder.Append(segments[s].PadLeft(vs[s], ' ')).Append(outputSeperator);
stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
results.Add(string.Empty);
}
return results;
}
}
}

View File

@ -0,0 +1,13 @@
namespace Adaptation.Shared.Properties
{
public interface IDescription
{
int Test { get; }
int Count { get; }
int Index { get; }
}
}

View File

@ -0,0 +1,20 @@
namespace Adaptation.Shared.Properties
{
public interface IFileRead
{
bool IsEvent { get; }
string NullData { get; }
string MesEntity { get; }
bool IsEAFHosted { get; }
string EventName { get; }
string EquipmentType { get; }
string ReportFullPath { get; }
string CellInstanceName { get; }
string ExceptionSubject { get; }
bool UseCyclicalForDescription { get; }
string CellInstanceConnectionName { get; }
string ParameterizedModelObjectDefinitionType { get; }
}
}

View File

@ -0,0 +1,25 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Shared.Properties
{
public interface ILogistics
{
public object NullData { get; }
public string JobID { get; } //CellName
public long Sequence { get; } //Ticks
public DateTime DateTimeFromSequence { get; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; }
public string MesEntity { get; } //SPC
public string ReportFullPath { get; } //Extract file
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; set; } //Lot & Pocket || Lot
public List<string> Tags { get; set; }
public List<string> Logistics1 { get; set; }
public List<Logistics2> Logistics2 { get; set; }
}
}

View File

@ -0,0 +1,17 @@
namespace Adaptation.Shared.Properties
{
public interface ILogistics2
{
public string MID { get; }
public string RunNumber { get; }
public string SatelliteGroup { get; }
public string PartNumber { get; }
public string PocketNumber { get; }
public string WaferLot { get; }
public string Recipe { get; }
}
}

View File

@ -0,0 +1,13 @@
using System.Collections.Generic;
namespace Adaptation.Shared.Properties
{
public interface IProcessData
{
List<object> Details { get; }
}
}

View File

@ -0,0 +1,20 @@
using System;
namespace Adaptation.Shared.Properties
{
public interface IScopeInfo
{
Enum Enum { get; }
string HTML { get; }
string Title { get; }
string FileName { get; }
int TestValue { get; }
string Header { get; }
string QueryFilter { get; }
string FileNameWithoutExtension { get; }
}
}

57
Adaptation/Shared/Test.cs Normal file
View File

@ -0,0 +1,57 @@
namespace Adaptation.Shared
{
public enum Test
{
AFMRoughness = 34,
BioRadQS408M = 25,
BioRadStratus = 26,
BreakdownVoltageCenter = 0,
BreakdownVoltageEdge = 1,
BreakdownVoltageMiddle8in = 2,
CandelaKlarfDC = 6,
CandelaLaser = 36,
CandelaProdU = 39,
CandelaPSL = 38,
CandelaVerify = 37,
CDE = 24,
CV = 3,
DailyRPMAverage = 19,
DailyRPMPLRatio = 20,
DailyRPMXY = 18,
Denton = 9,
DiffusionLength = 45,
Hall = 10,
HgCV = 23,
Lehighton = 13,
Microscope = 46,
MonthlyCV = 4,
MonthlyHall = 11,
MonthlyXRD = 32,
Photoreflectance = 22,
PlatoA = 48, //Largest
RPMAverage = 16,
RPMPLRatio = 17,
RPMXY = 15,
SP1 = 8,
Tencor = 7,
UV = 35,
VerificationLehighton = 14,
VerificationRPM = 21,
VerificationWarpAndBow = 29,
VpdIcpmsAnalyte = 27,
WarpAndBow = 28,
WeeklyCV = 5,
WeeklyHall = 12,
WeeklyXRD = 33,
WeeklyXRDAIcomp = 40,
WeeklyXRDFWHM002 = 41,
WeeklyXRDFWHM105 = 42,
WeeklyXRDSLStks = 43,
WeeklyXRDXRR = 44,
XRDWeightedAverage = 31,
JVXRD = 47,
XRDXY = 30
}
}

View File

@ -0,0 +1,97 @@
using Adaptation.Shared.Methods;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shared;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace _Tests.CreateSelfDescription.Staging.v2_36_3
{
[TestClass]
public class BIORAD4 : EAFLoggingUnitTesting
{
private static BIORAD4 _EAFLoggingUnitTesting;
internal static BIORAD4 EAFLoggingUnitTesting => _EAFLoggingUnitTesting;
public BIORAD4() : base(testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (_EAFLoggingUnitTesting is null)
throw new Exception();
}
public BIORAD4(TestContext testContext) : base(testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
if (_EAFLoggingUnitTesting is null)
_EAFLoggingUnitTesting = new BIORAD4(testContext);
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = _EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
if (!(_EAFLoggingUnitTesting.Logger is null))
_EAFLoggingUnitTesting.Logger.LogInformation("Cleanup");
if (!(_EAFLoggingUnitTesting is null))
_EAFLoggingUnitTesting.Dispose();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__txt()
{
string check = "*DataBioRad.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__Stratus()
{
string check = "CassetteDataBioRad_*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__QS408M()
{
string check = "DetailDataBioRad_*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~BIORAD4" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~BIORAD4 & Staging__v2_36_3__BIORAD4__QS408M" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")

View File

@ -0,0 +1,97 @@
using Adaptation.Shared.Methods;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shared;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace _Tests.CreateSelfDescription.Staging.v2_36_3
{
[TestClass]
public class BIORAD5 : EAFLoggingUnitTesting
{
private static BIORAD5 _EAFLoggingUnitTesting;
internal static BIORAD5 EAFLoggingUnitTesting => _EAFLoggingUnitTesting;
public BIORAD5() : base(testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (_EAFLoggingUnitTesting is null)
throw new Exception();
}
public BIORAD5(TestContext testContext) : base(testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
if (_EAFLoggingUnitTesting is null)
_EAFLoggingUnitTesting = new BIORAD5(testContext);
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = _EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
if (!(_EAFLoggingUnitTesting.Logger is null))
_EAFLoggingUnitTesting.Logger.LogInformation("Cleanup");
if (!(_EAFLoggingUnitTesting is null))
_EAFLoggingUnitTesting.Dispose();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__txt()
{
string check = "*DataBioRad.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__Stratus()
{
string check = "CassetteDataBioRad_*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__QS408M()
{
string check = "DetailDataBioRad_*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~BIORAD5" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~BIORAD5 & Staging__v2_36_3__BIORAD5__QS408M" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")

View File

@ -0,0 +1,183 @@
using Adaptation.Shared.Methods;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shared;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace _Tests.CreateSelfDescription.Staging.v2_36_3
{
[TestClass]
public class MET08THFTIRSTRATUS : EAFLoggingUnitTesting
{
private static MET08THFTIRSTRATUS _EAFLoggingUnitTesting;
internal static MET08THFTIRSTRATUS EAFLoggingUnitTesting => _EAFLoggingUnitTesting;
public MET08THFTIRSTRATUS() : base(testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (_EAFLoggingUnitTesting is null)
throw new Exception();
}
public MET08THFTIRSTRATUS(TestContext testContext) : base(testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
if (_EAFLoggingUnitTesting is null)
_EAFLoggingUnitTesting = new MET08THFTIRSTRATUS(testContext);
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = _EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
if (!(_EAFLoggingUnitTesting.Logger is null))
_EAFLoggingUnitTesting.Logger.LogInformation("Cleanup");
if (!(_EAFLoggingUnitTesting is null))
_EAFLoggingUnitTesting.Dispose();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS()
{
string check = "~IsXToOpenInsightMetrologyViewer";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_()
{
string check = "~IsXToIQSSi";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__()
{
string check = "~IsXToOpenInsight";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___()
{
string check = "~IsXToOpenInsightMetrologyViewerAttachments";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS____()
{
string check = "~IsXToAPC";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_____()
{
string check = "~IsXToSPaCe";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS______()
{
string check = "~IsXToArchive";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_______()
{
string check = "~IsArchive";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS________()
{
string check = "~IsDummy";
MethodBase methodBase = new StackFrame().GetMethod();
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
string[] fileNameAndJson = _EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
Assert.IsTrue(fileNameAndJson[1].Contains(check));
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
IFileRead fileRead = _EAFLoggingUnitTesting.AdaptationTesting.Get(methodBase, sourceFileLocation: string.Empty, sourceFileFilter: string.Empty, useCyclicalForDescription: false);
Assert.IsFalse(string.IsNullOrEmpty(fileRead.CellInstanceConnectionName));
_EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~MET08THFTIRSTRATUS" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.CreateSelfDescription.Staging.v2_36_3 & ClassName~MET08THFTIRSTRATUS & Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS________" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")

View File

@ -0,0 +1,164 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace _Tests.Extract.Staging.v2_36_3
{
[TestClass]
public class BIORAD4
{
private static CreateSelfDescription.Staging.v2_36_3.BIORAD4 _BIORAD4;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Staging.v2_36_3.BIORAD4.ClassInitialize(testContext);
_BIORAD4 = CreateSelfDescription.Staging.v2_36_3.BIORAD4.EAFLoggingUnitTesting;
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__txt()
{
_BIORAD4.Staging__v2_36_3__BIORAD4__txt();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__txt637730081979221342__Normal()
{
string check = "*DataBioRad.txt";
_BIORAD4.Staging__v2_36_3__BIORAD4__txt();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__txt637746296480404920__Failure()
{
string check = "*DataBioRad.txt";
_BIORAD4.Staging__v2_36_3__BIORAD4__txt();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__Stratus()
{
_BIORAD4.Staging__v2_36_3__BIORAD4__Stratus();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__Stratus637730081979221342__RDS()
{
string check = "CassetteDataBioRad_*.txt";
_BIORAD4.Staging__v2_36_3__BIORAD4__Stratus();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
Assert.IsNotNull(extractResult.Item4);
Logistics logistics = new Logistics(fileRead);
Tuple<string, string[], string[]> pdsfNew = Helpers.Metrology.GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, pdsf);
Helpers.Metrology.CompareSave(variables[5], pdsf, pdsfNew);
Assert.IsTrue(pdsf.Item1 == pdsfNew.Item1, "Item1 check!");
string[] json = Helpers.Metrology.GetItem2(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveJSON(variables[5], json);
Assert.IsTrue(json[0] == json[1], "Item2 check!");
string[] join = Helpers.Metrology.GetItem3(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveTSV(variables[5], join);
Assert.IsTrue(join[0] == join[1], "Item3 (Join) check!");
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__Stratus637730081979221342__1TRDS()
{
string check = "CassetteDataBioRad_*.txt";
_BIORAD4.Staging__v2_36_3__BIORAD4__Stratus();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
Assert.IsNotNull(extractResult.Item4);
Logistics logistics = new Logistics(fileRead);
Tuple<string, string[], string[]> pdsfNew = Helpers.Metrology.GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, pdsf);
Helpers.Metrology.CompareSave(variables[5], pdsf, pdsfNew);
Assert.IsTrue(pdsf.Item1 == pdsfNew.Item1, "Item1 check!");
string[] json = Helpers.Metrology.GetItem2(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveJSON(variables[5], json);
Assert.IsTrue(json[0] == json[1], "Item2 check!");
string[] join = Helpers.Metrology.GetItem3(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveTSV(variables[5], join);
Assert.IsTrue(join[0] == join[1], "Item3 (Join) check!");
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__Stratus637733400573863329__ReactorAndRDS()
{
DateTime dateTime;
string check = "CassetteDataBioRad_*.txt";
_BIORAD4.Staging__v2_36_3__BIORAD4__Stratus();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
Assert.IsNotNull(extractResult.Item4);
Logistics logistics = new Logistics(fileRead);
dateTime = Adaptation.FileHandlers.Stratus.ProcessData.GetDateTime(logistics, string.Empty);
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
dateTime = Adaptation.FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
Tuple<string, string[], string[]> pdsfNew = Helpers.Metrology.GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, pdsf);
Helpers.Metrology.CompareSave(variables[5], pdsf, pdsfNew);
Assert.IsTrue(pdsf.Item1 == pdsfNew.Item1, "Item1 check!");
string[] json = Helpers.Metrology.GetItem2(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveJSON(variables[5], json);
Assert.IsTrue(json[0] == json[1], "Item2 check!");
string[] join = Helpers.Metrology.GetItem3(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveTSV(variables[5], join);
Assert.IsTrue(join[0] == join[1], "Item3 (Join) check!");
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD4__QS408M()
{
_BIORAD4.Staging__v2_36_3__BIORAD4__QS408M();
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD4" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD4 & Staging__v2_36_3__BIORAD4__QS408M" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD4 & Staging__v2_36_3__BIORAD4__txt637730081979221342__Normal" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD4 & Staging__v2_36_3__BIORAD4__Stratus637733400573863329__ReactorAndRDS" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
//

View File

@ -0,0 +1,83 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace _Tests.Extract.Staging.v2_36_3
{
[TestClass]
public class BIORAD5
{
private static CreateSelfDescription.Staging.v2_36_3.BIORAD5 _BIORAD5;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Staging.v2_36_3.BIORAD5.ClassInitialize(testContext);
_BIORAD5 = CreateSelfDescription.Staging.v2_36_3.BIORAD5.EAFLoggingUnitTesting;
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__txt()
{
_BIORAD5.Staging__v2_36_3__BIORAD5__txt();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__Stratus()
{
_BIORAD5.Staging__v2_36_3__BIORAD5__Stratus();
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__Stratus637738592809956919__ReactorAndRDS()
{
DateTime dateTime;
string check = "CassetteDataBioRad_*.txt";
_BIORAD5.Staging__v2_36_3__BIORAD5__Stratus();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD5.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _BIORAD5.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
Assert.IsNotNull(extractResult.Item4);
Logistics logistics = new Logistics(fileRead);
dateTime = Adaptation.FileHandlers.Stratus.ProcessData.GetDateTime(logistics, string.Empty);
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
dateTime = Adaptation.FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
Tuple<string, string[], string[]> pdsfNew = Helpers.Metrology.GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, pdsf);
Helpers.Metrology.CompareSave(variables[5], pdsf, pdsfNew);
Assert.IsTrue(pdsf.Item1 == pdsfNew.Item1, "Item1 check!");
string[] json = Helpers.Metrology.GetItem2(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveJSON(variables[5], json);
Assert.IsTrue(json[0] == json[1], "Item2 check!");
string[] join = Helpers.Metrology.GetItem3(pdsf, pdsfNew);
Helpers.Metrology.CompareSaveTSV(variables[5], join);
Assert.IsTrue(join[0] == join[1], "Item3 (Join) check!");
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__BIORAD5__QS408M()
{
_BIORAD5.Staging__v2_36_3__BIORAD5__QS408M();
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD5" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~BIORAD5 & Staging__v2_36_3__BIORAD5__Stratus637738592809956919__ReactorAndRDS" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")

View File

@ -0,0 +1,114 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace _Tests.Extract.Staging.v2_36_3
{
[TestClass]
public class MET08THFTIRSTRATUS
{
private static CreateSelfDescription.Staging.v2_36_3.MET08THFTIRSTRATUS _MET08THFTIRSTRATUS;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Staging.v2_36_3.MET08THFTIRSTRATUS.ClassInitialize(testContext);
_MET08THFTIRSTRATUS = CreateSelfDescription.Staging.v2_36_3.MET08THFTIRSTRATUS.EAFLoggingUnitTesting;
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__637745411457972777__First()
{
string check = "~IsXToOpenInsight";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___637745411457972777__First()
{
MethodBase methodBase = new StackFrame().GetMethod();
string check = "~IsXToOpenInsightMetrologyViewerAttachments";
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
Tuple<string, string[], string[]> pdsf = Helpers.Metrology.GetLogisticsColumnsAndBody(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Helpers.Metrology.UpdatePassDirectory(variables[2]);
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS____()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS____();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_____()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_____();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS______()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS______();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_______()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS_______();
}
[TestMethod]
public void Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS________()
{
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS________();
}
}
}
// dotnet build --runtime win-x64
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~MET08THFTIRSTRATUS" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")
// dotnet test --runtime win-x64 --no-build --filter "FullyQualifiedName~_Tests.Extract.Staging.v2_36_3 & ClassName~MET08THFTIRSTRATUS & Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___637745411457972777__First" --% -- TestRunParameters.Parameter(name=\"Debug\", value=\"Debugger.IsAttached\")

View File

@ -0,0 +1,160 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text.Json;
namespace _Tests.Helpers
{
public class Metrology
{
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string fileFullName)
{
Tuple<string, string[], string[]> results;
results = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(fileFullName);
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
return results;
}
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string searchDirectory, string searchPattern)
{
Tuple<string, string[], string[]> results;
if (searchPattern.Length > 3 && !searchPattern.Contains('*') && File.Exists(searchPattern))
results = GetLogisticsColumnsAndBody(searchPattern);
else
{
string[] pdsfFiles;
pdsfFiles = Directory.GetFiles(searchDirectory, searchPattern, SearchOption.TopDirectoryOnly);
if (!pdsfFiles.Any())
Process.Start("explorer.exe", searchDirectory);
Assert.IsTrue(pdsfFiles.Any(), "GetFiles check");
results = GetLogisticsColumnsAndBody(pdsfFiles[0]);
}
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
return results;
}
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(IFileRead fileRead, Logistics logistics, Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult, Tuple<string, string[], string[]> pdsf)
{
Tuple<string, string[], string[]> results;
string text = ProcessDataStandardFormat.GetPDSFText(fileRead, logistics, extractResult.Item3, logisticsText: pdsf.Item1);
string[] lines = text.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
results = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(logistics.ReportFullPath, lines);
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
return results;
}
internal static string[] GetItem2(Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
{
JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions() { WriteIndented = true };
string jsonOld = JsonSerializer.Serialize(pdsf.Item2, pdsf.Item2.GetType(), jsonSerializerOptions);
string jsonNew = JsonSerializer.Serialize(pdsfNew.Item2, pdsfNew.Item2.GetType(), jsonSerializerOptions);
return new string[] { jsonOld, jsonNew };
}
internal static string[] GetItem3(Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
{
string joinOld = string.Join(Environment.NewLine, from l in pdsf.Item3 select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
string joinNew = string.Join(Environment.NewLine, from l in pdsfNew.Item3 select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
return new string[] { joinOld, joinNew };
}
internal static void UpdatePassDirectory(string searchDirectory)
{
DateTime dateTime = DateTime.Now;
try
{ Directory.SetLastWriteTime(searchDirectory, dateTime); }
catch (System.Exception) { }
string ticksDirectory = Path.GetDirectoryName(searchDirectory);
try
{ Directory.SetLastWriteTime(ticksDirectory, dateTime); }
catch (System.Exception) { }
string[] directories = Directory.GetDirectories(searchDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
try
{ Directory.SetLastWriteTime(directory, dateTime); }
catch (System.Exception) { }
}
}
internal static string GetFileName(MethodBase methodBase)
{
string result;
string connectionName;
string seperator = "__";
string connectionNameAndTicks;
string[] segments = methodBase.Name.Split(new string[] { seperator }, StringSplitOptions.None);
string environment = segments[0];
string rawVersionName = segments[1];
string equipmentTypeDirectory = segments[2];
string ticks = DateTime.Now.Ticks.ToString();
string comment = segments[segments.Length - 1];
string versionName = segments[1].Replace('_', '.');
string before = string.Concat(environment, seperator, rawVersionName, seperator, equipmentTypeDirectory, seperator);
string after = methodBase.Name.Substring(before.Length);
if (after.Length < ticks.Length)
{
connectionName = after;
connectionNameAndTicks = ticks;
}
else
{
connectionNameAndTicks = after.Substring(0, after.Length - 2 - comment.Length);
connectionName = connectionNameAndTicks.Substring(0, connectionNameAndTicks.Length - ticks.Length);
ticks = connectionNameAndTicks.Substring(connectionName.Length);
}
result = Path.Combine(environment, equipmentTypeDirectory, versionName, $"{environment}__{rawVersionName}__{equipmentTypeDirectory}__{connectionName}", ticks, $"{connectionName.Replace('_', '-')}.json");
if (result.Contains('/'))
result = string.Concat('/', result);
else
result = string.Concat('\\', result);
return result;
}
internal static void CompareSaveTSV(string textFileDirectory, string[] join)
{
if (join[0] != join[1])
{
Process.Start("explorer.exe", textFileDirectory);
File.WriteAllText(Path.Combine(textFileDirectory, "0.tsv"), join[0]);
File.WriteAllText(Path.Combine(textFileDirectory, "1.tsv"), join[1]);
}
}
internal static void CompareSaveJSON(string textFileDirectory, string[] json)
{
if (json[0] != json[1])
{
Process.Start("explorer.exe", textFileDirectory);
File.WriteAllText(Path.Combine(textFileDirectory, "0.json"), json[0]);
File.WriteAllText(Path.Combine(textFileDirectory, "1.json"), json[1]);
}
}
internal static void CompareSave(string textFileDirectory, Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
{
if (pdsf.Item1 != pdsfNew.Item1)
{
Process.Start("explorer.exe", textFileDirectory);
File.WriteAllText(Path.Combine(textFileDirectory, "0.dat"), pdsf.Item1);
File.WriteAllText(Path.Combine(textFileDirectory, "1.dat"), pdsfNew.Item1);
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
namespace Shared
{
public class EAFLoggingUnitTesting : LoggingUnitTesting, IDisposable
{
protected readonly AdaptationTesting _AdaptationTesting;
public AdaptationTesting AdaptationTesting => _AdaptationTesting;
public EAFLoggingUnitTesting(TestContext testContext, Type declaringType, bool skipEquipmentDictionary) :
base(testContext, declaringType)
{
if (testContext is null || declaringType is null)
_AdaptationTesting = null;
else
_AdaptationTesting = new AdaptationTesting(testContext, skipEquipmentDictionary);
}
public new void Dispose()
{
base.Dispose();
}
}
}

View File

@ -0,0 +1,171 @@
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace Shared
{
public class IsEnvironment
{
public enum Name
{
LinuxDevelopment,
LinuxProduction,
LinuxStaging,
OSXDevelopment,
OSXProduction,
OSXStaging,
WindowsDevelopment,
WindowsProduction,
WindowsStaging
}
public bool DebuggerWasAttachedDuringConstructor { get; private set; }
public bool Development { get; private set; }
public bool Linux { get; private set; }
public bool OSX { get; private set; }
public bool Production { get; private set; }
public bool Staging { get; private set; }
public bool Windows { get; private set; }
public string Profile { get; private set; }
public string AppSettingsFileName { get; private set; }
public string ASPNetCoreEnvironment { get; private set; }
public IsEnvironment(string testCategory)
{
if (testCategory.EndsWith(".json"))
{
Production = testCategory == "appsettings.json";
Staging = testCategory.EndsWith(nameof(Staging));
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Development = testCategory.EndsWith(nameof(Development));
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
}
else
{
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
OSX = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(OSX));
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
Linux = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(Linux));
Staging = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Staging));
Windows = !string.IsNullOrEmpty(testCategory) && testCategory.StartsWith(nameof(Windows));
Production = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Production));
Development = !string.IsNullOrEmpty(testCategory) && testCategory.EndsWith(nameof(Development));
}
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount: null);
}
public IsEnvironment(bool isDevelopment, bool isStaging, bool isProduction)
{
Staging = isStaging;
Production = isProduction;
Development = isDevelopment;
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount: null);
}
public IsEnvironment(int? processesCount, bool nullASPNetCoreEnvironmentIsDevelopment, bool nullASPNetCoreEnvironmentIsProduction)
{
OSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
Linux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
DebuggerWasAttachedDuringConstructor = Debugger.IsAttached;
Windows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
ASPNetCoreEnvironment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (nullASPNetCoreEnvironmentIsDevelopment && nullASPNetCoreEnvironmentIsProduction)
throw new Exception();
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && nullASPNetCoreEnvironmentIsProduction)
Production = true;
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && nullASPNetCoreEnvironmentIsDevelopment)
Development = true;
else if (string.IsNullOrEmpty(ASPNetCoreEnvironment) && !nullASPNetCoreEnvironmentIsDevelopment && !nullASPNetCoreEnvironmentIsProduction)
throw new Exception();
else
{
Staging = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Staging));
Production = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Production));
Development = ASPNetCoreEnvironment is not null && ASPNetCoreEnvironment.EndsWith(nameof(Development));
}
Profile = GetProfile();
AppSettingsFileName = GetAppSettingsFileName(processesCount);
}
private string GetProfile()
{
string result;
if (Windows && Production)
result = nameof(Production);
else if (Windows && Staging)
result = nameof(Staging);
else if (Windows && Development)
result = nameof(Development);
else if (Linux && Production)
result = nameof(Name.LinuxProduction);
else if (Linux && Staging)
result = nameof(Name.LinuxStaging);
else if (Linux && Development)
result = nameof(Name.LinuxDevelopment);
else if (OSX && Production)
result = nameof(Name.OSXProduction);
else if (OSX && Staging)
result = nameof(Name.OSXStaging);
else if (OSX && Development)
result = nameof(Name.OSXDevelopment);
else
throw new Exception();
return result;
}
private string GetAppSettingsFileName(int? processesCount)
{
string result;
if (Production)
{
if (processesCount is null)
result = "appsettings.json";
else
result = $"appsettings.{processesCount}.json";
}
else
{
string environment;
if (Staging)
environment = nameof(Staging);
else if (Development)
environment = nameof(Development);
else
throw new Exception();
if (processesCount is null)
result = $"appsettings.{environment}.json";
else
result = $"appsettings.{environment}.{processesCount}.json";
}
return result;
}
public static string GetEnvironmentName(IsEnvironment isEnvironment)
{
string result;
if (isEnvironment.Windows)
result = nameof(IsEnvironment.Windows);
else if (isEnvironment.Linux)
result = nameof(IsEnvironment.Linux);
else if (isEnvironment.OSX)
result = nameof(IsEnvironment.OSX);
else
throw new Exception();
return result;
}
}
}

View File

@ -0,0 +1,113 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.IO;
namespace Shared
{
public class LoggingUnitTesting : UnitTesting, IDisposable
{
protected ILogger<object> _Logger;
protected ILoggerFactory _LoggerFactory;
protected readonly LogLevel? _DefaultLogLevel;
protected readonly LogLevel? _Log4netProviderLogLevel;
protected readonly IConfigurationRoot _ConfigurationRoot;
public ILogger<object> Logger => _Logger;
public LogLevel? DefaultLogLevel => _DefaultLogLevel;
public ILoggerFactory LoggerFactory => _LoggerFactory;
public IConfigurationRoot ConfigurationRoot => _ConfigurationRoot;
public LogLevel? Log4netProviderLogLevel => _Log4netProviderLogLevel;
public LoggingUnitTesting(TestContext testContext, Type declaringType) :
base(testContext, declaringType)
{
_LoggerFactory = new LoggerFactory();
if (testContext is null || declaringType is null)
{
_ConfigurationRoot = null;
_DefaultLogLevel = null;
_Log4netProviderLogLevel = null;
}
else
{
LogLevel logLevel;
IConfigurationSection configurationSection;
List<LogLevel> logLevels = new List<LogLevel>();
string defaultLogLevelSection = "Logging:LogLevel:Default";
string log4netProviderLogLevelSection = "Logging:LogLevel:Log4netProvider";
string[] sections = new string[] { defaultLogLevelSection, log4netProviderLogLevelSection };
IConfigurationBuilder configurationBuilder = new ConfigurationBuilder()
.AddEnvironmentVariables()
.AddJsonFile(_IsEnvironment.AppSettingsFileName, optional: false, reloadOnChange: true);
_ConfigurationRoot = configurationBuilder.Build();
foreach (string section in sections)
{
configurationSection = _ConfigurationRoot.GetSection(section);
if (configurationSection is null)
logLevel = LogLevel.Debug;
else if (!Enum.TryParse<LogLevel>(configurationSection.Value, out logLevel))
logLevel = LogLevel.Debug;
logLevels.Add(logLevel);
}
_DefaultLogLevel = logLevels[0];
_Log4netProviderLogLevel = logLevels[1];
}
if (DefaultLogLevel.HasValue)
_LoggerFactory.AddProvider(new DebugProvider(DefaultLogLevel.Value));
if (DefaultLogLevel.HasValue)
_LoggerFactory.AddProvider(new ConsoleProvider(DefaultLogLevel.Value));
_Logger = _LoggerFactory.CreateLogger<object>();
}
public static string GetEnvironmentSpecialDirectory()
{
string result = string.Empty;
string traceFile;
List<string> directories = new List<string>();
Environment.SpecialFolder[] specialFolders = new Environment.SpecialFolder[]
{
Environment.SpecialFolder.LocalApplicationData,
Environment.SpecialFolder.ApplicationData,
Environment.SpecialFolder.History,
Environment.SpecialFolder.CommonApplicationData,
Environment.SpecialFolder.InternetCache
};
foreach (Environment.SpecialFolder specialFolder in specialFolders)
directories.Add(Environment.GetFolderPath(specialFolder));
foreach (string directory in directories)
{
for (int i = 1; i < 3; i++)
{
if (i == 1)
result = directory;
else
result = string.Concat("D", directory.Substring(1));
try
{
if (!Directory.Exists(result))
Directory.CreateDirectory(result);
traceFile = string.Concat(result, @"\", DateTime.Now.Ticks, ".txt");
File.WriteAllText(traceFile, traceFile);
File.Delete(traceFile);
break;
}
catch (Exception) { result = string.Empty; }
}
if (!string.IsNullOrEmpty(result))
break;
}
return result;
}
public void Dispose()
{
_LoggerFactory.Dispose();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,25 @@
using System.Xml.Serialization;
namespace Shared.PasteSpecialXml.EAF.XML.API.ConfigurationData
{
[XmlRoot(ElementName = "ChildBackboneMembers", Namespace = "http://schemas.datacontract.org/2004/07/EafManagement.Configuration.Services")]
public class ChildBackboneMembers
{
[XmlAttribute(AttributeName = "nil", Namespace = "http://www.w3.org/2001/XMLSchema-instance")]
public string Nil { get; set; }
}
[XmlRoot(ElementName = "ConfigurationData", Namespace = "http://schemas.datacontract.org/2004/07/EafManagement.Configuration.Services")]
public class ConfigurationData
{
[XmlElement(ElementName = "ChildBackboneMembers", Namespace = "http://schemas.datacontract.org/2004/07/EafManagement.Configuration.Services")]
public ChildBackboneMembers ChildBackboneMembers { get; set; }
[XmlElement(ElementName = "Data", Namespace = "http://schemas.datacontract.org/2004/07/EafManagement.Configuration.Services")]
public string Data { get; set; }
[XmlAttribute(AttributeName = "xmlns")]
public string Xmlns { get; set; }
[XmlAttribute(AttributeName = "i", Namespace = "http://www.w3.org/2000/xmlns/")]
public string I { get; set; }
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,93 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Shared
{
public class UnitTesting
{
protected readonly IsEnvironment _IsEnvironment;
public IsEnvironment IsEnvironment => _IsEnvironment;
public UnitTesting(TestContext testContext, Type declaringType)
{
if (testContext is null || declaringType is null)
_IsEnvironment = null;
else
{
string projectDirectory = GetProjectDirectory(testContext);
string json = JsonSerializer.Serialize(testContext.Properties);
string vsCodeDirectory = Path.Combine(projectDirectory, ".vscode");
if (!Directory.Exists(vsCodeDirectory))
Directory.CreateDirectory(vsCodeDirectory);
string launchText = GetLaunchText();
File.WriteAllText(Path.Combine(vsCodeDirectory, "launch.json"), launchText);
for (int i = 0; i < int.MaxValue; i++)
{
if (!json.Contains("Debugger.IsAttached") || Debugger.IsAttached)
break;
Thread.Sleep(500);
}
MethodBase methodBase = declaringType.GetMethod(testContext.TestName);
if (!(methodBase is null))
{
TestCategoryAttribute testCategoryAttribute = methodBase.GetCustomAttribute<TestCategoryAttribute>();
if (!(testCategoryAttribute is null))
{
foreach (string testCategory in testCategoryAttribute.TestCategories)
_IsEnvironment = new IsEnvironment(testCategory);
}
}
if (_IsEnvironment is null)
_IsEnvironment = new IsEnvironment(processesCount: null, nullASPNetCoreEnvironmentIsDevelopment: Debugger.IsAttached, nullASPNetCoreEnvironmentIsProduction: !Debugger.IsAttached);
}
}
internal static string GetProjectDirectory(TestContext testContext)
{
string result;
string[] checkFiles = null;
result = Path.GetDirectoryName(testContext.DeploymentDirectory);
for (int i = 0; i < int.MaxValue; i++)
{
if (string.IsNullOrEmpty(result))
break;
checkFiles = Directory.GetFiles(result, "*.Tests.*proj", SearchOption.TopDirectoryOnly);
if (checkFiles.Any())
break;
result = Path.GetDirectoryName(result);
}
if (string.IsNullOrEmpty(result) || checkFiles is null || !checkFiles.Any())
throw new Exception(result);
return result;
}
internal static string GetLaunchText()
{
StringBuilder result = new StringBuilder();
result.
AppendLine("{").
AppendLine(" \"configurations\": [").
AppendLine(" {").
AppendLine(" \"name\": \".NET Core Attach\",").
AppendLine(" \"type\": \"coreclr\",").
AppendLine(" \"request\": \"attach\",").
AppendLine($" \"processId\": {System.Diagnostics.Process.GetCurrentProcess().Id}").
AppendLine(" }").
AppendLine(" ]").
AppendLine("}");
return result.ToString();
}
}
}

Some files were not shown because too many files have changed in this diff Show More