Initial Commit

This commit is contained in:
2023-05-08 15:54:57 -07:00
commit 5924860f6c
166 changed files with 19013 additions and 0 deletions

341
.gitignore vendored Normal file
View File

@ -0,0 +1,341 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
bld/
[Bb]in/
[Oo]bj/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
##
## Visual Studio Code
##
*/!.vscode/extensions.json
*/!.vscode/launch.json
*/!.vscode/settings.json
*/!.vscode/tasks.json
*/.vscode/*
*/.vscode/ReportGenerator/*
*.lnk

View File

@ -0,0 +1,12 @@
{
"version": 1,
"isRoot": true,
"tools": {
"dotnet-reportgenerator-globaltool": {
"version": "5.1.15",
"commands": [
"reportgenerator"
]
}
}
}

248
Adaptation/.editorconfig Normal file
View File

@ -0,0 +1,248 @@
[*.cs]
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_case_contents_when_block = true
csharp_indent_labels = one_less_than_current
csharp_indent_switch_labels = true
csharp_new_line_before_catch = true
csharp_new_line_before_else = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = all
csharp_new_line_between_query_expression_clauses = true
csharp_prefer_braces = false
csharp_prefer_simple_default_expression = true:warning
csharp_prefer_simple_using_statement = true:warning
csharp_prefer_static_local_function = true:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = false
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true
csharp_style_allow_embedded_statements_on_same_line_experimental = true
csharp_style_conditional_delegate_call = true
csharp_style_deconstructed_variable_declaration = false
csharp_style_expression_bodied_accessors = when_on_single_line:warning
csharp_style_expression_bodied_constructors = when_on_single_line:warning
csharp_style_expression_bodied_indexers = when_on_single_line:warning
csharp_style_expression_bodied_lambdas = when_on_single_line:warning
csharp_style_expression_bodied_local_functions = when_on_single_line:warning
csharp_style_expression_bodied_methods = when_on_single_line:warning
csharp_style_expression_bodied_operators = when_on_single_line:warning
csharp_style_expression_bodied_properties = when_on_single_line:warning
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning
csharp_style_inlined_variable_declaration = false
csharp_style_namespace_declarations = file_scoped:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_prefer_index_operator = false:warning
csharp_style_prefer_not_pattern = true:warning
csharp_style_prefer_null_check_over_type_check = true
csharp_style_prefer_pattern_matching = true:warning
csharp_style_prefer_range_operator = false:warning
csharp_style_prefer_switch_expression = true:warning
csharp_style_throw_expression = true
csharp_style_unused_value_assignment_preference = discard_variable:warning
csharp_style_unused_value_expression_statement_preference = discard_variable:warning
csharp_style_var_elsewhere = false:warning
csharp_style_var_for_built_in_types = false:warning
csharp_style_var_when_type_is_apparent = false:warning
csharp_using_directive_placement = outside_namespace
dotnet_code_quality_unused_parameters = all
dotnet_code_quality_unused_parameters = non_public # IDE0060: Remove unused parameter
dotnet_code_quality.CAXXXX.api_surface = private, internal
dotnet_diagnostic.CA1816.severity = none # CA1816: Call GC.SuppressFinalize correctly
dotnet_diagnostic.CA1825.severity = warning # CA1823: Avoid zero-length array allocations
dotnet_diagnostic.CA1829.severity = warning # CA1829: Use Length/Count property instead of Count() when available
dotnet_diagnostic.CA1834.severity = warning # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1846.severity = none # CA1846: Prefer AsSpan over Substring
dotnet_diagnostic.CA1847.severity = none # CA1847: Use string.Contains(char) instead of string.Contains(string) with single characters
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
dotnet_diagnostic.IDE0049.severity = warning # Use language keywords instead of framework type names for type references (IDE0049)
dotnet_diagnostic.IDE0060.severity = warning # IDE0060: Remove unused parameter
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method
dotnet_naming_rule.class_should_be_pascal_case.severity = warning
dotnet_naming_rule.class_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.class_should_be_pascal_case.symbols = class
dotnet_naming_rule.delegate_should_be_pascal_case.severity = warning
dotnet_naming_rule.delegate_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.delegate_should_be_pascal_case.symbols = delegate
dotnet_naming_rule.enum_should_be_pascal_case.severity = warning
dotnet_naming_rule.enum_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.enum_should_be_pascal_case.symbols = enum
dotnet_naming_rule.event_should_be_pascal_case.severity = warning
dotnet_naming_rule.event_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.event_should_be_pascal_case.symbols = event
dotnet_naming_rule.interface_should_be_begins_with_i.severity = warning
dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
dotnet_naming_rule.method_should_be_pascal_case.severity = warning
dotnet_naming_rule.method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.method_should_be_pascal_case.symbols = method
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = warning
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
dotnet_naming_rule.private_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.private_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_method_should_be_pascal_case.symbols = private_method
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.symbols = private_or_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.symbols = private_or_internal_static_field
dotnet_naming_rule.property_should_be_pascal_case.severity = warning
dotnet_naming_rule.property_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.property_should_be_pascal_case.symbols = property
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.symbols = public_or_protected_field
dotnet_naming_rule.static_field_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_field_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_field_should_be_pascal_case.symbols = static_field
dotnet_naming_rule.static_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_method_should_be_pascal_case.symbols = static_method
dotnet_naming_rule.struct_should_be_pascal_case.severity = warning
dotnet_naming_rule.struct_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.struct_should_be_pascal_case.symbols = struct
dotnet_naming_rule.types_should_be_pascal_case.severity = warning
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
dotnet_naming_style.begins_with_i.capitalization = pascal_case
dotnet_naming_style.begins_with_i.required_prefix = I
dotnet_naming_style.begins_with_i.required_suffix =
dotnet_naming_style.begins_with_i.word_separator =
dotnet_naming_style.pascal_case.capitalization = pascal_case
dotnet_naming_style.pascal_case.required_prefix =
dotnet_naming_style.pascal_case.required_suffix =
dotnet_naming_style.pascal_case.word_separator =
dotnet_naming_style.private_of_internal_field.capitalization = pascal_case
dotnet_naming_style.private_of_internal_field.required_prefix = _
dotnet_naming_style.private_of_internal_field.required_suffix =
dotnet_naming_style.private_of_internal_field.word_separator =
dotnet_naming_symbols.abstract_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.abstract_method.applicable_kinds = method
dotnet_naming_symbols.abstract_method.required_modifiers = abstract
dotnet_naming_symbols.class.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.class.applicable_kinds = class
dotnet_naming_symbols.class.required_modifiers =
dotnet_naming_symbols.delegate.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.delegate.applicable_kinds = delegate
dotnet_naming_symbols.delegate.required_modifiers =
dotnet_naming_symbols.enum.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.enum.applicable_kinds = enum
dotnet_naming_symbols.enum.required_modifiers =
dotnet_naming_symbols.event.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.event.applicable_kinds = event
dotnet_naming_symbols.event.required_modifiers =
dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.interface.applicable_kinds = interface
dotnet_naming_symbols.interface.required_modifiers =
dotnet_naming_symbols.method.applicable_accessibilities = public
dotnet_naming_symbols.method.applicable_kinds = method
dotnet_naming_symbols.method.required_modifiers =
dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
dotnet_naming_symbols.non_field_members.required_modifiers =
dotnet_naming_symbols.private_method.applicable_accessibilities = private
dotnet_naming_symbols.private_method.applicable_kinds = method
dotnet_naming_symbols.private_method.required_modifiers =
dotnet_naming_symbols.private_or_internal_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_field.required_modifiers =
dotnet_naming_symbols.private_or_internal_static_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_static_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_static_field.required_modifiers = static
dotnet_naming_symbols.property.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.property.applicable_kinds = property
dotnet_naming_symbols.property.required_modifiers =
dotnet_naming_symbols.public_or_protected_field.applicable_accessibilities = public, protected
dotnet_naming_symbols.public_or_protected_field.applicable_kinds = field
dotnet_naming_symbols.public_or_protected_field.required_modifiers =
dotnet_naming_symbols.static_field.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_field.applicable_kinds = field
dotnet_naming_symbols.static_field.required_modifiers = static
dotnet_naming_symbols.static_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_method.applicable_kinds = method
dotnet_naming_symbols.static_method.required_modifiers = static
dotnet_naming_symbols.struct.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.struct.applicable_kinds = struct
dotnet_naming_symbols.struct.required_modifiers =
dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
dotnet_naming_symbols.types.required_modifiers =
dotnet_remove_unnecessary_suppression_exclusions = 0
dotnet_separate_import_directive_groups = false
dotnet_sort_system_directives_first = false
dotnet_style_allow_multiple_blank_lines_experimental = false:warning
dotnet_style_allow_statement_immediately_after_block_experimental = true
dotnet_style_coalesce_expression = true
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_namespace_match_folder = true
dotnet_style_null_propagation = true:warning
dotnet_style_object_initializer = true:warning
dotnet_style_operator_placement_when_wrapping = beginning_of_line
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_compound_assignment = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false
dotnet_style_prefer_conditional_expression_over_return = false
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_simplified_boolean_expressions = true:warning
dotnet_style_prefer_simplified_interpolation = true
dotnet_style_qualification_for_event = false:error
dotnet_style_qualification_for_field = false
dotnet_style_qualification_for_method = false:error
dotnet_style_qualification_for_property = false:error
dotnet_style_readonly_field = true:warning
dotnet_style_require_accessibility_modifiers = for_non_interface_members
end_of_line = crlf
file_header_template = unset
indent_size = 4
indent_style = space
insert_final_newline = false
root = true
tab_width = 4
# https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/quality-rules/ca1822
# https://github.com/dotnet/aspnetcore/blob/main/.editorconfig
# https://github.com/dotnet/project-system/blob/main/.editorconfig

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.Core;
public class BackboneComponent
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.Core;
public class BackboneStatusCache
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.Core;
public interface ILoggingSetupManager
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.Core;
public class StatusItem
{
}

View File

@ -0,0 +1,53 @@
using Adaptation.PeerGroup.GCL.Annotations;
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.Core;
public class Backbone
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public const string STATE_ERROR = "Error";
public const string STATE_OFFLINE = "Offline";
public const string STATE_RUNNING = "Running";
public const string STATE_SHUTDOWN = "Shutting Down";
public const string STATE_STARTING = "Starting";
protected Backbone() { }
[NotNull]
public static Backbone Instance { get; }
[NotNull]
public ILoggingSetupManager LoggingConfigurationManager { get; set; }
public BackboneStatusCache Status { get; }
public bool IsAutomatedRestartActive { get; }
public bool IsReadyForRestart { get; }
public string StartTime { get; }
public string State { get; }
public string Name { get; }
public string ConfigurationServiceAddress { get; }
public string CellName { get; }
protected bool IsInitialized { get; set; }
protected Dictionary<string, BackboneComponent> BackboneComponents { get; }
public void AddBackboneComponent(BackboneComponent backboneComponent) { }
public bool ContainsBackboneComponent(string id) => throw new NotImplementedException();
[Obsolete("Use the capabilities exposed via the Status property -> GetAll. Will be removed with next major release.")]
public List<StatusItem> GetAllStatuses() => throw new NotImplementedException();
public BackboneComponent GetBackboneComponentById(string id) => throw new NotImplementedException();
public List<T> GetBackboneComponentsOfType<T>() => throw new NotImplementedException();
public List<BackboneComponent> GetBackboneComponentsOfType(Type type) => throw new NotImplementedException();
public void RegisterSubprocess(int pid) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(string statusName, string statusValue) { }
[Obsolete("Use the capabilities exposed via the Status property -> SetValue. Will be removed with next major release.")]
public void SetStatus(BackboneComponent source, string statusName, string statusValue) { }
protected void CloseConnectionOfComponents(List<BackboneComponent> components) { }
protected virtual void StopAllComponents() { }
protected void StopComponents(List<BackboneComponent> components) { }
}

View File

@ -0,0 +1,25 @@
using System;
namespace Adaptation.Eaf.Core.Smtp;
public class EmailMessage
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public EmailMessage() { }
public EmailMessage(string subject, string body, MailPriority priority = MailPriority.Normal) { }
public string Body { get; }
public MailPriority Priority { get; }
public string Subject { get; }
public EmailMessage PriorityHigh() => throw new NotImplementedException();
public EmailMessage PriorityLow() => throw new NotImplementedException();
public EmailMessage PriorityNormal() => throw new NotImplementedException();
public EmailMessage SetBody(string body) => throw new NotImplementedException();
public EmailMessage SetPriority(MailPriority priority) => throw new NotImplementedException();
public EmailMessage SetSubject(string subject) => throw new NotImplementedException();
}

View File

@ -0,0 +1,6 @@
namespace Adaptation.Eaf.Core.Smtp;
public interface ISmtp
{
void Send(EmailMessage message);
}

View File

@ -0,0 +1,8 @@
namespace Adaptation.Eaf.Core.Smtp;
public enum MailPriority
{
Low = 0,
Normal = 1,
High = 2
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class ChangeDataCollectionHandler
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class DataCollectionRequest
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentEvent
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentException
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class EquipmentSelfDescription
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class GetParameterValuesHandler
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IConnectionControl
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IDataTracingHandler
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentCommandService
{
}

View File

@ -0,0 +1,15 @@
using Adaptation.PeerGroup.GCL.Annotations;
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentControl : IPackageSource
{
[NotNull]
IEquipmentSelfDescriptionBuilder SelfDescriptionBuilder { get; }
[NotNull]
IEquipmentDataCollection DataCollection { get; }
[NotNull]
IEquipmentCommandService Commands { get; }
[NotNull]
IConnectionControl Connection { get; }
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentSelfDescriptionBuilder
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IPackage
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface ISelfDescriptionLookup
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IVirtualParameterValuesHandler
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class SetParameterValuesHandler
{
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public class TraceRequest
{
}

View File

@ -0,0 +1,38 @@
using Adaptation.Eaf.EquipmentCore.DataCollection.Reporting;
using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using System;
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IEquipmentDataCollection
{
IVirtualParameterValuesHandler VirtualParameterValuesHandler { get; }
ISelfDescriptionLookup SelfDescriptionLookup { get; }
EquipmentSelfDescription SelfDescription { get; }
IEnumerable<DataCollectionRequest> ActiveRequests { get; }
IDataTracingHandler DataTracingHandler { get; }
ParameterValue CreateParameterValue(EquipmentParameter parameter, object value);
void NotifyDataTracingAvailable(bool isAvailable);
void RegisterChangeDataCollectionHandler(ChangeDataCollectionHandler handler);
void RegisterDataTracingHandler(IDataTracingHandler handler);
void RegisterGetParameterValuesHandler(GetParameterValuesHandler handler);
void RegisterSetParameterValuesHandler(SetParameterValuesHandler handler);
void TriggerDeactivate(DataCollectionRequest deactivateRequest);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters);
void TriggerEvent(EquipmentEvent equipmentEvent, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionClear(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, string severityOverride, string descriptionOverride);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerExceptionSet(EquipmentException equipmentException, IEnumerable<ParameterValue> parameters);
void TriggerPerformanceRestored();
void TriggerPerformanceWarning();
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, IPackage sourcePackage);
void TriggerTraceSample(TraceRequest traceRequest, long sampleId, IEnumerable<ParameterValue> parameters, DateTime equipmentTimeStamp);
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.EquipmentCore.Control;
public interface IPackageSource
{
}

View File

@ -0,0 +1,25 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
using Adaptation.PeerGroup.GCL.Annotations;
using System;
namespace Adaptation.Eaf.EquipmentCore.DataCollection.Reporting;
public class ParameterValue
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ParameterValue(EquipmentParameter definition, object value) { }
public ParameterValue(EquipmentParameter definition, object value, DateTime timestamp) { }
public virtual object Value { get; protected internal set; }
[NotNull]
public EquipmentParameter Definition { get; }
public DateTime Timestamp { get; protected set; }
public virtual ParameterValue Clone(EquipmentParameter newDefinition) => throw new NotImplementedException();
public override string ToString() => base.ToString();
}

View File

@ -0,0 +1,27 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ElementDescription;
public class EquipmentParameter
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public EquipmentParameter(EquipmentParameter source, ParameterTypeDefinition typeDefinition) { }
public EquipmentParameter(string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public EquipmentParameter(string id, string name, ParameterTypeDefinition typeDefinition, string description, bool isTransient = false, bool isReadOnly = true) { }
public string Name { get; }
public string Id { get; }
public string Description { get; }
public string SourcePath { get; }
public string SourceEquipment { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool IsTransient { get; }
public bool IsReadOnly { get; }
public override string ToString() => base.ToString();
public string ToStringWithDetails() => base.ToString();
}

View File

@ -0,0 +1,16 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public class Field
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public Field(string name, string description, bool canBeNull, ParameterTypeDefinition typeDefinition) { }
public string Name { get; }
public string Description { get; }
public ParameterTypeDefinition TypeDefinition { get; }
public bool CanBeNull { get; }
}

View File

@ -0,0 +1,16 @@
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public abstract class ParameterTypeDefinition
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ParameterTypeDefinition(string name, string description) { }
public string Name { get; }
public string Description { get; }
public override string ToString() => base.ToString();
}

View File

@ -0,0 +1,16 @@
using System.Collections.Generic;
namespace Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
public class StructuredType : ParameterTypeDefinition
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public StructuredType(string name, string description, IList<Field> fields) : base(name, description) { }
public IList<Field> Fields { get; }
}

View File

@ -0,0 +1,5 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
public interface IConfigurationObject
{
}

View File

@ -0,0 +1,30 @@
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
[System.Runtime.Serialization.DataContract(IsReference = true)]
public class ModelObjectParameterDefinition : IConfigurationObject
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ModelObjectParameterDefinition() { }
public ModelObjectParameterDefinition(string name, ModelObjectParameterType valueType, object defaultValue) { }
public ModelObjectParameterDefinition(string name, Type enumType, object defaultValue) { }
[System.Runtime.Serialization.DataMember]
public virtual long Id { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual string Name { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual string Value { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual ModelObjectParameterType ValueType { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual string EnumType { get; set; }
public virtual ModelObjectParameterDefinition Clone() => null;
public virtual bool IsValidValue(string value) => false;
}

View File

@ -0,0 +1,16 @@
namespace Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
public enum ModelObjectParameterType
{
String = 0,
Bool = 1,
Byte = 2,
SignedByte = 3,
Integer = 4,
UnsignedInteger = 5,
LongInteger = 6,
UnsignedLongInteger = 7,
Double = 8,
Float = 9,
Enum = 10
}

View File

@ -0,0 +1,43 @@
using Adaptation.PeerGroup.GCL.SecsDriver;
using System;
namespace Adaptation.Eaf.Management.ConfigurationData.Semiconductor.CellInstances;
[System.Runtime.Serialization.DataContract]
public class SecsConnectionConfiguration
{
public SecsConnectionConfiguration() { }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T6HsmsControlMessage { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T5ConnectionSeperation { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T4InterBlock { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T3MessageReply { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T2Protocol { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T1InterCharacter { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual SerialBaudRate? BaudRate { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual SecsTransportType? PortType { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? Port { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan LinkTestTimer { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual string Host { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? DeviceId { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual HsmsSessionMode? SessionMode { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual HsmsConnectionMode? ConnectionMode { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T7ConnectionIdle { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual TimeSpan T8NetworkIntercharacter { get; set; }
}

View File

@ -0,0 +1,135 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.APC;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string fileNameAfterUnderscoreSplit = GetFileNameAfterUnderscoreSplit(reportFullPath);
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.TargetFileLocation, fileNameAfterUnderscoreSplit);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,159 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Archive;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _JobIdParentDirectory;
private readonly string _JobIdArchiveParentDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
_JobIdArchiveParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
{
Queue<string> queue = new();
queue.Enqueue(path);
while (queue.Count > 0)
{
path = queue.Dequeue();
foreach (string subDirectory in Directory.GetDirectories(path))
{
queue.Enqueue(subDirectory);
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
yield return subDirectory;
}
}
}
#pragma warning disable IDE0060
private void MoveArchive(string reportFullPath, DateTime dateTime)
#pragma warning restore IDE0060
{
string logisticsSequence = _Logistics.Sequence.ToString();
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
if (!Directory.GetDirectories(jobIdDirectory).Any())
File.Copy(reportFullPath, Path.Combine(destinationArchiveDirectory, Path.GetFileName(reportFullPath)));
else
{
string[] matchDirectories = GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).ToArray();
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,34 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers;
public class CellInstanceConnectionName
{
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
{
IFileRead result = cellInstanceConnectionName switch
{
nameof(APC) => new APC.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Archive) => new Archive.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(DownloadSRPxFile) => new DownloadSRPxFile.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Dummy) => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(IQSSi) => new IQSSi.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(MoveMatchingFiles) => new MoveMatchingFiles.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(csv) => new csv.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(json) => new json.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
};
return result;
}
}

View File

@ -0,0 +1,273 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.DownloadSRPxFile;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private readonly HttpClient _HttpClient;
private readonly string _StaticFileServer;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_HttpClient = new();
_StaticFileServer = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".StaticFileServer"));
if (!Debugger.IsAttached && fileConnectorConfiguration.PreProcessingMode != FileConnectorConfiguration.PreProcessingModeEnum.Process)
_Timer = new Timer(Callback, null, (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000), Timeout.Infinite);
else
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) => throw new Exception(string.Concat("See ", nameof(Callback)));
private void DownloadSRPxFileAsync()
{
if (_HttpClient is null)
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
throw new Exception();
string logUrl;
string logText;
string runJson;
string rootJson;
string targetJson;
string[] logLines;
string runFileName;
string[] logSegments;
string targetFileName;
string runFullFileName;
FileInfo targetFileInfo;
FileInfo alternateFileInfo;
List<string> runFullFileNameSegments;
string dateTimeFormat = "yy/MM/dd HH:mm:ss";
NginxFileSystem[] runNginxFileSystemCollection;
NginxFileSystem[] rootNginxFileSystemCollection;
NginxFileSystem[] innerNginxFileSystemCollection;
DateTime fileAgeThresholdDateTime = DateTime.Now;
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
List<Tuple<DateTime, FileInfo, FileInfo, string>> possibleDownload = new();
string[] segments = _FileConnectorConfiguration.FileAgeThreshold.Split(':');
JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
for (int i = 0; i < segments.Length; i++)
{
fileAgeThresholdDateTime = i switch
{
0 => fileAgeThresholdDateTime.AddDays(double.Parse(segments[i]) * -1),
1 => fileAgeThresholdDateTime.AddHours(double.Parse(segments[i]) * -1),
2 => fileAgeThresholdDateTime.AddMinutes(double.Parse(segments[i]) * -1),
3 => fileAgeThresholdDateTime.AddSeconds(double.Parse(segments[i]) * -1),
_ => throw new Exception(),
};
}
rootJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer)).Result;
rootNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(rootJson, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem rootNginxFileSystem in rootNginxFileSystemCollection)
{
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where rootNginxFileSystem.Name == l select false).Any())
continue;
logUrl = string.Empty;
if (rootNginxFileSystem.Name.Contains("."))
logUrl = string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name);
else
{
rootJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name)).Result;
innerNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(rootJson, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem innerNginxFileSystem in innerNginxFileSystemCollection)
{
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where innerNginxFileSystem.Name == l select false).Any())
continue;
logUrl = string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name, '/', innerNginxFileSystem.Name);
break;
}
}
if (string.IsNullOrEmpty(logUrl))
continue;
logText = _HttpClient.GetStringAsync(logUrl).Result;
logLines = logText.Split(new string[] { Environment.NewLine }, StringSplitOptions.None);
foreach (string logLine in logLines)
{
if (string.IsNullOrEmpty(logLine))
continue;
logSegments = logLine.Split('<');
if (logSegments.Length < 1 || !DateTime.TryParseExact(logSegments[0].Trim(), dateTimeFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime tvsDateTime))
continue;
if (tvsDateTime < fileAgeThresholdDateTime)
continue;
if (logSegments.Length < 2)
continue;
runFullFileName = logSegments[1].Split('>')[0];
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where runFullFileName.EndsWith(l) select false).Any())
continue;
runFullFileNameSegments = runFullFileName.Split('\\').ToList();
runFileName = runFullFileNameSegments[runFullFileNameSegments.Count - 1];
runFullFileNameSegments.RemoveAt(runFullFileNameSegments.Count - 1);
runJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments))).Result;
runFullFileNameSegments.Add(runFileName);
runNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(runJson, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem matchNginxFileSystem in runNginxFileSystemCollection)
{
if (matchNginxFileSystem.Name != runFileName)
continue;
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where matchNginxFileSystem.Name.EndsWith(l) select false).Any())
continue;
if (!DateTime.TryParseExact(matchNginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime matchNginxFileSystemDateTime))
continue;
if (matchNginxFileSystemDateTime < fileAgeThresholdDateTime)
continue;
targetFileInfo = new FileInfo(Path.Combine(_FileConnectorConfiguration.TargetFileLocation, runFullFileName));
if (!Directory.Exists(targetFileInfo.Directory.FullName))
_ = Directory.CreateDirectory(targetFileInfo.Directory.FullName);
if (targetFileInfo.Exists && targetFileInfo.LastWriteTime == matchNginxFileSystemDateTime)
continue;
alternateFileInfo = new(Path.Combine(_FileConnectorConfiguration.AlternateTargetFolder, matchNginxFileSystem.Name));
targetFileName = string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments));
possibleDownload.Add(new(matchNginxFileSystemDateTime, targetFileInfo, alternateFileInfo, targetFileName));
break;
}
if (possibleDownload.Any())
break;
}
if (possibleDownload.Any())
break;
}
if (possibleDownload.Any())
{
possibleDownload = (from l in possibleDownload orderby l.Item1 select l).ToList();
alternateFileInfo = possibleDownload[0].Item3;
targetFileName = possibleDownload[0].Item4;
targetFileInfo = possibleDownload[0].Item2;
DateTime matchNginxFileSystemDateTime = possibleDownload[0].Item1;
if (alternateFileInfo.Exists)
File.Delete(alternateFileInfo.FullName);
if (targetFileInfo.Exists)
File.Delete(targetFileInfo.FullName);
targetJson = _HttpClient.GetStringAsync(targetFileName).Result;
File.WriteAllText(targetFileInfo.FullName, targetJson);
targetFileInfo.LastWriteTime = matchNginxFileSystemDateTime;
File.Copy(targetFileInfo.FullName, alternateFileInfo.FullName);
}
}
private void Callback(object state)
{
try
{
if (_IsEAFHosted)
DownloadSRPxFileAsync();
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}

View File

@ -0,0 +1,11 @@
namespace Adaptation.FileHandlers.DownloadSRPxFile;
internal class NginxFileSystem
{
public string Name { get; set; }
public string Type { get; set; }
public string MTime { get; set; }
public float Size { get; set; }
}

View File

@ -0,0 +1,286 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.Dummy;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly string[] _CellNames;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_LastDummyRunIndex = -1;
List<string> cellNames = new();
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
_CellNames = cellNames.ToArray();
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
Callback(null);
else
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
{
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
try
{
if (warning)
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
{
foreach (string file in files)
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
{
string[] files;
bool warning = false;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
string inProcessDirectory = Path.Combine("_ProgressPath", "Dummy In-Process", sequence.ToString());
if (!Directory.Exists(inProcessDirectory))
_ = Directory.CreateDirectory(inProcessDirectory);
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
{
warning = true;
break;
}
}
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
}
private string GetCellName(string pathSegment)
{
string result = string.Empty;
foreach (string cellName in _CellNames)
{
if (pathSegment.ToLower().Contains(cellName.ToLower()))
{
result = cellName;
break;
}
}
if (string.IsNullOrEmpty(result))
{
int count;
List<(string CellName, int Count)> cellNames = new();
foreach (string cellName in _CellNames)
{
count = 0;
foreach (char @char in cellName.ToLower())
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
cellNames.Add(new(cellName, count));
}
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
}
return result;
}
private void Callback(object state)
{
try
{
string sourceParentDirectory;
string targetParentDirectory;
DateTime dateTime = DateTime.Now;
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.SourceFileLocation)))
sourceParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.SourceFileLocation);
else
sourceParentDirectory = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.TargetFileLocation)))
targetParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
else
targetParentDirectory = GetParentParent(_FileConnectorConfiguration.TargetFileLocation);
if (sourceParentDirectory != targetParentDirectory)
throw new Exception("Target and source must have the same parent for Si Dummy FileConnectorConfiguration!");
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
if (!_IsEAFHosted || check)
{
string monARessource;
string sourceFileFilter;
string sourceArchiveFile;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
if (!Directory.Exists(traceDummyDirectory))
_ = Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
File.AppendAllText(traceDummyFile, string.Empty);
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
_LastDummyRunIndex = 0;
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
sourceArchiveFile = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, sourceFileFilter);
if (File.Exists(sourceArchiveFile))
{
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
throw new Exception("Invalid file name for source archive file!");
monARessource = GetCellName(sourceArchiveFile);
if (string.IsNullOrEmpty(monARessource))
throw new Exception("Could not determine which cell archive file is associated with!");
if (_IsEAFHosted)
CallbackFileExists(sourceArchiveFile, traceDummyFile, _FileConnectorConfiguration.TargetFileLocation, monARessource, sequence);
break;
}
}
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}

View File

@ -0,0 +1,134 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
string successDirectory = string.Empty;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,266 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.MoveMatchingFiles;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
{
List<string> results = new();
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
if (Path.GetFileName(directory).Length != numberLength)
continue;
results.Add(directory);
}
results.Sort();
return results;
}
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
{
List<string> results = new();
string[] found;
string fileName = Path.GetFileName(reportFullPath);
foreach (string searchDirectory in searchDirectories)
{
for (int i = 0; i < int.MaxValue; i++)
{
found = Directory.GetFiles(searchDirectory, fileName, SearchOption.AllDirectories);
if (found.Any())
{
results.AddRange(found);
break;
}
if (new TimeSpan(DateTime.Now.Ticks - ticks).TotalSeconds > _BreakAfterSeconds)
break;
}
}
return results;
}
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
{
List<(string matchingFile, string checkFile)> results = new();
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
results.Add(new(matchingFile, checkFile));
}
return results;
}
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
{
List<(string, string, string, string, string)> results = new();
string errFile;
string checkDirectory;
string noWaitDirectory;
foreach ((string matchingFile, string checkFile) in collection)
{
errFile = string.Concat(checkFile, ".err");
checkDirectory = Path.GetDirectoryName(checkFile);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
}
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string checkFile, string errFile)> postCollection = new();
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
{
File.Move(matchingFile, checkFile);
if (Directory.Exists(noWaitDirectory))
{
postCollection.Add(new(checkFile, errFile));
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(errFile))
throw new Exception(File.ReadAllText(errFile));
if (!File.Exists(checkFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
if (postCollection.Any())
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach ((string checkFile, string errFile) in postCollection)
{
if (File.Exists(errFile))
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
if (File.Exists(checkFile))
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
{
#nullable enable
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
#nullable disable
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
MoveCollection(dateTime, collection);
return results;
}
}

View File

@ -0,0 +1,186 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory;
private readonly string _OpenInsightApiIFXDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OpenInsightApiIFXDirectory = @"\\messdv002.na.infineon.com\Candela\Archive\API";
_OpenInsightApiECDirectory = @"\\messv02ecc1.ec.local\EC_Metrology_Si\Archive\API";
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static string GetLines(List<json.Description> descriptions)
{
StringBuilder results = new();
json.Description x = descriptions[0];
char del = x.MesEntity == x.Reactor ? '\t' : '~';
_ = results.Append(x.Date).Append(del).
Append(x.Employee).Append(del).
Append(x.Recipe).Append(del).
Append(x.Reactor).Append(del).
Append(x.RDS).Append(del).
Append(x.PSN).Append(del).
Append(x.Layer).Append(del).
Append(x.Zone);
return results.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<json.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
if (parentParent.Contains(_CellInstanceName))
parentParent = Path.GetDirectoryName(parentParent);
string duplicateDirectory = Path.Combine(parentParent, "Data");
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
if (descriptions.Any() && tests.Any())
{
string lines = GetLines(descriptions);
if (!string.IsNullOrEmpty(lines))
{
long? subGroupId;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subGroupId = null;
else
(subGroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subGroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subGroupId.Value} {_OpenInsightFilePattern}"), lines));
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _OpenInsightApiIFXDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
}
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
// List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,245 @@
using Adaptation.Shared;
using System;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.OpenInsight;
public class FromIQS
{
#nullable enable
private static string GetCommandText(Logistics logistics, json.Description description, string dateTime, long? subGroupId)
{
StringBuilder result = new();
_ = result
.AppendLine(" select iq.ev_count, iq.cl_count, iq.sl_count, iq.se_sgrp, iq.se_sgtm, iq.se_tsno, iq.td_test, iq.pr_name, iq.jd_name, iq.pl_name, iq.pd_name, iq.td_name, iq.se_val ")
.AppendLine(" from ( ")
.AppendLine(" select ")
.AppendLine(" se.f_sgrp se_sgrp, ")
.AppendLine(" se.f_sgtm se_sgtm, ")
.AppendLine(" se.f_tsno se_tsno, ")
.AppendLine(" se.f_val se_val, ")
.AppendLine(" pr.f_name pr_name, ")
.AppendLine(" jd.f_name jd_name, ")
.AppendLine(" pl.f_name pl_name, ")
.AppendLine(" pd.f_name pd_name, ")
.AppendLine(" td.f_test td_test, ")
.AppendLine(" td.f_name td_name, ")
.AppendLine(" (select count(cl.f_part) ")
.AppendLine(" from [spcepiworld].[dbo].[ctrl_lim] cl ")
.AppendLine(" where cl.f_part = pd.f_part ")
.AppendLine(" and cl.f_test = td.f_test ")
.AppendLine(" ) cl_count, ")
.AppendLine(" (select count(sl.f_part) ")
.AppendLine(" from [spcepiworld].[dbo].[spec_lim] sl ")
.AppendLine(" where sl.f_part = pd.f_part ")
.AppendLine(" and sl.f_test = td.f_test ")
.AppendLine(" ) sl_count, ")
.AppendLine(" (select count(ev.f_evnt) ")
.AppendLine(" from [spcepiworld].[dbo].[evnt_inf] ev ")
.AppendLine(" where ev.f_prcs = pr.f_prcs ")
.AppendLine(" and ev.f_part = pd.f_part ")
.AppendLine(" and ev.f_sgtm = se.f_sgtm ")
.AppendLine(" ) ev_count ")
.AppendLine(" from [spcepiworld].[dbo].[sgrp_ext] se ")
.AppendLine(" join [spcepiworld].[dbo].[prcs_dat] pr ")
.AppendLine(" on se.f_prcs = pr.f_prcs ")
.AppendLine(" join [spcepiworld].[dbo].[job_dat] jd ")
.AppendLine(" on se.f_job = jd.f_job ")
.AppendLine(" join [spcepiworld].[dbo].[part_lot] pl ")
.AppendLine(" on se.f_lot = pl.f_lot ")
.AppendLine(" join [spcepiworld].[dbo].[part_dat] pd ")
.AppendLine(" on se.f_part = pd.f_part ")
.AppendLine(" join [spcepiworld].[dbo].[test_dat] td ")
.AppendLine(" on se.f_test = td.f_test ")
.AppendLine(" where se.f_flag = 0 ");
if (subGroupId is not null)
_ = result.Append(" and se.f_sgrp = ").Append(subGroupId).AppendLine(" ");
if (!string.IsNullOrEmpty(description.RDS))
_ = result.Append(" and pl.f_name = '").Append(description.RDS).AppendLine("' ");
_ = result
.Append(" and pr.f_name = '").Append(description.Reactor).AppendLine("' ")
.Append(" and pd.f_name = '").Append(description.PSN).AppendLine("' ")
.AppendLine(" and jd.f_name in ('SRP2100') ")
.Append(" and jd.f_name = '").Append(logistics.MesEntity).AppendLine("' ")
.Append(" and dateadd(HH, -7, (dateadd(SS, convert(bigint, se.f_sgtm), '19700101'))) = '").Append(dateTime).AppendLine("' ")
.AppendLine(" ) as iq ")
.AppendLine(" order by iq.ev_count desc, iq.cl_count desc, iq.sl_count desc, iq.se_sgrp, iq.se_tsno, iq.td_test ")
.AppendLine(" for json path ");
return result.ToString();
}
private static StringBuilder GetForJsonPath(string connectionString, string commandText)
{
StringBuilder stringBuilder = new();
using SqlConnection sqlConnection = new(connectionString);
sqlConnection.Open();
using SqlCommand sqlCommand = new(commandText, sqlConnection);
SqlDataReader sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.SequentialAccess);
while (sqlDataReader.Read())
_ = stringBuilder.Append(sqlDataReader.GetString(0));
return stringBuilder;
}
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, json.Description description, long breakAfter, long preWait)
{
string dateTime;
int? count = null;
string commandText;
long? result = null;
string dateFormat = json.Description.GetDateFormat();
if (DateTime.TryParseExact(description.Date, dateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
dateTime = dateTimeParsed.ToString("yyyy-MM-dd HH:mm:ss");
else if (DateTime.TryParse(description.Date, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeParsed))
dateTime = dateTimeParsed.ToString("yyyy-MM-dd HH:mm:ss");
else
dateTime = logistics.DateTimeFromSequence.ToString("yyyy-MM-dd HH:mm:ss");
commandText = GetCommandText(logistics, description, dateTime, subGroupId: null);
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(100);
}
StringBuilder stringBuilder = new();
for (short z = 0; z < short.MaxValue; z++)
{
stringBuilder = GetForJsonPath(connectionString, commandText);
if (stringBuilder.Length > 0)
{
long postBreakAfter = DateTime.Now.AddSeconds(5).Ticks;
for (short y = 0; y < short.MaxValue; y++)
{
if (DateTime.Now.Ticks > postBreakAfter)
break;
Thread.Sleep(250);
}
stringBuilder = GetForJsonPath(connectionString, commandText);
break;
}
if (DateTime.Now.Ticks > breakAfter)
// throw new Exception($"After {breakAfterSeconds} seconds, didn't find sub group id!");
break;
Thread.Sleep(250);
}
if (stringBuilder.Length == 0)
commandText = stringBuilder.ToString();
else
{
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(stringBuilder.ToString());
if (jsonElements is null || !jsonElements.Any() || jsonElements[0].ValueKind != JsonValueKind.Object)
commandText = stringBuilder.ToString();
else
{
JsonProperty[] jsonProperties = jsonElements[0].EnumerateObject().ToArray();
if (!jsonProperties.Any() || jsonProperties[3].Name != "se_sgrp" || !long.TryParse(jsonProperties[3].Value.ToString(), out long subGroupId))
commandText = stringBuilder.ToString();
else
{
result = subGroupId;
if (jsonProperties.Any() && jsonProperties[0].Name == "ev_count" && int.TryParse(jsonProperties[0].Value.ToString(), out int evCount))
count = evCount;
}
}
}
return new(result, count, commandText);
}
private static string GetJson(Logistics logistics, string logisticLines, json.Description description)
{
string result;
StringBuilder stringBuilder = new();
var @object = new
{
description.MesEntity,
description.Employee,
description.Layer,
description.PSN,
description.RDS,
description.Reactor,
description.Recipe,
description.Zone,
logistics.DateTimeFromSequence.Ticks
};
string[] pair;
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
{
segments = line.Split('\t');
if (segments.Length < 2)
continue;
segments = segments[1].Split(';');
_ = stringBuilder.Append('{');
foreach (string segment in segments)
{
pair = segment.Split('=');
if (pair.Length != 2 || pair[0].Length < 3)
continue;
serializerValue = JsonSerializer.Serialize(pair[1]);
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append('}').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append(']').Append('}');
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
string json = JsonSerializer.Serialize(@object);
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
return result;
}
internal static void Save(string openInsightApiECDirectory, string openInsightApiIFXDirectory, Logistics logistics, string reportFullPath, string logisticLines, json.Description description, string lines, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, logisticLines, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
string? ifxPathRoot = Path.GetPathRoot(openInsightApiIFXDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
bool ifxExists = ifxPathRoot is not null && Directory.Exists(ifxPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
string ifxDirectory = Path.Combine(openInsightApiIFXDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
if (ecExists && !Directory.Exists(ecDirectory))
_ = Directory.CreateDirectory(ecDirectory);
if (ifxExists && !Directory.Exists(ifxDirectory))
_ = Directory.CreateDirectory(ifxDirectory);
checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ifxDirectory, fileName);
if (ifxExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ifxExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ifxExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
}
#nullable disable
}

View File

@ -0,0 +1,142 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _OpenInsightMetrologyViewerAPI;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#pragma warning disable IDE0060
private void SendData(DateTime dateTime, List<json.Description> descriptions)
#pragma warning restore IDE0060
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
(string json, WS.Results wsResults) = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
if (!wsResults.Success)
throw new Exception(wsResults.ToString());
_Log.Debug(wsResults.HeaderID);
lock (_StaticRuns)
{
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
_StaticRuns.Add(_Logistics.Sequence, new());
_StaticRuns[_Logistics.Sequence].Add(json);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,96 @@
using Adaptation.Shared;
using Adaptation.Shared.Metrology;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
public class WSRequest
{
public bool SentToMetrology { get; set; }
public bool SentToSPC { get; set; }
//
public long Id { get; set; }
public string CellName { get; set; }
public string Date { get; set; }
public string FilePath { get; set; }
public string Layer { get; set; }
public string Operator { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
public List<json.Detail> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
#pragma warning disable IDE0060
internal WSRequest(IFileRead fileRead, Logistics logistics, List<json.Description> descriptions)
#pragma warning restore IDE0060
{
Id = 0;
FilePath = string.Empty;
CellName = logistics.MesEntity;
if (descriptions[0] is not json.Description x)
throw new Exception();
Details = new List<json.Detail>();
//Header
{
Date = x.Date;
Layer = x.Layer;
Operator = x.Employee;
PSN = x.PSN;
RDS = x.RDS;
Reactor = x.Reactor;
Recipe = x.Recipe;
UniqueId = x.UniqueId;
Zone = x.Zone;
}
json.Detail detail;
foreach (json.Description description in descriptions)
{
detail = new json.Detail
{
HeaderUniqueId = description.HeaderUniqueId,
UniqueId = description.UniqueId,
Depth = description.Depth,
Raw = description.Raw,
Edited = description.Edited,
Resistivity = description.Resistivity,
CD = description.CD,
};
Details.Add(detail);
}
Date ??= logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Any())
throw new Exception();
}
#pragma warning disable IDE0060
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, DateTime dateTime, string json, List<json.Description> descriptions, string matchDirectory)
#pragma warning restore IDE0060
{
if (string.IsNullOrEmpty(json))
{
WSRequest wsRequest = new(fileRead, logistics, descriptions);
(json, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, wsRequest);
if (!wsResults.Success)
throw new Exception(wsResults.ToString());
}
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
string[] jsonFiles = Directory.GetFiles(matchDirectory, "*.data.json", SearchOption.TopDirectoryOnly);
if (jsonFiles.Length != 1)
throw new Exception($"Invalid source file count for <{wsResultsHeaderID}>!{Environment.NewLine}{json}");
List<WS.Attachment> headerAttachments = new() { new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.json", jsonFiles.First()) };
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments: null);
}
}

View File

@ -0,0 +1,149 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewerAttachments;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _JobIdParentDirectory;
private readonly string _OpenInsightMetrologyViewerAPI;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#pragma warning disable IDE0060
private void PostOpenInsightMetrologyViewerAttachments(string reportFullPath, DateTime dateTime, List<json.Description> descriptions)
#pragma warning restore IDE0060
{
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string json;
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
json = string.Empty;
else
{
if (_StaticRuns[_Logistics.Sequence].Count != 1)
throw new Exception($"{nameof(_StaticRuns)} has too many values for {_Logistics.Sequence}!");
json = _StaticRuns[_Logistics.Sequence][0];
lock (_StaticRuns)
_ = _StaticRuns.Remove(_Logistics.Sequence);
}
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, dateTime, json, descriptions, matchDirectories[0]);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,164 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Processed;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _JobIdParentDirectory;
private readonly string _JobIdProcessParentDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
_JobIdProcessParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#pragma warning disable IDE0060
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<json.Description> descriptions)
#pragma warning restore IDE0060
{
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
Directory.Move(matchDirectories[0], destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)
{
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText)
throw new Exception("File doesn't match historical!");
}
return results;
}
}

View File

@ -0,0 +1,132 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.SPaCe;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
string successDirectory = string.Empty;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = _FileConnectorConfiguration.TargetFileLocation;
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,86 @@
using System.IO;
using System.Linq;
namespace Adaptation.FileHandlers.csv;
public class CSV
{
#nullable enable
#pragma warning disable CA1834
public string? FileVersion { get; set; }
public Info? Info { get; set; }
public Setup? Setup { get; set; }
public LayerHeader? LayerHeader { get; set; }
public ProfileHeader? ProfileHeader { get; set; }
public Calibration? Calibration { get; set; }
public RawData? RawData { get; set; }
internal static CSV GetCSV(string path)
{
CSV result;
int? endInfo = null;
int? endSetup = null;
int? endLayers = null;
int? startInfo = null;
int? startSetup = null;
int? endProfiles = null;
int? startLayers = null;
int? startRawData = null;
int? startProfiles = null;
int? endCalibration = null;
int? startCalibration = null;
#if NET
string[] lines = File.ReadAllLines(path, System.Text.Encoding.Latin1); // µ³®
# else
string[] lines = File.ReadAllLines(path, System.Text.Encoding.GetEncoding("ISO-8859-1")); // µ³®
# endif
string? fileVersion = !lines.Any() ? null : GetFileVersion(lines.First());
for (int i = 1; i < lines.Length; i++)
{
if (lines[i].StartsWith("--INFO--"))
startInfo = i + 1;
else if (lines[i].StartsWith("--SETUP--"))
(endInfo, startSetup) = (i, i + 1);
else if (lines[i].StartsWith("--LAYERS--"))
(endSetup, startLayers) = (i, i + 1);
else if (lines[i].StartsWith("--PROFILES--"))
(endLayers, startProfiles) = (i, i + 1);
else if (lines[i].StartsWith("--CALIBRATION--"))
(endProfiles, startCalibration) = (i, i + 1);
else if (lines[i].StartsWith("--RAWDATA--"))
(endCalibration, startRawData) = (i, i + 1);
}
RawData? rawData = startRawData is null ? null : RawData.GetRawData(lines, startRawData.Value, lines.Length);
Info? info = startInfo is null || endInfo is null ? null : Info.GetInfo(lines, startInfo.Value, endInfo.Value);
Setup? setup = startSetup is null || endSetup is null ? null : Setup.GetSetup(lines, startSetup.Value, endSetup.Value);
LayerHeader? layerHeader = startLayers is null || endLayers is null ? null : LayerHeader.GetLayerHeader(lines, startLayers.Value, endLayers.Value);
ProfileHeader? profileHeader = startProfiles is null || endProfiles is null ? null : ProfileHeader.GetProfileHeader(lines, startProfiles.Value, endProfiles.Value);
Calibration? calibration = startCalibration is null || endCalibration is null ? null : Calibration.GetCalibration(lines, startCalibration.Value, endCalibration.Value);
result = new()
{
FileVersion = fileVersion,
Info = info,
Setup = setup,
LayerHeader = layerHeader,
ProfileHeader = profileHeader,
Calibration = calibration,
RawData = rawData,
};
return result;
}
private static string GetFileVersion(string line)
{
string result;
string[] segments = line.Split(',');
if (segments.Length < 2)
result = string.Empty;
else
result = segments.Last();
return result;
}
}

View File

@ -0,0 +1,133 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class Calibration
{
public string NumberOfCalibrationSets { get; set; }
public List<DataSet> DataSets { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static Calibration? GetCalibration(string[] lines, int start, int stop)
{
Calibration? result;
string first;
DataSet dataSet;
Position position;
string[] segments;
int? thirdStart = null;
int? secondStart = null;
List<Position> positions;
List<string> values = new();
List<DataSet> dataSets = new();
StringBuilder stringBuilder = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "DataSet:")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Number of Calibration Sets,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
NumberOfCalibrationSets = values[0],
DataSets = dataSets,
};
for (int x = 0; x < int.MaxValue; x++)
{
values.Clear();
_ = stringBuilder.Clear();
if (secondStart is null)
break;
for (int i = secondStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Resistivity(ohm-cm)")
{
thirdStart = i + 1;
break;
}
if (first == " Sample Set:")
continue;
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
secondStart = null;
header = "Operator,Date & Time,Finish,Orientation,North Probe ID,South Probe ID,Polarity,Contact Radius (µm),Probe Spacing (µm),Load (gm),X Step (µm),Name,Plate ID,Type,Points per Sample,Number of Pairs,";
if (thirdStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
positions = new();
dataSet = new()
{
Operator = values[0],
DateTime = values[1],
Finish = values[2],
Orientation = values[3],
NorthProbeID = values[4],
SouthProbeID = values[5],
Polarity = values[6],
ContactRadius = values[7],
ProbeSpacing = values[8],
Load = values[9],
XStep = values[10],
Name = values[11],
PlateId = values[12],
Type = values[13],
PointsPerSample = values[14],
NumberOfPairs = values[15],
Positions = positions,
};
dataSets.Add(dataSet);
for (int i = thirdStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "DataSet:")
{
secondStart = i + 1;
break;
}
if (segments.Length < 5)
continue;
position = new()
{
Resistivity = segments[0],
Resistance = segments[1],
PercentStandardDeviation = segments[2],
Number = segments[3],
Name = segments[4],
};
positions.Add(position);
}
}
}
}
return result;
}
}

View File

@ -0,0 +1,27 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.csv;
public class DataSet
{
public string Operator { get; set; }
public string DateTime { get; set; }
public string Finish { get; set; }
public string Orientation { get; set; }
public string NorthProbeID { get; set; }
public string SouthProbeID { get; set; }
public string Polarity { get; set; }
public string ContactRadius { get; set; }
public string ProbeSpacing { get; set; }
public string Load { get; set; }
public string XStep { get; set; }
// public string SampleSet { get; set; }
public string Name { get; set; }
public string PlateId { get; set; }
public string Type { get; set; }
public string PointsPerSample { get; set; }
public string NumberOfPairs { get; set; }
public List<Position> Positions { get; set; }
}

View File

@ -0,0 +1,194 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.csv;
public class Description : IDescription, Shared.Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string RDS { get; set; }
//
public string Path { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed";
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(Layer),
nameof(RDS),
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(RDS),
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{ };
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData _)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not string path)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Path = path,
};
results.Add(description);
}
}
return results;
}
private static Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
RDS = nameof(RDS),
//
Path = nameof(Path),
};
return result;
}
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}

View File

@ -0,0 +1,25 @@
namespace Adaptation.FileHandlers.csv;
public class Descriptor
{
public string Employee { get; private set; }
public string Layer { get; private set; }
public string PSN { get; private set; }
public string RDS { get; private set; }
public string Reactor { get; private set; }
public string Wafer { get; private set; }
public string Zone { get; private set; }
public Descriptor(string employee, string layer, string psn, string rds, string reactor, string wafer, string zone)
{
Employee = employee;
Layer = layer;
PSN = psn;
RDS = rds;
Reactor = reactor;
Wafer = wafer;
Zone = zone;
}
}

View File

@ -0,0 +1,115 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.csv;
public class FileRead : Shared.FileRead, IFileRead
{
private long? _TickOffset;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, tickOffset: _TickOffset.Value);
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}

View File

@ -0,0 +1,73 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class Info
{
public string Operator { get; set; }
public string SampleName { get; set; }
public string SoftwareVersion { get; set; }
public string DateTime { get; set; }
public string SystemId { get; set; }
public string SystemSite { get; set; }
public string SamplePosition { get; set; }
public string Units { get; set; }
public string CommentLength { get; set; }
public List<string> Comments { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static Info? GetInfo(string[] lines, int start, int stop)
{
Info? result;
string first;
string[] segments;
int? secondStart = null;
List<string> values = new();
List<string> comments = new();
StringBuilder stringBuilder = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Comment:")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Operator,Sample Name,Software Version,Date & Time,System ID,System Site,Sample Position,Units,Comment Length,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
Operator = values[0],
SampleName = values[1],
SoftwareVersion = values[2],
DateTime = values[3],
SystemId = values[4],
SystemSite = values[5],
SamplePosition = values[6],
Units = values[7],
CommentLength = values[8],
Comments = comments,
};
for (int i = secondStart.Value; i < stop; i++)
comments.Add(lines[i]);
}
return result;
}
}

View File

@ -0,0 +1,22 @@
namespace Adaptation.FileHandlers.csv;
public class Layer
{
public string FirstPoint { get; set; }
public string LastPoint { get; set; }
public string Type { get; set; }
public string Smoothing { get; set; }
public string Apply { get; set; }
public string SOrder { get; set; }
public string GOrder { get; set; }
public string Correction { get; set; }
public string Conversion { get; set; }
public string JunctionOption { get; set; }
public string JunctionConstant { get; set; }
public string CurrentDensity { get; set; }
public string M1M2Tolerance { get; set; }
public string Sheet { get; set; }
public string Dose { get; set; }
}

View File

@ -0,0 +1,81 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class LayerHeader
{
public string NumberOfLayers { get; set; }
public List<Layer> Layers { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static LayerHeader? GetLayerHeader(string[] lines, int start, int stop)
{
LayerHeader? result;
Layer layer;
string first;
string[] segments;
int? secondStart = null;
List<Layer> layerCollection = new();
List<string> values = new();
StringBuilder stringBuilder = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Layer")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Number of Layers,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
NumberOfLayers = values[0],
Layers = layerCollection,
};
for (int i = secondStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
if (segments.Length < 15)
continue;
layer = new()
{
FirstPoint = segments[0],
LastPoint = segments[1],
Type = segments[2],
Smoothing = segments[3],
Apply = segments[4],
SOrder = segments[5],
GOrder = segments[6],
Correction = segments[7],
Conversion = segments[8],
JunctionOption = segments[9],
JunctionConstant = segments[10],
CurrentDensity = segments[11],
M1M2Tolerance = segments[12],
Sheet = segments[13],
Dose = segments[14],
};
layerCollection.Add(layer);
}
}
return result;
}
}

View File

@ -0,0 +1,14 @@
namespace Adaptation.FileHandlers.csv;
public class Point
{
public string Number { get; set; }
public string Depth { get; set; }
public string Resistance { get; set; }
public string StageX { get; set; }
public string StageY { get; set; }
public string StageZ { get; set; }
public string StageT { get; set; }
}

View File

@ -0,0 +1,12 @@
namespace Adaptation.FileHandlers.csv;
public class Position
{
public string Resistivity { get; set; }
public string Resistance { get; set; }
public string PercentStandardDeviation { get; set; }
public string Number { get; set; }
public string Name { get; set; }
}

View File

@ -0,0 +1,135 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.csv;
public partial class ProcessData : IProcessData
{
private readonly List<object> _Details;
List<object> Shared.Properties.IProcessData.Details => _Details;
private readonly ILog _Log;
public ProcessData()
{
}
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, long tickOffset)
{
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
_ = Parse(fileRead, logistics, fileInfoCollection);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.SRP2100);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private static void OnlyOnce(string directory)
{
CSV csv;
string json;
string fileName;
string innerDirectory;
string fileNameWithoutExtension;
foreach (string path in Directory.GetFiles(directory, "*.csv", SearchOption.TopDirectoryOnly))
{
csv = CSV.GetCSV(path);
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(path);
innerDirectory = Path.Combine(directory, fileNameWithoutExtension);
_ = Directory.CreateDirectory(innerDirectory);
fileName = Path.Combine(innerDirectory, $"{fileNameWithoutExtension}.csv");
File.Move(path, fileName);
fileName = Path.Combine(innerDirectory, $"{fileNameWithoutExtension}.json");
json = JsonSerializer.Serialize(csv, new JsonSerializerOptions { WriteIndented = true });
File.WriteAllText(fileName, json);
}
foreach (string path in Directory.GetFiles(directory, "*", SearchOption.AllDirectories))
{
innerDirectory = Path.GetDirectoryName(path);
fileName = Path.Combine(innerDirectory, $"{Path.GetFileName(innerDirectory)}{Path.GetExtension(path)}");
File.Move(path, fileName);
}
foreach (string path in Directory.GetFiles(directory, "*.csv", SearchOption.AllDirectories))
{
innerDirectory = Path.GetDirectoryName(path);
Directory.SetLastWriteTime(innerDirectory, new FileInfo(path).LastWriteTime);
}
}
#pragma warning disable IDE0060
private CSV Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
#pragma warning restore IDE0060
{
CSV result;
result = CSV.GetCSV(logistics.ReportFullPath);
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
#if OnlyOnce
OnlyOnce(directory);
#endif
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.json");
string json = JsonSerializer.Serialize(result, new JsonSerializerOptions { WriteIndented = true });
result = JsonSerializer.Deserialize<CSV>(json);
if (result is null)
throw new NullReferenceException(nameof(result));
File.WriteAllText(fileName, json);
#if NotSure
fileInfoCollection.Add(new(fileName));
#endif
fileInfoCollection.Add(logistics.FileInfo);
_Details.Add(fileName);
return result;
}
#nullable enable
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
}
}

View File

@ -0,0 +1,13 @@
namespace Adaptation.FileHandlers.csv;
public class Profile
{
public string Id { get; set; }
public string Name { get; set; }
public string Label { get; set; }
public string Allocated { get; set; }
public string Used { get; set; }
public string List { get; set; }
}

View File

@ -0,0 +1,102 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class ProfileHeader
{
public string NumberOfProfiles { get; set; }
public List<Profile> Profiles { get; set; }
public List<ProfilePoint> ProfilePoints { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static ProfileHeader? GetProfileHeader(string[] lines, int start, int stop)
{
ProfileHeader? result;
string first;
string[] segments;
Profile profileInfo;
ProfilePoint profile;
int? thirdStart = null;
int? secondStart = null;
List<string> values = new();
List<Profile> profiles = new();
StringBuilder stringBuilder = new();
List<ProfilePoint> profilePoints = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "ID")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Number of Profiles,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
NumberOfProfiles = values[0],
Profiles = profiles,
ProfilePoints = profilePoints,
};
for (int i = secondStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Point")
{
thirdStart = i + 1;
break;
}
if (segments.Length < 6)
continue;
profileInfo = new()
{
Id = segments[0],
Name = segments[1],
Label = segments[2],
Allocated = segments[3],
Used = segments[4],
List = segments[5],
};
profiles.Add(profileInfo);
}
if (thirdStart is not null)
{
for (int i = thirdStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
if (segments.Length < 6)
continue;
profile = new()
{
Number = segments[0],
Depth = segments[1],
Raw = segments[2],
Edited = segments[3],
Resistivity = segments[4],
CD = segments[5],
};
profilePoints.Add(profile);
}
}
}
return result;
}
}

View File

@ -0,0 +1,13 @@
namespace Adaptation.FileHandlers.csv;
public class ProfilePoint
{
public string Number { get; set; }
public string Depth { get; set; }
public string Raw { get; set; }
public string Edited { get; set; }
public string Resistivity { get; set; }
public string CD { get; set; }
}

View File

@ -0,0 +1,73 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class RawData
{
public string TotalPoints { get; set; }
public List<Point> Points { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static RawData? GetRawData(string[] lines, int start, int stop)
{
RawData? result;
Point point;
string first;
string[] segments;
int? secondStart = null;
List<Point> points = new();
List<string> values = new();
StringBuilder stringBuilder = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Point")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Total Points,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
TotalPoints = values[0],
Points = points,
};
for (int i = secondStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
if (segments.Length < 4)
continue;
point = new()
{
Number = segments[0],
Depth = segments[1],
Resistance = segments[2],
StageX = segments[3],
StageY = segments[4],
StageZ = segments[5],
StageT = segments[6],
};
points.Add(point);
}
}
return result;
}
}

View File

@ -0,0 +1,88 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Adaptation.FileHandlers.csv;
public class Setup
{
public string Finish { get; set; }
public string NorthProbeID { get; set; }
public string SouthProbeID { get; set; }
public string MeasurementPolarity { get; set; }
public string SineBevelAngle { get; set; }
public string ContactRadius { get; set; }
public string ProbeSpacing { get; set; }
public string ProbeLoad { get; set; }
public string Orientation { get; set; }
public string NumberOfStepSizes { get; set; }
public List<Step> Steps { get; set; }
#nullable enable
#pragma warning disable CA1834
internal static Setup? GetSetup(string[] lines, int start, int stop)
{
Setup? result;
Step step;
string first;
string[] segments;
int? secondStart = null;
List<Step> steps = new();
List<string> values = new();
StringBuilder stringBuilder = new();
for (int i = start; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
first = segments.First();
if (first == "Step")
{
secondStart = i + 1;
break;
}
_ = stringBuilder.Append(first).Append(",");
if (segments.Length > 1)
values.Add(segments[1]);
else
values.Add(string.Empty);
}
string header = "Finish,North Probe ID,South Probe ID,Measurement Polarity,Sine Bevel Angle,Contact Radius (µm),Probe Spacing (µm),Probe Load (gm),Orientation,Number of Step Sizes,";
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
result = null;
else
{
result = new()
{
Finish = values[0],
NorthProbeID = values[1],
SouthProbeID = values[2],
MeasurementPolarity = values[3],
SineBevelAngle = values[4],
ContactRadius = values[5],
ProbeSpacing = values[6],
ProbeLoad = values[7],
Orientation = values[8],
NumberOfStepSizes = values[9],
Steps = steps,
};
for (int i = secondStart.Value; i < stop; i++)
{
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
if (segments.Length < 4)
continue;
step = new()
{
Number = segments[0],
Points = segments[1],
X = segments[2],
Y = segments[3],
};
steps.Add(step);
}
}
return result;
}
}

View File

@ -0,0 +1,11 @@
namespace Adaptation.FileHandlers.csv;
public class Step
{
public string Number { get; set; }
public string Points { get; set; }
public string X { get; set; }
public string Y { get; set; }
}

View File

@ -0,0 +1,27 @@
namespace Adaptation.FileHandlers.json;
public class CSV
{
#nullable enable
public string? FileVersion { get; }
public Info? Info { get; }
public Setup? Setup { get; }
public LayerHeader? LayerHeader { get; }
public ProfileHeader? ProfileHeader { get; }
public Calibration? Calibration { get; }
public RawData? RawData { get; }
internal CSV(csv.CSV csv)
{
FileVersion = csv.FileVersion;
Info = csv.Info is null ? null : new(csv.Info);
Setup = csv.Setup is null ? null : new(csv.Setup);
RawData = csv.RawData is null ? null : new(csv.RawData);
Calibration = csv.Calibration is null ? null : new(csv.Calibration);
LayerHeader = csv.LayerHeader is null ? null : new(csv.LayerHeader);
ProfileHeader = csv.ProfileHeader is null ? null : new(csv.ProfileHeader);
}
}

View File

@ -0,0 +1,59 @@
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class Calibration
{
public int NumberOfCalibrationSets { get; }
public List<DataSet> DataSets { get; }
internal Calibration(csv.Calibration calibration)
{
DataSet dataSet;
Position position;
List<Position> positions;
List<DataSet> dataSets = new();
NumberOfCalibrationSets = int.Parse(calibration.NumberOfCalibrationSets);
DataSets = dataSets;
foreach (csv.DataSet csvDataSet in calibration.DataSets)
{
positions = new();
foreach (csv.Position csvPosition in csvDataSet.Positions)
{
position = new
(
resistivity: double.Parse(csvPosition.Resistivity),
resistance: double.Parse(csvPosition.Resistance),
percentStandardDeviation: double.Parse(csvPosition.PercentStandardDeviation),
number: int.Parse(csvPosition.Number),
name: csvPosition.Name
);
positions.Add(position);
}
dataSet = new
(
@operator: csvDataSet.Operator,
dateTime: DateTime.Parse(csvDataSet.DateTime),
finish: csvDataSet.Finish,
orientation: csvDataSet.Orientation,
northProbeID: csvDataSet.NorthProbeID,
southProbeID: csvDataSet.SouthProbeID,
polarity: csvDataSet.Polarity,
contactRadius: double.Parse(csvDataSet.ContactRadius),
probeSpacing: double.Parse(csvDataSet.ProbeSpacing),
load: double.Parse(csvDataSet.Load),
xStep: double.Parse(csvDataSet.XStep),
name: csvDataSet.Name,
plateId: csvDataSet.PlateId,
type: csvDataSet.Type,
pointsPerSample: int.Parse(csvDataSet.PointsPerSample),
numberOfPairs: int.Parse(csvDataSet.NumberOfPairs),
positions: positions
);
dataSets.Add(dataSet);
}
}
}

View File

@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class DataSet
{
public string Operator { get; }
public DateTime DateTime { get; }
public string Finish { get; }
public string Orientation { get; }
public string NorthProbeID { get; }
public string SouthProbeID { get; }
public string Polarity { get; }
public double ContactRadius { get; }
public double ProbeSpacing { get; }
public double Load { get; }
public double XStep { get; }
// public string SampleSet { get; }
public string Name { get; }
public string PlateId { get; }
public string Type { get; }
public int PointsPerSample { get; }
public int NumberOfPairs { get; }
public List<Position> Positions { get; }
public DataSet(string @operator,
DateTime dateTime,
string finish,
string orientation,
string northProbeID,
string southProbeID,
string polarity,
double contactRadius,
double probeSpacing,
double load,
double xStep,
string name,
string plateId,
string type,
int pointsPerSample,
int numberOfPairs,
List<Position> positions)
{
Operator = @operator;
DateTime = dateTime;
Finish = finish;
Orientation = orientation;
NorthProbeID = northProbeID;
SouthProbeID = southProbeID;
Polarity = polarity;
ContactRadius = contactRadius;
ProbeSpacing = probeSpacing;
Load = load;
XStep = xStep;
Name = name;
PlateId = plateId;
Type = type;
PointsPerSample = pointsPerSample;
NumberOfPairs = numberOfPairs;
Positions = positions;
}
}

View File

@ -0,0 +1,245 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.json;
public class Description : IDescription, Shared.Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string HeaderUniqueId { get; set; }
public string Layer { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
//
public string Number { get; set; }
public string Depth { get; set; }
public string Raw { get; set; }
public string Edited { get; set; }
public string Resistivity { get; set; }
public string CD { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed";
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(HeaderUniqueId),
nameof(Layer),
nameof(RDS),
nameof(UniqueId),
nameof(Zone)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(PSN),
nameof(RDS),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{ };
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date.ToString(GetDateFormat()),
Employee = processData.Employee,
Layer = processData.Layer,
PSN = processData.PSN,
RDS = processData.RDS,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
Zone = processData.Zone,
//
UniqueId = detail.UniqueId,
HeaderUniqueId = detail.HeaderUniqueId,
Depth = detail.Depth,
Raw = detail.Raw,
Edited = detail.Edited,
Resistivity = detail.Resistivity,
CD = detail.CD,
};
results.Add(description);
}
}
return results;
}
private static Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
HeaderUniqueId = nameof(HeaderUniqueId),
Layer = nameof(Layer),
PSN = nameof(PSN),
RDS = nameof(RDS),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
UniqueId = nameof(UniqueId),
Zone = nameof(Zone),
//
Number = nameof(Number),
Depth = nameof(Depth),
Raw = nameof(Raw),
Edited = nameof(Edited),
Resistivity = nameof(Resistivity),
CD = nameof(CD),
};
return result;
}
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}

View File

@ -0,0 +1,25 @@
namespace Adaptation.FileHandlers.json;
public class Descriptor
{
public string Employee { get; private set; }
public string Layer { get; private set; }
public string PSN { get; private set; }
public string RDS { get; private set; }
public string Reactor { get; private set; }
public string Wafer { get; private set; }
public string Zone { get; private set; }
public Descriptor(string employee, string layer, string psn, string rds, string reactor, string wafer, string zone)
{
Employee = employee;
Layer = layer;
PSN = psn;
RDS = rds;
Reactor = reactor;
Wafer = wafer;
Zone = zone;
}
}

View File

@ -0,0 +1,14 @@
namespace Adaptation.FileHandlers.json;
public class Detail
{
public string HeaderUniqueId { get; set; }
public string UniqueId { get; set; }
public string Depth { get; set; }
public string Raw { get; set; }
public string Edited { get; set; }
public string Resistivity { get; set; }
public string CD { get; set; }
}

View File

@ -0,0 +1,130 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.json;
public class FileRead : Shared.FileRead, IFileRead
{
private long? _TickOffset;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, tickOffset: _TickOffset.Value);
if (iProcessData is not ProcessData processData)
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
string mid;
if (!string.IsNullOrEmpty(processData.Zone) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Zone;
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Employee;
else
{
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
}
SetFileParameterLotID(mid);
_Logistics.Update(mid, processData.Reactor);
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}

View File

@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class Info
{
public string Operator { get; }
public string SampleName { get; }
public string SoftwareVersion { get; }
public DateTime DateTime { get; }
public string SystemId { get; }
public string SystemSite { get; }
public int SamplePosition { get; }
public int Units { get; }
public int CommentLength { get; }
public List<string> Comments { get; }
internal Info(csv.Info info)
{
Operator = info.Operator;
SampleName = info.SampleName;
SoftwareVersion = info.SoftwareVersion;
DateTime = DateTime.Parse(info.DateTime);
SystemId = info.SystemId;
SystemSite = info.SystemSite;
SamplePosition = int.Parse(info.SamplePosition);
Units = int.Parse(info.Units);
CommentLength = int.Parse(info.CommentLength);
Comments = info.Comments;
}
}

View File

@ -0,0 +1,55 @@
namespace Adaptation.FileHandlers.json;
public class Layer
{
public int FirstPoint { get; }
public int? LastPoint { get; }
public string Type { get; }
public string Smoothing { get; }
public string Apply { get; }
public int SOrder { get; }
public int GOrder { get; }
public string Correction { get; }
public string Conversion { get; }
public string JunctionOption { get; }
public int JunctionConstant { get; }
public double CurrentDensity { get; }
public string M1M2Tolerance { get; }
public string Sheet { get; }
public string Dose { get; }
public Layer(int firstPoint,
int? lastPoint,
string type,
string smoothing,
string apply,
int sOrder,
int gOrder,
string correction,
string conversion,
string junctionOption,
int junctionConstant,
double currentDensity,
string m1M2Tolerance,
string sheet,
string dose)
{
FirstPoint = firstPoint;
LastPoint = lastPoint;
Type = type;
Smoothing = smoothing;
Apply = apply;
SOrder = sOrder;
GOrder = gOrder;
Correction = correction;
Conversion = conversion;
JunctionOption = junctionOption;
JunctionConstant = junctionConstant;
CurrentDensity = currentDensity;
M1M2Tolerance = m1M2Tolerance;
Sheet = sheet;
Dose = dose;
}
}

View File

@ -0,0 +1,41 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class LayerHeader
{
public int NumberOfLayers { get; }
public List<Layer> Layers { get; }
internal LayerHeader(csv.LayerHeader layerHeader)
{
Layer layer;
List<Layer> layers = new();
NumberOfLayers = int.Parse(layerHeader.NumberOfLayers);
Layers = layers;
foreach (csv.Layer csvLayer in layerHeader.Layers)
{
layer = new
(
firstPoint: int.Parse(csvLayer.FirstPoint),
lastPoint: string.IsNullOrEmpty(csvLayer.LastPoint) ? null : int.Parse(csvLayer.LastPoint),
type: csvLayer.Type,
smoothing: csvLayer.Smoothing,
apply: csvLayer.Apply,
sOrder: int.Parse(csvLayer.SOrder),
gOrder: int.Parse(csvLayer.GOrder),
correction: csvLayer.Correction,
conversion: csvLayer.Conversion,
junctionOption: csvLayer.JunctionOption,
junctionConstant: int.Parse(csvLayer.JunctionConstant),
currentDensity: double.Parse(csvLayer.CurrentDensity),
m1M2Tolerance: csvLayer.M1M2Tolerance,
sheet: csvLayer.Sheet,
dose: csvLayer.Dose
);
layers.Add(layer);
}
}
}

View File

@ -0,0 +1,31 @@
namespace Adaptation.FileHandlers.json;
public class Point
{
public double Number { get; }
public double Depth { get; }
public double Resistance { get; }
public double StageX { get; }
public double StageY { get; }
public double StageZ { get; }
public double StageT { get; }
public Point(double number,
double depth,
double resistance,
double stageX,
double stageY,
double stageZ,
double stageT)
{
Number = number;
Depth = depth;
Resistance = resistance;
StageX = stageX;
StageY = stageY;
StageZ = stageZ;
StageT = stageT;
}
}

View File

@ -0,0 +1,25 @@
namespace Adaptation.FileHandlers.json;
public class Position
{
public double Resistivity { get; }
public double Resistance { get; }
public double PercentStandardDeviation { get; }
public int Number { get; }
public string Name { get; }
public Position(double resistivity,
double resistance,
double percentStandardDeviation,
int number,
string name)
{
Resistivity = resistivity;
Resistance = resistance;
PercentStandardDeviation = percentStandardDeviation;
Number = number;
Name = name;
}
}

View File

@ -0,0 +1,558 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.json;
public partial class ProcessData : IProcessData
{
private readonly List<object> _Details;
public DateTime Date { get; set; }
public string Employee { get; set; }
public string JobID { get; set; }
public string Layer { get; set; }
public string MesEntity { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string Zone { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
private readonly ILog _Log;
public ProcessData()
{
}
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, long tickOffset)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
CSV csv = Parse(fileRead, logistics, fileInfoCollection);
_ = GetImageBytes(csv);
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.data.json");
string json = JsonSerializer.Serialize(csv, new JsonSerializerOptions { WriteIndented = true });
File.WriteAllText(fileName, json);
if (!string.IsNullOrEmpty(csv.Info?.SampleName) && csv.ProfileHeader.ProfilePoints.Any())
SetValues(logistics, tickOffset, csv);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.SRP2100);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
internal static DateTime GetDateTime(Logistics logistics, long tickOffset, DateTime dateTime)
{
DateTime result;
if (dateTime < logistics.DateTimeFromSequence.AddDays(1) && dateTime > logistics.DateTimeFromSequence.AddDays(-1))
result = new(dateTime.Ticks + tickOffset);
else
result = logistics.DateTimeFromSequence;
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
{
string rds;
string reactor;
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
reactor = defaultReactor;
else
reactor = segments[0];
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
rds = defaultRDS;
else
rds = segments[1];
if (reactor.Length > 3)
{
rds = reactor;
reactor = defaultReactor;
}
return new(reactor, rds);
}
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
{
string psn;
string layer;
if (segments.Length <= 2)
{
psn = defaultPSN;
layer = defaultLayer;
}
else
{
string[] segmentsB = segments[2].Split('.');
psn = segmentsB[0];
if (segmentsB.Length <= 1)
layer = defaultLayer;
else
{
layer = segmentsB[1];
if (layer.Length > 1 && layer[0] == '0')
layer = layer.Substring(1);
}
}
return (layer, psn);
}
private static string GetZone(string[] segments)
{
string result;
if (segments.Length <= 3)
result = string.Empty;
else
{
result = segments[3];
if (result.Length > 1 && result[0] == '0')
result = result.Substring(1);
}
return result;
}
public static Descriptor GetDescriptor(string text)
{
Descriptor result;
string psn;
string rds;
string zone;
string layer;
string wafer;
string reactor;
string employee;
string defaultPSN = string.Empty;
string defaultRDS = string.Empty;
string defaultZone = string.Empty;
string defaultLayer = string.Empty;
string defaultReactor = string.Empty;
string defaultEmployee = string.Empty;
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
{
wafer = text.ToUpper();
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
employee = defaultEmployee;
}
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
{
wafer = text;
employee = text;
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
}
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
{
string[] segments = text.Split('.');
wafer = text;
psn = defaultPSN;
rds = defaultRDS;
layer = segments[1];
reactor = segments[0];
employee = defaultEmployee;
if (segments.Length <= 2)
zone = defaultZone;
else
zone = segments[2];
}
else
{
// Remove illegal characters \/:*?"<>| found in the Batch
wafer = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (wafer.Length > 2 && wafer[0] == '1' && (wafer[1] == 'T' || wafer[1] == 't'))
wafer = wafer.Substring(2);
string[] segments = wafer.Split('-');
// bool hasRDS = Regex.IsMatch(wafer, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, wafer, segments);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
zone = GetZone(segments);
if (segments.Length <= 4)
employee = defaultEmployee;
else
employee = segments[4];
}
result = new(employee, layer, psn, rds, reactor, wafer, zone);
return result;
}
#pragma warning disable IDE0060
private static CSV Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
#pragma warning restore IDE0060
{
CSV result;
string json = File.ReadAllText(logistics.ReportFullPath);
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.csv");
csv.CSV csv = JsonSerializer.Deserialize<csv.CSV>(json);
if (csv is null)
throw new NullReferenceException(nameof(csv));
result = new(csv);
if (File.Exists(fileName))
fileInfoCollection.Add(new(fileName));
fileInfoCollection.Add(logistics.FileInfo);
return result;
}
private void SetValues(Logistics logistics, long tickOffset, CSV csv)
{
string psn;
string rds;
string zone;
string layer;
Detail detail;
string reactor;
List<Detail> details = new();
string recipe = csv.Info.SampleName;
string employee = csv.Info.Operator;
string timeFormat = "yyyyMMddHHmmss";
DateTime dateTime = GetDateTime(logistics, tickOffset, csv.Info.DateTime);
string uniqueId = string.Concat(logistics.MesEntity, "_", logistics.DateTimeFromSequence.ToString(timeFormat));
// Remove illegal characters \/:*?"<>| found in the Batch
Descriptor descriptor = GetDescriptor(csv.Info.SampleName);
psn = descriptor.PSN;
rds = descriptor.RDS;
zone = descriptor.Zone;
layer = descriptor.Layer;
reactor = descriptor.Reactor;
if (string.IsNullOrEmpty(employee))
employee = descriptor.Employee;
PSN = psn;
RDS = rds;
Date = dateTime;
Zone = zone;
Layer = layer;
Recipe = recipe;
Reactor = reactor;
Employee = employee;
JobID = logistics.JobID;
foreach (ProfilePoint profilePoint in csv.ProfileHeader.ProfilePoints)
{
detail = new()
{
HeaderUniqueId = uniqueId,
UniqueId = string.Concat(uniqueId, "_Point-", profilePoint.Number),
Depth = profilePoint.Depth.ToString(),
Raw = profilePoint.Raw.ToString(),
Edited = string.Concat(profilePoint.Edited),
Resistivity = string.Concat(profilePoint.Resistivity),
CD = string.Concat(profilePoint.CD),
};
details.Add(detail);
}
_Details.AddRange(details);
}
#nullable enable
#pragma warning disable CA1416
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
}
private static void GetMinMax(List<ProfilePoint> profilePoints, out double maxDepth, out int concentrationMaxD, out int concentrationMinD, out int resistanceEditedMaxD, out int resistanceEditedMinD, out int resistivityMaxD, out int resistivityMinD)
{
maxDepth = 30;
concentrationMaxD = -99;
concentrationMinD = 99;
resistanceEditedMaxD = -99;
resistanceEditedMinD = 99;
resistivityMaxD = -99;
resistivityMinD = 99;
foreach (ProfilePoint profilePoint in profilePoints)
{
if (profilePoint.Depth > 0 && profilePoint.Edited is not null && profilePoint.Resistivity is not null && profilePoint.CD is not null)
{
maxDepth = profilePoint.Depth;
if (Math.Log10(profilePoint.Resistivity.Value) < resistivityMinD)
resistivityMinD = (int)Math.Log10(profilePoint.Resistivity.Value);
if (Math.Ceiling(Math.Log10(profilePoint.Resistivity.Value)) > resistivityMaxD)
resistivityMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.Resistivity.Value));
if (Math.Log10(profilePoint.Edited.Value) < resistanceEditedMinD)
resistanceEditedMinD = (int)Math.Log10(profilePoint.Edited.Value);
if (Math.Ceiling(Math.Log10(profilePoint.Edited.Value)) > resistanceEditedMaxD)
resistanceEditedMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.Edited.Value));
if (Math.Log10(profilePoint.CD.Value) < concentrationMinD)
concentrationMinD = (int)Math.Log10(profilePoint.CD.Value);
if (Math.Ceiling(Math.Log10(profilePoint.CD.Value)) > concentrationMaxD)
concentrationMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.CD.Value));
}
}
}
private static PointF GetPointF(double x, double y) =>
new((float)x, (float)y);
private static RectangleF GetRectangleF(double left, double top, double width, double height) =>
new((float)left, (float)top, (float)width, (float)height);
private static void DrawLine(Graphics graphics, Pen pen, double x1, double y1, double x2, double y2) =>
graphics.DrawLine(pen, (float)x1, (float)y1, (float)x2, (float)y2);
private static void DrawString(Graphics graphics, string s, Font font, Brush brush, double x, double y) =>
graphics.DrawString(s, font, brush, (float)x, (float)y);
private static void FillEllipse(Graphics graphics, Brush brush, double x, double y, double width, double height) =>
graphics.FillEllipse(brush, (float)x, (float)y, (float)width, (float)height);
private static void DrawString(Graphics graphics, string s, Font font, Brush brush, double x, double y, StringFormat stringFormat) =>
graphics.DrawString(s, font, brush, (float)x, (float)y, stringFormat);
internal static byte[] GetImageBytes(CSV csv)
{
if (csv.Info is null)
throw new NullReferenceException(nameof(csv.Info));
if (csv.Setup is null)
throw new NullReferenceException(nameof(csv.Setup));
if (csv.ProfileHeader is null)
throw new NullReferenceException(nameof(csv.ProfileHeader));
if (csv.LayerHeader is null)
throw new NullReferenceException(nameof(csv.LayerHeader));
double maxDepth;
int concentrationMaxD, concentrationMinD, resistanceEditedMaxD, resistanceEditedMinD, resistivityMaxD, resistivityMinD;
GetMinMax(csv.ProfileHeader.ProfilePoints, out maxDepth, out concentrationMaxD, out concentrationMinD, out resistanceEditedMaxD, out resistanceEditedMinD, out resistivityMaxD, out resistivityMinD);
int decades;
byte[] bytes;
double point;
int width = 694;
int height = 714;
int position = -1;
Pen pen = Pens.Black;
int blocksOfDepth = 6;
RectangleF[] rectangles;
double topChartArea = 90;
double widthOfDepthBlock;
double leftChartArea = 60;
double widthOfBlacks = 120;
Brush brush = Brushes.Black;
double widthChartArea = 500;
double heightChartArea = 600;
StringBuilder layers = new();
Font consolas = new("Consolas", 9);
PointF[]? resistivityPoints = null;
StringFormat sfFormatRight = new();
Color backgroundColor = Color.White;
PointF[]? concentrationPoints = null;
Brush resistivityBrush = Brushes.Green;
Brush concentrationBrush = Brushes.Blue;
Brush resistanceRawBrush = Brushes.Black;
Pen resistivityPen = new(Color.Green, 2);
Brush resistanceEditedBrush = Brushes.Red;
double sizeOfBlock = heightChartArea / 18;
Pen concentrationPen = new(Color.Blue, 2);
Brush backgroundBrush = Brushes.WhiteSmoke;
Pen resistanceRawPen = new(Color.Black, 2);
Pen transitionWidthPen = new(Color.Orange);
Pen resistanceEditedPen = new(Color.Red, 2);
Font consolasBold = new("Consolas", 9); // , FontStyle.Bold)
concentrationPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Dash;
concentrationPen.DashPattern = new float[] { 5, 4 };
resistanceEditedPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Custom;
resistanceEditedPen.DashPattern = new float[] { 1, 3 };
resistanceRawPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Custom;
resistanceRawPen.DashPattern = new float[] { 1, 3 };
transitionWidthPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Dash;
transitionWidthPen.DashPattern = new float[] { 5, 4 };
sfFormatRight.Alignment = StringAlignment.Far;
Bitmap bitmap = new(width, height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
Graphics graphics = Graphics.FromImage(bitmap);
graphics.Clear(backgroundColor);
widthOfDepthBlock = Math.Ceiling(maxDepth / 3) * 3 / 6;
// maxDepth = widthOfDepthBlock * blocksOfDepth;
decades = resistivityMaxD - resistivityMinD;
if (resistanceEditedMaxD - resistanceEditedMinD > decades)
decades = resistanceEditedMaxD - resistanceEditedMinD;
if (concentrationMaxD - concentrationMinD > decades)
decades = concentrationMaxD - concentrationMinD;
rectangles = new RectangleF[1];
rectangles[0] = GetRectangleF(leftChartArea, topChartArea, widthChartArea, heightChartArea);
graphics.FillRectangles(backgroundBrush, rectangles);
rectangles = new RectangleF[18];
rectangles[0] = GetRectangleF(leftChartArea, 10, widthChartArea, 65);
// rectangles[1] = GetRectangleF(leftChartArea + widthChartArea, topChartArea, widthOfBlacks, sizeOfBlock * 5);
rectangles[1] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 0, widthOfBlacks, sizeOfBlock);
rectangles[2] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 1, widthOfBlacks, sizeOfBlock);
rectangles[3] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 2, widthOfBlacks, sizeOfBlock);
rectangles[4] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 3, widthOfBlacks, sizeOfBlock);
rectangles[5] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 4, widthOfBlacks, sizeOfBlock);
rectangles[6] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 5, widthOfBlacks, sizeOfBlock);
rectangles[7] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 6, widthOfBlacks, sizeOfBlock);
rectangles[8] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 7, widthOfBlacks, sizeOfBlock);
rectangles[9] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 8, widthOfBlacks, sizeOfBlock);
rectangles[10] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 9, widthOfBlacks, sizeOfBlock);
rectangles[11] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 10, widthOfBlacks, sizeOfBlock);
rectangles[12] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 11, widthOfBlacks, sizeOfBlock);
rectangles[13] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 12, widthOfBlacks, sizeOfBlock);
rectangles[14] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 13, widthOfBlacks, sizeOfBlock);
rectangles[15] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 14, widthOfBlacks, sizeOfBlock);
rectangles[16] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 15, widthOfBlacks, sizeOfBlock);
rectangles[17] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 16, widthOfBlacks, sizeOfBlock * 2);
graphics.FillRectangles(Brushes.White, rectangles);
graphics.DrawRectangles(pen, rectangles);
foreach (Layer layer in csv.LayerHeader.Layers)
_ = layers.AppendLine(string.Concat("First Pt. ", layer.FirstPoint, " Last Pt. ", layer.LastPoint, " Type ", layer.Type, " Smoothing ", layer.Smoothing, " Correction ", layer.Correction));
_ = layers.AppendLine(string.Join(" ", csv.Info.Comments));
graphics.DrawString(layers.ToString(), consolas, brush, rectangles[0]);
graphics.DrawString(string.Concat(csv.Info.SystemId, Environment.NewLine, csv.Info.SoftwareVersion), consolas, brush, rectangles[9]);
graphics.DrawString(string.Concat("SURFACE FINISH", Environment.NewLine, csv.Setup.Finish), consolas, brush, rectangles[10]);
graphics.DrawString(string.Concat("ORIENTATION", Environment.NewLine, csv.Setup.Orientation), consolas, brush, rectangles[11]);
graphics.DrawString(string.Concat("BEVEL ANGLE", Environment.NewLine, csv.Setup.SineBevelAngle), consolas, brush, rectangles[12]);
graphics.DrawString(string.Concat("X-STEP (um)", Environment.NewLine, csv.Setup.Steps.First().X), consolas, brush, rectangles[13]);
graphics.DrawString(string.Concat("PROBE LOAD (gm)", Environment.NewLine, csv.Setup.ProbeLoad), consolas, brush, rectangles[14]);
graphics.DrawString(string.Concat("SPACING (um)", Environment.NewLine, csv.Setup.ProbeSpacing), consolas, brush, rectangles[15]);
graphics.DrawString(string.Concat("OPERATOR", Environment.NewLine, csv.Info.Operator), consolas, brush, rectangles[16]);
graphics.DrawString(string.Concat("DATE", Environment.NewLine, csv.Info.DateTime.ToString("dd MMM yy"), Environment.NewLine, "TIME", Environment.NewLine, csv.Info.DateTime.ToString("HH:mm:ss tt")), consolas, brush, rectangles[17]);
DrawLine(graphics, concentrationPen, 13, 6, 13, 40);
graphics.DrawString("C", consolasBold, concentrationBrush, 8, 41);
graphics.DrawString("D", consolasBold, concentrationBrush, 8, 53);
DrawLine(graphics, resistivityPen, 28, 6, 28, 40);
graphics.DrawString("ρ", consolasBold, resistivityBrush, 21, 41);
graphics.DrawString("c", consolasBold, resistivityBrush, 21, 53);
graphics.DrawString("m", consolasBold, resistivityBrush, 21, 62);
DrawLine(graphics, resistanceRawPen, 39, 7, 39, 41);
graphics.DrawString("Ω", consolasBold, resistanceRawBrush, 34, 41);
DrawLine(graphics, resistanceEditedPen, 51, 7, 51, 41);
graphics.DrawString("Ω", consolasBold, resistanceEditedBrush, 46, 41);
graphics.DrawString("E", consolasBold, resistanceEditedBrush, 46, 53);
for (int iLoop = decades - 1; iLoop >= 0; iLoop += -1)
{
for (int iLoop_Sub = 1; iLoop_Sub <= 10; iLoop_Sub++)
DrawLine(graphics, Pens.LightGray, leftChartArea, topChartArea + heightChartArea - (iLoop * heightChartArea / decades) + heightChartArea / decades * Math.Log10(iLoop_Sub), leftChartArea + widthChartArea, topChartArea + heightChartArea - (iLoop * heightChartArea / decades) + heightChartArea / decades * Math.Log10(iLoop_Sub));
}
DrawString(graphics, "0", consolas, brush, leftChartArea - 5, topChartArea + heightChartArea + 5);
for (int iLoop = 0; iLoop <= blocksOfDepth - 1; iLoop++)
{
for (int iLoop_Sub = 1; iLoop_Sub <= 10; iLoop_Sub++)
DrawLine(graphics, Pens.LightGray, leftChartArea + (iLoop * widthChartArea / blocksOfDepth) + iLoop_Sub * widthChartArea / blocksOfDepth / 10, topChartArea, leftChartArea + (iLoop * widthChartArea / blocksOfDepth) + (iLoop_Sub * widthChartArea / blocksOfDepth / 10), topChartArea + heightChartArea);
DrawString(graphics, ((iLoop + 1) * widthOfDepthBlock).ToString("0.0"), consolas, brush, leftChartArea + 13 + (iLoop + 1) * widthChartArea / blocksOfDepth, topChartArea + heightChartArea + 5, sfFormatRight);
}
DrawString(graphics, "(um)", consolas, brush, leftChartArea + widthChartArea + 12, topChartArea + heightChartArea + 5);
for (int iLoop = 0; iLoop <= decades; iLoop++)
{
DrawLine(graphics, pen, leftChartArea, topChartArea + (iLoop * heightChartArea / decades), leftChartArea + widthChartArea, topChartArea + (iLoop * heightChartArea / decades));
DrawString(graphics, (decades - iLoop + concentrationMinD).ToString("0"), consolasBold, concentrationBrush, 20, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
DrawString(graphics, (decades - iLoop + resistivityMinD).ToString("0"), consolasBold, resistivityBrush, 33, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
DrawString(graphics, (decades - iLoop + resistanceEditedMinD).ToString("0"), consolasBold, resistanceRawBrush, 45, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
DrawString(graphics, (decades - iLoop + resistanceEditedMinD).ToString("0"), consolasBold, resistanceEditedBrush, 58, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
}
for (int iLoop = 0; iLoop <= blocksOfDepth; iLoop++)
DrawLine(graphics, pen, leftChartArea + (iLoop * widthChartArea / blocksOfDepth), topChartArea, leftChartArea + (iLoop * widthChartArea / blocksOfDepth), topChartArea + heightChartArea);
foreach (ProfilePoint profilePoint in csv.ProfileHeader.ProfilePoints)
{
if (profilePoint.Depth < 0 || profilePoint.Edited is null || profilePoint.Resistivity is null || profilePoint.CD is null)
continue;
// Rho (Resistivity || Resistivity_ohm_cm) = Green
if (profilePoint.Depth < 0 || profilePoint.Edited.Value == 0 || profilePoint.Resistivity.Value == 0 || profilePoint.CD.Value == 0)
continue;
point = topChartArea + heightChartArea - ((Math.Log10(profilePoint.Resistivity.Value) - resistivityMinD) / decades * heightChartArea);
if (point < (90 - 2))
continue;
if (point > (topChartArea + heightChartArea + 2))
continue;
position += 1;
PointF[]? resistivityOldPoints = resistivityPoints;
resistivityPoints = new PointF[position + 1];
if (resistivityOldPoints != null)
Array.Copy(resistivityOldPoints, resistivityPoints, Math.Min(position + 1, resistivityOldPoints.Length));
PointF[]? concentrationPointsOld = concentrationPoints;
concentrationPoints = new PointF[position + 1];
if (concentrationPointsOld != null)
Array.Copy(concentrationPointsOld, concentrationPoints, Math.Min(position + 1, concentrationPointsOld.Length));
resistivityPoints[position] = GetPointF(leftChartArea + profilePoint.Depth / maxDepth * widthChartArea, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Resistivity.Value) - resistivityMinD) / decades * heightChartArea));
concentrationPoints[position] = GetPointF(leftChartArea + profilePoint.Depth / maxDepth * widthChartArea, topChartArea + heightChartArea - ((Math.Log10(profilePoint.CD.Value) - concentrationMinD) / decades * heightChartArea));
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
FillEllipse(graphics, resistanceRawBrush, leftChartArea + profilePoint.Depth / maxDepth * widthChartArea - 2, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Raw) - resistanceEditedMinD) / decades * heightChartArea) - 2, 3, 3);
FillEllipse(graphics, resistanceEditedBrush, leftChartArea + profilePoint.Depth / maxDepth * widthChartArea - 2, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Edited.Value) - resistanceEditedMinD) / decades * heightChartArea) - 2, 3, 3);
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
}
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
if (resistivityPoints is not null)
graphics.DrawLines(resistivityPen, resistivityPoints);
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
if (concentrationPoints is not null)
graphics.DrawLines(concentrationPen, concentrationPoints);
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
using MemoryStream msMemoryStream = new();
bitmap.Save(msMemoryStream, System.Drawing.Imaging.ImageFormat.Png);
bytes = new byte[Convert.ToInt32(msMemoryStream.Length) + 1];
_ = msMemoryStream.Read(bytes, 0, bytes.Length);
bytes = msMemoryStream.ToArray();
return bytes;
}
}

View File

@ -0,0 +1,28 @@
namespace Adaptation.FileHandlers.json;
public class Profile
{
public string Id { get; }
public string Name { get; }
public string Label { get; }
public int Allocated { get; }
public int Used { get; }
public string List { get; }
public Profile(string id,
string name,
string label,
int allocated,
int used,
string list)
{
Id = id;
Name = name;
Label = label;
Allocated = allocated;
Used = used;
List = list;
}
}

View File

@ -0,0 +1,49 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class ProfileHeader
{
public int NumberOfProfiles { get; }
public List<Profile> Profiles { get; }
public List<ProfilePoint> ProfilePoints { get; }
internal ProfileHeader(csv.ProfileHeader profileHeader)
{
Profile profile;
ProfilePoint profilePoint;
List<Profile> profiles = new();
List<ProfilePoint> profilePoints = new();
NumberOfProfiles = int.Parse(profileHeader.NumberOfProfiles);
Profiles = profiles;
ProfilePoints = profilePoints;
foreach (csv.Profile csvProfile in profileHeader.Profiles)
{
profile = new
(
id: csvProfile.Id,
name: csvProfile.Name,
label: csvProfile.Label,
allocated: int.Parse(csvProfile.Allocated),
used: int.Parse(csvProfile.Used),
list: csvProfile.List
);
profiles.Add(profile);
}
foreach (csv.ProfilePoint csvProfilePoint in profileHeader.ProfilePoints)
{
profilePoint = new
(
number: int.Parse(csvProfilePoint.Number),
depth: double.Parse(csvProfilePoint.Depth),
raw: double.Parse(csvProfilePoint.Raw),
edited: double.Parse(csvProfilePoint.Edited),
resistivity: double.Parse(csvProfilePoint.Resistivity),
cd: double.Parse(csvProfilePoint.CD)
);
profilePoints.Add(profilePoint);
}
}
}

View File

@ -0,0 +1,28 @@
namespace Adaptation.FileHandlers.json;
public class ProfilePoint
{
public int Number { get; }
public double Depth { get; }
public double Raw { get; }
public double? Edited { get; }
public double? Resistivity { get; }
public double? CD { get; }
public ProfilePoint(int number,
double depth,
double raw,
double? edited,
double? resistivity,
double? cd)
{
Number = number;
Depth = depth;
Raw = raw;
Edited = edited;
Resistivity = resistivity;
CD = cd;
}
}

View File

@ -0,0 +1,33 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class RawData
{
public int TotalPoints { get; }
public List<Point> Points { get; }
internal RawData(csv.RawData rawData)
{
Point point;
List<Point> points = new();
TotalPoints = int.Parse(rawData.TotalPoints);
Points = points;
foreach (csv.Point csvPoint in rawData.Points)
{
point = new
(
number: double.Parse(csvPoint.Number),
depth: double.Parse(csvPoint.Depth),
resistance: double.Parse(csvPoint.Resistance),
stageX: double.Parse(csvPoint.StageX),
stageY: double.Parse(csvPoint.StageY),
stageZ: double.Parse(csvPoint.StageZ),
stageT: double.Parse(csvPoint.StageT)
);
points.Add(point);
}
}
}

View File

@ -0,0 +1,49 @@
using System.Collections.Generic;
namespace Adaptation.FileHandlers.json;
public class Setup
{
public string Finish { get; }
public string NorthProbeID { get; }
public string SouthProbeID { get; }
public string MeasurementPolarity { get; }
public double SineBevelAngle { get; }
public double ContactRadius { get; }
public double ProbeSpacing { get; }
public double ProbeLoad { get; }
public string Orientation { get; }
public int NumberOfStepSizes { get; }
public List<Step> Steps { get; }
internal Setup(csv.Setup setup)
{
Step step;
List<Step> steps = new();
Finish = setup.Finish;
NorthProbeID = setup.NorthProbeID;
SouthProbeID = setup.SouthProbeID;
MeasurementPolarity = setup.MeasurementPolarity;
SineBevelAngle = double.Parse(setup.SineBevelAngle);
ContactRadius = double.Parse(setup.ContactRadius);
ProbeSpacing = double.Parse(setup.ProbeSpacing);
ProbeLoad = double.Parse(setup.ProbeLoad);
Orientation = setup.Orientation;
NumberOfStepSizes = int.Parse(setup.NumberOfStepSizes);
Steps = steps;
foreach (csv.Step csvStep in setup.Steps)
{
Step step1 = new
(
number: int.Parse(csvStep.Number),
points: int.Parse(csvStep.Points),
x: double.Parse(csvStep.X),
y: double.Parse(csvStep.Y)
);
step = step1;
steps.Add(step);
}
}
}

View File

@ -0,0 +1,22 @@
namespace Adaptation.FileHandlers.json;
public class Step
{
public int Number { get; }
public int Points { get; }
public double X { get; }
public double Y { get; }
public Step(int number,
int points,
double x,
double y)
{
Number = number;
Points = points;
X = x;
Y = y;
}
}

View File

@ -0,0 +1,17 @@
namespace Adaptation.Ifx.Eaf.Common.Configuration;
[System.Runtime.Serialization.DataContract]
public class ConnectionSetting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public ConnectionSetting(string name, string value) { }
[System.Runtime.Serialization.DataMember]
public string Name { get; set; }
[System.Runtime.Serialization.DataMember]
public string Value { get; set; }
}

View File

@ -0,0 +1,23 @@
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component;
public class File
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
public File(string filePath) => throw new NotImplementedException();
public File(string filePath, DateTime timeFileFound) => throw new NotImplementedException();
public string Path { get; }
public DateTime TimeFound { get; }
public bool IsErrorFile { get; }
public Dictionary<string, string> ContentParameters { get; }
public File UpdateContentParameters(Dictionary<string, string> contentParameters) => throw new NotImplementedException();
public File UpdateParsingStatus(bool isErrorFile) => throw new NotImplementedException();
}

View File

@ -0,0 +1,40 @@
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Component;
public class FilePathGenerator
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public const char PLACEHOLDER_IDENTIFIER = '%';
public const char PLACEHOLDER_SEPARATOR = ':';
public const string PLACEHOLDER_NOT_AVAILABLE = "NA";
public const string PLACEHOLDER_ORIGINAL_FILE_NAME = "OriginalFileName";
public const string PLACEHOLDER_ORIGINAL_FILE_EXTENSION = "OriginalFileExtension";
public const string PLACEHOLDER_DATE_TIME = "DateTime";
public const string PLACEHOLDER_SUB_FOLDER = "SubFolder";
public const string PLACEHOLDER_CELL_NAME = "CellName";
public FilePathGenerator(FileConnectorConfiguration config, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
public FilePathGenerator(FileConnectorConfiguration config, File file, bool isErrorFile = false, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
public FilePathGenerator(FileConnectorConfiguration config, string sourceFilePath, bool isErrorFile = false, Dictionary<string, string> customPattern = null) => throw new NotImplementedException();
protected string SubFolderPath { get; }
protected FileConnectorConfiguration Configuration { get; }
protected File File { get; }
protected bool IsErrorFile { get; }
protected string DefaultPlaceHolderValue { get; }
public string GetFullTargetPath() => throw new NotImplementedException();
public virtual string GetTargetFileName() => throw new NotImplementedException();
public string GetTargetFolder(bool throwExceptionIfNotExist = true) => throw new NotImplementedException();
protected virtual string GetSubFolder(string folderPattern, string subFolderPath) => throw new NotImplementedException();
protected virtual string PrepareFolderPath(string targetFolderPath, string subFolderPath) => throw new NotImplementedException();
protected string ReplacePlaceholder(string inputPath) => throw new NotImplementedException();
}

View File

@ -0,0 +1,134 @@
using Adaptation.Ifx.Eaf.Common.Configuration;
using System;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
[System.Runtime.Serialization.DataContract]
public class FileConnectorConfiguration
{
public const ulong IDLE_EVENT_WAIT_TIME_DEFAULT = 360;
public const ulong FILE_HANDLE_TIMEOUT_DEFAULT = 15;
[System.Runtime.Serialization.DataMember]
public virtual bool? TriggerOnChanged { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? PostProcessingRetries { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual bool? CopySourceFolderStructure { get; set; }
[System.Runtime.Serialization.DataMember]
public IfPostProcessingFailsEnum? IfPostProcessingFailsAction { get; set; }
[System.Runtime.Serialization.DataMember]
public string AlternateTargetFolder { get; set; }
[System.Runtime.Serialization.DataMember]
public long? FileHandleTimeout { get; set; }
[System.Runtime.Serialization.DataMember]
public bool? DeleteEmptySourceSubFolders { get; set; }
[System.Runtime.Serialization.DataMember]
public long? IdleEventWaitTimeInSeconds { get; set; }
[System.Runtime.Serialization.DataMember]
public string FileAgeThreshold { get; set; }
public bool? FolderAgeCheckIndividualSubFolders { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual ZipModeEnum? ZipMode { get; set; }
[System.Runtime.Serialization.DataMember]
public FileAgeFilterEnum? FileAgeFilterMode { get; set; }
[System.Runtime.Serialization.DataMember]
public string ZipTargetFileName { get; set; }
[System.Runtime.Serialization.DataMember]
public string ZipErrorTargetFileName { get; set; }
[System.Runtime.Serialization.DataMember]
public long? ZipFileSubFolderLevel { get; set; }
[System.Runtime.Serialization.DataMember]
public string DefaultPlaceHolderValue { get; set; }
[System.Runtime.Serialization.DataMember]
public bool? UseZip64Mode { get; set; }
[System.Runtime.Serialization.DataMember]
public List<ConnectionSetting> ConnectionSettings { get; set; }
public string SourceDirectoryCloaking { get; set; }
public string FolderAgeThreshold { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? FileScanningIntervalInSeconds { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual bool? TriggerOnCreated { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? ZipFileTime { get; set; }
[System.Runtime.Serialization.DataMember]
public string SourceFileLocation { get; set; }
[System.Runtime.Serialization.DataMember]
public string SourceFileFilter { get; set; }
public List<string> SourceFileFilters { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual bool? IncludeSubDirectories { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual FileScanningOptionEnum? FileScanningOption { get; set; }
[System.Runtime.Serialization.DataMember]
public string TargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMember]
public string ErrorTargetFileLocation { get; set; }
[System.Runtime.Serialization.DataMember]
public string TargetFileName { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? FileHandleWaitTime { get; set; }
[System.Runtime.Serialization.DataMember]
public IfFileExistEnum? IfFileExistAction { get; set; }
[System.Runtime.Serialization.DataMember]
public long? ConnectionRetryInterval { get; set; }
[System.Runtime.Serialization.DataMember]
public PreProcessingModeEnum? PreProcessingMode { get; set; }
[System.Runtime.Serialization.DataMember]
public PostProcessingModeEnum? PostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMember]
public PostProcessingModeEnum? ErrorPostProcessingMode { get; set; }
[System.Runtime.Serialization.DataMember]
public virtual long? ZipFileAmount { get; set; }
[System.Runtime.Serialization.DataMember]
public string ErrorTargetFileName { get; set; }
public void Initialize() => throw new NotImplementedException();
public enum PostProcessingModeEnum
{
None = 0,
Move = 1,
Copy = 2,
Rename = 3,
Zip = 4,
Delete = 5,
MoveFolder = 6,
CopyFolder = 7,
DeleteFolder = 8
}
public enum PreProcessingModeEnum
{
None = 0,
Process = 1
}
public enum IfFileExistEnum
{
Overwrite = 0,
LeaveFiles = 1,
Delete = 2
}
public enum IfPostProcessingFailsEnum
{
LeaveFiles = 0,
Delete = 1
}
public enum FileScanningOptionEnum
{
FileWatcher = 0,
TimeBased = 1
}
public enum ZipModeEnum
{
ZipByAmountOrTime = 0,
ZipByFileName = 1,
ZipBySubFolderName = 2
}
public enum FileAgeFilterEnum
{
IgnoreNewer = 0,
IgnoreOlder = 1
}
}

View File

@ -0,0 +1,18 @@
using Adaptation.Eaf.EquipmentCore.SelfDescription.ParameterTypes;
using System.Collections.Generic;
namespace Adaptation.Ifx.Eaf.EquipmentConnector.File.SelfDescription;
public class FileConnectorParameterTypeDefinitionProvider
{
#pragma warning disable CA1822
#pragma warning disable CA2254
#pragma warning disable IDE0060
public FileConnectorParameterTypeDefinitionProvider() { }
public IEnumerable<ParameterTypeDefinition> GetAllParameterTypeDefinition() => null;
public ParameterTypeDefinition GetParameterTypeDefinition(string name) => null;
}

View File

@ -0,0 +1,20 @@
using System;
using System.Net;
namespace Infineon.Monitoring.MonA;
#nullable disable
#pragma warning disable SYSLIB0014
public class ExtWebClient : WebClient
{
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest webRequest = base.GetWebRequest(address);
if (webRequest != null)
webRequest.PreAuthenticate = PreAuthenticate;
return webRequest;
}
public bool PreAuthenticate { get; set; }
}

View File

@ -0,0 +1,167 @@
using System;
namespace Infineon.Monitoring.MonA;
public interface IMonIn
{
string SendStatus(string site, string resource, string stateName, State state);
string SendStatus(
string site,
DateTime timeStamp,
string resource,
string stateName,
State state);
string SendStatus(
string site,
string resource,
string stateName,
State state,
string description);
string SendStatus(
string site,
DateTime timeStamp,
string resource,
string stateName,
State state,
string description);
string SendStatus(
string site,
string resource,
string subresource,
string stateName,
State state);
string SendStatus(
string site,
DateTime timeStamp,
string resource,
string subresource,
string stateName,
State state);
string SendStatus(
string site,
string resource,
string subresource,
string stateName,
State state,
string description);
string SendStatus(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string stateName,
State state,
string description);
string SendPerformanceMessage(
string site,
string resource,
string performanceName,
double value);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value);
string SendPerformanceMessage(
string site,
string resource,
string performanceName,
double value,
string description);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
string description);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
int? interval);
string SendPerformanceMessage(
string site,
string resource,
DateTime? timeStamp,
string performanceName,
double value,
string unit);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
string unit,
int? interval);
string SendPerformanceMessage(
string site,
string resource,
string subresource,
string performanceName,
double value);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value);
string SendPerformanceMessage(
string site,
string resource,
string subresource,
string performanceName,
double value,
string description);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
int? interval);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
string unit);
string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
string description,
string unit,
int? interval);
}

View File

@ -0,0 +1,292 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Net;
using System.Text;
namespace Infineon.Monitoring.MonA;
public class MonIn : IMonIn, IDisposable
{
private static readonly DateTime _Utc1970DateTime = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public const string MonInUrl = "http://moninhttp.{0}.infineon.com/input/text";
private static readonly Dictionary<string, MonIn> _Instances = new();
private readonly ExtWebClient _WebClient;
private readonly string _MonInUrl;
private static CultureInfo _CultureInfo;
public static MonIn GetInstance(string url = "http://moninhttp.{0}.infineon.com/input/text")
{
MonIn instance;
if (_Instances.ContainsKey(url))
{
instance = _Instances[url];
}
else
{
lock (_Instances)
{
if (!_Instances.ContainsKey(url))
{
instance = new MonIn(url);
_Instances.Add(url, instance);
}
else
instance = _Instances[url];
}
}
return instance;
}
private MonIn(string url)
{
_WebClient = new ExtWebClient();
_WebClient.Headers[HttpRequestHeader.ContentType] = "application/text";
_WebClient.Encoding = Encoding.UTF8;
_CultureInfo = new CultureInfo("en-US");
_MonInUrl = url;
}
public void SetBasicAuthentication(string username, string password)
{
_WebClient.PreAuthenticate = true;
_WebClient.Headers[HttpRequestHeader.Authorization] = "Basic " + Convert.ToBase64String(Encoding.ASCII.GetBytes(username + ":" + password));
}
public string SendStatus(string site, string resource, string stateName, State state) => SendStatus(site, new DateTime?(), resource, string.Empty, stateName, state, string.Empty);
public string SendStatus(
string site,
DateTime timeStamp,
string resource,
string stateName,
State state) => SendStatus(site, new DateTime?(timeStamp), resource, string.Empty, stateName, state, string.Empty);
public string SendStatus(
string site,
string resource,
string stateName,
State state,
string description) => SendStatus(site, new DateTime?(), resource, string.Empty, stateName, state, description);
public string SendStatus(
string site,
DateTime timeStamp,
string resource,
string stateName,
State state,
string description) => SendStatus(site, new DateTime?(timeStamp), resource, string.Empty, stateName, state, description);
public string SendStatus(
string site,
string resource,
string subresource,
string stateName,
State state) => SendStatus(site, new DateTime?(), resource, subresource, stateName, state, string.Empty);
public string SendStatus(
string site,
DateTime timeStamp,
string resource,
string subresource,
string stateName,
State state) => SendStatus(site, new DateTime?(timeStamp), resource, subresource, stateName, state, string.Empty);
public string SendStatus(
string site,
string resource,
string subresource,
string stateName,
State state,
string description) => SendStatus(site, new DateTime?(), resource, subresource, stateName, state, description);
public string SendStatus(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string stateName,
State state,
string description)
{
string statusMessage = CreateStatusMessage(site, timeStamp, resource, subresource, stateName, state.ToString(), description);
lock (_WebClient)
return _WebClient.UploadString(string.Format(_MonInUrl, site), statusMessage);
}
public string SendPerformanceMessage(
string site,
string resource,
string performanceName,
double value) => SendPerformanceMessage(site, new DateTime?(), resource, string.Empty, performanceName, value, string.Empty, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value) => SendPerformanceMessage(site, timeStamp, resource, string.Empty, performanceName, value, string.Empty, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
string resource,
string performanceName,
double value,
string description) => SendPerformanceMessage(site, new DateTime?(), resource, string.Empty, performanceName, value, description, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
string description) => SendPerformanceMessage(site, timeStamp, resource, string.Empty, performanceName, value, description, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
int? interval) => SendPerformanceMessage(site, timeStamp, resource, string.Empty, performanceName, value, string.Empty, string.Empty, interval);
public string SendPerformanceMessage(
string site,
string resource,
DateTime? timeStamp,
string performanceName,
double value,
string unit) => SendPerformanceMessage(site, timeStamp, resource, string.Empty, performanceName, value, string.Empty, unit, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string performanceName,
double value,
string unit,
int? interval) => SendPerformanceMessage(site, timeStamp, resource, string.Empty, performanceName, value, string.Empty, unit, interval);
public string SendPerformanceMessage(
string site,
string resource,
string subresource,
string performanceName,
double value) => SendPerformanceMessage(site, new DateTime?(), resource, subresource, performanceName, value, string.Empty, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value) => SendPerformanceMessage(site, timeStamp, resource, subresource, performanceName, value, string.Empty, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
string resource,
string subresource,
string performanceName,
double value,
string description) => SendPerformanceMessage(site, new DateTime?(), resource, subresource, performanceName, value, description, string.Empty, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
int? interval) => SendPerformanceMessage(site, timeStamp, resource, subresource, performanceName, value, string.Empty, string.Empty, interval);
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
string unit) => SendPerformanceMessage(site, timeStamp, resource, subresource, performanceName, value, string.Empty, unit, new int?());
public string SendPerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
string description,
string unit,
int? interval)
{
string performanceMessage = CreatePerformanceMessage(site, timeStamp, resource, subresource, performanceName, value, description, unit, interval);
lock (_WebClient)
return _WebClient.UploadString(string.Format(_MonInUrl, site), performanceMessage);
}
private static string CreateStatusMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string stateName,
string state,
string description)
{
StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource))
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
return stringBuilder.ToString();
}
private static string CreatePerformanceMessage(
string site,
DateTime? timeStamp,
string resource,
string subresource,
string performanceName,
double value,
string description,
string unit,
int? interval)
{
StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource))
{
if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
}
else if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
return stringBuilder.ToString();
}
private static string GetDateTimeNowAsPosix(DateTime timeStamp)
{
if (timeStamp > DateTime.Now)
timeStamp = DateTime.Now;
return ((int)timeStamp.ToUniversalTime().Subtract(_Utc1970DateTime).TotalSeconds).ToString(CultureInfo.InvariantCulture);
}
public void Dispose()
{
KeyValuePair<string, MonIn> keyValuePair = new();
foreach (KeyValuePair<string, MonIn> instance in _Instances)
{
if (instance.Value == this)
{
keyValuePair = instance;
break;
}
}
_ = _Instances.Remove(keyValuePair.Key);
_WebClient?.Dispose();
}
}

View File

@ -0,0 +1,11 @@
namespace Infineon.Monitoring.MonA;
public enum State
{
Up,
Ok,
Warning,
Critical,
Down,
Unknown,
}

View File

@ -0,0 +1,187 @@
trigger:
branches:
include:
- development
paths:
include:
- "Adaptation/*"
pool:
name: Mesa-Windows-Service
demands: MET08RESISRP2100-Development
steps:
- script: |
set coreVersion=net7.0
echo %coreVersion%
echo ##vso[task.setvariable variable=CoreVersion;]%coreVersion%
echo $(CoreVersion)
displayName: CoreVersion
- script: |
set configuration=Debug
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: |
set nugetSource=https://messa017.infineon.com/v3/index.json
echo %nugetSource%
echo ##vso[task.setvariable variable=NugetSource;]%nugetSource%
echo $(NugetSource)
displayName: NugetSource
- script: |
set gitCommit=$(Build.SourceVersion)
set gitCommitSeven=%gitCommit:~0,7%
echo %gitCommitSeven%
echo ##vso[task.setvariable variable=GitCommitSeven;]%gitCommitSeven%
echo $(GitCommitSeven)
displayName: GitCommitSeven
- script: |
echo $(Build.BuildId)
echo $(Build.Reason)
echo $(Build.Repository.Id)
echo $(Build.Repository.Name)
echo $(Build.SourceVersion)
echo $(CoreVersion)
echo $(Configuration)
echo $(NugetSource)
echo $(GitCommitSeven)
REM echo $(pipelinePassword)
displayName: "Echo Check"
- script: '"C:\program files\dotnet\dotnet.exe" nuget locals all --clear'
displayName: "Nuget Clear"
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
"C:\program files\dotnet\dotnet.exe" user-secrets set "GitCommitSeven" "$(GitCommitSeven)"
"C:\program files\dotnet\dotnet.exe" user-secrets list
workingDirectory: Adaptation
displayName: "Safe storage of app secrets - Adaptation"
- task: CopyFiles@2
displayName: "Copy Files to: - GhostPCL"
inputs:
SourceFolder: '\\messv02ecc1.ec.local\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64'
TargetFolder: 'Adaptation\bin\$(Configuration)\net6.0\win-x64'
OverWrite: true
enabled: false
- task: CopyFiles@2
displayName: "Copy Files to: - LincPDFC"
inputs:
SourceFolder: '\\messv02ecc1.ec.local\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC\v2.6.6.21'
TargetFolder: 'Adaptation\bin\$(Configuration)\net6.0\win-x64'
OverWrite: true
enabled: false
- task: CopyFiles@2
displayName: "Copy Files to: - PDF-Text-Stripper"
inputs:
SourceFolder: '\\messv02ecc1.ec.local\EC_EAFRepository\Staging\DeploymentStorage\PDF-Text-Stripper\v4.8.0.1'
TargetFolder: 'Adaptation\bin\$(Configuration)\net6.0\win-x64'
OverWrite: true
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" build --configuration $(Configuration) --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Build - Adaptation"
- powershell: Get-ChildItem .\ -include TestResults -Recurse | foreach ($_) { remove-item $_.fullname -Force -Recurse }
workingDirectory: "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)"
displayName: "PowerShell Script"
continueOnError: true
- script: "dotnet test --configuration $(Configuration)"
workingDirectory: Adaptation
displayName: "Core Test"
- script: 'move /y "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)/05_TestResults/TestResults" $(System.DefaultWorkingDirectory)'
displayName: "Move Results"
- script: '"C:\program files\dotnet\dotnet.exe" tool restore'
workingDirectory: Adaptation
displayName: "Tool Restore"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" ReportGenerator -reports:$(System.DefaultWorkingDirectory)/TestResults/**/coverage.cobertura.xml -targetDir:$(Build.ArtifactStagingDirectory)\Coverage -reportTypes:Html_Dark'
workingDirectory: Adaptation
displayName: "Report Generator"
enabled: false
- task: PublishTestResults@2
displayName: "Publish Test Results **/*.trx"
inputs:
testResultsFormat: VSTest
testResultsFiles: "**/*.trx"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: PublishTestResults@2
displayName: "Publish Test Results */coverage.cobertura.xml"
inputs:
testResultsFormat: VSTest
testResultsFiles: "*/coverage.cobertura.xml"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: mspremier.CreateWorkItem.CreateWorkItem-task.CreateWorkItem@1
displayName: "Create work item"
inputs:
teamProject: "Mesa_FI"
workItemType: Bug
title: $(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)
assignedTo: "$(Build.RequestedForId)"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" publish --configuration $(Configuration) --runtime win-x64 --self-contained -o $(Build.ArtifactStagingDirectory)\Adaptation --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Publish"
enabled: false
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Restore /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8 /p:RestoreSources=$(NugetSource)'
displayName: "MSBuild Restore"
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Build /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8'
displayName: MSBuild
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)-$(Build.Repository.Name)>bin\$(Configuration)\$(Build.Repository.Name).txt'
displayName: "Commit Id"
- task: CopyFiles@2
displayName: 'Copy Files to: D:\Framework4.8'
inputs:
SourceFolder: 'bin\$(Configuration)'
Contents: "*$(Build.Repository.Name)*"
TargetFolder: 'D:\Framework4.8\$(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)'
OverWrite: true
enabled: false
- task: CopyFiles@2
displayName: 'Copy Files to: \\messv02ecc1.ec.local\EC_EAFRepository'
inputs:
SourceFolder: 'bin\$(Configuration)'
Contents: "*$(Build.Repository.Name)*"
TargetFolder: '\\messv02ecc1.ec.local\EC_EAFRepository\Staging\DeploymentStorage\Adaptation_$(Build.Repository.Name)'
OverWrite: true
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Tests"
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Adaptation"
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)>bin_x_x_\$(Configuration)\$(CoreVersion)\win-x64\$(Build.Repository.Name).txt'
displayName: "Force Fail"
enabled: false

Some files were not shown because too many files have changed in this diff Show More