javascript methods for sequence to readable date

c# like java for PI5

Helper 2025-02-19 more updates for Compare
This commit is contained in:
Mike Phares 2025-03-26 17:02:35 -07:00
parent 0621d0f07e
commit 919279a917
12 changed files with 1119 additions and 738 deletions

381
ADO2025/PI5/.editorconfig Normal file
View File

@ -0,0 +1,381 @@
[*.md]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.csproj]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.cs]
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_case_contents_when_block = true
csharp_indent_labels = one_less_than_current
csharp_indent_switch_labels = true
csharp_new_line_before_catch = false
csharp_new_line_before_else = false
csharp_new_line_before_finally = false
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = none
csharp_new_line_between_query_expression_clauses = true
csharp_prefer_braces = false
csharp_prefer_qualified_reference = true:error
csharp_prefer_simple_default_expression = true:warning
csharp_prefer_simple_using_statement = true:warning
csharp_prefer_static_local_function = true:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = false
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true
csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true
csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = false
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true
csharp_style_allow_embedded_statements_on_same_line_experimental = true
csharp_style_conditional_delegate_call = true
csharp_style_deconstructed_variable_declaration = false
csharp_style_expression_bodied_accessors = when_on_single_line:warning
csharp_style_expression_bodied_constructors = when_on_single_line:warning
csharp_style_expression_bodied_indexers = when_on_single_line:warning
csharp_style_expression_bodied_lambdas = when_on_single_line:warning
csharp_style_expression_bodied_local_functions = when_on_single_line:warning
csharp_style_expression_bodied_methods = when_on_single_line:warning
csharp_style_expression_bodied_operators = when_on_single_line:warning
csharp_style_expression_bodied_properties = when_on_single_line:warning
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning
csharp_style_inlined_variable_declaration = false
csharp_style_namespace_declarations = file_scoped:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_not_pattern = true:warning
csharp_style_prefer_null_check_over_type_check = true
csharp_style_prefer_pattern_matching = true:warning
csharp_style_prefer_range_operator = true:warning
csharp_style_prefer_switch_expression = true:warning
csharp_style_throw_expression = true
csharp_style_unused_value_assignment_preference = discard_variable:warning
csharp_style_unused_value_expression_statement_preference = discard_variable:warning
csharp_style_var_elsewhere = false:warning
csharp_style_var_for_built_in_types = false:warning
csharp_style_var_when_type_is_apparent = false:warning
csharp_using_directive_placement = outside_namespace
dotnet_analyzer_diagnostic.category-Design.severity = error
dotnet_analyzer_diagnostic.category-Documentation.severity = error
dotnet_analyzer_diagnostic.category-Globalization.severity = none
dotnet_analyzer_diagnostic.category-Interoperability.severity = error
dotnet_analyzer_diagnostic.category-Maintainability.severity = error
dotnet_analyzer_diagnostic.category-Naming.severity = none
dotnet_analyzer_diagnostic.category-Performance.severity = none
dotnet_analyzer_diagnostic.category-Reliability.severity = error
dotnet_analyzer_diagnostic.category-Security.severity = error
dotnet_analyzer_diagnostic.category-SingleFile.severity = error
dotnet_analyzer_diagnostic.category-Style.severity = error
dotnet_analyzer_diagnostic.category-Usage.severity = error
dotnet_code_quality_unused_parameters = all
dotnet_code_quality_unused_parameters = non_public
dotnet_code_quality.CAXXXX.api_surface = private, internal
dotnet_diagnostic.CA1001.severity = error # CA1001: Types that own disposable fields should be disposable
dotnet_diagnostic.CA1051.severity = error # CA1051: Do not declare visible instance fields
dotnet_diagnostic.CA1511.severity = warning # CA1511: Use 'ArgumentException.ThrowIfNullOrEmpty' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1513.severity = warning # Use 'ObjectDisposedException.ThrowIf' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1825.severity = warning # CA1825: Avoid zero-length array allocations
dotnet_diagnostic.CA1829.severity = error # CA1829: Use Length/Count property instead of Count() when available
dotnet_diagnostic.CA1834.severity = warning # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1860.severity = error # CA1860: Prefer comparing 'Count' to 0 rather than using 'Any()', both for clarity and for performance
dotnet_diagnostic.CA1862.severity = warning # CA1862: Prefer using 'string.Equals(string, StringComparison)' to perform a case-insensitive comparison, but keep in mind that this might cause subtle changes in behavior, so make sure to conduct thorough testing after applying the suggestion, or if culturally sensitive comparison is not required, consider using 'StringComparison.OrdinalIgnoreCase'
dotnet_diagnostic.CA1869.severity = none # CA1869: Avoid creating a new 'JsonSerializerOptions' instance for every serialization operation. Cache and reuse instances instead.
dotnet_diagnostic.CA2201.severity = none # CA2201: Exception type System.NullReferenceException is reserved by the runtime
dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template should not vary between calls to 'LoggerExtensions.LogInformation(ILogger, string?, params object?[])'
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = error # Using directive is unnecessary
dotnet_diagnostic.IDE0010.severity = none # Add missing cases to switch statement (IDE0010)
dotnet_diagnostic.IDE0028.severity = error # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
dotnet_diagnostic.IDE0048.severity = none # Parentheses preferences (IDE0047 and IDE0048)
dotnet_diagnostic.IDE0049.severity = warning # Use language keywords instead of framework type names for type references (IDE0049)
dotnet_diagnostic.IDE0051.severity = error # Private member '' is unused [, ]
dotnet_diagnostic.IDE0058.severity = error # IDE0058: Expression value is never used
dotnet_diagnostic.IDE0060.severity = error # IDE0060: Remove unused parameter
dotnet_diagnostic.IDE0074.severity = warning # IDE0074: Use compound assignment
dotnet_diagnostic.IDE0130.severity = none # Namespace does not match folder structure (IDE0130)
dotnet_diagnostic.IDE0270.severity = warning # IDE0270: Null check can be simplified
dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]csharp(IDE0290)
dotnet_diagnostic.IDE0300.severity = error # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = error #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.IDE2000.severity = error # IDE2000: Allow multiple blank lines
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method
dotnet_naming_rule.class_should_be_pascal_case.severity = warning
dotnet_naming_rule.class_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.class_should_be_pascal_case.symbols = class
dotnet_naming_rule.delegate_should_be_pascal_case.severity = warning
dotnet_naming_rule.delegate_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.delegate_should_be_pascal_case.symbols = delegate
dotnet_naming_rule.enum_should_be_pascal_case.severity = warning
dotnet_naming_rule.enum_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.enum_should_be_pascal_case.symbols = enum
dotnet_naming_rule.event_should_be_pascal_case.severity = warning
dotnet_naming_rule.event_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.event_should_be_pascal_case.symbols = event
dotnet_naming_rule.interface_should_be_begins_with_i.severity = warning
dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
dotnet_naming_rule.method_should_be_pascal_case.severity = warning
dotnet_naming_rule.method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.method_should_be_pascal_case.symbols = method
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = warning
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
dotnet_naming_rule.private_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.private_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_method_should_be_pascal_case.symbols = private_method
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.symbols = private_or_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.symbols = private_or_internal_static_field
dotnet_naming_rule.property_should_be_pascal_case.severity = warning
dotnet_naming_rule.property_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.property_should_be_pascal_case.symbols = property
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.symbols = public_or_protected_field
dotnet_naming_rule.static_field_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_field_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_field_should_be_pascal_case.symbols = static_field
dotnet_naming_rule.static_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_method_should_be_pascal_case.symbols = static_method
dotnet_naming_rule.struct_should_be_pascal_case.severity = warning
dotnet_naming_rule.struct_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.struct_should_be_pascal_case.symbols = struct
dotnet_naming_rule.types_should_be_pascal_case.severity = warning
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
dotnet_naming_style.begins_with_i.capitalization = pascal_case
dotnet_naming_style.begins_with_i.required_prefix = I
dotnet_naming_style.begins_with_i.required_suffix =
dotnet_naming_style.begins_with_i.word_separator =
dotnet_naming_style.pascal_case.capitalization = pascal_case
dotnet_naming_style.pascal_case.required_prefix =
dotnet_naming_style.pascal_case.required_suffix =
dotnet_naming_style.pascal_case.word_separator =
dotnet_naming_style.private_of_internal_field.capitalization = pascal_case
dotnet_naming_style.private_of_internal_field.required_prefix = _
dotnet_naming_style.private_of_internal_field.required_suffix =
dotnet_naming_style.private_of_internal_field.word_separator =
dotnet_naming_symbols.abstract_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.abstract_method.applicable_kinds = method
dotnet_naming_symbols.abstract_method.required_modifiers = abstract
dotnet_naming_symbols.class.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.class.applicable_kinds = class
dotnet_naming_symbols.class.required_modifiers =
dotnet_naming_symbols.delegate.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.delegate.applicable_kinds = delegate
dotnet_naming_symbols.delegate.required_modifiers =
dotnet_naming_symbols.enum.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.enum.applicable_kinds = enum
dotnet_naming_symbols.enum.required_modifiers =
dotnet_naming_symbols.event.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.event.applicable_kinds = event
dotnet_naming_symbols.event.required_modifiers =
dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.interface.applicable_kinds = interface
dotnet_naming_symbols.interface.required_modifiers =
dotnet_naming_symbols.method.applicable_accessibilities = public
dotnet_naming_symbols.method.applicable_kinds = method
dotnet_naming_symbols.method.required_modifiers =
dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
dotnet_naming_symbols.non_field_members.required_modifiers =
dotnet_naming_symbols.private_method.applicable_accessibilities = private
dotnet_naming_symbols.private_method.applicable_kinds = method
dotnet_naming_symbols.private_method.required_modifiers =
dotnet_naming_symbols.private_or_internal_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_field.required_modifiers =
dotnet_naming_symbols.private_or_internal_static_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_static_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_static_field.required_modifiers = static
dotnet_naming_symbols.property.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.property.applicable_kinds = property
dotnet_naming_symbols.property.required_modifiers =
dotnet_naming_symbols.public_or_protected_field.applicable_accessibilities = public, protected
dotnet_naming_symbols.public_or_protected_field.applicable_kinds = field
dotnet_naming_symbols.public_or_protected_field.required_modifiers =
dotnet_naming_symbols.static_field.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_field.applicable_kinds = field
dotnet_naming_symbols.static_field.required_modifiers = static
dotnet_naming_symbols.static_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_method.applicable_kinds = method
dotnet_naming_symbols.static_method.required_modifiers = static
dotnet_naming_symbols.struct.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.struct.applicable_kinds = struct
dotnet_naming_symbols.struct.required_modifiers =
dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
dotnet_naming_symbols.types.required_modifiers =
dotnet_remove_unnecessary_suppression_exclusions = 0
dotnet_separate_import_directive_groups = true
dotnet_sort_system_directives_first = true
dotnet_style_allow_multiple_blank_lines_experimental = false:warning
dotnet_style_allow_statement_immediately_after_block_experimental = true
dotnet_style_coalesce_expression = true
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_namespace_match_folder = true
dotnet_style_null_propagation = true:warning
dotnet_style_object_initializer = true:warning
dotnet_style_operator_placement_when_wrapping = beginning_of_line
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_compound_assignment = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false
dotnet_style_prefer_conditional_expression_over_return = false
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_simplified_boolean_expressions = true:warning
dotnet_style_prefer_simplified_interpolation = true
dotnet_style_qualification_for_event = false:error
dotnet_style_qualification_for_field = false
dotnet_style_qualification_for_method = false:error
dotnet_style_qualification_for_property = false:error
dotnet_style_readonly_field = true:warning
dotnet_style_require_accessibility_modifiers = for_non_interface_members
end_of_line = crlf
file_header_template = unset
indent_size = 4
indent_style = space
insert_final_newline = false
root = true
tab_width = 4
# https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/quality-rules/ca1822
# https://github.com/dotnet/aspnetcore/blob/main/.editorconfig
# https://github.com/dotnet/project-system/blob/main/.editorconfig
# Question
csharp_prefer_simple_using_statement = false # Question
csharp_style_expression_bodied_constructors = when_on_single_line:none # Question
csharp_style_expression_bodied_properties = true # Question
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning # Question
csharp_style_pattern_matching_over_as_with_null_check = false # Question
csharp_style_prefer_pattern_matching = false # Question
csharp_style_prefer_range_operator = false # Question
csharp_style_prefer_switch_expression = false # Question
csharp_style_unused_value_assignment_preference = unused_local_variable # Question
csharp_style_unused_value_expression_statement_preference = false # Question
csharp_style_var_elsewhere = false:none # Question
csharp_style_var_for_built_in_types = false:none # Question
csharp_style_var_when_type_is_apparent = false:warning # Question
dotnet_diagnostic.CA1001.severity = none # Question - Types that own disposable fields should be disposable
dotnet_diagnostic.CA1051.severity = none # Question - Do not declare visible instance fields
dotnet_diagnostic.CA1416.severity = none # Question - This call site is reachable on all platforms.
dotnet_diagnostic.CA1510.severity = none # Question - Use
dotnet_diagnostic.CA1834.severity = none # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1860.severity = none # Question - Avoid using
dotnet_diagnostic.CA1862.severity = none # Question - Prefer using
dotnet_diagnostic.CA2208.severity = none # Question - Instantiate argument exceptions correctly
dotnet_diagnostic.CA2211.severity = none # Question - Non-constant fields should not be visible
dotnet_diagnostic.CA2249.severity = none # Question - Use
dotnet_diagnostic.CA2253.severity = none # Question - Named placeholders should not be numeric values
dotnet_diagnostic.CS0103.severity = none # Question - The name
dotnet_diagnostic.CS0168.severity = none # Question - The variable
dotnet_diagnostic.CS0219.severity = none # Question - The variable
dotnet_diagnostic.CS0612.severity = none # Question - is obsolete
dotnet_diagnostic.CS0618.severity = none # Question - Compiler Warning (level 2)
dotnet_diagnostic.CS0659.severity = none # Question - Compiler Warning (level 3)
dotnet_diagnostic.CS8019.severity = warning # Question - Unnecessary using directive.
dotnet_diagnostic.CS8600.severity = none # Question - Converting null literal or possible null value to non-nullable type
dotnet_diagnostic.CS8602.severity = none # Question - Dereference of a possibly null reference.
dotnet_diagnostic.CS8603.severity = none # Question - Possible null reference return
dotnet_diagnostic.CS8604.severity = none # Question - Possible null reference argument for parameter.
dotnet_diagnostic.CS8618.severity = none # Question - Non-nullable variable must contain a non-null value when exiting constructor
dotnet_diagnostic.CS8625.severity = none # Question - Cannot convert null literal to non-nullable reference type.
dotnet_diagnostic.CS8629.severity = none # Question - Nullable value type may be null
dotnet_diagnostic.CS8765.severity = none # Question - Nullability of type of parameter
dotnet_diagnostic.IDE0005.severity = none # Question - Remove unnecessary using directives
dotnet_diagnostic.IDE0008.severity = warning # Question - Use explicit type instead of
dotnet_diagnostic.IDE0017.severity = none # Question - Object initialization can be simplified
dotnet_diagnostic.IDE0019.severity = none # Question - Use pattern matching
dotnet_diagnostic.IDE0021.severity = none # Question - Use expression body for constructor
dotnet_diagnostic.IDE0022.severity = none # Question - Use expression body for method
dotnet_diagnostic.IDE0025.severity = none # Question - Use expression body for property
dotnet_diagnostic.IDE0027.severity = none # Question - Use expression body for accessor
dotnet_diagnostic.IDE0028.severity = none # Question - Use collection initializers or expressions
dotnet_diagnostic.IDE0031.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0032.severity = none # Question - Use auto property
dotnet_diagnostic.IDE0037.severity = none # Question - Member name can be simplified
dotnet_diagnostic.IDE0041.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0047.severity = none # Question - Parentheses preferences
dotnet_diagnostic.IDE0049.severity = warning # Question - Name can be simplified
dotnet_diagnostic.IDE0051.severity = none # Question - Remove unused private member
dotnet_diagnostic.IDE0053.severity = none # Question - Use expression body for lambdas
dotnet_diagnostic.IDE0054.severity = none # Question - Use compound assignment
dotnet_diagnostic.IDE0055.severity = none # Question - Formatting rule
dotnet_diagnostic.IDE0057.severity = none # Question - Substring can be simplified
dotnet_diagnostic.IDE0058.severity = none # Question - Remove unnecessary expression value
dotnet_diagnostic.IDE0059.severity = none # Question - Unnecessary assignment of a value to
dotnet_diagnostic.IDE0060.severity = none # Question - Remove unused parameter
dotnet_diagnostic.IDE0063.severity = none # Question - Use simple
dotnet_diagnostic.IDE0065.severity = none # Question -
dotnet_diagnostic.IDE0066.severity = none # Question - Use
dotnet_diagnostic.IDE0078.severity = none # Question - Use pattern matching (may change code meaning)
dotnet_diagnostic.IDE0090.severity = warning # Question - Simplify new expression
dotnet_diagnostic.IDE0100.severity = error # Question - Remove redundant equality
dotnet_diagnostic.IDE0160.severity = warning # Question - Use block-scoped namespace
dotnet_diagnostic.IDE0161.severity = warning # Question - Namespace declaration preferences
dotnet_diagnostic.IDE0270.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0300.severity = none # Question - Collection initialization can be simplified
dotnet_diagnostic.IDE1006.severity = none # Question - Use collection expression for builder dotnet_style_prefer_collection_expression
dotnet_style_null_propagation = false # Question
dotnet_style_object_initializer = false # Question
dotnet_style_prefer_auto_properties = false # Question
dotnet_style_allow_statement_immediately_after_block_experimental = true # Question
dotnet_style_prefer_inferred_anonymous_type_member_names = false:warning # Question
dotnet_style_prefer_is_null_check_over_reference_equality_method = false # Question

View File

@ -1,13 +1,23 @@
using Microsoft.Extensions.Logging;
using System.Globalization;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250218
{
internal static partial class Helper20250218 {
private static void MoveToArchive(ILogger<Worker> logger, string searchMES, string searchSequence, string destinationRoot, string[] files)
{
internal static void MoveToArchive(ILogger<Worker> logger, List<string> args) {
string searchMES = args[3];
string searchPattern = args[2];
string searchSequence = args[4];
string destinationRoot = args[5];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
MoveToArchive(logger, searchMES, searchSequence, destinationRoot, files);
}
private static void MoveToArchive(ILogger<Worker> logger, string searchMES, string searchSequence, string destinationRoot, string[] files) {
string mes;
string text;
string sequence;
@ -20,8 +30,7 @@ internal static partial class Helper20250218
string[] segmentsC;
string checkDirectory;
Calendar calendar = new CultureInfo("en-US").Calendar;
foreach (string file in files)
{
foreach (string file in files) {
fileInfo = new(file);
if (string.IsNullOrEmpty(fileInfo.DirectoryName))
continue;
@ -37,20 +46,17 @@ internal static partial class Helper20250218
segmentsC = Path.GetFileName(fileInfo.DirectoryName).Split('-');
weekOfYear = $"{fileInfo.LastWriteTime.Year}_Week_{calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
checkDirectory = Path.GetFullPath(Path.Combine(destinationRoot, mes, weekOfYear, fileInfo.LastWriteTime.ToString("yyyy-MM-dd")));
if (!Directory.Exists(checkDirectory))
{
if (!Directory.Exists(checkDirectory)) {
logger.LogInformation("<{checkDirectory}>", checkDirectory);
continue;
}
matches = Directory.GetDirectories(checkDirectory, sequence, SearchOption.AllDirectories);
if (matches.Length != 1)
{
if (matches.Length != 1) {
logger.LogInformation("!= 1 <{checkDirectory}>", checkDirectory);
continue;
}
checkFile = segmentsC.Length == 2 ? Path.Combine(matches[0], $"csv-{segmentsC[1]}-{fileInfo.Name}") : Path.Combine(matches[0], $"csv-{fileInfo.Name}");
if (File.Exists(checkFile))
{
if (File.Exists(checkFile)) {
logger.LogInformation("csv- {segmentsC} <{checkDirectory}>", segmentsC.Length, checkDirectory);
continue;
}
@ -58,18 +64,6 @@ internal static partial class Helper20250218
}
}
internal static void MoveToArchive(ILogger<Worker> logger, List<string> args)
{
string searchMES = args[3];
string searchPattern = args[2];
string searchSequence = args[4];
string destinationRoot = args[5];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
MoveToArchive(logger, searchMES, searchSequence, destinationRoot, files);
}
}
// L:\DevOps\MESA_FI\file-folder-helper\bin\Debug\net8.0\win-x64>dotnet File-Folder-Helper.dll X D:/Tmp Day-Helper-2025-02-18 *.pdsf A_MES_ENTITY= B_SEQUENCE= \\mesfs.infineon.com\EC_Characterization_Si\Archive

View File

@ -1,110 +1,280 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250219
{
internal static partial class Helper20250219 {
private record ProcessDataStandardFormat(ReadOnlyCollection<string> Body,
ReadOnlyCollection<string> Columns,
string Logistics);
ReadOnlyCollection<string> Logistics,
long? Sequence);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))]
private partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
private partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext {
}
private static ProcessDataStandardFormat GetLogisticsColumnsAndBody(string path, string[]? lines)
{
ProcessDataStandardFormat result;
string segment;
List<string> body = [];
private record Input(ReadOnlyCollection<string> Backfill,
ReadOnlyCollection<int> ColumnIndices,
ReadOnlyCollection<string> Ignore,
ReadOnlyCollection<string> IndexOnly,
ReadOnlyDictionary<string, string> KeyValuePairs,
ReadOnlyCollection<string> NewColumnNames,
ReadOnlyCollection<string> OldColumnNames);
internal static void Compare(ILogger<Worker> logger, List<string> args) {
string[] segmentsB;
List<string> distinct = [];
string searchPattern = args[2];
string searchPatternB = args[3];
string[] segments = args[7].Split(',');
Dictionary<string, string> keyValuePairs = [];
ReadOnlyCollection<string> ignore = args[4].Split(',').AsReadOnly();
ReadOnlyCollection<string> backfill = args[5].Split(',').AsReadOnly();
ReadOnlyCollection<string> indexOnly = args[6].Split(',').AsReadOnly();
ReadOnlyCollection<string> oldColumnNames = args[8].Split(',').AsReadOnly();
ReadOnlyCollection<string> newColumnNames = args[9].Split(',').AsReadOnly();
ReadOnlyCollection<int> columnIndices = args[10].Split(',').Select(int.Parse).ToArray().AsReadOnly();
foreach (string segment in segments) {
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
Input input = new(Backfill: backfill,
ColumnIndices: columnIndices,
NewColumnNames: newColumnNames,
Ignore: ignore,
IndexOnly: indexOnly,
KeyValuePairs: keyValuePairs.AsReadOnly(),
OldColumnNames: oldColumnNames);
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
Compare(logger, sourceDirectory.Length, searchPatternB, input, files);
}
private static void Compare(ILogger<Worker> logger, int sourceDirectoryLength, string searchPattern, Input input, string[] files) {
bool compare;
string directory;
string[] matches;
string directorySegment;
string[] directoryFiles;
const int columnsLine = 6;
JsonElement[]? jsonElementsNew;
JsonElement[]? jsonElementsOld;
ProcessDataStandardFormat processDataStandardFormat;
FileInfo[] collection = files.Select(l => new FileInfo(l)).ToArray();
foreach (FileInfo fileInfo in collection) {
directory = fileInfo.DirectoryName ?? throw new Exception();
directoryFiles = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly);
matches = (from l in directoryFiles where l != fileInfo.FullName select l).ToArray();
if (matches.Length < 1)
continue;
directorySegment = directory[sourceDirectoryLength..];
processDataStandardFormat = GetProcessDataStandardFormat(logger, fileInfo.LastWriteTime, input.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
jsonElementsNew = GetArray(logger, input.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElementsNew is null)
continue;
if (input.OldColumnNames.Count == input.ColumnIndices.Count) {
processDataStandardFormat = Get(logger, input, jsonElementsNew, processDataStandardFormat);
Write(logger, fileInfo, processDataStandardFormat);
}
foreach (string match in matches) {
processDataStandardFormat = GetProcessDataStandardFormat(logger, fileInfo.LastWriteTime, input.OldColumnNames.Count, columnsLine, match, lines: null);
jsonElementsOld = GetArray(logger, input.OldColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElementsOld is null || jsonElementsOld.Length != jsonElementsNew.Length) {
logger.LogWarning("! <{match}> (jsonElementsOld.Length:{jsonElementsOld} != jsonElementsNew.Length:{jsonElementsNew})", match, jsonElementsOld?.Length, jsonElementsNew.Length);
continue;
}
compare = Compare(logger, input, directorySegment, jsonElementsNew, jsonElementsOld);
if (!compare) {
logger.LogWarning("! <{match}>", match);
continue;
}
logger.LogInformation("<{match}>", match);
}
}
}
private static bool Compare(ILogger<Worker> logger, Input input, string directory, JsonElement[] jsonElementsNew, JsonElement[] jsonElementsOld) {
bool result;
int? q;
string valueNew;
string valueOld;
List<string> columns = [];
StringBuilder logistics = new();
lines ??= File.ReadAllLines(path);
string[] segments;
if (lines.Length < 7)
segments = [];
else
segments = lines[6].Trim().Split('\t');
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c][1..^1];
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
JsonProperty jsonPropertyOld;
JsonProperty jsonPropertyNew;
JsonProperty[] jsonPropertiesOld;
JsonProperty[] jsonPropertiesNew;
List<string> unknownColumns = [];
List<string> differentColumns = [];
int last = jsonElementsOld.Length - 1;
List<string> sameAfterSpaceSplitColumns = [];
for (int i = last; i > 0; i--) {
if (jsonElementsOld[i].ValueKind != JsonValueKind.Object) {
unknownColumns.Add(string.Empty);
break;
}
jsonPropertiesOld = jsonElementsOld[i].EnumerateObject().ToArray();
jsonPropertiesNew = jsonElementsNew[i].EnumerateObject().ToArray();
for (int p = 0; p < jsonPropertiesOld.Length; p++) {
jsonPropertyOld = jsonPropertiesOld[p];
valueOld = jsonPropertyOld.Value.ToString();
if (input.KeyValuePairs.TryGetValue(jsonPropertyOld.Name, out string? name) && !string.IsNullOrEmpty(name)) {
q = TryGetPropertyIndex(jsonPropertiesNew, name);
if (q is null && i == 0)
unknownColumns.Add($"{jsonPropertyOld.Name}|{name}");
} else {
q = TryGetPropertyIndex(jsonPropertiesNew, jsonPropertyOld.Name);
if (q is null) {
if (i == 0)
unknownColumns.Add(jsonPropertyOld.Name);
}
}
if (q is null) {
if (input.Ignore.Contains(jsonPropertyOld.Name)) {
if (i == last) {
columns.Add("-1");
logger.LogDebug("{p} )) {jsonPropertyOld.Name} **", p, jsonPropertyOld.Name);
}
continue;
}
if (i == last) {
columns.Add("-1");
if (!string.IsNullOrEmpty(valueOld))
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ??", p, jsonPropertyOld.Name);
}
} else {
if (i == last)
columns.Add(q.Value.ToString());
jsonPropertyNew = jsonPropertiesNew[q.Value];
valueNew = jsonPropertyNew.Value.ToString();
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ~~ {q.Value} => {jsonPropertyNew.Name}", p, jsonPropertyOld.Name, q.Value, jsonPropertyNew.Name);
if (valueNew != valueOld && !differentColumns.Contains(jsonPropertyOld.Name)) {
if (valueNew.Length >= 2 && valueNew.Split(' ')[0] == valueOld)
sameAfterSpaceSplitColumns.Add(jsonPropertyOld.Name);
else {
if (input.Backfill.Contains(jsonPropertyOld.Name) && i != last)
continue;
if (input.IndexOnly.Contains(jsonPropertyOld.Name) && int.TryParse(jsonPropertyOld.Name[^2..], out int index) && i != index - 1)
continue;
logger.LogWarning("For [{jsonProperty.Name}] <{directory}> doesn't match (valueNew:{valueNew} != valueOld:{valueOld})!", jsonPropertyOld.Name, directory, valueNew, valueOld);
differentColumns.Add(jsonPropertyOld.Name);
}
}
}
}
if (i == last)
logger.LogInformation(string.Join(',', columns));
}
result = unknownColumns.Count == 0 && differentColumns.Count == 0 && sameAfterSpaceSplitColumns.Count == 0;
return result;
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName) {
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++) {
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null) {
for (int i = 0; i < jsonProperties.Length; i++) {
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ILogger<Worker> logger, DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines) {
ProcessDataStandardFormat result;
long sequence;
string[] segments;
List<string> body = [];
List<string> logistics = [];
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = [];
else {
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns) {
logger.LogWarning("{segments} != {expectedColumns}", segments.Length, expectedColumns);
segments = [];
}
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++) {
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
if (!lookForLogistics) {
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("LOGISTICS_1")) {
for (int i = r; i < lines.Length; i++) {
if (lines[r].StartsWith("END_HEADER"))
break;
_ = logistics.AppendLine(lines[i]);
logistics.Add(lines[i]);
}
break;
}
}
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else {
segments = logistics[0].Split("SEQUENCE=");
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(Body: body.AsReadOnly(),
Columns: columns.AsReadOnly(),
logistics.ToString());
Logistics: logistics.AsReadOnly(),
Sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
{
private static JsonElement[]? GetArray(ILogger<Worker> logger, int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers) {
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
else {
string value;
string[] segments;
List<string> lines = [];
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
foreach (string bodyLine in processDataStandardFormat.Body) {
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns) {
logger.LogWarning("{segments} != {expectedColumns}", segments.Length, expectedColumns);
continue;
}
if (!lookForNumbers) {
for (int c = 0; c < segments.Length; c++) {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
} else {
for (int c = 0; c < segments.Length; c++) {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
@ -124,182 +294,72 @@ internal static partial class Helper20250219
return results;
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
private static bool Compare(ILogger<Worker> logger, ReadOnlyCollection<string> ignore, ReadOnlyCollection<string> backfill, ReadOnlyCollection<string> indexOnly, ReadOnlyDictionary<string, string> keyValuePairs, string directory, JsonElement[] jsonElementsNew, JsonElement[] jsonElementsOld)
{
bool result;
int? q;
string valueNew;
string valueOld;
JsonProperty jsonPropertyOld;
JsonProperty jsonPropertyNew;
JsonProperty[] jsonPropertiesOld;
JsonProperty[] jsonPropertiesNew;
private static ProcessDataStandardFormat Get(ILogger<Worker> logger, Input input, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat) {
ProcessDataStandardFormat result;
int column;
string value;
List<string> values = [];
List<string> results = [];
JsonProperty jsonProperty;
JsonProperty[] jsonProperties;
List<string> unknownColumns = [];
List<string> differentColumns = [];
int last = jsonElementsOld.Length - 1;
List<string> sameAfterSpaceSplitColumns = [];
for (int i = last; i > 0; i--)
{
if (jsonElementsOld[i].ValueKind != JsonValueKind.Object)
{
for (int i = 0; i < jsonElements.Length; i++) {
values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object) {
unknownColumns.Add(string.Empty);
break;
}
jsonPropertiesOld = jsonElementsOld[i].EnumerateObject().ToArray();
jsonPropertiesNew = jsonElementsNew[i].EnumerateObject().ToArray();
for (int p = 0; p < jsonPropertiesOld.Length; p++)
{
jsonPropertyOld = jsonPropertiesOld[p];
valueOld = jsonPropertyOld.Value.ToString();
if (ignore.Contains(jsonPropertyOld.Name))
{
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} **", p, jsonPropertyOld.Name);
continue;
}
if (keyValuePairs.TryGetValue(jsonPropertyOld.Name, out string? name) && !string.IsNullOrEmpty(name))
{
q = TryGetPropertyIndex(jsonPropertiesNew, name);
if (q is null && i == 0)
unknownColumns.Add($"{jsonPropertyOld.Name}|{name}");
}
else
{
q = TryGetPropertyIndex(jsonPropertiesNew, jsonPropertyOld.Name);
if (q is null)
{
if (i == 0)
unknownColumns.Add(jsonPropertyOld.Name);
}
}
if (q is null)
{
if (i == last && !string.IsNullOrEmpty(valueOld))
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ??", p, jsonPropertyOld.Name);
}
else
{
jsonPropertyNew = jsonPropertiesNew[q.Value];
valueNew = jsonPropertyNew.Value.ToString();
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ~~ {q.Value} => {jsonPropertyNew.Name}", p, jsonPropertyOld.Name, q.Value, jsonPropertyNew.Name);
if (valueNew != valueOld && !differentColumns.Contains(jsonPropertyOld.Name))
{
if (valueNew.Length >= 2 && valueNew.Split(' ')[0] == valueOld)
sameAfterSpaceSplitColumns.Add(jsonPropertyOld.Name);
else
{
if (backfill.Contains(jsonPropertyOld.Name) && i != last)
continue;
if (indexOnly.Contains(jsonPropertyOld.Name) && int.TryParse(jsonPropertyOld.Name[^2..], out int index) && i != index - 1)
continue;
logger.LogWarning("For [{jsonProperty.Name}] <{directory}> doesn't match (valueNew:{valueNew} != valueOld:{valueOld})!", jsonPropertyOld.Name, directory, valueNew, valueOld);
differentColumns.Add(jsonPropertyOld.Name);
}
}
}
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
if (jsonProperties.Length != input.NewColumnNames.Count) {
logger.LogWarning("{jsonProperties} != {NewColumnNames}", jsonProperties.Length, input.NewColumnNames.Count);
continue;
}
for (int c = 0; c < input.ColumnIndices.Count; c++) {
column = input.ColumnIndices[c];
if (column == -1)
value = input.OldColumnNames[c];
else {
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
}
values.Add(value);
}
results.Add(string.Join('\t', values));
}
result = unknownColumns.Count == 0 && differentColumns.Count == 0 && sameAfterSpaceSplitColumns.Count == 0;
result = new(Body: new(results),
Columns: processDataStandardFormat.Columns,
Logistics: processDataStandardFormat.Logistics,
Sequence: processDataStandardFormat.Sequence);
return result;
}
private static void Compare(ILogger<Worker> logger, int sourceDirectoryLength, ReadOnlyCollection<string> ignore, ReadOnlyCollection<string> backfill, ReadOnlyCollection<string> indexOnly, ReadOnlyDictionary<string, string> keyValuePairs, string searchPattern, string[] files)
{
bool isMatch;
string directory;
string[] matches;
string directorySegment;
string[] directoryFiles;
JsonElement[]? jsonElementsNew;
JsonElement[]? jsonElementsOld;
ProcessDataStandardFormat processDataStandardFormat;
FileInfo[] collection = files.Select(l => new FileInfo(l)).ToArray();
string[] sorted = (from l in collection orderby l.CreationTime descending select l.FullName).ToArray();
foreach (string file in sorted)
{
directory = Path.GetDirectoryName(file) ?? throw new Exception();
directoryFiles = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly);
matches = (from l in directoryFiles where l != file select l).ToArray();
if (matches.Length < 1)
continue;
directorySegment = directory[sourceDirectoryLength..];
processDataStandardFormat = GetLogisticsColumnsAndBody(file, lines: null);
jsonElementsNew = GetArray(processDataStandardFormat);
if (jsonElementsNew is null)
continue;
foreach (string match in matches)
{
processDataStandardFormat = GetLogisticsColumnsAndBody(match, lines: null);
jsonElementsOld = GetArray(processDataStandardFormat);
if (jsonElementsOld is null || jsonElementsOld.Length != jsonElementsNew.Length)
{
logger.LogWarning("! <{match}> (jsonElementsOld.Length:{jsonElementsOld} != jsonElementsNew.Length:{jsonElementsNew})", match, jsonElementsOld?.Length, jsonElementsNew.Length);
continue;
}
isMatch = Compare(logger, ignore, backfill, indexOnly, keyValuePairs, directorySegment, jsonElementsNew, jsonElementsOld);
if (!isMatch)
{
logger.LogWarning("! <{match}>", match);
continue;
}
logger.LogInformation("<{match}>", match);
}
}
}
internal static void Compare(ILogger<Worker> logger, List<string> args)
{
string[] segmentsB;
List<string> distinct = [];
string searchPattern = args[2];
string searchPatternB = args[3];
string[] segments = args[7].Split(',');
Dictionary<string, string> keyValuePairs = [];
ReadOnlyCollection<string> ignore = args[4].Split(',').AsReadOnly();
ReadOnlyCollection<string> backfill = args[5].Split(',').AsReadOnly();
ReadOnlyCollection<string> indexOnly = args[6].Split(',').AsReadOnly();
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
Compare(logger, sourceDirectory.Length, ignore, backfill, indexOnly, keyValuePairs.AsReadOnly(), searchPatternB, files);
private static void Write(ILogger<Worker> logger, FileInfo fileInfo, ProcessDataStandardFormat processDataStandardFormat) {
List<string> results = [];
if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
results.Add("HEADER_TAG\tHEADER_VALUE");
results.Add("FORMAT\t2.00");
results.Add("NUMBER_PASSES\t0001");
results.Add($"HEADER_OFFSET\t{headerOffset}");
results.Add($"DATA_OFFSET\t{dataOffset}");
results.Add($"END_OFFSET\t{endOffset}");
results.Add($"\"{string.Join("\",\t\"", processDataStandardFormat.Columns)}\"");
results.AddRange(processDataStandardFormat.Body);
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
results.Add("DELIMITER\t;");
results.Add($"START_TIME_FORMAT\t{format}");
results.Add($"START_TIME\t{startTime}");
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
results.AddRange(processDataStandardFormat.Logistics);
File.WriteAllText($"{fileInfo.FullName}.tsv", string.Join(Environment.NewLine, results));
logger.LogDebug("<{fileInfo}>", fileInfo);
}
}

View File

@ -1,16 +1,35 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250228
{
internal static partial class Helper20250228 {
private record Record(string TableName, ReadOnlyCollection<string> Columns, ReadOnlyCollection<string[]> Rows);
private static ReadOnlyCollection<Record> GetRecords(string headerA, string headerB, string file)
{
internal static void PostgresDumpToJson(ILogger<Worker> logger, List<string> args) {
string searchPattern = args[2];
string headerA = args[3].Replace('_', ' ');
string headerB = args[4].Replace('_', ' ');
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
PostgresDumpToJson(logger, headerA, headerB, files[0]);
}
private static void PostgresDumpToJson(ILogger<Worker> logger, string headerA, string headerB, string file) {
ReadOnlyCollection<Record> records = GetRecords(headerA, headerB, file);
if (records.Count > 0)
WriteFile(file, records);
else
logger.LogWarning("<{records}>(s)", records.Count);
}
private static ReadOnlyCollection<Record> GetRecords(string headerA, string headerB, string file) {
List<Record> results = [];
string line;
string[] segmentsA;
@ -23,11 +42,9 @@ internal static partial class Helper20250228
string? tableName = null;
string[] lines = File.ReadAllLines(file);
ReadOnlyCollection<string>? columns = null;
for (int i = 0; i < lines.Length; i++)
{
for (int i = 0; i < lines.Length; i++) {
line = lines[i];
if (tableName is null)
{
if (tableName is null) {
segmentsA = line.Split(headerA);
if (segmentsA.Length != 2)
continue;
@ -45,18 +62,14 @@ internal static partial class Helper20250228
continue;
segmentsE = segmentsB[0].Split(' ');
tableName = segmentsE[0];
}
else if (columns is null)
} else if (columns is null)
break;
else
{
else {
rows = [];
for (int j = i + 1; j < lines.Length; j++)
{
for (int j = i + 1; j < lines.Length; j++) {
i = j;
segmentsF = lines[j].Split('\t');
if (segmentsF.Length != columns.Count)
{
if (segmentsF.Length != columns.Count) {
if (rows.Count > 0)
results.Add(new(TableName: tableName, Columns: columns, Rows: rows.AsReadOnly()));
break;
@ -70,20 +83,16 @@ internal static partial class Helper20250228
return results.AsReadOnly();
}
private static void WriteFile(string file, ReadOnlyCollection<Record> records)
{
private static void WriteFile(string file, ReadOnlyCollection<Record> records) {
List<string> results = [];
string json;
string text;
Dictionary<string, string?> keyValuePairs = [];
foreach (Record record in records)
{
foreach (Record record in records) {
results.Clear();
foreach (string[] row in record.Rows)
{
foreach (string[] row in record.Rows) {
keyValuePairs.Clear();
for (int i = 0; i < row.Length; i++)
{
for (int i = 0; i < row.Length; i++) {
if (row[i] == "\\N")
keyValuePairs.Add(record.Columns[i], null);
else
@ -97,26 +106,4 @@ internal static partial class Helper20250228
}
}
private static void PostgresDumpToJson(ILogger<Worker> logger, string headerA, string headerB, string file)
{
ReadOnlyCollection<Record> records = GetRecords(headerA, headerB, file);
if (records.Count > 0)
WriteFile(file, records);
else
logger.LogWarning("<{records}>(s)", records.Count);
}
internal static void PostgresDumpToJson(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2];
string headerA = args[3].Replace('_', ' ');
string headerB = args[4].Replace('_', ' ');
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
PostgresDumpToJson(logger, headerA, headerB, files[0]);
}
}

View File

@ -1,51 +1,12 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250301
{
internal static partial class Helper20250301 {
private static ReadOnlyCollection<string> CopyFiles(char split, string workingDirectory, string directory, string[] files)
{
List<string> results = [];
string fileName;
string checkFile;
string checkDirectory = Path.Combine(workingDirectory, directory);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
foreach (string file in files)
{
fileName = Path.GetFileName(file).Split(split)[^1];
checkFile = Path.Combine(checkDirectory, fileName);
if (File.Exists(checkFile))
File.Delete(checkFile);
File.Copy(file, checkFile);
results.Add(fileName);
}
return results.AsReadOnly();
}
private static void PocketBaseImportWithDeno(ILogger<Worker> logger, char split, string workingDirectory, string scriptName, string directory, string[] files)
{
string checkFile = Path.Combine(workingDirectory, scriptName);
if (!File.Exists(checkFile))
logger.LogWarning("<{checkFile}> doesn't exist!", checkFile);
else
{
ReadOnlyCollection<string> fileNames = CopyFiles(split, workingDirectory, directory, files);
if (fileNames.Count == 0)
logger.LogWarning("<{fileNames}>(s)", fileNames.Count);
else
{
foreach (string fileName in fileNames)
logger.LogInformation("deno run --unstable --allow-read --allow-env --allow-net {scriptName} --id=true --input={fileName}", scriptName, fileName);
}
}
}
internal static void PocketBaseImportWithDeno(ILogger<Worker> logger, List<string> args)
{
internal static void PocketBaseImportWithDeno(ILogger<Worker> logger, List<string> args) {
char split = args[3][0];
string directory = args[6];
string scriptName = args[5];
@ -59,4 +20,37 @@ internal static partial class Helper20250301
PocketBaseImportWithDeno(logger, split, workingDirectory, scriptName, directory, files);
}
private static void PocketBaseImportWithDeno(ILogger<Worker> logger, char split, string workingDirectory, string scriptName, string directory, string[] files) {
string checkFile = Path.Combine(workingDirectory, scriptName);
if (!File.Exists(checkFile))
logger.LogWarning("<{checkFile}> doesn't exist!", checkFile);
else {
ReadOnlyCollection<string> fileNames = CopyFiles(split, workingDirectory, directory, files);
if (fileNames.Count == 0)
logger.LogWarning("<{fileNames}>(s)", fileNames.Count);
else {
foreach (string fileName in fileNames)
logger.LogInformation("deno run --unstable --allow-read --allow-env --allow-net {scriptName} --id=true --input={fileName}", scriptName, fileName);
}
}
}
private static ReadOnlyCollection<string> CopyFiles(char split, string workingDirectory, string directory, string[] files) {
List<string> results = [];
string fileName;
string checkFile;
string checkDirectory = Path.Combine(workingDirectory, directory);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
foreach (string file in files) {
fileName = Path.GetFileName(file).Split(split)[^1];
checkFile = Path.Combine(checkDirectory, fileName);
if (File.Exists(checkFile))
File.Delete(checkFile);
File.Copy(file, checkFile);
results.Add(fileName);
}
return results.AsReadOnly();
}
}

View File

@ -1,131 +1,19 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250305
{
internal static partial class Helper20250305 {
private static readonly HttpClient _HttpClient = new();
private record Record(Uri URI, string Path, DateTime LastModified, int? TotalSeconds);
private static ReadOnlyCollection<NginxFileSystem>? GetCollection(string format, TimeZoneInfo timeZoneInfo, Uri uri)
{
List<NginxFileSystem>? results;
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(uri);
taskHttpResponseMessage.Wait();
if (!taskHttpResponseMessage.Result.IsSuccessStatusCode)
results = null;
else
{
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
if (taskString.Result.StartsWith('<'))
results = null;
else
{
NginxFileSystem[]? nginxFileSystems = JsonSerializer.Deserialize(taskString.Result, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (nginxFileSystems is null)
results = null;
else
{
results = [];
NginxFileSystem nginxFileSystem;
for (int i = 0; i < nginxFileSystems.Length; i++)
{
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
results.Add(nginxFileSystem);
}
}
}
}
return results?.AsReadOnly();
}
private static ReadOnlyCollection<Record> GetRecords(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory)
{
List<Record> results = [];
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}");
ReadOnlyCollection<NginxFileSystem>? nginxFileSystems = GetCollection(format, timeZoneInfo, uri);
if (nginxFileSystems is not null)
{
NginxFileSystem nginxFileSystem;
ReadOnlyCollection<Record> records;
string checkDirectory = $"{compareDirectory}\\{string.Join('\\', directoryNames)}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
for (int i = 0; i < nginxFileSystems.Count; i++)
{
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
if (nginxFileSystem.Type == "file")
{
Record? record = CompareFile(host, directoryNames, compareDirectory, nginxFileSystem);
if (record is not null)
results.Add(record);
}
else
{
records = CompareDirectory(format, timeZoneInfo, host, directoryNames, compareDirectory, nginxFileSystem);
foreach (Record record in records)
results.Add(record);
}
}
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Record> CompareDirectory(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem)
{
ReadOnlyCollection<Record> results;
List<string> collection = directoryNames.ToList();
collection.Add(nginxFileSystem.Name);
results = GetRecords(format, timeZoneInfo, host, collection.AsReadOnly(), compareDirectory);
return results;
}
private static Record? CompareFile(string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem)
{
Record? result;
if (nginxFileSystem.LastModified is null || nginxFileSystem.Length is null)
result = null;
else
{
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}/{nginxFileSystem.Name}");
FileInfo fileInfo = new($"{compareDirectory}\\{string.Join('\\', directoryNames)}\\{nginxFileSystem.Name}");
if (!fileInfo.Exists)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: null);
else
{
int totalSeconds = (int)new TimeSpan(fileInfo.LastWriteTime.Ticks - nginxFileSystem.LastModified.Value.Ticks).TotalSeconds;
if (totalSeconds is not < 2 or not > -2)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: totalSeconds);
else if (fileInfo.Length != nginxFileSystem.Length.Value)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: 0);
else
result = null;
}
}
return result;
}
private static void Download(Record record)
{
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(record.URI);
taskHttpResponseMessage.Wait();
if (taskHttpResponseMessage.Result.IsSuccessStatusCode)
{
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
File.WriteAllText(record.Path, taskString.Result);
File.SetLastWriteTime(record.Path, record.LastModified);
}
}
internal static void WriteNginxFileSystemDelta(ILogger<Worker> logger, List<string> args)
{
internal static void WriteNginxFileSystemDelta(ILogger<Worker> logger, List<string> args) {
string host = args[2];
string rootDirectoryName = args[3];
string format = NginxFileSystem.GetFormat();
@ -136,8 +24,7 @@ internal static partial class Helper20250305
#if ShellProgressBar
ProgressBar progressBar = new(records.Count, "Downloading", new ProgressBarOptions() { ProgressCharacter = '─', ProgressBarOnBottom = true, DisableBottomPercentage = true });
#endif
foreach (Record record in records)
{
foreach (Record record in records) {
#if ShellProgressBar
progressBar.Tick();
#endif
@ -155,4 +42,99 @@ internal static partial class Helper20250305
#endif
}
private static ReadOnlyCollection<Record> GetRecords(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory) {
List<Record> results = [];
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}");
ReadOnlyCollection<NginxFileSystem>? nginxFileSystems = GetCollection(format, timeZoneInfo, uri);
if (nginxFileSystems is not null) {
NginxFileSystem nginxFileSystem;
ReadOnlyCollection<Record> records;
string checkDirectory = $"{compareDirectory}\\{string.Join('\\', directoryNames)}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
for (int i = 0; i < nginxFileSystems.Count; i++) {
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
if (nginxFileSystem.Type == "file") {
Record? record = CompareFile(host, directoryNames, compareDirectory, nginxFileSystem);
if (record is not null)
results.Add(record);
} else {
records = CompareDirectory(format, timeZoneInfo, host, directoryNames, compareDirectory, nginxFileSystem);
foreach (Record record in records)
results.Add(record);
}
}
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<NginxFileSystem>? GetCollection(string format, TimeZoneInfo timeZoneInfo, Uri uri) {
List<NginxFileSystem>? results;
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(uri);
taskHttpResponseMessage.Wait();
if (!taskHttpResponseMessage.Result.IsSuccessStatusCode)
results = null;
else {
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
if (taskString.Result.StartsWith('<'))
results = null;
else {
NginxFileSystem[]? nginxFileSystems = JsonSerializer.Deserialize(taskString.Result, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (nginxFileSystems is null)
results = null;
else {
results = [];
NginxFileSystem nginxFileSystem;
for (int i = 0; i < nginxFileSystems.Length; i++) {
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
results.Add(nginxFileSystem);
}
}
}
}
return results?.AsReadOnly();
}
private static Record? CompareFile(string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem) {
Record? result;
if (nginxFileSystem.LastModified is null || nginxFileSystem.Length is null)
result = null;
else {
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}/{nginxFileSystem.Name}");
FileInfo fileInfo = new($"{compareDirectory}\\{string.Join('\\', directoryNames)}\\{nginxFileSystem.Name}");
if (!fileInfo.Exists)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: null);
else {
int totalSeconds = (int)new TimeSpan(fileInfo.LastWriteTime.Ticks - nginxFileSystem.LastModified.Value.Ticks).TotalSeconds;
if (totalSeconds is not < 2 or not > -2)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: totalSeconds);
else if (fileInfo.Length != nginxFileSystem.Length.Value)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: 0);
else
result = null;
}
}
return result;
}
private static ReadOnlyCollection<Record> CompareDirectory(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem) {
ReadOnlyCollection<Record> results;
List<string> collection = directoryNames.ToList();
collection.Add(nginxFileSystem.Name);
results = GetRecords(format, timeZoneInfo, host, collection.AsReadOnly(), compareDirectory);
return results;
}
private static void Download(Record record) {
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(record.URI);
taskHttpResponseMessage.Wait();
if (taskHttpResponseMessage.Result.IsSuccessStatusCode) {
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
File.WriteAllText(record.Path, taskString.Result);
File.SetLastWriteTime(record.Path, record.LastModified);
}
}
}

View File

@ -2,16 +2,39 @@ using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250306
{
internal static partial class Helper20250306 {
private static int? GetProcessDataStandardFormatColumnTitlesLine(string[] lines)
{
internal static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, List<string> args) {
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
ProcessDataStandardFormatToJson(logger, files[0]);
}
private static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, string file) {
string[] lines = File.ReadAllLines(file);
int? columnTitlesLine = GetProcessDataStandardFormatColumnTitlesLine(lines);
if (columnTitlesLine is null)
logger.LogWarning("<{columnTitlesLine}> is null", nameof(columnTitlesLine));
else {
string? text = ProcessDataStandardFormatToLastDataLine(lines, columnTitlesLine.Value);
File.WriteAllText(".lbl", text);
if (lines.Length < columnTitlesLine.Value + 1)
logger.LogWarning("<{lines}>(s)", lines.Length);
else {
string json = ProcessDataStandardFormatToJson(columnTitlesLine.Value, [], lines);
File.WriteAllText(".json", json);
}
}
}
private static int? GetProcessDataStandardFormatColumnTitlesLine(string[] lines) {
int? result = null;
for (int i = 0; i < lines.Length; i++)
{
if (lines[i].StartsWith("END_OFFSET") && i + 2 < lines.Length)
{
for (int i = 0; i < lines.Length; i++) {
if (lines[i].StartsWith("END_OFFSET") && i + 2 < lines.Length) {
result = i + 1;
break;
}
@ -19,13 +42,10 @@ internal static partial class Helper20250306
return result;
}
private static string? ProcessDataStandardFormatToLastDataLine(string[] lines, int columnTitlesLine)
{
private static string? ProcessDataStandardFormatToLastDataLine(string[] lines, int columnTitlesLine) {
string? result = null;
for (int i = columnTitlesLine + 1; i < lines.Length; i++)
{
if (lines[i].StartsWith("NUM_DATA_ROWS"))
{
for (int i = columnTitlesLine + 1; i < lines.Length; i++) {
if (lines[i].StartsWith("NUM_DATA_ROWS")) {
result = lines[i - 1];
break;
}
@ -33,8 +53,7 @@ internal static partial class Helper20250306
return result;
}
private static string ProcessDataStandardFormatToJson(int columnTitlesLine, string[] columns, string[] lines)
{
private static string ProcessDataStandardFormatToJson(int columnTitlesLine, string[] columns, string[] lines) {
#pragma warning disable CA1845, IDE0057
string result = "[\n";
string line;
@ -42,14 +61,12 @@ internal static partial class Helper20250306
string[] segments;
if (columns.Length == 0)
columns = lines[columnTitlesLine].Trim().Split('\t');
for (int i = columnTitlesLine + 1; i < lines.Length; i++)
{
for (int i = columnTitlesLine + 1; i < lines.Length; i++) {
line = "{";
segments = lines[i].Trim().Split('\t');
if (segments.Length != columns.Length)
break;
for (int c = 1; c < segments.Length; c++)
{
for (int c = 1; c < segments.Length; c++) {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
}
@ -61,35 +78,4 @@ internal static partial class Helper20250306
#pragma warning restore CA1845, IDE0057
}
private static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, string file)
{
string[] lines = File.ReadAllLines(file);
int? columnTitlesLine = GetProcessDataStandardFormatColumnTitlesLine(lines);
if (columnTitlesLine is null)
logger.LogWarning("<{columnTitlesLine}> is null", nameof(columnTitlesLine));
else
{
string? text = ProcessDataStandardFormatToLastDataLine(lines, columnTitlesLine.Value);
File.WriteAllText(".lbl", text);
if (lines.Length < columnTitlesLine.Value + 1)
logger.LogWarning("<{lines}>(s)", lines.Length);
else
{
string json = ProcessDataStandardFormatToJson(columnTitlesLine.Value, [], lines);
File.WriteAllText(".json", json);
}
}
}
internal static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
ProcessDataStandardFormatToJson(logger, files[0]);
}
}

View File

@ -1,47 +1,38 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250315
{
internal static partial class Helper20250315 {
internal static void Empty(ILogger<Worker> logger, List<string> args)
{
internal static void Empty(ILogger<Worker> logger, List<string> args) {
string[] searchPatterns = args[2].Split('|');
string sourceDirectory = Path.GetFullPath(args[0]);
if (searchPatterns.Length == 1)
{
if (searchPatterns.Length == 1) {
string[] files = Directory.GetFiles(sourceDirectory, searchPatterns[0], SearchOption.AllDirectories);
if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length);
else
{
else {
string directoryName;
string[] directories;
foreach (string file in files)
{
foreach (string file in files) {
directoryName = Path.GetDirectoryName(file) ?? throw new Exception();
directories = Directory.GetDirectories(directoryName, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, directory);
}
}
}
else
{
} else {
string[] files;
string checkFile;
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
foreach (string searchPattern in searchPatterns)
{
foreach (string searchPattern in searchPatterns) {
files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length);
else
{
foreach (string file in files)
{
else {
foreach (string file in files) {
checkFile = $"{file}.json";
if (File.Exists(checkFile))
continue;

View File

@ -1,14 +1,14 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250320
{
internal static partial class Helper20250320 {
private record Match(string Name,
string Parameters,
@ -34,11 +34,9 @@ internal static partial class Helper20250320
ReadOnlyCollection<int> ReferenceToLineNumbers,
int? ScopeEnum,
Search Search,
int StartLine)
{
int StartLine) {
public override string ToString()
{
public override string ToString() {
string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method);
return result;
}
@ -47,8 +45,7 @@ internal static partial class Helper20250320
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Method[]))]
private partial class MethodCollectionCommonSourceGenerationContext : JsonSerializerContext
{
private partial class MethodCollectionCommonSourceGenerationContext : JsonSerializerContext {
}
private record MethodWith(int? EndLine,
@ -60,11 +57,9 @@ internal static partial class Helper20250320
ReadOnlyCollection<int> ReferenceToLineNumbers,
int? ScopeEnum,
Search Search,
int StartLine)
{
int StartLine) {
public override string ToString()
{
public override string ToString() {
string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method);
return result;
}
@ -73,8 +68,7 @@ internal static partial class Helper20250320
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(MethodWith[]))]
private partial class MethodWithCollectionCommonSourceGenerationContext : JsonSerializerContext
{
private partial class MethodWithCollectionCommonSourceGenerationContext : JsonSerializerContext {
}
private const string _Name = "name";
@ -88,11 +82,14 @@ internal static partial class Helper20250320
[GeneratedRegex(@"[[\]<,>?a-zA-Z0-9_()\s]*?\s[a-z_]{1}[a-zA-Z0-9_]*?,")]
private static partial Regex CSharpParameter();
[GeneratedRegex(@"(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_]*)+\((?<parameters>.*)\)")]
// VSCode Search ^\s*\b(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_])+\((?<parameters>.*)\)\s?\{?$
[GeneratedRegex(@"^\s*\b(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_]*)+\((?<parameters>.*)\)\s?\{?$")]
private static partial Regex CSharpMethodLine();
internal static void SortCodeMethods(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken)
{
private static ReadOnlyCollection<Method> GetSortedMethods(ReadOnlyCollection<Method> methods) =>
(from l in methods orderby l.ScopeEnum descending, l.ReferenceToLineNumbers.Count descending, l.Line.Length, l.Match.Name.Length, l.Match.Name select l).ToArray().AsReadOnly();
internal static void SortCodeMethods(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken) {
bool check;
string[] lines;
List<string> changed = [];
@ -104,16 +101,13 @@ internal static partial class Helper20250320
string repositoryDirectory = Path.GetFullPath(args[0]);
string[] cSharpFiles = Directory.GetFiles(repositoryDirectory, "*.cs", SearchOption.AllDirectories);
ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles = logOnly ? new(cSharpFiles) : Helpers.HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(repositoryDirectory, usePathCombine, cancellationToken);
foreach (string cSharpFile in cSharpFiles)
{
foreach (string cSharpFile in cSharpFiles) {
if (!gitOthersModifiedAndDeletedExcludingStandardFiles.Contains(cSharpFile))
continue;
for (int i = 0; i < 10; i++)
{
for (int i = 0; i < 10; i++) {
lines = File.ReadAllLines(cSharpFile);
check = SortFile(logger, logOnly, scopeSpaces, cSharpFile, lines);
if (check)
{
if (check) {
Thread.Sleep(500);
changed.Add($"{i + 1:00}) {cSharpFile}");
}
@ -123,41 +117,35 @@ internal static partial class Helper20250320
}
if (changed.Count == 0)
logger.LogInformation("No changes :)");
else
{
else {
changed.Reverse();
foreach (string c in changed)
logger.LogInformation(c);
}
}
private static bool SortFile(ILogger<Worker> logger, bool logOnly, int scopeSpaces, string cSharpFile, string[] lines)
{
private static bool SortFile(ILogger<Worker> logger, bool logOnly, int scopeSpaces, string cSharpFile, string[] lines) {
bool result;
ReadOnlyCollection<Method> methods = GetMethods(logger, scopeSpaces, cSharpFile, lines);
if (methods.Count == 0)
result = false;
else if (methods.Any(l => l.EndLine is null))
result = false;
else if (logOnly)
{
else if (logOnly) {
foreach (Method method in methods)
logger.LogInformation("{cSharpFile} - {Name} has {lines} line(s)", cSharpFile, method.Match.Name, (method.EndLine is null ? 999999 : method.EndLine.Value - method.StartLine).ToString("000000"));
result = false;
}
else
{
} else {
ReadOnlyCollection<Method> sortedMethods = GetSortedMethods(methods);
if (Debugger.IsAttached)
File.WriteAllText(Path.Combine(".vscode", "helper", ".txt"), string.Join(Environment.NewLine, sortedMethods.Select(l => $"{l.Match.Name} => {l.Parameters.Count}")));
File.WriteAllText(Path.Combine(".vscode", "helper", ".json"), JsonSerializer.Serialize(sortedMethods.ToArray(), MethodCollectionCommonSourceGenerationContext.Default.MethodArray));
ReadOnlyCollection<MethodWith> collection = GetCollection(logger, lines, sortedMethods);
result = WriteAllLines(cSharpFile, lines, collection);
}
return result;
}
private static ReadOnlyCollection<Method> GetMethods(ILogger<Worker> logger, int scopeSpaces, string cSharpFile, string[] lines)
{
private static ReadOnlyCollection<Method> GetMethods(ILogger<Worker> logger, int scopeSpaces, string cSharpFile, string[] lines) {
List<Method> results = [];
int check;
int blocks;
@ -176,8 +164,7 @@ internal static partial class Helper20250320
Regex parameterRegex = CSharpParameter();
ReadOnlyDictionary<string, string> parameters;
System.Text.RegularExpressions.Match regularExpressionsMatch;
for (int i = 0; i < lines.Length; i++)
{
for (int i = 0; i < lines.Length; i++) {
check = GetNumberOfStartSpaces(lines, i);
if (check != scopeSpaces)
continue;
@ -215,17 +202,17 @@ internal static partial class Helper20250320
firstLine = lines[startLine].Trim();
else
firstLine = lines[startLine + 1].Trim();
isLinq = !lines[i + 1].StartsWith("#pragma") && !lines[i + 1].StartsWith("#nullable") && lines[i + 1].Trim() != "{";
isLinq = !lines[i + 1].StartsWith("#pragma") && !lines[i + 1].StartsWith("#nullable") && lines[i].Trim()[^1] != '{' && lines[i + 1].Trim() != "{";
if (isLinq)
blocks++;
endLine = null;
for (int j = i + 1; j < lines.Length; j++)
{
if (lines[i].Trim()[^1] == '{')
blocks++;
for (int j = i + 1; j < lines.Length; j++) {
innerLine = lines[j].Trim();
if (innerLine.StartsWith("#pragma") || innerLine.StartsWith("#nullable"))
continue;
if (isLinq && string.IsNullOrEmpty(innerLine))
{
if (isLinq && string.IsNullOrEmpty(innerLine)) {
if (line.EndsWith(';'))
blocks--;
}
@ -240,11 +227,9 @@ internal static partial class Helper20250320
break;
}
referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: 0, end: lines.Length, i: i, search: search, parameters: parameters);
if (referenceToLineNumbers.Count == 0)
{
if (referenceToLineNumbers.Count == 0) {
lineSegmentFirst = line.Split(match.Name)[0];
if (!lines[i - 1].Trim().StartsWith("[Obsolete"))
{
if (!lines[i - 1].Trim().StartsWith("[Obsolete")) {
if (lineSegmentFirst.StartsWith("private"))
logger.LogWarning("// <{cSharpFileName}> {name} with {parameters} parameter(s) <{line}>", Path.GetFileName(cSharpFile), match.Name, parameters, lineSegmentFirst);
else
@ -269,11 +254,9 @@ internal static partial class Helper20250320
return results.AsReadOnly();
}
private static int GetNumberOfStartSpaces(string[] lines, int i)
{
private static int GetNumberOfStartSpaces(string[] lines, int i) {
int result = 0;
foreach (char @char in lines[i])
{
foreach (char @char in lines[i]) {
if (@char != ' ')
break;
result += 1;
@ -281,11 +264,9 @@ internal static partial class Helper20250320
return result;
}
private static int GetScopeEnum(Match match)
{
private static int GetScopeEnum(Match match) {
int result;
int value = match.Scope switch
{
int value = match.Scope switch {
"public" => 8000,
"internal" => 7000,
"protected" => 6000,
@ -301,29 +282,36 @@ internal static partial class Helper20250320
return result;
}
private static ReadOnlyDictionary<string, string> GetParameters(Regex parameterRegex, Match match)
{
private static ReadOnlyDictionary<string, string> GetParameters(Regex parameterRegex, Match match) {
Dictionary<string, string> results = [];
string value;
string[] segments;
System.Text.RegularExpressions.Match[] matches = parameterRegex.Matches($"{match.Parameters},").ToArray();
foreach (System.Text.RegularExpressions.Match m in matches)
{
if (!m.Success)
continue;
value = m.Value.Trim()[..^1];
segments = value.Split(' ');
results.Add(segments[^1], value);
try {
foreach (System.Text.RegularExpressions.Match m in matches) {
if (!m.Success)
continue;
value = m.Value.Trim()[..^1];
segments = value.Split(' ');
results.Add(segments[^1], value);
}
} catch (Exception) {
results.Clear();
System.Text.RegularExpressions.Match m;
for (int i = 0; i < matches.Length; i++) {
m = matches[i];
if (!m.Success)
continue;
results.Add(i.ToString(), i.ToString());
}
}
return new(results);
}
private static int GetStartLine(string[] lines, int i)
{
private static int GetStartLine(string[] lines, int i) {
int result = i;
string line;
for (int j = i - 1; j > -1; j--)
{
for (int j = i - 1; j > -1; j--) {
line = lines[j].Trim();
if (!line.StartsWith('[') && !line.StartsWith('#') && !line.StartsWith("/// "))
break;
@ -332,12 +320,10 @@ internal static partial class Helper20250320
return result;
}
private static int GetLineBlockCount(string line, bool isLinq)
{
private static int GetLineBlockCount(string line, bool isLinq) {
int result = 0;
bool ignore = false;
for (int i = 0; i < line.Length; i++)
{
for (int i = 0; i < line.Length; i++) {
if (line[i] == '\'')
i++;
else if (!isLinq && !ignore && line[i] == '{')
@ -352,27 +338,21 @@ internal static partial class Helper20250320
return result;
}
private static List<int> GetReferenceToLineNumbers(string[] lines, int start, int end, int i, Search search, ReadOnlyDictionary<string, string> parameters)
{
private static List<int> GetReferenceToLineNumbers(string[] lines, int start, int end, int i, Search search, ReadOnlyDictionary<string, string> parameters) {
List<int> results = [];
string[] segments;
string[] afterSegments;
string lastSegmentBeforeDot;
for (int j = start; j < end; j++)
{
for (int j = start; j < end; j++) {
if (j == i)
continue;
segments = lines[j].Split(search.Name);
if (segments.Length == 1)
{
if (segments.Length == 1) {
segments = lines[j].Split(search.Not);
if (segments.Length == 1)
{
if (segments.Length == 1) {
segments = lines[j].Split(search.Wrap);
if (segments.Length == 1)
{
if (!lines[j].EndsWith(search.Delegate))
{
if (segments.Length == 1) {
if (!lines[j].EndsWith(search.Delegate)) {
segments = lines[j].Split(search.Constructor);
if (segments.Length == 1)
continue;
@ -382,16 +362,12 @@ internal static partial class Helper20250320
}
if (lines[j].EndsWith(search.Delegate))
results.Add(j);
else
{
else {
lastSegmentBeforeDot = segments[^1].Split(").")[0];
if (parameters.Count == 0)
{
if (parameters.Count == 0) {
if (lastSegmentBeforeDot.Contains(','))
continue;
}
else
{
} else {
afterSegments = lastSegmentBeforeDot.Split(',');
if (afterSegments.Length != parameters.Count)
continue;
@ -402,15 +378,10 @@ internal static partial class Helper20250320
return results;
}
private static ReadOnlyCollection<Method> GetSortedMethods(ReadOnlyCollection<Method> methods) =>
(from l in methods orderby l.ScopeEnum descending, l.ReferenceToLineNumbers.Count descending, l.Line.Length, l.Match.Name.Length, l.Match.Name select l).ToArray().AsReadOnly();
private static ReadOnlyCollection<MethodWith> GetCollection(ILogger<Worker> logger, string[] lines, ReadOnlyCollection<Method> sortedMethods)
{
private static ReadOnlyCollection<MethodWith> GetCollection(ILogger<Worker> logger, string[] lines, ReadOnlyCollection<Method> sortedMethods) {
List<MethodWith> results = [];
List<Method> check = sortedMethods.ToList();
foreach (Method method in sortedMethods)
{
foreach (Method method in sortedMethods) {
logger.LogInformation($"{method.Match.Name} => {method.Parameters.Count}");
if (method.EndLine is null)
continue;
@ -422,25 +393,21 @@ internal static partial class Helper20250320
return results.AsReadOnly();
}
private static MethodWith GetMethodWith(string[] lines, ReadOnlyCollection<Method> methods, List<Method> check, Method method, int methodEndLineValue)
{
private static MethodWith GetMethodWith(string[] lines, ReadOnlyCollection<Method> methods, List<Method> check, Method method, int methodEndLineValue) {
MethodWith methodWith;
List<int> referenceToLineNumbers;
MethodWith[] sortedReferences;
Dictionary<int, MethodWith> references = [];
foreach (Method m in methods)
{
foreach (Method m in methods) {
if (m.EndLine is null)
continue;
if (m == method)
continue;
referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: method.StartLine, end: methodEndLineValue, i: -1, search: m.Search, parameters: m.Parameters);
if (referenceToLineNumbers.Count > 0)
{
if (referenceToLineNumbers.Count > 0) {
if (!check.Remove(m))
continue;
foreach (int i in referenceToLineNumbers)
{
foreach (int i in referenceToLineNumbers) {
if (references.ContainsKey(i))
continue;
methodWith = GetMethodWith(lines, methods, check, m, m.EndLine.Value);
@ -466,23 +433,20 @@ internal static partial class Helper20250320
return methodWith;
}
private static bool WriteAllLines(string cSharpFile, string[] lines, ReadOnlyCollection<MethodWith> collection)
{
private static bool WriteAllLines(string cSharpFile, string[] lines, ReadOnlyCollection<MethodWith> collection) {
bool result;
if (Debugger.IsAttached)
WriteDebug(collection);
List<string> results = [];
ReadOnlyCollection<int> methodLines = GetMethodLines(collection);
int maxMethodLines = methodLines.Max();
for (int i = 0; i < maxMethodLines; i++)
{
for (int i = 0; i < maxMethodLines; i++) {
if (methodLines.Contains(i))
continue;
results.Add(lines[i]);
}
List<bool> nests = [true];
foreach (MethodWith methodWith in collection)
{
foreach (MethodWith methodWith in collection) {
if (methodWith.EndLine is null)
continue;
AppendLines(results, nests, lines, methodWith, methodWith.EndLine.Value);
@ -493,16 +457,14 @@ internal static partial class Helper20250320
string join = string.Join(Environment.NewLine, results);
if (join == text)
result = false;
else
{
else {
result = true;
File.WriteAllText(cSharpFile, join);
}
return result;
}
private static void WriteDebug(ReadOnlyCollection<MethodWith> collection)
{
private static void WriteDebug(ReadOnlyCollection<MethodWith> collection) {
List<string> results = [];
List<bool> nests = [true];
foreach (MethodWith methodWith in collection)
@ -510,8 +472,7 @@ internal static partial class Helper20250320
File.WriteAllText(Path.Combine(".vscode", "helper", ".md"), string.Join(Environment.NewLine, results));
}
private static void AppendLines(List<string> results, List<bool> nests, MethodWith methodWith)
{
private static void AppendLines(List<string> results, List<bool> nests, MethodWith methodWith) {
nests.Add(true);
results.Add($" - {new string('#', nests.Count)} {methodWith.Match.Name} => {methodWith.Parameters.Count}");
foreach (MethodWith m in methodWith.References)
@ -519,12 +480,10 @@ internal static partial class Helper20250320
nests.RemoveAt(nests.Count - 1);
}
private static ReadOnlyCollection<int> GetMethodLines(ReadOnlyCollection<MethodWith> collection)
{
private static ReadOnlyCollection<int> GetMethodLines(ReadOnlyCollection<MethodWith> collection) {
List<int> results = [];
List<bool> nests = [true];
foreach (MethodWith methodWith in collection)
{
foreach (MethodWith methodWith in collection) {
if (methodWith.EndLine is null)
continue;
AppendLineNumbers(results, nests, methodWith, methodWith.EndLine.Value);
@ -535,13 +494,11 @@ internal static partial class Helper20250320
return new(results);
}
private static void AppendLineNumbers(List<int> results, List<bool> nests, MethodWith methodWith, int methodWithEndLineValue)
{
private static void AppendLineNumbers(List<int> results, List<bool> nests, MethodWith methodWith, int methodWithEndLineValue) {
nests.Add(true);
for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++)
results.Add(i);
foreach (MethodWith m in methodWith.References)
{
foreach (MethodWith m in methodWith.References) {
if (m.EndLine is null)
continue;
AppendLineNumbers(results, nests, m, m.EndLine.Value);
@ -549,13 +506,11 @@ internal static partial class Helper20250320
nests.RemoveAt(nests.Count - 1);
}
private static void AppendLines(List<string> results, List<bool> nests, string[] lines, MethodWith methodWith, int methodWithEndLineValue)
{
private static void AppendLines(List<string> results, List<bool> nests, string[] lines, MethodWith methodWith, int methodWithEndLineValue) {
nests.Add(true);
for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++)
results.Add(lines[i]);
foreach (MethodWith m in methodWith.References)
{
foreach (MethodWith m in methodWith.References) {
if (m.EndLine is null)
continue;
AppendLines(results, nests, lines, m, m.EndLine.Value);

View File

@ -1,19 +1,18 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250321
{
internal static partial class Helper20250321 {
private record Record(string Directory,
string File,
ThreeDeep ThreeDeep)
{
ThreeDeep ThreeDeep) {
public static ReadOnlyCollection<Record> GetCollection(string sourceDirectory, string searchPattern, string[] files)
{
public static ReadOnlyCollection<Record> GetCollection(string sourceDirectory, string searchPattern, string[] files) {
List<Record> results = [];
Record record;
string directory;
@ -21,8 +20,7 @@ internal static partial class Helper20250321
bool json = searchPattern.Contains(".json");
bool check = searchPattern.Split('.').Length == 3;
ReadOnlyCollection<ThreeDeep> collection = ThreeDeep.GetCollection(files);
foreach (ThreeDeep threeDeep in collection)
{
foreach (ThreeDeep threeDeep in collection) {
if (!json && check)
fileNameWithoutExtension = threeDeep.DirectoryName;
else if (!json && !check)
@ -32,19 +30,15 @@ internal static partial class Helper20250321
else
throw new NotImplementedException();
directory = $"{fileNameWithoutExtension[^1]}{fileNameWithoutExtension[^3..][..2]}";
if (json || (!json && !check))
{
if (json || (!json && !check)) {
record = new(Directory: Path.Combine(sourceDirectory, "new-a", directory),
File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}",
ThreeDeep: threeDeep);
}
else if (!json && check)
{
} else if (!json && check) {
record = new(Directory: Path.Combine(sourceDirectory, "new-b", directory, threeDeep.DirectoryName),
File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}",
ThreeDeep: threeDeep);
}
else
} else
throw new NotImplementedException();
results.Add(record);
}
@ -59,17 +53,14 @@ internal static partial class Helper20250321
long Length,
string DirectoryName,
string ParentDirectoryName,
string Root)
{
string Root) {
public static ReadOnlyCollection<ThreeDeep> GetCollection(string[] files)
{
public static ReadOnlyCollection<ThreeDeep> GetCollection(string[] files) {
List<ThreeDeep> results = [];
ThreeDeep record;
FileInfo fileInfo;
string parentDirectory;
foreach (string file in files)
{
foreach (string file in files) {
fileInfo = new(file);
parentDirectory = Path.GetDirectoryName(fileInfo.DirectoryName) ?? throw new Exception();
record = new(Extension: Path.GetExtension(file),
@ -89,14 +80,12 @@ internal static partial class Helper20250321
}
internal static void MoveToLast(ILogger<Worker> logger, List<string> args)
{
internal static void MoveToLast(ILogger<Worker> logger, List<string> args) {
string[] searchPatterns = args[2].Split('|');
string sourceDirectory = Path.GetFullPath(args[0]);
if (searchPatterns.Length == 1)
logger.LogInformation("No code for just one!");
else
{
else {
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
ReadOnlyCollection<Record> collection = GetCollection(logger, searchPatterns, sourceDirectory);
if (collection.Count != 0)
@ -108,17 +97,14 @@ internal static partial class Helper20250321
}
}
private static ReadOnlyCollection<Record> GetCollection(ILogger<Worker> logger, string[] searchPatterns, string sourceDirectory)
{
private static ReadOnlyCollection<Record> GetCollection(ILogger<Worker> logger, string[] searchPatterns, string sourceDirectory) {
string[] files;
List<Record> results = [];
foreach (string searchPattern in searchPatterns)
{
foreach (string searchPattern in searchPatterns) {
files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length);
else
{
else {
ReadOnlyCollection<Record> collection = Record.GetCollection(sourceDirectory, searchPattern, files);
results.AddRange(collection);
}
@ -126,25 +112,21 @@ internal static partial class Helper20250321
return results.AsReadOnly();
}
private static void UseCollection(ReadOnlyCollection<Record> collection)
{
private static void UseCollection(ReadOnlyCollection<Record> collection) {
string fullPath;
string checkFile;
List<string> distinct = [];
foreach (Record record in collection)
{
foreach (Record record in collection) {
if (distinct.Contains(record.Directory))
continue;
distinct.Add(record.Directory);
}
foreach (string directory in distinct)
{
foreach (string directory in distinct) {
if (Directory.Exists(directory))
continue;
_ = Directory.CreateDirectory(directory);
}
foreach (Record record in collection)
{
foreach (Record record in collection) {
checkFile = Path.Combine(record.Directory, record.File);
if (File.Exists(checkFile))
continue;

View File

@ -1,4 +1,5 @@
"use strict";
// DateTime normal = DateTime.Now.ToUniversalTime();
// logger.LogInformation("Now - ToUniversalTime: {ticks}", normal.Ticks);
// DateTime utc1970DateTime = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@ -16,42 +17,7 @@
// console.log("dateText: " + dateText);
// DateTime utcMeDateTime = new(1980, 1, 17, 0, 0, 0, DateTimeKind.Utc);
// long meTotalSeconds = (long)Math.Floor(fileInfo.LastWriteTime.ToUniversalTime().Subtract(utcMeDateTime).TotalSeconds);
const now = new Date();
const time = now.getTime();
const year = now.getFullYear();
const start = new Date(year, 0, 0);
const oneDay = 1000 * 60 * 60 * 24;
const timezoneOffset = now.getTimezoneOffset();
const diff = (now - start) + ((start.getTimezoneOffset() - timezoneOffset) * 60 * 1000);
const day = Math.floor(diff / oneDay);
console.log('Day of year: ' + day);
var season = year + "-";
if (day < 78)
season = season + "0.Winter";
else if (day < 124)
season = season + "1.Spring";
else if (day < 171)
season = season + "2.Spring";
else if (day < 217)
season = season + "3.Summer";
else if (day < 264)
season = season + "4.Summer";
else if (day < 309)
season = season + "5.Fall";
else if (day < 354)
season = season + "6.Fall";
else
season = season + "7.Winter";
let seconds = time.valueOf() + timezoneOffset;
let epoch = seconds * 10000;
let ticks = epoch + 621355968000000000;
let dateText = seconds + " - " + ticks + " - " + now.toString();
console.log("dateText: " + dateText);
console.log("end");
let original = "d:\\5-Other-Small\\Kanban\\Year-Season\\2025\\2025-0.Winter\\1737913505637";
let segments = original.split('\\');
let path = segments.slice(0, -3).join('\\') + '\\2021\\2021-0.Summer\\123';
console.log(path);
// epoch: 25201000
// ticks: 638665132483790000
// dateText: 638665132483790000 - Wed Nov 06 2024 10:55:58 GMT-0700 (Mountain Standard Time)
@ -59,3 +25,82 @@ console.log(path);
// 638665135325760000
// 638665136814890000
// utc1970DateTime: 621355968000000000
function getDateText() {
let result;
const now = new Date();
const time = now.getTime();
const year = now.getFullYear();
const start = new Date(year, 0, 0);
const oneDay = 1000 * 60 * 60 * 24;
const timezoneOffset = now.getTimezoneOffset();
const diff = (now - start) + ((start.getTimezoneOffset() - timezoneOffset) * 60 * 1000);
const day = Math.floor(diff / oneDay);
console.log('Day of year: ' + day);
var season = year + "-";
if (day < 78)
season = season + "0.Winter";
else if (day < 124)
season = season + "1.Spring";
else if (day < 171)
season = season + "2.Spring";
else if (day < 217)
season = season + "3.Summer";
else if (day < 264)
season = season + "4.Summer";
else if (day < 309)
season = season + "5.Fall";
else if (day < 354)
season = season + "6.Fall";
else
season = season + "7.Winter";
const seconds = time.valueOf() + timezoneOffset;
const epoch = seconds * 10000;
const ticks = epoch + 621355968000000000;
result = seconds + " - " + ticks + " - " + now.toString();
return result;
}
const dateText = getDateText();
console.log("dateText: " + dateText);
function getPath() {
let result;
const original = "d:\\5-Other-Small\\Kanban\\Year-Season\\2025\\2025-0.Winter\\1737913505637";
const segments = original.split('\\');
result = segments.slice(0, -3).join('\\') + '\\2021\\2021-0.Summer\\123';
return result;
}
const path = getPath();
console.log("path: " + path);
// https://tickstodatetime.wassupy.com/?ticks=638784250251441727
function getInfinityQS(value, sequence) {
let result;
if (sequence.length < 18)
result = [value];
else {
const now = new Date();
const epochHour = 36000000000;
const epochTicks = 621355968000000000;
const timezoneHourOffset = now.getTimezoneOffset() / 60;
const timezoneOffset = timezoneHourOffset * epochHour;
const milliseconds = (sequence - epochTicks + timezoneOffset) / 10000;
const flooredMilliseconds = Math.floor(milliseconds / (60 * 1000)) * (60 * 1000);
const ceiledMilliseconds = Math.ceil(milliseconds / (60 * 1000)) * (60 * 1000);
result = [(flooredMilliseconds / 1000), (ceiledMilliseconds / 1000)];
}
return result;
}
const date = '3/26/2025';
const infinityQS = 1742853453;
const sequence = '638784250251441727';
const values = getInfinityQS(date, sequence);
console.info("InfinityQS: " + values);
if (values[0] < infinityQS && values[1] > infinityQS)
console.info("InfinityQS: Pass");
else
console.warn("InfinityQS: Fail");

View File

@ -127,4 +127,28 @@ const nineResRangePercent = getNineResRangePercent(criticalRhoPoints);
const nineCriticalPointsStdDev = Math.sqrt(getVariance(criticalRhoPoints));
const nineCriticalPointsPhaseAngleAverage = getAverage(criticalPhasePoints);
const nineEdgeMeanDelta = getNineEdgeMeanDelta(edge4mmRhoPoints, edge10mmRhoPoints);
console.log(nineCriticalPointsStdDev);
console.log(nineCriticalPointsStdDev);
// Sequence to date string
// getValue(self, getContextData('1', 'cds.SEQUENCE', ''));
function getValue(value, sequence) {
let result;
if (sequence.length < 18)
result = value;
else {
const epochTicks = 621355968000000000;
const milliseconds = (sequence - epochTicks) / 10000;
const date = new Date(milliseconds);
result = date.toString();
}
return result;
}
const date = '3/26/2025';
const sequence = '638784250251441727';
const value = getValue(date, sequence);
if (value === 1742853453)
console.info("Pass");
else
console.warn("Fail");