Compare commits

...

10 Commits

Author SHA1 Message Date
1cd20fa08b YamlDotNet sort yaml
Helper20240623 Sorted
Improved log
Force root directory modified date when updating file
Switched to bash over link
MoveUpOneDirectory
Only write if needed
UpdateSubTasksInMarkdownFiles
2024-07-10 12:08:41 -07:00
47e6b85c21 Write index.yml.md
Helper to diff video files
Move matches from directory
Bug fix for DirectoryToISO
Find replace instead of remove
Rename Directory
Amazon
Immich Person
PersonKeyToName
PullIconsForBLM
New links
2024-06-13 08:51:00 -07:00
299aa19d53 Kanban Index for Typescript Helper 2024-05-01 17:08:21 -07:00
4e3f06bb44 Minor changes
Empty file ISO
Add date back for just .kanbn
Removed HardcodedFileSearchAndSort
Sync with 01-23
JsonToTsv
System.Text.Json
White-List
Ready to move to Move Helper
Remove Whitelist
Force Start At
Check for .git directory before ls
Optional
Allow root for unc path
nuget bump
PreVerify
EnforceCodeStyleInBuild
dotnet_analyzer_diagnostic
HelperGit
searchDelegate
Host File
AlertIfNewDeviceIsConnected
AOT
SetFrontMatterAndH1
Match Error
Unknown with better logging
Undo 04-05
WriteAppendToHostConfFile
MonA
IsKanbanIndex
Dotnet Format Pre-commit
NPM
CreateWindowsShortcut
Working directory
Split description
Copy tests
Ready to test
Delete after a couple of days
GitConfigCleanUp
knb Files
2024-05-01 09:05:08 -07:00
84ad97ac6e SortCodeMethods
DirectoryToISO
TextToJson
2024-01-08 10:02:30 -07:00
ccea8de8cf Removed updated and created front data
Helper-2023-12-21 -> Helper-2024-01-05
2024-01-05 18:39:49 -07:00
90380fdd43 CUDA -> ConvertId
KeePass -> ConvertKeePassExport
NMap -> SplitJsonFile
2023-12-25 17:10:26 -07:00
1f5743cf74 .editorconfig
2023-12-12 SplitJsonFile
2023-11-28 Migrated to File-Watcher
2023-12-05 SplitMarkdownFile
2023-11-30 RenameReactorProcessDataStandardFormatFiles
2023-11-22 ProcessDataStandardFormat
2023-12-13 13:19:37 -07:00
5fe51ef645 nuget bump
2023-11-02
2023-11-08
net8.0
editorconfig
NuGet NuSpec
Kanban
2023-11-14 07:54:34 -07:00
948937d8a5 Day Helpers
net8.0
Wrap in try
Zip with directories
2023-10-16 15:17:21 -07:00
65 changed files with 5969 additions and 880 deletions

View File

@ -1,3 +1,19 @@
[*.md]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.csproj]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.cs]
csharp_indent_block_contents = true
csharp_indent_braces = false
@ -13,6 +29,7 @@ csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = all
csharp_new_line_between_query_expression_clauses = true
csharp_prefer_braces = false
csharp_prefer_qualified_reference = true:error
csharp_prefer_simple_default_expression = true:warning
csharp_prefer_simple_using_statement = true:warning
csharp_prefer_static_local_function = true:warning
@ -73,17 +90,53 @@ csharp_style_var_elsewhere = false:warning
csharp_style_var_for_built_in_types = false:warning
csharp_style_var_when_type_is_apparent = false:warning
csharp_using_directive_placement = outside_namespace
dotnet_analyzer_diagnostic.category-Design.severity = error
dotnet_analyzer_diagnostic.category-Documentation.severity = error
dotnet_analyzer_diagnostic.category-Globalization.severity = none
dotnet_analyzer_diagnostic.category-Interoperability.severity = error
dotnet_analyzer_diagnostic.category-Maintainability.severity = error
dotnet_analyzer_diagnostic.category-Naming.severity = none
dotnet_analyzer_diagnostic.category-Performance.severity = none
dotnet_analyzer_diagnostic.category-Reliability.severity = error
dotnet_analyzer_diagnostic.category-Security.severity = error
dotnet_analyzer_diagnostic.category-SingleFile.severity = error
dotnet_analyzer_diagnostic.category-Style.severity = error
dotnet_analyzer_diagnostic.category-Usage.severity = error
dotnet_code_quality_unused_parameters = all
dotnet_code_quality_unused_parameters = non_public # IDE0060: Remove unused parameter
dotnet_code_quality_unused_parameters = non_public
dotnet_code_quality.CAXXXX.api_surface = private, internal
dotnet_diagnostic.CA1825.severity = warning # CA1823: Avoid zero-length array allocations
dotnet_diagnostic.CA1001.severity = error # CA1001: Types that own disposable fields should be disposable
dotnet_diagnostic.CA1051.severity = error # CA1051: Do not declare visible instance fields
dotnet_diagnostic.CA1511.severity = warning # CA1511: Use 'ArgumentException.ThrowIfNullOrEmpty' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1513.severity = warning # Use 'ObjectDisposedException.ThrowIf' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1825.severity = warning # CA1825: Avoid zero-length array allocations
dotnet_diagnostic.CA1829.severity = warning # CA1829: Use Length/Count property instead of Count() when available
dotnet_diagnostic.CA1834.severity = warning # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1860.severity = error # CA1860: Prefer comparing 'Count' to 0 rather than using 'Any()', both for clarity and for performance
dotnet_diagnostic.CA1862.severity = warning # CA1862: Prefer using 'string.Equals(string, StringComparison)' to perform a case-insensitive comparison, but keep in mind that this might cause subtle changes in behavior, so make sure to conduct thorough testing after applying the suggestion, or if culturally sensitive comparison is not required, consider using 'StringComparison.OrdinalIgnoreCase'
dotnet_diagnostic.CA1869.severity = none # CA1869: Avoid creating a new 'JsonSerializerOptions' instance for every serialization operation. Cache and reuse instances instead.
dotnet_diagnostic.CA2201.severity = none # CA2201: Exception type System.NullReferenceException is reserved by the runtime
dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template should not vary between calls to 'LoggerExtensions.LogInformation(ILogger, string?, params object?[])'
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary
dotnet_diagnostic.IDE0010.severity = none # Add missing cases to switch statement (IDE0010)
dotnet_diagnostic.IDE0028.severity = error # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
dotnet_diagnostic.IDE0060.severity = warning # IDE0060: Remove unused parameter
dotnet_diagnostic.IDE0048.severity = none # Parentheses preferences (IDE0047 and IDE0048)
dotnet_diagnostic.IDE0049.severity = warning # Use language keywords instead of framework type names for type references (IDE0049)
dotnet_diagnostic.IDE0051.severity = error # Private member '' is unused [, ]
dotnet_diagnostic.IDE0058.severity = warning # IDE0058: Expression value is never used
dotnet_diagnostic.IDE0060.severity = error # IDE0060: Remove unused parameter
dotnet_diagnostic.IDE0074.severity = warning # IDE0074: Use compound assignment
dotnet_diagnostic.IDE0130.severity = none # Namespace does not match folder structure (IDE0130)
dotnet_diagnostic.IDE0270.severity = warning # IDE0270: Null check can be simplified
dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]csharp(IDE0290)
dotnet_diagnostic.IDE0300.severity = error # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = error #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method
@ -215,7 +268,7 @@ dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true
dotnet_style_predefined_type_for_member_access = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_compound_assignment = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false

6
.gitignore vendored
View File

@ -330,4 +330,8 @@ ASALocalRun/
.vscode/Helper/**
.kanbn/board*
.kanbn
.extensions-vscode
.extensions-vscode-oss
.extensions-vscode-insiders

View File

@ -1,34 +0,0 @@
---
startedColumns:
- 'In Progress'
completedColumns:
- Done
dateFormat: mm/dd
taskTemplate: '^+^_${overdue ? ''^R'' : ''''}${name}^: ${relations ? (''\n^-^/^g'' + relations.reduce((accumulator, currentValue) => accumulator.task + currentValue.task + '' '', '''')) : ''''}'
created: "2023-10-01T18:07:00.085Z"
updated: "2023-10-01T18:07:00.085Z"
type: "Kanban"
---
# File File Helper
## Backlog
- [markdown-links-to-json](tasks/markdown-links-to-json.md)
- [markdown-to-json](tasks/markdown-to-json.md)
## Todo
- [find-orphan-links](tasks/find-orphan-links.md)
## In Progress
## Done
- [break-circular-references](tasks/break-circular-references.md)
- [populate-a-collection-of-file-info-front-matter-and-first-indent-of-1](tasks/populate-a-collection-of-file-info-front-matter-and-first-indent-of-1.md)
- [use-humanizer-on-title-of-links](tasks/use-humanizer-on-title-of-links.md)
- [convert-files-to-slug-name](tasks/convert-files-to-slug-name.md)
- [convert-wiki-links-to-markdown-links](tasks/convert-wiki-links-to-markdown-links.md)
- [add-update-date-in-front-matter](tasks/add-update-date-in-front-matter.md)
- [relative-path-to-relative-to-content](tasks/relative-path-to-relative-to-content.md)

312
.vscode/.json vendored Normal file
View File

@ -0,0 +1,312 @@
[
{
"id": "403675d4-631e-40bb-900e-fae36d9c9cdd",
"deviceAssetId": "449501900719.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/71/449501900719.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/40/36/403675d4-631e-40bb-900e-fae36d9c9cdd-preview.jpeg",
"fileCreatedAt": "2016-12-02T02:34:23-07:00",
"fileModifiedAt": "2016-12-02T02:34:22-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/40/36/403675d4-631e-40bb-900e-fae36d9c9cdd-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x28b46dbf4864b92f18800815cf8145c38d037e92",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:13:20.074314-07:00",
"createdAt": "2024-04-25T10:14:24.253144-07:00",
"isArchived": false,
"originalFileName": "449501900719.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\xe5a9090d8257787870788886886877776870760aa9",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2016-12-01T19:34:23-07:00",
"stackId": null
},
{
"id": "11ceb05f-8c94-46cd-9a7e-1c06be5a18b8",
"deviceAssetId": "015516300831.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/83/015516300831.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/11/ce/11ceb05f-8c94-46cd-9a7e-1c06be5a18b8-preview.jpeg",
"fileCreatedAt": "2014-05-03T14:44:20-07:00",
"fileModifiedAt": "2014-11-17T11:18:58-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/11/ce/11ceb05f-8c94-46cd-9a7e-1c06be5a18b8-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x5b976715bab319b3bdc69d5f337701a062494e0b",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:55.048725-07:00",
"createdAt": "2024-04-25T10:14:12.923101-07:00",
"isArchived": false,
"originalFileName": "015516300831.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x5a08120c00771777f87778979877597fbef365",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2014-05-03T14:44:20-07:00",
"stackId": null
},
{
"id": "e8e94a75-2b0c-48f6-b26a-76f5cbe46233",
"deviceAssetId": "985177500821.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/985177500821.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/e8/e9/e8e94a75-2b0c-48f6-b26a-76f5cbe46233-preview.jpeg",
"fileCreatedAt": "2004-04-28T20:31:40-07:00",
"fileModifiedAt": "2018-05-16T21:41:26.093-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/e8/e9/e8e94a75-2b0c-48f6-b26a-76f5cbe46233-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x176b222fa88bc72aaf81031f3b7f73644b178de4",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:55.667409-07:00",
"createdAt": "2024-04-25T10:14:12.945414-07:00",
"isArchived": false,
"originalFileName": "985177500821.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\xdf07121d0687868f87378788887877887780670789",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2004-04-28T20:31:40-07:00",
"stackId": null
},
{
"id": "4091bebd-4c26-4d30-bd3a-f2160a54b451",
"deviceAssetId": "956694610829.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/956694610829.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/40/91/4091bebd-4c26-4d30-bd3a-f2160a54b451-preview.jpeg",
"fileCreatedAt": "2010-07-05T09:10:13.2-07:00",
"fileModifiedAt": "2010-07-05T08:10:12-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/40/91/4091bebd-4c26-4d30-bd3a-f2160a54b451-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\xc2eb5667d6da5ead1be71c51064ea293ad413ea6",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:56.364375-07:00",
"createdAt": "2024-04-25T10:14:12.976197-07:00",
"isArchived": false,
"originalFileName": "956694610829.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x12080a0d82668886808887867877877867807906b7",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2010-07-05T09:10:13.2-07:00",
"stackId": null
},
{
"id": "c7bf1944-9f71-4808-8ff9-b0f972e907b0",
"deviceAssetId": "948800300821.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/948800300821.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/c7/bf/c7bf1944-9f71-4808-8ff9-b0f972e907b0-preview.jpeg",
"fileCreatedAt": "2009-10-09T05:35:00.2-07:00",
"fileModifiedAt": "2009-10-09T04:35:00-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/c7/bf/c7bf1944-9f71-4808-8ff9-b0f972e907b0-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x3c5c87ab7e442d1f7a0f2a12678c1d6be00dbc7b",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:56.553262-07:00",
"createdAt": "2024-04-25T10:14:12.982686-07:00",
"isArchived": false,
"originalFileName": "948800300821.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x103806258e02bd47937779a478997768fd3bcb9fa4",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2009-10-09T05:35:00.2-07:00",
"stackId": null
},
{
"id": "4f5ea703-47e9-48c6-9366-0cc10630dac2",
"deviceAssetId": "898525300821.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/898525300821.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/4f/5e/4f5ea703-47e9-48c6-9366-0cc10630dac2-preview.jpeg",
"fileCreatedAt": "2020-12-25T08:35:04.92-07:00",
"fileModifiedAt": "2020-12-25T08:35:04-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/4f/5e/4f5ea703-47e9-48c6-9366-0cc10630dac2-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\xf706263e450c9a26feaeba2dd14fe0fd8f22e623",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:58.530848-07:00",
"createdAt": "2024-04-25T10:14:13.048275-07:00",
"isArchived": false,
"originalFileName": "898525300821.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x5518060d8208976849959a99687678687f8dae48f6",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2020-12-25T01:35:04.92-07:00",
"stackId": null
},
{
"id": "86c813ad-2a1c-489f-8fc2-0b76a21889c0",
"deviceAssetId": "864710800829.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/864710800829.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/86/c8/86c813ad-2a1c-489f-8fc2-0b76a21889c0-preview.jpeg",
"fileCreatedAt": "2004-04-28T20:00:46-07:00",
"fileModifiedAt": "2004-04-28T19:00:46-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/86/c8/86c813ad-2a1c-489f-8fc2-0b76a21889c0-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x9061edbf75f11526cef2c832ba339267509eaec4",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:59.171233-07:00",
"createdAt": "2024-04-25T10:14:13.078169-07:00",
"isArchived": false,
"originalFileName": "864710800829.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x140812250674874f87777669788778887a93a0470a",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2004-04-28T20:00:46-07:00",
"stackId": null
},
{
"id": "b65121d8-4a74-4f27-9d6f-c582ffc444dc",
"deviceAssetId": "862274900829.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/862274900829.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/b6/51/b65121d8-4a74-4f27-9d6f-c582ffc444dc-preview.jpeg",
"fileCreatedAt": "2018-08-17T22:50:55.15-07:00",
"fileModifiedAt": "2022-11-03T20:25:09.161-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/b6/51/b65121d8-4a74-4f27-9d6f-c582ffc444dc-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\xd4f623e97acd727868fe0e191c170e449d4456a5",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:07:59.640678-07:00",
"createdAt": "2024-04-25T10:14:13.087927-07:00",
"isArchived": false,
"originalFileName": "862274900829.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x99180a0d045977a077687887777678876a806b0867",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2018-08-17T15:50:55.15-07:00",
"stackId": null
},
{
"id": "09fa281c-b828-47f6-8fbb-a5856edb63b5",
"deviceAssetId": "840656100829.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/840656100829.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/09/fa/09fa281c-b828-47f6-8fbb-a5856edb63b5-preview.jpeg",
"fileCreatedAt": "2019-05-30T14:56:36.82-07:00",
"fileModifiedAt": "2019-05-30T14:56:36-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/09/fa/09fa281c-b828-47f6-8fbb-a5856edb63b5-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\xd215606441cefcc295130262bad9fed96d9ac40e",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:08:00.211274-07:00",
"createdAt": "2024-04-25T10:14:13.104556-07:00",
"isArchived": false,
"originalFileName": "840656100829.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\xd6070a0d826f62873c788799993a7777137f679058",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2019-05-30T07:56:36.82-07:00",
"stackId": null
},
{
"id": "8c239624-2bea-479d-b7fa-9f2cd5ebc9b7",
"deviceAssetId": "812813100821.jpg",
"ownerId": "fc9fd5a1-d1b3-4080-a21c-daf9b1c24593",
"deviceId": "Library Import",
"type": "IMAGE",
"originalPath": "/var/snap/immich-distribution/pictures/82/812813100821.jpg",
"previewPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/8c/23/8c239624-2bea-479d-b7fa-9f2cd5ebc9b7-preview.jpeg",
"fileCreatedAt": "2021-08-28T16:29:07.65-07:00",
"fileModifiedAt": "2021-08-28T16:29:08-07:00",
"isFavorite": false,
"duration": null,
"thumbnailPath": "/var/snap/immich-distribution/common/upload/thumbs/fc9fd5a1-d1b3-4080-a21c-daf9b1c24593/8c/23/8c239624-2bea-479d-b7fa-9f2cd5ebc9b7-thumbnail.webp",
"encodedVideoPath": "",
"checksum": "\\x25a9ffe84298f0e3e7151aaf2eb339908574c035",
"isVisible": true,
"livePhotoVideoId": null,
"updatedAt": "2024-04-25T13:08:00.918411-07:00",
"createdAt": "2024-04-25T10:14:13.134038-07:00",
"isArchived": false,
"originalFileName": "812813100821.jpg",
"sidecarPath": null,
"isReadOnly": true,
"thumbhash": "\\x21080e0d825878767f9678bf7747799612c3b0308a",
"isOffline": false,
"libraryId": "af11ab7c-0782-4b7a-ba8e-fe68cf9a718d",
"isExternal": true,
"deletedAt": null,
"localDateTime": "2021-08-28T09:29:07.65-07:00",
"stackId": null
}
]

23
.vscode/launch.json vendored
View File

@ -9,11 +9,12 @@
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/bin/Debug/net7.0/win-x64/File-Folder-Helper.dll",
"program": "${workspaceFolder}/bin/Debug/net8.0/win-x64/File-Folder-Helper.dll",
"args": [
"s",
"\\\\messv02ecc1.ec.local\\EC_EDA\\Staging\\Traces\\HTR-PLC\\R72-PLC\\PollPath"
],
"K",
".kanbn"
],
"cwd": "${workspaceFolder}",
"console": "integratedTerminal",
"stopAtEntry": false
@ -24,18 +25,4 @@
"request": "attach"
}
]
}
// dotnet build
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R45-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R47-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R49-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R51-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R70-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R72-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R73-PLC\PollPath"
// dotnet run "s" "\\messv02ecc1.ec.local\EC_EDA\Staging\Traces\HTR-PLC\R74-PLC\PollPath"
// dotnet run "s" "C:/Users/phares/.nuget/packages"
// dotnet run "s" "D:/ProgramData/EC_EAFRepository/nupkg"
// dotnet run "s" "D:/Baget/packages"
// dotnet run "s" "\\messdv002.na.infineon.com\Candela\BaGet\packages"
// dotnet run "s" "T:/MESAFIBACKLOG/06_SourceCode/MESAFIBACKLOG/Adaptation/.kanbn"
}

24
.vscode/mklink.md vendored Normal file
View File

@ -0,0 +1,24 @@
---
type: "note"
created: "2023-10-20T03:53:13.742Z"
updated: "2023-10-20T04:00:37.259Z"
---
# mklink
```bash
mklink /J "L:\DevOps\Mesa_FI\File-Folder-Helper\.kanbn" "D:\5-Other-Small\Kanban\File-Folder-Helper"
```
```bash
mklink /J "L:\DevOps\Mesa_FI\File-Folder-Helper\.kanbn" "D:\5-Other-Small\Kanban\File-Folder-Helper"
```
```bash
del "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode"
del "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode-oss"
del "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode-insiders"
mklink /J "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode" "C:\Users\phares\.vscode\extensions\ifx.type-script-helper-1.6.2"
mklink /J "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode-oss" "C:\Users\phares\.vscode-oss\extensions\ifx.type-script-helper-1.6.2"
mklink /J "L:\DevOps\Mesa_FI\File-Folder-Helper\.extensions-vscode-insiders" "C:\Users\phares\.vscode-insiders\extensions\ifx.type-script-helper-1.6.2"
```

32
.vscode/pre-commit vendored Normal file
View File

@ -0,0 +1,32 @@
#!/bin/sh
echo "*****eclint fix******"
before=$(git ls-files --others --modified --deleted --exclude-standard | wc -l)
echo "before: $before"
dotnet format whitespace
err=$?
case $err in
0) echo success ;;
1) echo fail ;;
2) echo "unexpected failure" ;;
*) echo "I never thought this would happen" ;;
esac
if [ $err -ne 0 ]; then
exit $err
fi
after=$(git ls-files --others --modified --deleted --exclude-standard | wc -l)
echo "after: $after"
if [ $after -gt $before ]; then
files=$(git ls-files --others --modified --deleted --exclude-standard)
echo "Files not formatted correctly: $files"
exit 1
fi
exit $err

View File

@ -19,16 +19,19 @@
"FAMS",
"GIVN",
"HUSB",
"Immich",
"INDI",
"Infineon",
"Kanban",
"kanbn",
"Kofax",
"NpgSql",
"NSFX",
"OBJE",
"onenote",
"pged",
"Phares",
"Rijndael",
"Serilog",
"SUBM",
"SURN",

48
.vscode/tasks.json vendored
View File

@ -22,8 +22,8 @@
"-p",
"${workspaceFolder}/File-Folder-Helper.csproj",
"set",
"asdf",
"123"
"_UserSecretsId",
"0c43f9aa-96e9-4298-967c-ed069d79e262"
],
"problemMatcher": "$msCompile"
},
@ -42,6 +42,16 @@
],
"problemMatcher": "$msCompile"
},
{
"label": "Format-Whitespaces",
"command": "dotnet",
"type": "process",
"args": [
"format",
"whitespace"
],
"problemMatcher": "$msCompile"
},
{
"label": "build",
"command": "dotnet",
@ -89,8 +99,6 @@
"-c",
"Release",
"-p:PublishAot=true",
"--source",
"https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet7/nuget/v3/index.json",
"${workspaceFolder}/File-Folder-Helper.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
@ -100,91 +108,91 @@
{
"label": "File-Folder-Helper AOT s H Run Data Repository",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s J Verdaccio",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s J 'L:/Verdaccio/storage'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s J 'L:/Verdaccio/storage'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s M Self .Kanbn Tasks",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s M '.kanbn/tasks'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s M L:/DevOps/Mesa_FI/File-Folder-Helper L:/DevOps/Mesa_FI/File-Folder-Helper/.kanbn/tasks",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s S BaGet",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s S 'L:/BaGet/packages'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s S 'L:/BaGet/packages'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s V Helpers",
"label": "File-Folder-Helper AOT s X SortCodeMethods",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s V Helpers",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s X L:/DevOps/Mesa_FI/File-Folder-Helper Day-Helper-2024-01-08 L:/DevOps/Mesa_FI/File-Folder-Helper/Day/2024-Q2",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08ANLYSDIFAAST230",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08ANLYSDIFAAST230\\Source\\MET08ANLYSDIFAAST230'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08ANLYSDIFAAST230\\Source\\MET08ANLYSDIFAAST230'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08DDUPSFS6420",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08DDUPSFS6420\\Source\\MET08DDUPSFS6420'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08DDUPSFS6420\\Source\\MET08DDUPSFS6420'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08DDUPSP1TBI",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08DDUPSP1TBI\\Source\\MET08DDUPSP1TBI'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08DDUPSP1TBI\\Source\\MET08DDUPSP1TBI'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08RESIHGCV",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08RESIHGCV\\Source\\MET08RESIHGCV'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08RESIHGCV\\Source\\MET08RESIHGCV'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08RESIMAPCDE",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08RESIMAPCDE\\Source\\MET08RESIMAPCDE'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08RESIMAPCDE\\Source\\MET08RESIMAPCDE'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08RESISRP2100",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08RESISRP2100\\Source\\MET08RESISRP2100'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08RESISRP2100\\Source\\MET08RESISRP2100'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08THFTIRQS408M",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08THFTIRQS408M\\Source\\MET08THFTIRQS408M'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08THFTIRQS408M\\Source\\MET08THFTIRQS408M'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H MET08THFTIRSTRATUS",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\Metrology\\Run Data Repository\\MET08THFTIRSTRATUS\\Source\\MET08THFTIRSTRATUS'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\Metrology\\Run Data Repository\\MET08THFTIRSTRATUS\\Source\\MET08THFTIRSTRATUS'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s H WaferCounter",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.ec.local\\apps\\WaferCounter\\BackupFiles'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s H '\\\\messa01ec.infineon.com\\apps\\WaferCounter\\BackupFiles'",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s F Staging _Logs",
"type": "shell",
"command": "& L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net7.0/win-x64/publish/File-Folder-Helper.exe s F '\\\\messv02ecc1.ec.local\\EC_EAFLog\\Staging\\_ Logs'",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe s F '\\\\messv02ecc1.ec.local\\EC_EAFLog\\Staging\\_ Logs'",
"problemMatcher": []
},
{

View File

@ -1,9 +1,9 @@
using Microsoft.Extensions.Logging;
using System.Net.Http.Json;
namespace File_Folder_Helper.Helpers;
namespace File_Folder_Helper.Day;
internal static class HelperDownload
internal static class Helper20230906
{
internal static void SaveJson(ILogger log, string argsZero)
@ -43,8 +43,10 @@ internal static class HelperDownload
httpRequestMessage.Headers.Add(segments[0], segments[1]);
}
}
#pragma warning disable IL2026, IL3050
if (jsonBodyLine is not null)
httpRequestMessage.Content = JsonContent.Create(lines[jsonBodyLine.Value]);
#pragma warning restore IL2026, IL3050
httpClient = new(new HttpClientHandler { UseCookies = false }) { BaseAddress = new Uri(lines[0]) };
if (userAgent is not null)
httpClient.DefaultRequestHeaders.UserAgent.ParseAdd(userAgent);

View File

@ -0,0 +1,31 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.Day;
internal static class Helper20231010
{
internal static void HgCV(ILogger<Worker> logger, string argsZero)
{
long ticks;
DateTime dateTime;
TimeSpan timeSpan;
string[] segments;
DirectoryInfo argsZeroDirectoryInfo = new(argsZero);
int padLength = "64-612464-4626_2023-10-10_06;37_AM_2529222011".Length;
ReadOnlyCollection<DirectoryInfo> directoryInfoCollection = new(argsZeroDirectoryInfo.GetDirectories().OrderBy(l => l.LastWriteTime).ToArray());
foreach (DirectoryInfo directoryInfo in directoryInfoCollection)
{
segments = directoryInfo.Name.Split('_');
if (!long.TryParse(segments[^1], out ticks))
continue;
dateTime = new(ticks);
timeSpan = new(ticks);
dateTime = new(ticks);
logger.LogInformation("{directory.Name} at {LastWriteTime} took {TotalMinutes} minutes(s)", directoryInfo.Name.PadRight(padLength, ' '), directoryInfo.LastWriteTime, Math.Round(timeSpan.TotalMinutes, 3));
}
}
}

View File

@ -0,0 +1,37 @@
using Microsoft.Extensions.Logging;
using System.Globalization;
namespace File_Folder_Helper.Day;
internal static class Helper20231016
{
internal static void MoveDirectory(ILogger<Worker> logger, string argsZero)
{
string weekOfYear;
string checkDirectory;
long ticks = DateTime.Now.AddHours(-84).Ticks;
DirectoryInfo argsZeroDirectoryInfo = new(argsZero);
Calendar calendar = new CultureInfo("en-US").Calendar;
DirectoryInfo[] directoryInfoCollection = argsZeroDirectoryInfo.GetDirectories();
string fileShareDirectory = Path.GetDirectoryName(argsZero) ?? throw new Exception();
string archiveDirectory = Path.Combine(fileShareDirectory, "Archive");
if (!Directory.Exists(archiveDirectory))
throw new NotSupportedException();
foreach (DirectoryInfo directoryInfo in directoryInfoCollection)
{
foreach (DirectoryInfo directory in directoryInfo.GetDirectories())
{
if (directory.CreationTime.Ticks > ticks)
continue;
weekOfYear = calendar.GetWeekOfYear(directory.CreationTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
checkDirectory = Path.Combine(archiveDirectory, directoryInfo.Name, $"{directory.CreationTime.Year}_Week_{weekOfYear}", directory.CreationTime.ToString("yyyy-MM-dd"));
if (!Directory.Exists(checkDirectory))
continue;
Directory.Move(directory.FullName, Path.Combine(checkDirectory, directory.Name));
logger.LogInformation("{directory.CreationTime} - {directory.Name}", directory.CreationTime, directory.Name);
}
}
}
}

View File

@ -0,0 +1,125 @@
using Microsoft.Extensions.Logging;
using System.Diagnostics;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal record Drive(string Share,
bool Use,
string User);
internal static class Helper20231024
{
internal static void NetUse(ILogger<Worker> logger, string argsZero)
{
Process? process;
string arguments;
string[] segments;
string standardError;
string standardOutput;
string fileName = "net";
StringBuilder stringBuilder = new();
string json = File.ReadAllText(Path.Combine(argsZero, ".json"));
string decrypted = File.ReadAllText(Path.Combine(argsZero, ".password"));
List<Drive> drives = JsonSerializer.Deserialize(json, DrivesSourceGenerationContext.Default.ListDrive) ?? throw new NullReferenceException();
foreach (Drive drive in drives)
{
if (!drive.Use)
continue;
arguments = $"use * \"{drive.Share}\" /p:yes /user:{drive.User} {decrypted}";
_ = stringBuilder.Clear();
segments = arguments.Split(' ');
for (int j = 0; j < segments.Length - 1; j++)
_ = stringBuilder.Append(segments[j]).Append(' ');
logger.LogInformation("// {stringBuilder}", stringBuilder);
ProcessStartInfo processStartInfo = new(fileName, arguments)
{
RedirectStandardError = true,
RedirectStandardOutput = true,
UseShellExecute = false
};
try
{
process = Process.Start(processStartInfo);
if (process is null)
continue;
for (int j = 1; j < 45; j++)
{
_ = process.WaitForExit(1000);
if (process.HasExited)
break;
}
if (!process.HasExited)
logger.LogError("// Never exited!");
else
{
standardError = process.StandardError.ReadToEnd();
standardOutput = process.StandardOutput.ReadToEnd();
logger.LogInformation("// {standardError}{Environment.NewLine}{Environment.NewLine}// {standardOutput}", standardError, Environment.NewLine, Environment.NewLine, standardOutput);
try
{
_ = Directory.CreateDirectory($"{drive.Share}\\Tmp\\Phares");
string reg = """
Windows Registry Editor Version 5.00
[HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\ODBC\ODBC.INI\SPCEPIWORLD]
"Driver"="C:\\WINDOWS\\system32\\SQLSRV32.dll"
"Description"="InfinityQS SPC (Si)"
"Server"="messqlec1.infineon.com\\PROD1,53959"
"Database"="SPCEPIWORLD"
"LastUser"=""
"Trusted_Connection"="Yes"
[HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\ODBC\ODBC.INI\ODBC Data Sources]
"SPCEPIWORLD"="SQL Server"
""";
string bat = """
@ECHO OFF
NET USE G: /delete
NET USE H: /delete
NET USE M: /delete
NET USE P: /delete
NET USE R: /delete
NET USE T: /delete
NET USE V: /delete
NET USE Y: /delete
NET USE G: \\mesfs.infineon.com\EC_Engineering /PERSISTENT:YES
NET USE H: \\mesfs.infineon.com\EC_TempHumidity_Controls /PERSISTENT:YES
NET USE M: \\mesfs.infineon.com\EC_Maintenance /PERSISTENT:YES
NET USE P: \\mesfs.infineon.com\EC_Production /PERSISTENT:YES
NET USE R: \\mesfs.infineon.com\EC_Customer_Service /PERSISTENT:YES
NET USE T: \\mesfs.infineon.com\EC_Materials /PERSISTENT:YES
NET USE V: \\mesfs.infineon.com\EC_Engineering /PERSISTENT:YES
NET USE Y: \\mesfs.infineon.com\EC_EpiReactorRecipes /PERSISTENT:YES
""";
File.WriteAllText($"{drive.Share}\\Tmp\\Phares\\SPCEPIWORLD_hklm.reg", reg);
File.WriteAllText($"{drive.Share}\\Tmp\\Phares\\MESA-Users-Drives.bat", bat);
File.WriteAllText($"{drive.Share}\\Tmp\\Phares\\lnk.txt", """"\\messa04ec.infineon.com\EC_SPC_Si\SPC\Projects\Active\ir epi services database.ipj"""");
}
catch (Exception)
{ }
}
}
catch (Exception ex)
{
logger.LogError(ex, "Error:");
}
}
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Drive))]
internal partial class DriveSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(List<Drive>))]
internal partial class DrivesSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,35 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static class Helper20231102
{
internal static void NuSpec(ILogger<Worker> logger, string argsZero)
{
string[] lines;
string? idLine;
string? versionLine;
string[] files = Directory.GetFiles(argsZero);
logger.LogInformation("{fileCount}", files.Length.ToString());
foreach (string file in files)
{
idLine = null;
versionLine = null;
lines = File.ReadAllLines(file);
foreach (string line in lines)
{
if (!line.EndsWith("</id>") && !line.EndsWith("</version>"))
continue;
if (line.EndsWith("</id>"))
idLine = line.TrimEnd();
if (line.EndsWith("</version>"))
versionLine = line.TrimEnd();
if (idLine is not null && versionLine is not null)
break;
}
File.AppendAllText(".txt", $"{idLine}{versionLine}{Environment.NewLine}");
}
}
}

View File

@ -0,0 +1,37 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static class Helper20231108
{
internal static void MasterImage(ILogger<Worker> logger, List<string> args)
{
string fileName;
string checkFile;
string[] aSegments;
string[] bSegments;
if (!Directory.Exists(args[0]))
throw new Exception(args[0]);
if (!Directory.Exists(args[2]))
throw new Exception(args[2]);
string directoryName = Path.GetFileName(args[0]) ?? throw new Exception();
string[] files = Directory.GetFiles(args[0]);
logger.LogInformation("{fileCount}", files.Length.ToString());
foreach (string file in files)
{
aSegments = Path.GetFileNameWithoutExtension(file).Split('-');
if (aSegments.Length != 2)
continue;
bSegments = aSegments[1].Split('_');
if (bSegments.Length != 3)
continue;
fileName = $"{directoryName}-{bSegments[1]}-{bSegments[0]}{Path.GetExtension(file)}";
checkFile = Path.Combine(args[2], fileName);
if (File.Exists(checkFile))
continue;
File.Copy(file, checkFile);
}
}
}

View File

@ -0,0 +1,195 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO.Compression;
namespace File_Folder_Helper.Day;
internal static class Helper20231122
{
private record Record(string File, string FileName, string Equipment, string TimeStamp);
private static ReadOnlyCollection<Record> GetRecords(string sourceDirectory, string timestampFormat)
{
List<Record> results = [];
Record record;
string fileName;
string equipment;
string timestamp;
string[] segments;
string[] files = Directory.GetFiles(sourceDirectory, "*.pdsf", SearchOption.TopDirectoryOnly).ToArray();
foreach (string file in files)
{
fileName = Path.GetFileName(file);
segments = fileName.Split('_');
if (segments.Length != 2)
continue;
equipment = segments[0];
timestamp = segments[1].Split('.')[0];
if (timestamp.Length != timestampFormat.Length)
continue;
record = new(file, fileName, equipment, timestamp);
results.Add(record);
}
return new(results.OrderBy(l => l.TimeStamp).ToArray());
}
private static void WriteFile(string sourceDirectory, string[] columns, string equipment, List<List<string>> data, List<string> timestamps, string timestamp, DateTime dateTime)
{
List<string> lines = [];
string checkFile = Path.Combine(sourceDirectory, $"{equipment}-{timestamp}.tvs");
if (File.Exists(checkFile))
throw new NotSupportedException();
lines.Add($"timestamp\t{string.Join('\t', timestamps)}");
for (int i = 0; i < columns.Length; i++)
lines.Add($"{columns[i]}\t{string.Join('\t', data[i])}");
File.WriteAllLines(checkFile, lines);
File.SetLastWriteTime(checkFile, dateTime);
}
private static void ZipAndDeleteFiles(string sourceDirectory, string equipment, string timestamp, List<string> files, DateTime dateTime)
{
string checkFile = Path.Combine(sourceDirectory, $"{equipment}-{timestamp}.zip");
if (File.Exists(checkFile))
throw new NotSupportedException();
using ZipArchive zip = ZipFile.Open(checkFile, ZipArchiveMode.Create);
foreach (string file in files)
{
_ = zip.CreateEntryFromFile(file, Path.GetFileName(file));
File.Delete(file);
}
File.SetLastWriteTime(checkFile, dateTime);
}
private static void MoveFilesBack(string sourceDirectory, string parsedDirectory, List<string> parsedFiles)
{
foreach (string parsedFile in parsedFiles)
File.Move(parsedFile, Path.Combine(sourceDirectory, Path.GetFileName(parsedFile)));
if (parsedFiles.Count > 0)
Directory.Delete(parsedDirectory);
}
private static ReadOnlyDictionary<string, ReadOnlyCollection<Record>> GetEquipmentToRecords(string sourceDirectory, string timestampFormat)
{
Dictionary<string, ReadOnlyCollection<Record>> results = [];
List<Record>? collection;
Dictionary<string, List<Record>> keyValuePairs = [];
ReadOnlyCollection<Record> records = GetRecords(sourceDirectory, timestampFormat);
foreach (Record record in records)
{
if (!keyValuePairs.TryGetValue(record.Equipment, out collection))
{
keyValuePairs.Add(record.Equipment, []);
if (!keyValuePairs.TryGetValue(record.Equipment, out collection))
throw new NotSupportedException();
}
collection.Add(record);
}
foreach (KeyValuePair<string, List<Record>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, new(keyValuePair.Value));
return new(results);
}
private static void ParseProcessDataStandardFormatRecords(ILogger<Worker> logger, string sourceDirectory, string timestampFormat, string keyColumn, string missingKeyDirectory, string parsedDirectory, ReadOnlyCollection<Record> records)
{
string[] lines;
string[] values;
string[] columns;
DateTime dateTime;
string parsedFile;
int? keyColumnIndex;
string keyColumnValue;
string? lastColumn = null;
List<List<string>> data = [];
List<string> timestamps = [];
List<string> parsedFiles = [];
int? lastKeyColumnIndex = null;
string? lastKeyColumnValue = null;
foreach (Record record in records)
{
lines = File.ReadAllLines(record.File);
if (lines.Length != 15)
continue;
if (lines[6].Length < 1 || lines[6][0] != '"' || !lines[6].StartsWith("\"Time\""))
continue;
if (lines[8].Length < 1 || lines[8][0] != 'N' || lines[8] != "NUM_DATA_ROWS\t000000001")
continue;
keyColumnIndex = null;
columns = lines[6].Split('\t');
if (columns.Length < 3)
continue;
values = lines[7].Split('\t');
if (values.Length != columns.Length)
continue;
for (int i = 0; i < columns.Length; i++)
{
if (columns[i] != keyColumn)
continue;
keyColumnIndex = i;
break;
}
if (keyColumnIndex is null)
{
File.Move(record.File, Path.Combine(sourceDirectory, missingKeyDirectory, record.FileName));
continue;
}
keyColumnValue = values[keyColumnIndex.Value];
parsedFile = Path.Combine(parsedDirectory, record.FileName);
if ((lastColumn is not null && lines[6] != lastColumn) || (lastKeyColumnIndex is not null && keyColumnIndex.Value != lastKeyColumnIndex.Value) || (lastKeyColumnValue is not null && lastKeyColumnValue != keyColumnValue) || timestamps.Count > 12345)
{
if (!DateTime.TryParseExact(record.TimeStamp, timestampFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTime))
throw new NotSupportedException();
WriteFile(sourceDirectory, columns, record.Equipment, data, timestamps, record.TimeStamp, dateTime);
ZipAndDeleteFiles(sourceDirectory, record.Equipment, record.TimeStamp, parsedFiles, dateTime);
Directory.Delete(parsedDirectory);
logger.LogInformation("{timestamp} triggered", record.TimeStamp);
parsedFiles.Clear();
break;
}
parsedFiles.Add(parsedFile);
File.Move(record.File, parsedFile);
timestamps.Add($"'{record.TimeStamp}");
for (int i = 0; i < columns.Length; i++)
data.Add([]);
for (int i = 0; i < columns.Length; i++)
data[i].Add(values[i]);
lastColumn = lines[6];
lastKeyColumnIndex = keyColumnIndex;
lastKeyColumnValue = keyColumnValue;
}
MoveFilesBack(sourceDirectory, parsedDirectory, parsedFiles);
}
private static void ParseProcessDataStandardFormatFiles(ILogger<Worker> logger, string sourceDirectory, string timestampFormat, string keyColumn, string missingKeyDirectory)
{
string parsedDirectory;
ReadOnlyDictionary<string, ReadOnlyCollection<Record>> equipmentToRecords = GetEquipmentToRecords(sourceDirectory, timestampFormat);
foreach (KeyValuePair<string, ReadOnlyCollection<Record>> keyValuePair in equipmentToRecords)
{
parsedDirectory = Path.Combine(sourceDirectory, DateTime.Now.Ticks.ToString());
if (!Directory.Exists(parsedDirectory))
_ = Directory.CreateDirectory(parsedDirectory);
ParseProcessDataStandardFormatRecords(logger, sourceDirectory, timestampFormat, keyColumn, missingKeyDirectory, parsedDirectory, keyValuePair.Value);
Thread.Sleep(100);
}
}
internal static void ProcessDataStandardFormat(ILogger<Worker> logger, List<string> args)
{
string keyColumn = args[3];
string sourceDirectory = args[0];
string timestampFormat = args[2];
if (!Directory.Exists(sourceDirectory))
throw new Exception(sourceDirectory);
string missingKeyDirectory = Path.Combine(sourceDirectory, "Missing-Key");
if (!Directory.Exists(missingKeyDirectory))
_ = Directory.CreateDirectory(missingKeyDirectory);
while (true)
{
ParseProcessDataStandardFormatFiles(logger, sourceDirectory, timestampFormat, keyColumn, missingKeyDirectory);
Thread.Sleep(5000);
}
}
}

View File

@ -0,0 +1,128 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Globalization;
namespace File_Folder_Helper.Day;
internal static class Helper20231130
{
private record Record(string File, string FileName, string Equipment, string TimeStamp);
private static ReadOnlyDictionary<string, string> GetSystemStates()
{
Dictionary<string, string> results = [];
results.Add("1", "cold-idle");
results.Add("2", "running");
results.Add("3", "run-wafer");
results.Add("4", "warm-idle");
results.Add("5", "pause");
results.Add("6", "suspend");
results.Add("7", "startup");
results.Add("8", "shutdown");
results.Add("9", "abort");
results.Add("10", "safety-1");
results.Add("11", "safety-2");
results.Add("12", "safety-3");
return new(results);
}
private static ReadOnlyCollection<Record> GetRecords(string sourceDirectory, string timestampFormat)
{
List<Record> results = [];
Record record;
string fileName;
string equipment;
string timestamp;
string[] segments;
string[] files = Directory.GetFiles(sourceDirectory, "*.pdsf", SearchOption.TopDirectoryOnly).ToArray();
foreach (string file in files)
{
fileName = Path.GetFileName(file);
segments = fileName.Split('_');
if (segments.Length != 2)
continue;
equipment = segments[0];
timestamp = segments[1].Split('.')[0];
if (timestamp.Length != timestampFormat.Length)
continue;
record = new(file, fileName, equipment, timestamp);
results.Add(record);
}
return new(results.OrderBy(l => l.TimeStamp).ToArray());
}
internal static void RenameReactorProcessDataStandardFormatFiles(ILogger<Worker> logger, List<string> args)
{
string line;
string[] lines;
string[] values;
string[] columns;
DateTime dateTime;
int? keyColumnIndex;
string? systemState;
string checkFileName;
string keyColumnValue;
string? lastColumn = null;
List<string> allLines = [];
string keyColumn = args[3];
string sourceDirectory = args[0];
string timestampFormat = args[2];
if (!Directory.Exists(sourceDirectory))
throw new Exception(sourceDirectory);
string missingKeyDirectory = Path.Combine(sourceDirectory, "Missing-Key");
if (!Directory.Exists(missingKeyDirectory))
_ = Directory.CreateDirectory(missingKeyDirectory);
ReadOnlyDictionary<string, string> systemStates = GetSystemStates();
ReadOnlyCollection<Record> records = GetRecords(sourceDirectory, timestampFormat);
foreach (Record record in records)
{
lines = File.ReadAllLines(record.File);
if (lines.Length < 8)
continue;
if (lines[6].Length < 1 || lines[6][0] != '"' || !lines[6].StartsWith("\"Time\""))
continue;
if (lastColumn is not null && lines[6] != lastColumn)
break;
keyColumnIndex = null;
lastColumn = lines[6];
if (allLines.Count == 0)
allLines.Add($"\"Timestamp\"\t{lastColumn}");
columns = lines[6].Split('\t');
if (columns.Length < 3)
continue;
values = lines[7].Split('\t');
if (values.Length != columns.Length)
continue;
for (int i = 0; i < columns.Length; i++)
{
if (columns[i] != keyColumn)
continue;
keyColumnIndex = i;
break;
}
if (keyColumnIndex is null)
{
File.Move(record.File, Path.Combine(sourceDirectory, missingKeyDirectory, record.FileName));
continue;
}
for (int i = 7; i < lines.Length; i++)
{
line = lines[i];
if (line.Length < 1 || line[0] == 'N' && line.StartsWith("NUM_DATA_ROWS\t"))
break;
allLines.Add($"'{record.TimeStamp}\t{line}");
}
keyColumnValue = values[keyColumnIndex.Value];
logger.LogInformation("{timestamp} triggered", record.TimeStamp);
if (!systemStates.TryGetValue(keyColumnValue, out systemState))
continue;
checkFileName = Path.Combine(Path.GetDirectoryName(record.File) ?? throw new Exception(), $"{record.Equipment}-{record.TimeStamp}-{systemState}.pdsf");
File.Move(record.File, checkFileName);
if (DateTime.TryParseExact(record.TimeStamp, timestampFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTime))
File.SetLastWriteTime(checkFileName, dateTime);
}
File.WriteAllLines(Path.Combine(sourceDirectory, $"{DateTime.Now.Ticks}.tsv"), allLines);
}
}

View File

@ -0,0 +1,103 @@
using Microsoft.Extensions.Logging;
using System.Text.RegularExpressions;
namespace File_Folder_Helper.Day;
internal static partial class Helper20231205
{
private static string? GetStrippedMacAddress(string[] segments)
{
string? result = null;
foreach (string segment in segments)
{
if (segment.Length != 17)
continue;
if (segment[2] is not ':' or '-' || segment[5] is not ':' or '-' || segment[8] is not ':' or '-' || segment[11] is not ':' or '-' || segment[14] is not ':' or '-')
continue;
result = $"{segment[0]}{segment[1]}{segment[3]}{segment[4]}{segment[6]}{segment[7]}{segment[9]}{segment[10]}{segment[12]}{segment[13]}{segment[15]}{segment[16]}".ToLower();
}
return result;
}
[GeneratedRegex(@"[\\,\/,\:,\*,\?,\"",\<,\>,\|]")]
private static partial Regex WindowsSafe();
private static string? GetStrippedIPV4(string[] segments)
{
string? result = null;
string[] subSegments;
foreach (string segment in segments)
{
subSegments = segment.Split('.');
if (subSegments.Length != 4)
continue;
if (!subSegments.All(l => int.TryParse(l, out _)))
continue;
result = segment.Replace(".", string.Empty);
}
return result;
}
internal static void SplitMarkdownFile(ILogger<Worker> logger, List<string> args)
{
string[] lines;
string? fileName;
Regex windowsSafe;
string[] segments;
string checkFileName;
string? strippedIpV4;
string? strippedMacAddress;
List<string> collection = [];
string sourceDirectory = args[0];
if (!Directory.Exists(sourceDirectory))
throw new Exception(sourceDirectory);
string outputDirectory = Path.Combine(sourceDirectory, Path.GetFileNameWithoutExtension(args[2]));
if (!Directory.Exists(outputDirectory))
_ = Directory.CreateDirectory(outputDirectory);
string[] files = Directory.GetFiles(args[0], args[2], SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileName = null;
collection.Clear();
lines = File.ReadAllLines(file);
foreach (string line in lines)
{
collection.Add(line);
if (line.Length > 0 && line[0] == '#' && line.StartsWith("## "))
{
segments = line.Split(' ');
strippedIpV4 = GetStrippedIPV4(segments);
strippedMacAddress = GetStrippedMacAddress(segments);
if (strippedMacAddress is null && strippedIpV4 is null)
{
windowsSafe = WindowsSafe();
fileName = $"{windowsSafe.Replace(line[3..], "-").Trim().ToLower()}.md";
}
else if (strippedMacAddress is null)
{
fileName = $"ipv4-{strippedIpV4}.md";
collection.Insert(0, string.Empty);
collection.Insert(0, $"# {fileName}");
}
else
{
fileName = $"mac-{strippedMacAddress}.md";
collection.Insert(0, string.Empty);
collection.Insert(0, $"# {fileName}");
}
}
if (fileName is null || line != "----")
continue;
collection.RemoveAt(collection.Count - 1);
logger.LogInformation("{fileName} created", fileName);
checkFileName = Path.Combine(outputDirectory, fileName);
if (File.Exists(checkFileName))
File.Delete(checkFileName);
File.WriteAllLines(checkFileName, collection);
collection.Clear();
}
}
}
}

View File

@ -0,0 +1,231 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text;
namespace File_Folder_Helper.Day;
internal static partial class Helper20231222
{
private record Record(string File,
string DestinationDirectory,
string DestinationFile);
private record IntelligentIdRecord(int Key,
ReadOnlyCollection<char> ResultAllInOneSubdirectoryChars,
string Reverse);
private record FilePath(long CreationTicks,
string DirectoryName,
string ExtensionLowered,
string FileNameFirstSegment,
string FullName,
int? Id,
bool IsIntelligentIdFormat,
long LastWriteTicks,
long Length,
string Name,
string NameWithoutExtension,
int? SortOrder);
public record MetadataConfiguration(int ResultAllInOneSubdirectoryLength, int Offset, int IntMinValueLength);
private static short GetSortOrderOnlyLengthIndex(MetadataConfiguration metadataConfiguration) =>
(short)metadataConfiguration.Offset.ToString().Length;
private static bool NameWithoutExtensionIsIntelligentIdFormat(MetadataConfiguration metadataConfiguration, string fileNameFirstSegment) =>
fileNameFirstSegment.Length - 1 == metadataConfiguration.IntMinValueLength && fileNameFirstSegment[^1] is '1' or '2' or '8' or '9' && fileNameFirstSegment.All(char.IsNumber);
private static bool NameWithoutExtensionIsPaddedIntelligentIdFormat(MetadataConfiguration metadataConfiguration, short sortOrderOnlyLengthIndex, string fileNameFirstSegment) =>
fileNameFirstSegment.Length == metadataConfiguration.IntMinValueLength + sortOrderOnlyLengthIndex + 1
&& fileNameFirstSegment[^1] is '1' or '2' or '8' or '9'
&& fileNameFirstSegment.All(char.IsNumber);
private static bool NameWithoutExtensionIsIdFormat(MetadataConfiguration metadataConfiguration, string fileNameFirstSegment)
{
bool result;
if (fileNameFirstSegment.Length < 5 || fileNameFirstSegment.Length > metadataConfiguration.IntMinValueLength)
result = false;
else
{
bool skipOneAllAreNumbers = fileNameFirstSegment[1..].All(char.IsNumber);
result = (skipOneAllAreNumbers && fileNameFirstSegment[0] == '-') || (skipOneAllAreNumbers && char.IsNumber(fileNameFirstSegment[0]));
}
return result;
}
private static FilePath GetFilePath(MetadataConfiguration metadataConfiguration, FileInfo fileInfo, int? index)
{
FilePath result;
int? id;
int? sortOder;
string fileNameFirstSegment = fileInfo.Name.Split('.')[0];
short sortOrderOnlyLengthIndex = GetSortOrderOnlyLengthIndex(metadataConfiguration);
string fileDirectoryName = fileInfo.DirectoryName ?? throw new NullReferenceException();
bool fileNameFirstSegmentIsIntelligentIdFormat = NameWithoutExtensionIsIntelligentIdFormat(metadataConfiguration, fileNameFirstSegment);
bool fileNameFirstSegmentIsPaddedIntelligentIdFormat = NameWithoutExtensionIsPaddedIntelligentIdFormat(metadataConfiguration, sortOrderOnlyLengthIndex, fileNameFirstSegment);
bool fileNameFirstSegmentIsIdFormat = !fileNameFirstSegmentIsPaddedIntelligentIdFormat && !fileNameFirstSegmentIsIntelligentIdFormat && NameWithoutExtensionIsIdFormat(metadataConfiguration, fileNameFirstSegment);
if (fileNameFirstSegmentIsIdFormat)
{
if (index is null)
throw new NullReferenceException(nameof(index));
if (!int.TryParse(fileNameFirstSegment, out int valueOfFileNameFirstSegment))
throw new NotSupportedException();
(id, sortOder) = (valueOfFileNameFirstSegment, metadataConfiguration.Offset + index);
}
else if (!fileNameFirstSegmentIsIntelligentIdFormat && !fileNameFirstSegmentIsPaddedIntelligentIdFormat)
(id, sortOder) = (null, null);
else if (fileNameFirstSegmentIsIntelligentIdFormat)
(id, sortOder) = (GetId(metadataConfiguration, fileNameFirstSegment), null);
else if (fileNameFirstSegmentIsPaddedIntelligentIdFormat)
{
if (!int.TryParse(fileNameFirstSegment[..sortOrderOnlyLengthIndex], out int absoluteValueOfSortOrder))
(id, sortOder) = (null, null);
else
(id, sortOder) = (GetId(metadataConfiguration, fileNameFirstSegment[sortOrderOnlyLengthIndex..]), absoluteValueOfSortOrder);
}
else
throw new NotSupportedException();
result = new(fileInfo.CreationTime.Ticks,
fileDirectoryName,
fileInfo.Extension.ToLower(),
fileNameFirstSegment,
fileInfo.FullName,
id,
fileNameFirstSegmentIsIntelligentIdFormat,
fileInfo.LastWriteTime.Ticks,
fileInfo.Length,
fileInfo.Name,
Path.GetFileNameWithoutExtension(fileInfo.Name),
sortOder);
return result;
}
private static IntelligentIdRecord GetIntelligentIdRecord(MetadataConfiguration metadataConfiguration, long id, bool ignore)
{
IntelligentIdRecord result;
StringBuilder stringBuilder = new();
if (metadataConfiguration.IntMinValueLength < (metadataConfiguration.ResultAllInOneSubdirectoryLength + 2))
throw new NotSupportedException();
int key;
string value;
List<char> chars = [];
if (id > -1)
{
key = ignore ? 8 : 9;
value = id.ToString().PadLeft(metadataConfiguration.IntMinValueLength, '0');
}
else
{
key = ignore ? 2 : 1;
value = id.ToString()[1..].PadLeft(metadataConfiguration.IntMinValueLength, '0');
}
for (int i = value.Length - metadataConfiguration.ResultAllInOneSubdirectoryLength - 1; i > -1; i--)
_ = stringBuilder.Append(value[i]);
for (int i = value.Length - metadataConfiguration.ResultAllInOneSubdirectoryLength; i < value.Length; i++)
chars.Add(value[i]);
result = new(key, new(chars), stringBuilder.ToString());
return result;
}
private static string GetIntelligentId(IntelligentIdRecord intelligentId) =>
$"{intelligentId.Reverse}{string.Join(string.Empty, intelligentId.ResultAllInOneSubdirectoryChars)}{intelligentId.Key}";
private static int GetId(MetadataConfiguration metadataConfiguration, string intelligentId)
{
int result;
StringBuilder results = new();
if (metadataConfiguration.IntMinValueLength < (metadataConfiguration.ResultAllInOneSubdirectoryLength + 2))
throw new NotSupportedException();
for (int i = intelligentId.Length - (metadataConfiguration.ResultAllInOneSubdirectoryLength + 2); i > -1; i--)
_ = results.Append(intelligentId[i]);
_ = results.Append(intelligentId[^3]).Append(intelligentId[^2]);
result = int.Parse(results.ToString());
if (intelligentId[^1] is '1' or '2')
result *= -1;
else if (intelligentId[^1] is not '9' and not '8')
throw new NotSupportedException();
return result;
}
private static ReadOnlyCollection<Record> GetRecords(MetadataConfiguration metadataConfiguration, string sourceDirectory, string searchPattern)
{
List<Record> results = [];
int check;
int index = -1;
FileInfo fileInfo;
FilePath filePath;
string? directory;
string[] segments;
bool ignore = false;
string directoryName;
string intelligentId;
string? parentDirectory;
IntelligentIdRecord intelligentIdRecord;
string sourceParentDirectory = Path.GetDirectoryName(sourceDirectory) ?? throw new NotSupportedException();
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
index += 1;
directory = Path.GetDirectoryName(file);
if (directory is null)
continue;
parentDirectory = Path.GetDirectoryName(directory);
if (parentDirectory is null)
continue;
fileInfo = new(file);
directoryName = Path.GetFileName(directory);
filePath = GetFilePath(metadataConfiguration, fileInfo, index);
if (filePath.Id is null)
continue;
intelligentIdRecord = GetIntelligentIdRecord(metadataConfiguration, filePath.Id.Value, ignore);
intelligentId = GetIntelligentId(intelligentIdRecord);
check = GetId(metadataConfiguration, intelligentId);
if (check != filePath.Id.Value)
throw new NotSupportedException();
segments = Path.GetFileName(file).Split('.');
if (segments.Length == 2)
{
if (filePath.SortOrder is not null)
results.Add(new(file, directory, $"{filePath.SortOrder.Value}{intelligentId}.{segments[1]}"));
else
results.Add(new(file, Path.Combine(sourceParentDirectory, intelligentIdRecord.Key.ToString(), string.Join(string.Empty, intelligentIdRecord.ResultAllInOneSubdirectoryChars)), $"{intelligentId}.{segments[1]}"));
}
else if (segments.Length == 3)
results.Add(new(file, Path.Combine(sourceParentDirectory, intelligentIdRecord.Key.ToString(), string.Join(string.Empty, intelligentIdRecord.ResultAllInOneSubdirectoryChars)), $"{intelligentId}.{segments[1]}.{segments[2]}"));
else if (segments.Length == 4)
{
if (directoryName != segments[0])
results.Add(new(file, directory, $"{intelligentId}.{segments[1]}.{segments[2]}.{segments[3]}"));
else
results.Add(new(file, Path.Combine(sourceParentDirectory, intelligentIdRecord.Key.ToString(), string.Join(string.Empty, intelligentIdRecord.ResultAllInOneSubdirectoryChars), intelligentId), $"{intelligentId}.{segments[1]}.{segments[2]}.{segments[3]}"));
}
else if (segments.Length == 5)
results.Add(new(file, directory, $"{intelligentId}.{segments[1]}.{segments[2]}.{segments[3]}.{segments[4]}"));
else
continue;
}
return new(results);
}
internal static void ConvertId(ILogger<Worker> logger, List<string> args)
{
List<string> distinct = [];
string searchPattern = args[2];
string sourceDirectory = args[0];
logger.LogInformation("{sourceDirectory}", sourceDirectory);
MetadataConfiguration metadataConfiguration = new(2, 1000000, int.MinValue.ToString().Length);
ReadOnlyCollection<Record> records = GetRecords(metadataConfiguration, sourceDirectory, searchPattern);
foreach (Record record in records)
{
if (distinct.Contains(record.DestinationDirectory))
continue;
distinct.Add(record.DestinationDirectory);
if (!Directory.Exists(record.DestinationDirectory))
_ = Directory.CreateDirectory(record.DestinationDirectory);
}
foreach (Record record in records)
File.Move(record.File, Path.Combine(record.DestinationDirectory, record.DestinationFile));
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,251 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240106
{
private record Host([property: JsonPropertyName("a")] string? Id,
[property: JsonPropertyName("b")] string? Colon,
[property: JsonPropertyName("c")] string? Hyphen,
[property: JsonPropertyName("d")] string? Line,
[property: JsonPropertyName("e")] string? Count,
[property: JsonPropertyName("f")] string? Segments,
[property: JsonPropertyName("g")] string? Type,
[property: JsonPropertyName("h")] string? Device,
[property: JsonPropertyName("i")] string? Name);
[JsonSourceGenerationOptions(WriteIndented = true, AllowTrailingCommas = true)]
[JsonSerializable(typeof(Host[]))]
private partial class HostSourceGenerationContext : JsonSerializerContext
{
}
private record Record(string Key, Dictionary<string, string> KeyValuePairs);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
private partial class DictionaryDictionarySourceGenerationContext : JsonSerializerContext
{
}
private static Dictionary<string, Dictionary<string, string>> GetKeyValuePairs(List<Record> collection, bool replaceFound)
{
Dictionary<string, Dictionary<string, string>> results = [];
if (replaceFound)
{
foreach ((string key, Dictionary<string, string> keyValuePairs) in collection)
_ = results.TryAdd(key, keyValuePairs);
}
else
{
foreach ((string key, Dictionary<string, string> keyValuePairs) in collection.OrderBy(l => l.Key))
_ = results.TryAdd(key, keyValuePairs);
}
return results;
}
private static int? GetHeaderLine(string[] lines)
{
int? headerLine = null;
for (int i = 0; i < lines.Length - 1; i++)
{
if (!lines[i].Contains('\t'))
continue;
headerLine = i;
}
return headerLine;
}
private static Dictionary<string, Dictionary<string, string>> GetKeyValuePairs(int keyIndex, int keyLength, string replace, string[] headers, string[] lines, int headerLine)
{
Dictionary<string, Dictionary<string, string>> results;
string? key;
Record record;
bool replaceFound = false;
List<Record> collection = [];
Dictionary<string, string> keyValuePairs;
for (int i = headerLine + 1; i < lines.Length; i++)
{
key = null;
keyValuePairs = [];
for (int j = 0; j < headers.Length; j++)
{
if (j > 0)
i++;
if (lines.Length <= i)
{
keyValuePairs.Clear();
break;
}
if (j == keyIndex)
{
key = lines[i];
if (key.Length != keyLength)
{
keyValuePairs.Clear();
break;
}
}
if (lines[i] != replace)
_ = keyValuePairs.TryAdd(headers[j], lines[i]);
else
{
if (!replaceFound)
replaceFound = true;
_ = keyValuePairs.TryAdd(headers[j], lines[i]);
j++;
_ = keyValuePairs.TryAdd(headers[j], lines[i]);
}
}
if (keyValuePairs.Count != headers.Length)
continue;
key ??= "-";
record = new(key, keyValuePairs);
collection.Add(record);
}
results = GetKeyValuePairs(collection, replaceFound);
return results;
}
private static Dictionary<int, Host> GetHosts(string jsonl)
{
Dictionary<int, Host> results = [];
int id;
string json = $"[{File.ReadAllText(jsonl).Replace("\r\n", ",")}]";
Host[] hosts = JsonSerializer.Deserialize(json, HostSourceGenerationContext.Default.HostArray) ?? throw new NullReferenceException(nameof(json));
foreach (Host host in hosts)
{
if (host.Id is null)
continue;
if (host.Hyphen is not null and nameof(host.Hyphen))
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
if (results.ContainsKey(id))
throw new NotSupportedException($"Id {id} is not unique!");
results.Add(id, host);
}
return results;
}
private static ReadOnlyCollection<string> GetIpAddressAndVerify(ILogger<Worker> logger, string key, Dictionary<string, Dictionary<string, string>> keyValuePairs, Dictionary<int, Host> hosts, string filter)
{
List<string> results = [];
int id;
bool found;
Host? host;
string text;
string? ipAddress;
StringBuilder stringBuilder = new();
foreach (KeyValuePair<string, Dictionary<string, string>> keyValuePair in keyValuePairs)
{
found = false;
if (keyValuePair.Key.StartsWith(filter))
continue;
if (!keyValuePair.Value.TryGetValue(key, out ipAddress))
throw new NotSupportedException($"{key} isn't present!");
if (ipAddress == "0.0.0.0")
continue;
results.Add(ipAddress);
_ = stringBuilder.Clear();
foreach (KeyValuePair<string, string> keyValue in keyValuePair.Value)
_ = stringBuilder.AppendLine(keyValue.Value);
text = stringBuilder.ToString();
if (!int.TryParse(ipAddress.Split('.')[^1], out id))
throw new NotSupportedException($"{ipAddress} isn't valid!");
if (!hosts.TryGetValue(id, out host))
throw new NotSupportedException($"{id} isn't valid!");
foreach (KeyValuePair<string, string> keyValue in keyValuePair.Value)
{
if (keyValue.Value != host.Hyphen)
continue;
found = true;
}
if (!found)
throw new NotSupportedException($"{host}{Environment.NewLine}{text} doesn't match!");
if (text.Contains("Unknown", StringComparison.InvariantCultureIgnoreCase))
logger.LogWarning($"{text} contains Unknown and should be {host.Device}!");
}
return new(results);
}
private static void WriteAppendToHostConfFile(FileInfo fileInfo, string hostConfFile, Dictionary<int, Host> hosts, ReadOnlyCollection<string> ipAddress)
{
int id;
Host host;
string ip;
string line;
List<int> distinct = [];
List<string> lines = [$"# {fileInfo.LastWriteTime.Ticks}"];
string firstSegmentsOfIpAddress = string.Join('.', ipAddress[0].Split('.').Take(3));
foreach (KeyValuePair<int, Host> keyValuePair in hosts)
{
host = keyValuePair.Value;
if (host.Hyphen is not null and nameof(host.Hyphen))
continue;
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Hyphen.Length != 17)
throw new NotSupportedException($"{host.Id} is Null or not 17");
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
if (distinct.Contains(id))
throw new NotSupportedException($"{id} is not distinct!");
distinct.Add(id);
ip = ipAddress.Contains($"{firstSegmentsOfIpAddress}.{id}") ? $"{firstSegmentsOfIpAddress}.{id}" : $"# {firstSegmentsOfIpAddress}.{id}";
line = $"{ip} {host.Name} # https://{host.Name} | {host.Colon} | {host.Hyphen} | {host.Device} |";
lines.Add(line);
}
lines.Add($"# {fileInfo.LastWriteTime.Ticks}");
File.AppendAllLines(hostConfFile, lines);
}
internal static void TextToJson(ILogger<Worker> logger, List<string> args)
{
string json;
string[] lines;
int? headerLine;
FileInfo fileInfo;
string key = args[7];
string filter = args[10];
string replace = args[5];
int keyIndex = int.Parse(args[3]);
int keyLength = int.Parse(args[4]);
ReadOnlyCollection<string> ipAddress;
string[] headers = args[6].Split(',');
string jsonl = Path.Combine(args[0], args[8]);
string hostConfFile = Path.Combine(args[0], args[9]);
string[] txtFiles = Directory.GetFiles(args[0], args[2]);
Dictionary<string, Dictionary<string, string>> keyValuePairs;
if (!File.Exists(jsonl))
throw new NotSupportedException($"{args[8]} doesn't exist!");
Dictionary<int, Host> hosts = GetHosts(jsonl);
if (hosts.Count == 0)
throw new NotSupportedException($"{args[8]} isn't valid!");
foreach (string txtFile in txtFiles)
{
lines = File.ReadAllLines(txtFile);
if (lines.Length == 0)
continue;
headerLine = GetHeaderLine(lines);
if (headerLine is null)
continue;
fileInfo = new(txtFile);
keyValuePairs = GetKeyValuePairs(keyIndex, keyLength, replace, headers, lines, headerLine.Value);
if (keyValuePairs.Count == 0)
continue;
ipAddress = GetIpAddressAndVerify(logger, key, keyValuePairs, hosts, filter);
if (ipAddress.Count == 0)
continue;
json = JsonSerializer.Serialize(keyValuePairs, DictionaryDictionarySourceGenerationContext.Default.DictionaryStringDictionaryStringString);
logger.LogInformation("Writing output file...");
File.WriteAllText($"{fileInfo.FullName}-{fileInfo.LastWriteTime.Ticks}.json", json);
WriteAppendToHostConfFile(fileInfo, hostConfFile, hosts, ipAddress);
File.WriteAllLines(txtFile, [string.Empty, string.Empty, lines[^1]]);
}
}
}

View File

@ -0,0 +1,50 @@
using DiscUtils.Iso9660;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240107
{
private static void DirectoryToISO(ILogger<Worker> logger, string destinationDirectory, bool mapOnly, string directory)
{
byte[] bytes = [];
string relativePath;
string directoryName = Path.GetFileName(directory);
CDBuilder builder = new() { UseJoliet = true, VolumeIdentifier = directoryName.Length < 25 ? directoryName : directoryName[..25] };
IEnumerable<string> files = Directory.EnumerateFiles(directory, "*", new EnumerationOptions { IgnoreInaccessible = true, RecurseSubdirectories = true });
foreach (string file in files)
{
relativePath = Path.GetRelativePath(directory, file).Replace(';', '_');
if (!mapOnly)
_ = builder.AddFile(relativePath, file);
else
_ = builder.AddFile(relativePath, bytes);
}
logger.LogInformation(destinationDirectory);
builder.Build(Path.Combine(destinationDirectory, $"{directoryName}.iso"));
logger.LogInformation(directoryName);
}
internal static void DirectoryToISO(ILogger<Worker> logger, List<string> args)
{
string sourceDirectory = args[0];
int directories = int.Parse(args[2]);
string destinationDirectory = args[3];
logger.LogInformation(sourceDirectory);
if (sourceDirectory == "C:/ProgramData")
{
sourceDirectory = destinationDirectory;
destinationDirectory = Path.GetDirectoryName(destinationDirectory) ?? throw new NotSupportedException();
}
bool mapOnly = sourceDirectory.Length == 2;
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
string[] subDirectories = directories == 1 ? [sourceDirectory] : Directory.GetDirectories(sourceDirectory, "*", SearchOption.TopDirectoryOnly);
if (subDirectories.Length != directories)
throw new Exception($"{directories} != {subDirectories.Length}");
foreach (string directory in subDirectories)
DirectoryToISO(logger, destinationDirectory, mapOnly, directory);
}
}

View File

@ -2,9 +2,9 @@ using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.RegularExpressions;
namespace File_Folder_Helper.Helpers;
namespace File_Folder_Helper.Day;
internal static partial class HelperVSCodePossibleExtension
internal static partial class Helper20240108
{
private record Method(string Name,
@ -81,7 +81,7 @@ internal static partial class HelperVSCodePossibleExtension
return result;
}
private static int? GetFirstUsedLine(string[] lines, int i, string search, string searchNot, string searchWrap, int parameterCount)
private static int? GetFirstUsedLine(string[] lines, int i, string search, string searchNot, string searchWrap, string searchDelegate, string searchConstructor, int parameterCount)
{
int? result = null;
string[] segments;
@ -99,30 +99,45 @@ internal static partial class HelperVSCodePossibleExtension
{
segments = lines[j].Split(searchWrap);
if (segments.Length == 1)
continue;
{
if (!lines[j].EndsWith(searchDelegate))
{
segments = lines[j].Split(searchConstructor);
if (segments.Length == 1)
continue;
}
}
}
}
lastSegmentBeforeDot = segments[^1].Split(").")[0];
if (parameterCount == 0)
if (lines[j].EndsWith(searchDelegate))
{
if (lastSegmentBeforeDot.Contains(','))
continue;
result = j;
break;
}
else
{
afterSegments = lastSegmentBeforeDot.Split(',');
if (afterSegments.Length != parameterCount)
continue;
lastSegmentBeforeDot = segments[^1].Split(").")[0];
if (parameterCount == 0)
{
if (lastSegmentBeforeDot.Contains(','))
continue;
}
else
{
afterSegments = lastSegmentBeforeDot.Split(',');
if (afterSegments.Length != parameterCount)
continue;
}
result = j;
break;
}
result = j;
break;
}
return result;
}
private static ReadOnlyCollection<int> GetMethodLines(ReadOnlyCollection<Method> methods)
{
List<int> results = new();
List<int> results = [];
foreach (Method method in methods)
{
for (int i = method.StartLine; i < method.EndLine + 1; i++)
@ -133,7 +148,7 @@ internal static partial class HelperVSCodePossibleExtension
private static ReadOnlyCollection<Method> GetMethods(string cSharpFile, ILogger<Worker> logger, string[] lines)
{
List<Method> results = new();
List<Method> results = [];
int blocks;
bool isLinq;
int endLine;
@ -146,7 +161,9 @@ internal static partial class HelperVSCodePossibleExtension
string searchWrap;
int parameterCount;
int? firstUsedLine;
string searchDelegate;
string lineSegmentFirst;
string searchConstructor;
for (int i = 0; i < lines.Length; i++)
{
line = lines[i].Trim();
@ -162,10 +179,12 @@ internal static partial class HelperVSCodePossibleExtension
search = $" {name}(";
searchNot = $"!{name}(";
searchWrap = $"({name}(";
searchDelegate = $" += {name};";
if (string.IsNullOrEmpty(name))
continue;
blocks = 0;
startLine = GetStartLine(lines, i);
searchConstructor = $"{name.ToLower()} = new(";
parameterCount = GetParameterCount(line, search);
isLinq = lines[i + 1].Trim() != "{";
if (isLinq)
@ -184,7 +203,7 @@ internal static partial class HelperVSCodePossibleExtension
endLine = j;
if (lines.Length > j + 1 && string.IsNullOrEmpty(lines[j + 1].Trim()))
endLine++;
firstUsedLine = GetFirstUsedLine(lines, i, search, searchNot, searchWrap, parameterCount);
firstUsedLine = GetFirstUsedLine(lines, i, search, searchNot, searchWrap, searchDelegate, searchConstructor, parameterCount);
if (firstUsedLine is null)
{
lineSegmentFirst = line.Split(search)[0];
@ -210,7 +229,7 @@ internal static partial class HelperVSCodePossibleExtension
private static bool WriteAllLines(string cSharpFile, string[] lines, ReadOnlyCollection<Method> methods)
{
bool result;
List<string> results = new();
List<string> results = [];
ReadOnlyCollection<int> methodLines = GetMethodLines(methods);
int minMethodLines = methodLines.Min();
for (int i = 0; i < minMethodLines; i++)
@ -249,18 +268,24 @@ internal static partial class HelperVSCodePossibleExtension
return result;
}
internal static void Sort(ILogger<Worker> logger, List<string> args)
internal static void SortCodeMethods(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken)
{
bool result = false;
bool check;
string[] lines;
bool usePathCombine = true;
long ticks = DateTime.Now.Ticks;
logger.LogInformation("{ticks}", ticks);
string[] cSharpFiles = Directory.GetFiles(args[0], "*.cs", SearchOption.TopDirectoryOnly);
string directory = Path.GetFullPath(args[2]);
string repositoryDirectory = Path.GetFullPath(args[0]);
string[] cSharpFiles = Directory.GetFiles(directory, "*.cs", SearchOption.AllDirectories);
ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles = Helpers.HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(repositoryDirectory, usePathCombine, cancellationToken);
for (int i = 0; i < 10; i++)
{
foreach (string cSharpFile in cSharpFiles)
{
if (!gitOthersModifiedAndDeletedExcludingStandardFiles.Contains(cSharpFile))
continue;
lines = File.ReadAllLines(cSharpFile);
check = SortFile(logger, cSharpFile, lines);
if (check && !result)

View File

@ -0,0 +1,83 @@
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240129
{
private record Record([property: JsonPropertyName("Part Name")] string? PartName,
[property: JsonPropertyName("Part Revision")] string? PartRevision,
[property: JsonPropertyName("Test Name")] string? TestName,
[property: JsonPropertyName("Description")] string? Description,
[property: JsonPropertyName("Lot Number")] string? LotNumber,
[property: JsonPropertyName("Job Name")] string? JobName,
[property: JsonPropertyName("Process Name")] string? ProcessName,
[property: JsonPropertyName("Reasonable Limit (Upper)")] double? ReasonableLimitUpper,
[property: JsonPropertyName("Alarm Reasonable Limit (Upper)")] double? AlarmReasonableLimitUpper,
[property: JsonPropertyName("Specification Limit (Upper)")] double? SpecificationLimitUpper,
[property: JsonPropertyName("Alarm Specification Limit (Upper)")] double? AlarmSpecificationLimitUpper,
[property: JsonPropertyName("Warning Limit (Upper)")] double? WarningLimitUpper,
[property: JsonPropertyName("Alarm Warning Limit (Upper)")] double? AlarmWarningLimitUpper,
[property: JsonPropertyName("Specification Limit (Target)")] double? SpecificationLimitTarget,
[property: JsonPropertyName("Warning Limit (Lower)")] double? WarningLimitLower,
[property: JsonPropertyName("Alarm Warning Limit (Lower)")] double? AlarmWarningLimitLower,
[property: JsonPropertyName("Specification Limit (Lower)")] double? SpecificationLimitLower,
[property: JsonPropertyName("Alarm Specification Limit (Lower)")] double? AlarmSpecificationLimitLower,
[property: JsonPropertyName("Reasonable Limit (Lower)")] double? ReasonableLimitLower,
[property: JsonPropertyName("Alarm Reasonable Limit (Lower)")] double? AlarmReasonableLimitLower,
[property: JsonPropertyName("Original Test Name")] string? OriginalTestName,
[property: JsonPropertyName("Test Id")] int? TestId,
[property: JsonPropertyName("count")] int? Count);
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Record[]))]
private partial class Helper20240129RecordCollectionSourceGenerationContext : JsonSerializerContext
{
}
private static List<string> GetLines(Record[] records)
{
List<string> lines = [];
lines.Add($"{nameof(Record.PartName)}\t{nameof(Record.PartRevision)}\t{nameof(Record.TestName)}\t{nameof(Record.Description)}\t{nameof(Record.LotNumber)}\t{nameof(Record.JobName)}\t{nameof(Record.ProcessName)}\t{nameof(Record.ReasonableLimitUpper)}\t{nameof(Record.AlarmReasonableLimitUpper)}\t{nameof(Record.SpecificationLimitUpper)}\t{nameof(Record.AlarmSpecificationLimitUpper)}\t{nameof(Record.WarningLimitUpper)}\t{nameof(Record.AlarmWarningLimitUpper)}\t{nameof(Record.SpecificationLimitTarget)}\t{nameof(Record.WarningLimitLower)}\t{nameof(Record.AlarmWarningLimitLower)}\t{nameof(Record.SpecificationLimitLower)}\t{nameof(Record.AlarmSpecificationLimitLower)}\t{nameof(Record.ReasonableLimitLower)}\t{nameof(Record.AlarmReasonableLimitLower)}\t{nameof(Record.OriginalTestName)}\t{nameof(Record.TestId)}\t{nameof(Record.Count)}");
foreach (Record record in records)
lines.Add($"{record.PartName}\t{record.PartRevision}\t{record.TestName}\t{record.Description}\t{record.LotNumber}\t{record.JobName}\t{record.ProcessName}\t{record.ReasonableLimitUpper}\t{record.AlarmReasonableLimitUpper}\t{record.SpecificationLimitUpper}\t{record.AlarmSpecificationLimitUpper}\t{record.WarningLimitUpper}\t{record.AlarmWarningLimitUpper}\t{record.SpecificationLimitTarget}\t{record.WarningLimitLower}\t{record.AlarmWarningLimitLower}\t{record.SpecificationLimitLower}\t{record.AlarmSpecificationLimitLower}\t{record.ReasonableLimitLower}\t{record.AlarmReasonableLimitLower}\t{record.OriginalTestName}\t{record.TestId}\t{record.Count}");
return lines;
}
private static void ConvertAndWrite(string pattern, string sourceDirectory)
{
long ticks;
string json;
string fileName;
string checkFile;
Record[]? records;
List<string> lines;
string[] files = Directory.GetFiles(sourceDirectory, pattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
ticks = DateTime.Now.Ticks;
json = File.ReadAllText(file);
fileName = Path.GetFileName(file);
checkFile = Path.Combine(sourceDirectory, $"{fileName}.{ticks}.tsv");
records = JsonSerializer.Deserialize(json, Helper20240129RecordCollectionSourceGenerationContext.Default.RecordArray);
if (records is null)
continue;
lines = GetLines(records);
File.WriteAllLines(checkFile, lines);
checkFile = Path.Combine(sourceDirectory, $"{fileName}.{ticks}.done");
File.Move(file, checkFile);
Thread.Sleep(100);
}
}
internal static void JsonToTsv(ILogger<Worker> logger, List<string> args)
{
string pattern = args[2];
string sourceDirectory = args[0];
logger.LogInformation(sourceDirectory);
ConvertAndWrite(pattern, sourceDirectory);
}
}

View File

@ -0,0 +1,58 @@
using Microsoft.Extensions.Logging;
using System.Globalization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240305
{
private static void TryArchiveFiles(string sourceDirectory, string pattern, string archiveDirectory, string days)
{
string checkFile;
FileInfo fileInfo;
string weekOfYear;
string checkDirectory;
string[] directorySegments;
Calendar calendar = new CultureInfo("en-US").Calendar;
DateTime dateTime = DateTime.Now.AddDays(-int.Parse(days));
string[] sourceDirectorySegments = sourceDirectory.Split('/');
// DirectoryInfo directoryInfo = new(Path.GetFullPath(sourceDirectory));
// IEnumerable<FileInfo> fileInfoCollection = directoryInfo.EnumerateFiles(pattern, new EnumerationOptions { IgnoreInaccessible = true, RecurseSubdirectories = true });
string[] files = Directory.GetFiles(sourceDirectory, pattern, SearchOption.AllDirectories);
if (!sourceDirectory.StartsWith('\\') && sourceDirectorySegments.Length < 2)
throw new Exception("Can't be root drive!");
// foreach (FileInfo fileInfo in fileInfoCollection)
foreach (string file in files)
{
fileInfo = new FileInfo(file);
if (string.IsNullOrEmpty(fileInfo.DirectoryName) || fileInfo.LastWriteTime > dateTime)
continue;
directorySegments = fileInfo.DirectoryName.Split(Path.DirectorySeparatorChar);
if (directorySegments.Length < sourceDirectorySegments.Length)
continue;
weekOfYear = $"{fileInfo.CreationTime.Year}_Week_{calendar.GetWeekOfYear(fileInfo.CreationTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
// checkDirectory = string.Concat(archiveDirectory, Path.DirectorySeparatorChar, weekOfYear, Path.DirectorySeparatorChar, string.Join(Path.DirectorySeparatorChar, directorySegments.Skip(sourceDirectorySegments.Length)));
checkDirectory = string.Concat(archiveDirectory, Path.DirectorySeparatorChar, weekOfYear);
for (int i = sourceDirectorySegments.Length; i < directorySegments.Length; i++)
checkDirectory = string.Concat(checkDirectory, Path.DirectorySeparatorChar, directorySegments[i]);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, fileInfo.Name);
if (File.Exists(checkFile))
continue;
// File.WriteAllText(checkFile, string.Empty);
File.Move(fileInfo.FullName, checkFile);
}
}
internal static void ArchiveFiles(ILogger<Worker> logger, List<string> args)
{
string days = args[4];
string pattern = args[2];
string sourceDirectory = args[0];
string archiveDirectory = Path.GetFullPath(args[3]);
logger.LogInformation(sourceDirectory);
TryArchiveFiles(sourceDirectory, pattern, archiveDirectory, days);
}
}

View File

@ -0,0 +1,151 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240403
{
private record DynamicHostConfigurationProtocolConfiguration(string[] Columns,
string Directory,
string Ignore,
int KeyIndex,
string Pattern,
string Primary);
private static string? GetMatch(DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration, string[] files)
{
string? result = null;
foreach (string file in files)
{
if (file.EndsWith(dynamicHostConfigurationProtocolConfiguration.Primary))
result = file;
}
return result;
}
private static ReadOnlyDictionary<string, int> GetKeyToCounts(DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration, string[] files)
{
Dictionary<string, int> results = [];
bool check;
string key;
string[] lines;
string[] segments;
List<int>? collection;
Dictionary<string, List<int>> keyValuePairs = [];
int columnsLength = dynamicHostConfigurationProtocolConfiguration.Columns.Length;
foreach (string file in files)
{
check = false;
lines = File.ReadAllLines(file);
foreach (string line in lines)
{
if (line.Length < 3 || line[0] != '|' || line[^1] != '|')
continue;
segments = line.Split('|');
if (segments.Length != columnsLength)
continue;
key = segments[dynamicHostConfigurationProtocolConfiguration.KeyIndex].Trim();
if (!check)
{
if (!key.StartsWith('-') || !key.EndsWith('-'))
continue;
check = true;
continue;
}
if (key == dynamicHostConfigurationProtocolConfiguration.Ignore)
break;
if (!keyValuePairs.TryGetValue(key, out collection))
{
keyValuePairs.Add(key, []);
if (!keyValuePairs.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(1);
}
}
foreach (KeyValuePair<string, List<int>> keyValuePair in keyValuePairs.OrderByDescending(l => l.Value.Count))
results.Add(keyValuePair.Key, keyValuePair.Value.Count);
return new(results);
}
private static ReadOnlyDictionary<string, ReadOnlyDictionary<string, ReadOnlyCollection<string>>> GetKeyValuePairs(DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration, string[] files)
{
ReadOnlyDictionary<string, ReadOnlyDictionary<string, ReadOnlyCollection<string>>> results;
bool check;
string key;
string[] lines;
string[] segments;
List<string> keys = [];
Dictionary<string, ReadOnlyCollection<string>> keyValuePairs;
Dictionary<string, ReadOnlyDictionary<string, ReadOnlyCollection<string>>> fileTo = [];
int columnsLength = dynamicHostConfigurationProtocolConfiguration.Columns.Length;
foreach (string file in files)
{
keys.Clear();
check = false;
keyValuePairs = [];
lines = File.ReadAllLines(file);
foreach (string line in lines)
{
if (line.Length < 3 || line[0] != '|' || line[^1] != '|')
continue;
segments = line.Split('|');
if (segments.Length != columnsLength)
continue;
key = segments[dynamicHostConfigurationProtocolConfiguration.KeyIndex].Trim();
if (!check)
{
if (!key.StartsWith('-') || !key.EndsWith('-'))
continue;
check = true;
continue;
}
if (key == dynamicHostConfigurationProtocolConfiguration.Ignore)
break;
if (keys.Contains(key))
throw new NotSupportedException($"{key}: is a duplicate!");
keys.Add(key);
keyValuePairs.Add(key, new(segments.Select(l => l.Trim()).ToArray()));
}
fileTo.Add(Path.GetFullPath(file), new(keyValuePairs));
}
results = new(fileTo);
return results;
}
private static void AlertIfNewDeviceIsConnected(DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration, ILogger<Worker> logger)
{
string[] files = Directory.GetFiles(dynamicHostConfigurationProtocolConfiguration.Directory, dynamicHostConfigurationProtocolConfiguration.Pattern, SearchOption.TopDirectoryOnly);
string? match = GetMatch(dynamicHostConfigurationProtocolConfiguration, files);
if (string.IsNullOrEmpty(match))
throw new NotSupportedException($"{dynamicHostConfigurationProtocolConfiguration.Primary} doesn't exist!");
ReadOnlyDictionary<string, int> keyToCounts = GetKeyToCounts(dynamicHostConfigurationProtocolConfiguration, files);
foreach (KeyValuePair<string, int> keyToCount in keyToCounts)
{
if (keyToCount.Value < 2)
continue;
logger.LogInformation("{Key}: {Count}", keyToCount.Key, keyToCount.Value);
}
ReadOnlyDictionary<string, ReadOnlyDictionary<string, ReadOnlyCollection<string>>> keyValuePairs = GetKeyValuePairs(dynamicHostConfigurationProtocolConfiguration, files);
foreach (KeyValuePair<string, ReadOnlyDictionary<string, ReadOnlyCollection<string>>> keyValuePair in keyValuePairs)
{
if (!keyValuePair.Key.EndsWith(dynamicHostConfigurationProtocolConfiguration.Primary))
continue;
}
}
internal static void AlertIfNewDeviceIsConnected(ILogger<Worker> logger, List<string> args)
{
string ignore = args[6];
string pattern = args[2];
string primary = args[3];
string keyIndex = args[5];
string directory = args[0];
logger.LogInformation(directory);
string[] columns = args[4].Split('|');
DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration = new(columns, directory, ignore, int.Parse(keyIndex), pattern, primary);
AlertIfNewDeviceIsConnected(dynamicHostConfigurationProtocolConfiguration, logger);
}
}

View File

@ -0,0 +1,96 @@
using Microsoft.Extensions.Logging;
using System.Text.RegularExpressions;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240404
{
private record MetaData(int DeviceId,
string DeviceType,
int DeviceNumber,
string DescriptionName,
string DescriptionTest,
string Frequency,
string Date);
private record CommaSeparatedValuesConfiguration(int Columns,
string Directory,
int TestIndex,
int RegularExpressionGroupCount,
string RegularExpressionPattern,
string SearchPattern);
private static MetaData? GetMetaData(CommaSeparatedValuesConfiguration commaSeparatedValuesConfiguration, string fileNameWithoutExtension)
{
MetaData? result;
Match match = Regex.Match(fileNameWithoutExtension, commaSeparatedValuesConfiguration.RegularExpressionPattern);
if (!match.Success || match.Groups.Count != commaSeparatedValuesConfiguration.RegularExpressionGroupCount)
result = null;
else
{
int deviceId = int.Parse(match.Groups["DeviceId"].Value);
int deviceNumber = int.Parse(match.Groups["DeviceNumber"].Value);
result = new(deviceId,
match.Groups["DeviceType"].Value,
deviceNumber,
match.Groups["DescriptionName"].Value,
match.Groups["DescriptionTest"].Value,
match.Groups["Frequency"].Value,
match.Groups["Date"].Value);
}
return result;
}
private static void ParseCSV(CommaSeparatedValuesConfiguration commaSeparatedValuesConfiguration, ILogger<Worker> logger)
{
string line;
string test;
string[] lines;
string[] columns;
MetaData? metaData;
string fileNameWithoutExtension;
string[] files = Directory.GetFiles(commaSeparatedValuesConfiguration.Directory, commaSeparatedValuesConfiguration.SearchPattern, SearchOption.TopDirectoryOnly);
logger.LogInformation(files.Length.ToString());
foreach (string file in files)
{
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(file);
metaData = GetMetaData(commaSeparatedValuesConfiguration, fileNameWithoutExtension);
if (metaData is null)
continue;
lines = File.ReadAllLines(file);
if (lines.Length < 2)
continue;
line = lines[0];
columns = line.Split('"');
if (columns.Length != commaSeparatedValuesConfiguration.Columns)
continue;
test = columns[commaSeparatedValuesConfiguration.TestIndex].Trim().Trim('"');
if (test.IndexOf(metaData.DescriptionTest, 0, StringComparison.CurrentCultureIgnoreCase) == -1)
continue;
for (int i = 1; i < lines.Length; i++)
{
line = lines[i];
if (line.Length < 1)
continue;
columns = line.Split(',').Select(l => l.Trim().Trim('"')).ToArray();
if (columns.Length != commaSeparatedValuesConfiguration.Columns)
continue;
}
}
}
internal static void ParseCSV(ILogger<Worker> logger, List<string> args)
{
string directory = args[0];
string regularExpressionPattern = args[2];
string regularExpressionGroupCount = args[3];
string searchPattern = args[4];
string columns = args[5];
string testIndex = args[6];
logger.LogInformation(directory);
CommaSeparatedValuesConfiguration commaSeparatedValuesConfiguration = new(int.Parse(columns), directory, int.Parse(testIndex), int.Parse(regularExpressionGroupCount), regularExpressionPattern, searchPattern);
ParseCSV(commaSeparatedValuesConfiguration, logger);
}
}

View File

@ -0,0 +1,91 @@
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240409
{
internal record FsSize( // cSpell:disable
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("object")] string Object,
[property: JsonPropertyName("pmon")] string PMon,
[property: JsonPropertyName("unit")] string Unit,
[property: JsonPropertyName("timeresolution")] string TimeResolution,
[property: JsonPropertyName("aggr")] string Aggr,
[property: JsonPropertyName("data")] List<double[]> Data
); // cSpell:restore
[JsonSourceGenerationOptions(WriteIndented = true, AllowTrailingCommas = true)]
[JsonSerializable(typeof(FsSize))]
internal partial class FsSizeSourceGenerationContext : JsonSerializerContext
{
}
internal static void MonA(ILogger<Worker> logger, List<string> args)
{
string url;
FsSize? fsSize;
string[] segments;
Task<string> json;
string jsonSection;
string urlBase = args[3];
string selector = args[6];
string urlPartA = args[4];
string urlPartC = args[5];
string directory = args[0];
string aggregation = args[8];
int skip = int.Parse(args[7]);
string searchPattern = args[2];
string fileNameWithoutExtension;
string[] ignore = args[9].Split(',');
Task<HttpResponseMessage> httpResponseMessage;
string[] files = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly);
HttpClient httpClient = new(new HttpClientHandler { UseCookies = false }) { BaseAddress = new Uri(urlBase) };
logger.LogInformation("{directory} has {files}(s)", directory, files.Length);
foreach (string file in files)
{
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(file);
if (ignore.Contains(fileNameWithoutExtension))
continue;
url = $"{httpClient.BaseAddress}{urlPartA}{fileNameWithoutExtension}{urlPartC}";
httpResponseMessage = httpClient.GetAsync(url);
httpResponseMessage.Wait();
if (!httpResponseMessage.Result.IsSuccessStatusCode)
{
logger.LogWarning("{StatusCode} for {url}", httpResponseMessage.Result.StatusCode, url);
continue;
}
json = httpResponseMessage.Result.Content.ReadAsStringAsync();
json.Wait();
if (json.Result.Length < 3)
{
logger.LogInformation("{Size} | {Julian} | {PMon} | {FileNameWithoutExtension}", -1, -1, "{}", fileNameWithoutExtension);
continue;
}
segments = json.Result.Split($"\"{fileNameWithoutExtension}{selector}");
if (segments.Length < 2)
{
logger.LogInformation("{Size} | {Julian} | {PMon} | {FileNameWithoutExtension}", -1, -1, selector, fileNameWithoutExtension);
continue;
}
for (int i = 1; i < segments.Length; i++)
{
jsonSection = segments[i][skip..][..^1];
fsSize = JsonSerializer.Deserialize(jsonSection, FsSizeSourceGenerationContext.Default.FsSize);
if (fsSize is null)
continue;
if (fsSize.Aggr != aggregation)
continue;
if (fsSize.Data.Count == 0 || fsSize.Data[0].Length == 0)
{
logger.LogInformation("{Size} | {Julian} | {PMon} | {FileNameWithoutExtension}", -1, -1, nameof(FsSize.Data), fileNameWithoutExtension);
continue;
}
logger.LogInformation("{Size} | {Julian} | {PMon} | {FileNameWithoutExtension}", fsSize.Data[0][1].ToString().PadLeft(20, '0'), fsSize.Data[0][0], fsSize.PMon, fileNameWithoutExtension);
}
}
}
}

View File

@ -0,0 +1,62 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240417
{
private static void RunCommand(ILogger<Worker> logger, string directory, string commandFileName, List<string> files, int before, CancellationToken cancellationToken)
{
bool usePathCombine = false;
string command = $"npx eclint fix {string.Join(' ', files)}";
string output = HelperNPM.RunCommand(commandFileName, command, directory);
logger.LogInformation(output);
if (output.Contains("error", StringComparison.OrdinalIgnoreCase))
{
Environment.ExitCode = 2;
logger.LogCritical("Setting exit code {ExitCode}", Environment.ExitCode);
throw new Exception(output);
}
ReadOnlyCollection<string> afterGitOthersModifiedAndDeletedExcludingStandardFiles = HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(directory, usePathCombine, cancellationToken);
int after = afterGitOthersModifiedAndDeletedExcludingStandardFiles.Count;
if (before != after)
{
List<string> fileNames = [];
foreach (string file in afterGitOthersModifiedAndDeletedExcludingStandardFiles)
{
if (file.Contains(' '))
continue;
if (files.Contains(file))
continue;
fileNames.Add(Path.GetFileName(file));
}
Environment.ExitCode = 1;
throw new Exception($"Files were modified exiting with exit code {Environment.ExitCode}!{Environment.NewLine}{string.Join(Environment.NewLine, fileNames)}");
}
}
internal static void FilteredRunCommand(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken)
{
List<string> files = [];
string directory = args[0];
bool usePathCombine = false;
string commandFileName = args[2];
ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles = HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(directory, usePathCombine, cancellationToken);
int before = gitOthersModifiedAndDeletedExcludingStandardFiles.Count;
foreach (string file in gitOthersModifiedAndDeletedExcludingStandardFiles)
{
if (file.Contains(' '))
{
logger.LogInformation("Skipping <{File}>", file);
continue;
}
files.Add(file);
}
logger.LogInformation("{directory} has {files} file(s)", directory, before);
if (files.Count > 0)
RunCommand(logger, directory, commandFileName, files, before, cancellationToken);
}
}

View File

@ -0,0 +1,93 @@
using Microsoft.Extensions.Logging;
using System.Text;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240426
{
private static void Write(string toA, string toB, string matchA, string matchB, string file, List<string> lines)
{
StringBuilder stringBuilder = new();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line.Replace(matchB, toB));
string checkFile = file.Replace(matchA, toA);
_ = Directory.CreateDirectory(Path.GetDirectoryName(checkFile) ?? throw new NullReferenceException());
if (!File.Exists(checkFile))
File.WriteAllText(checkFile, stringBuilder.ToString());
}
private static void UpdateTestFiles(ILogger<Worker> logger, string to, string match, string staticDirectoryName, List<string> files)
{
List<string> lines;
string directoryName;
string fistLineCheck;
string lastLineCheck = "#endif";
foreach (string file in files)
{
directoryName = Path.GetFileName(Path.GetDirectoryName(file) ?? throw new NullReferenceException(nameof(directoryName)));
if (directoryName == staticDirectoryName)
continue;
lines = File.ReadAllLines(file).ToList();
if (directoryName == match)
Write(to, to.Replace('.', '_'), match, match.Replace('.', '_'), file, lines);
if (lines.Count < 3)
continue;
fistLineCheck = $"#if {directoryName.Replace('.', '_')}";
if (lines[0].StartsWith("#if true") || lines[0].StartsWith(fistLineCheck))
continue;
for (int i = lines.Count - 1; i > 0; i--)
{
if (!string.IsNullOrEmpty(lines[i]))
break;
lines.RemoveAt(i);
}
if (lines[^1].StartsWith(lastLineCheck))
continue;
if (lines.All(l => string.IsNullOrEmpty(l) || l.StartsWith("//")))
continue;
lines.Add(lastLineCheck);
lines.Insert(0, fistLineCheck);
File.WriteAllText(file, string.Join(Environment.NewLine, lines));
logger.LogDebug(file);
}
}
private static void UpdateProjectTests(ILogger<Worker> logger, string to, string match, string searchPattern, string staticDirectoryName, string extractDirectoryName, string createSelfDescriptionDirectoryName, string sourceCodeDirectory)
{
List<string> files = [];
List<string> knownDirectories = [];
knownDirectories.AddRange(Directory.GetDirectories(sourceCodeDirectory, createSelfDescriptionDirectoryName, SearchOption.AllDirectories));
knownDirectories.AddRange(Directory.GetDirectories(sourceCodeDirectory, extractDirectoryName, SearchOption.AllDirectories));
knownDirectories.AddRange(Directory.GetDirectories(sourceCodeDirectory, staticDirectoryName, SearchOption.AllDirectories));
foreach (string knownDirectory in knownDirectories)
files.AddRange(Directory.GetFiles(knownDirectory, searchPattern, SearchOption.AllDirectories));
logger.LogInformation("{directory} has {files} file(s)", sourceCodeDirectory, files.Count);
UpdateTestFiles(logger, to, match, staticDirectoryName, files);
}
internal static void UpdateTests(ILogger<Worker> logger, List<string> args)
{
string to = args[9];
string match = args[8];
string searchPattern = args[7];
string[] sourceCodeDirectories;
string staticDirectoryName = args[6];
string extractDirectoryName = args[5];
string sourceCodeDirectoryName = args[3];
string directory = Path.GetFullPath(args[0]);
string createSelfDescriptionDirectoryName = args[4];
string systemVolumeInformation = Path.Combine(directory, args[2]);
string[] subDirectories = Directory.GetDirectories(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in subDirectories)
{
if (subDirectory == systemVolumeInformation)
continue;
sourceCodeDirectories = Directory.GetDirectories(subDirectory, sourceCodeDirectoryName, SearchOption.AllDirectories);
if (sourceCodeDirectories.Length != 1)
continue;
UpdateProjectTests(logger, to, match, searchPattern, staticDirectoryName, extractDirectoryName, createSelfDescriptionDirectoryName, sourceCodeDirectories[0]);
}
}
}

View File

@ -0,0 +1,145 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240427
{
internal record Asset( // cSpell:disable
[property: JsonPropertyName("id")] string? Id,
[property: JsonPropertyName("deviceAssetId")] string? DeviceAssetId,
[property: JsonPropertyName("ownerId")] string? OwnerId,
[property: JsonPropertyName("deviceId")] string? DeviceId,
[property: JsonPropertyName("type")] string? Type,
[property: JsonPropertyName("originalPath")] string? OriginalPath,
[property: JsonPropertyName("previewPath")] string? PreviewPath,
[property: JsonPropertyName("fileCreatedAt")] DateTime? FileCreatedAt,
[property: JsonPropertyName("fileModifiedAt")] DateTime? FileModifiedAt,
[property: JsonPropertyName("isFavorite")] bool? IsFavorite,
[property: JsonPropertyName("duration")] object? Duration,
[property: JsonPropertyName("thumbnailPath")] string? ThumbnailPath,
[property: JsonPropertyName("encodedVideoPath")] string? EncodedVideoPath,
[property: JsonPropertyName("checksum")] string? Checksum,
[property: JsonPropertyName("isVisible")] bool? IsVisible,
[property: JsonPropertyName("livePhotoVideoId")] object? LivePhotoVideoId,
[property: JsonPropertyName("updatedAt")] DateTime? UpdatedAt,
[property: JsonPropertyName("createdAt")] DateTime? CreatedAt,
[property: JsonPropertyName("isArchived")] bool? IsArchived,
[property: JsonPropertyName("originalFileName")] string? OriginalFileName,
[property: JsonPropertyName("sidecarPath")] string? SidecarPath,
[property: JsonPropertyName("isReadOnly")] bool? IsReadOnly,
[property: JsonPropertyName("thumbhash")] string? Thumbhash,
[property: JsonPropertyName("isOffline")] bool? IsOffline,
[property: JsonPropertyName("libraryId")] string? LibraryId,
[property: JsonPropertyName("isExternal")] bool? IsExternal,
[property: JsonPropertyName("deletedAt")] DateTime? DeletedAt,
[property: JsonPropertyName("localDateTime")] DateTime? LocalDateTime,
[property: JsonPropertyName("stackId")] object? StackId); // cSpell:restore
[JsonSourceGenerationOptions(WriteIndented = true, AllowTrailingCommas = true)]
[JsonSerializable(typeof(List<Asset>))]
internal partial class AssetCollectionSourceGenerationContext : JsonSerializerContext
{
}
private static string? MoveAsset(string home, string originalFileName, string lastVarDirectoryName, string path)
{
string? result;
string checkFile;
string sourceFile;
int? match = null;
string checkDirectory = home;
string sourceDirectory = home;
string originalFileNameWithoutExtension = Path.GetFileNameWithoutExtension(originalFileName);
List<string> directoryNames = HelperDirectory.GetDirectoryNames(path);
for (int i = 0; i < directoryNames.Count; i++)
{
if (directoryNames[i] != lastVarDirectoryName)
continue;
match = i;
break;
}
if (match is null)
result = null;
else
{
string[] fileNameSegments = directoryNames[^1].Split('-');
foreach (string directory in directoryNames.Skip(match.Value + 1).Take(directoryNames.Count - match.Value - 2))
sourceDirectory = Path.Combine(sourceDirectory, directory);
if (!Directory.Exists(sourceDirectory))
result = null;
else
{
sourceFile = Path.Combine(sourceDirectory, directoryNames[^1]);
if (!File.Exists(sourceFile))
result = null;
else
{
foreach (string directory in directoryNames.Skip(match.Value + 1).Take(directoryNames.Count - match.Value - 2))
checkDirectory = Path.Combine(checkDirectory, directory);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, $"{originalFileNameWithoutExtension}{fileNameSegments[^1]}");
if (File.Exists(checkFile))
result = null;
else
{
File.Move(sourceFile, checkFile);
result = checkFile;
}
}
}
}
return result;
}
private static void MoveAssets(ILogger<Worker> logger, string var, string home, string pictures, List<Asset> assets)
{
string? checkFile;
List<string> varDirectoryNames = HelperDirectory.GetDirectoryNames(home);
string lastVarDirectoryName = varDirectoryNames[^1];
foreach (Asset asset in assets)
{
if (asset.OriginalFileName is null)
continue;
if (asset.Type is null or not "IMAGE")
continue;
if (asset.PreviewPath is null || !asset.PreviewPath.StartsWith(var))
continue;
if (asset.ThumbnailPath is null || !asset.ThumbnailPath.StartsWith(var))
continue;
if (asset.OriginalPath is null || !asset.OriginalPath.StartsWith(pictures))
continue;
checkFile = MoveAsset(home, asset.OriginalFileName, lastVarDirectoryName, asset.PreviewPath);
if (checkFile is null)
continue;
checkFile = MoveAsset(home, asset.OriginalFileName, lastVarDirectoryName, asset.ThumbnailPath);
if (checkFile is null)
continue;
logger.LogDebug("<{OriginalFileName}> moved.", asset.OriginalFileName);
}
}
#pragma warning restore IDE0055, IDE0059, IDE0060, IDE2000, CS0168
internal static void Immich(Models.AppSettings appSettings, ILogger<Worker> logger, List<string> args)
{
string var = args[5];
string home = args[6];
string query = args[3];
string jsonFile = args[4];
string pictures = args[7];
string directory = args[0];
logger.LogInformation(query);
string databasePassword = args[2];
string json = File.ReadAllText(Path.Combine(directory, jsonFile));
List<Asset> assets = JsonSerializer.Deserialize(json, AssetCollectionSourceGenerationContext.Default.ListAsset) ?? throw new NullReferenceException();
string encryptedPassword = RijndaelEncryption.Encrypt(databasePassword, appSettings.Company);
MoveAssets(logger, var, home, pictures, assets);
logger.LogInformation("Encrypted Password: [{EncryptedPassword}]", encryptedPassword);
}
}

View File

@ -0,0 +1,56 @@
using Microsoft.Extensions.Logging;
using System.Diagnostics;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240429
{
internal static void GitConfigCleanUp(ILogger<Worker> logger, List<string> args)
{
string[] files;
Process? process;
string? directory;
string standardError;
string ignoreError = args[5];
string searchPattern = args[3];
ProcessStartInfo processStartInfo;
string root = Path.GetFullPath(args[0]);
string[] removeRemotes = args[4].Split(',');
string systemVolumeInformation = Path.Combine(root, args[2]);
string[] subDirectories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in subDirectories)
{
if (subDirectory == systemVolumeInformation)
continue;
files = Directory.GetFiles(subDirectory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
directory = Path.GetDirectoryName(file);
if (directory is null)
continue;
foreach (string removeRemote in removeRemotes)
{
processStartInfo = new()
{
FileName = "git",
WorkingDirectory = directory,
Arguments = $"remote rm {removeRemote}",
RedirectStandardError = true
};
process = Process.Start(processStartInfo);
if (process is null)
continue;
#pragma warning disable IDE0058
process.WaitForExit(7000);
#pragma warning restore IDE0058
standardError = process.StandardError.ReadToEnd();
if (!standardError.Contains(ignoreError))
logger.LogInformation(standardError);
logger.LogInformation("for <{directoryName}> remote rm {removeRemote}", directory, removeRemote);
}
}
}
}
}

View File

@ -0,0 +1,34 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240510
{
internal static void PullIconsForBLM(ILogger<Worker> logger, List<string> args)
{
string fileName;
FileInfo fileInfo;
string searchPattern = args[4];
string sourceDirectory = args[3];
string root = Path.GetFullPath(args[0]);
string createDirectory = Path.Combine(root, args[2]);
if (!Directory.Exists(createDirectory))
_ = Directory.CreateDirectory(createDirectory);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileName = Path.GetFileName(file);
fileInfo = new(Path.Combine(createDirectory, fileName));
if (fileInfo.Exists && fileInfo.LastWriteTime == new FileInfo(file).LastWriteTime)
continue;
File.Copy(file, fileInfo.FullName, overwrite: true);
logger.LogInformation("<{fileName}> copied", fileName);
}
logger.LogWarning("What reactor is this near?");
string? reactor = Console.ReadLine();
if (!string.IsNullOrEmpty(reactor))
_ = Directory.CreateDirectory(Path.Combine(sourceDirectory, Environment.MachineName, reactor));
}
}

View File

@ -0,0 +1,39 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Text.Json;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240513
{
internal static void PersonKeyToName(ILogger<Worker> logger, List<string> args)
{
Person? person;
string directoryName;
string checkDirectory;
string root = Path.GetFullPath(args[0]);
string json = File.ReadAllText(args[2]);
Dictionary<string, Person> keyValuePairs = [];
string[] directories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
Dictionary<long, Person> people = JsonSerializer.Deserialize(json, PeopleSourceGenerationContext.Default.DictionaryInt64Person) ?? throw new NullReferenceException();
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Birth?.Note is null)
continue;
keyValuePairs.Add(keyValuePair.Value.Birth.Note, keyValuePair.Value);
}
foreach (string directory in directories)
{
directoryName = Path.GetFileName(directory);
if (!keyValuePairs.TryGetValue(directoryName, out person) || person.Name?.ForwardSlashFull is null)
continue;
checkDirectory = Path.Combine(root, $"{person.Name.ForwardSlashFull.Replace('/', '-')}{directoryName}-{person.Id}");
if (Directory.Exists(checkDirectory))
continue;
Directory.Move(directory, checkDirectory);
logger.LogInformation("<{directory}> was moved", directory);
}
}
}

View File

@ -0,0 +1,334 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240517
{
public record ContentSignature([property: JsonPropertyName("contentSignature")] string Value,
[property: JsonPropertyName("contentSignatureType")] string ContentSignatureType);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ContentSignature))]
public partial class ContentSignatureGenerationContext : JsonSerializerContext
{
}
public record Type([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Type))]
public partial class TypeGenerationContext : JsonSerializerContext
{
}
public record ImageAmazon([property: JsonPropertyName("colorSpace")] string ColorSpace,
[property: JsonPropertyName("dateTime")] DateTime DateTime,
[property: JsonPropertyName("dateTimeDigitized")] DateTime DateTimeDigitized,
[property: JsonPropertyName("dateTimeOriginal")] DateTime DateTimeOriginal,
[property: JsonPropertyName("exposureMode")] string ExposureMode,
[property: JsonPropertyName("exposureProgram")] string ExposureProgram,
[property: JsonPropertyName("exposureTime")] string ExposureTime,
[property: JsonPropertyName("flash")] string Flash,
[property: JsonPropertyName("focalLength")] string FocalLength,
[property: JsonPropertyName("height")] int Height,
[property: JsonPropertyName("make")] string Make,
[property: JsonPropertyName("meteringMode")] string MeteringMode,
[property: JsonPropertyName("model")] string Model,
[property: JsonPropertyName("orientation")] string Orientation,
[property: JsonPropertyName("resolutionUnit")] string ResolutionUnit,
[property: JsonPropertyName("sensingMethod")] string SensingMethod,
[property: JsonPropertyName("sharpness")] string Sharpness,
[property: JsonPropertyName("software")] string Software,
[property: JsonPropertyName("subSecTime")] string SubSecTime,
[property: JsonPropertyName("subSecTimeDigitized")] string SubSecTimeDigitized,
[property: JsonPropertyName("subSecTimeOriginal")] string SubSecTimeOriginal,
[property: JsonPropertyName("whiteBalance")] string WhiteBalance,
[property: JsonPropertyName("width")] int Width,
[property: JsonPropertyName("apertureValue")] string ApertureValue);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ImageAmazon))]
public partial class ImageAmazonGenerationContext : JsonSerializerContext
{
}
public record ContentProperties([property: JsonPropertyName("contentDate")] DateTime ContentDate,
[property: JsonPropertyName("contentSignatures")] IReadOnlyList<ContentSignature> ContentSignatures,
[property: JsonPropertyName("contentType")] string ContentType,
[property: JsonPropertyName("extension")] string Extension,
[property: JsonPropertyName("image")] ImageAmazon Image,
[property: JsonPropertyName("md5")] string Md5,
[property: JsonPropertyName("size")] int Size,
[property: JsonPropertyName("version")] int Version);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ContentProperties))]
public partial class ContentPropertiesGenerationContext : JsonSerializerContext
{
}
public record XAccntParentMap();
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(XAccntParentMap))]
public partial class XAccntParentMapGenerationContext : JsonSerializerContext
{
}
public record Datum([property: JsonPropertyName("accessRuleIds")] IReadOnlyList<object> AccessRuleIds,
[property: JsonPropertyName("childAssetTypeInfo")] IReadOnlyList<object> ChildAssetTypeInfo,
[property: JsonPropertyName("contentProperties")] ContentProperties ContentProperties,
[property: JsonPropertyName("createdBy")] string CreatedBy,
[property: JsonPropertyName("createdDate")] DateTime CreatedDate,
[property: JsonPropertyName("eTagResponse")] string ETagResponse,
[property: JsonPropertyName("groupPermissions")] IReadOnlyList<object> GroupPermissions,
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("isRoot")] bool IsRoot,
[property: JsonPropertyName("isShared")] bool IsShared,
[property: JsonPropertyName("keywords")] IReadOnlyList<object> Keywords,
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("labels")] IReadOnlyList<object> Labels,
[property: JsonPropertyName("modifiedDate")] DateTime ModifiedDate,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("ownerId")] string OwnerId,
[property: JsonPropertyName("parentMap")] ParentMap ParentMap,
[property: JsonPropertyName("parents")] IReadOnlyList<string> Parents,
[property: JsonPropertyName("protectedFolder")] bool ProtectedFolder,
[property: JsonPropertyName("restricted")] bool Restricted,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("subKinds")] IReadOnlyList<object> SubKinds,
[property: JsonPropertyName("transforms")] IReadOnlyList<string> Transforms,
[property: JsonPropertyName("version")] int Version,
[property: JsonPropertyName("xAccntParentMap")] XAccntParentMap XAccntParentMap,
[property: JsonPropertyName("xAccntParents")] IReadOnlyList<object> XAccntParents,
[property: JsonPropertyName("match")] bool? Match);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Datum))]
public partial class DatumGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Dictionary<string, Datum>))]
public partial class DictionaryDatumGenerationContext : JsonSerializerContext
{
}
public record LocationAmazon([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(LocationAmazon))]
public partial class LocationAmazonGenerationContext : JsonSerializerContext
{
}
public record LocationInfo([property: JsonPropertyName("city")] string City,
[property: JsonPropertyName("country")] string Country,
[property: JsonPropertyName("countryIso3Code")] string CountryIso3Code,
[property: JsonPropertyName("state")] string State,
[property: JsonPropertyName("stateCode")] string StateCode);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(LocationInfo))]
public partial class LocationInfoGenerationContext : JsonSerializerContext
{
}
public record SearchData([property: JsonPropertyName("clusterName")] string ClusterName,
[property: JsonPropertyName("locationId")] string LocationId,
[property: JsonPropertyName("locationInfo")] LocationInfo LocationInfo,
[property: JsonPropertyName("thingId")] string ThingId);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(SearchData))]
public partial class SearchDataGenerationContext : JsonSerializerContext
{
}
public record AllPerson([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(AllPerson))]
public partial class AllPersonGenerationContext : JsonSerializerContext
{
}
public record PersonAmazon([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(PersonAmazon))]
public partial class PersonAmazonGenerationContext : JsonSerializerContext
{
}
public record ClusterId([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ClusterId))]
public partial class ClusterIdGenerationContext : JsonSerializerContext
{
}
public record Thing([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Thing))]
public partial class ThingGenerationContext : JsonSerializerContext
{
}
public record Time([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Time))]
public partial class TimeGenerationContext : JsonSerializerContext
{
}
public record ParentMap([property: JsonPropertyName("FOLDER")] IReadOnlyList<string> FOLDER);
public record Aggregations([property: JsonPropertyName("allPeople")] IReadOnlyList<AllPerson> AllPeople,
[property: JsonPropertyName("clusterId")] IReadOnlyList<ClusterId> ClusterId,
[property: JsonPropertyName("location")] IReadOnlyList<LocationAmazon> Location,
[property: JsonPropertyName("people")] IReadOnlyList<PersonAmazon> People,
[property: JsonPropertyName("things")] IReadOnlyList<Thing> Things,
[property: JsonPropertyName("time")] IReadOnlyList<Time> Time,
[property: JsonPropertyName("type")] IReadOnlyList<Type> Type);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Aggregations))]
public partial class AggregationsGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ParentMap))]
public partial class ParentMapGenerationContext : JsonSerializerContext
{
}
public record RootAmazon([property: JsonPropertyName("aggregations")] Aggregations Aggregations,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("data")] IReadOnlyList<Datum> Data);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(RootAmazon))]
public partial class RootAmazonGenerationContext : JsonSerializerContext
{
}
private static ReadOnlyCollection<(string, string)> GetAggregationLines(string harFile)
{
List<(string, string)> results = [];
if (!File.Exists(harFile))
throw new Exception();
string lastUrl = string.Empty;
string text = "\"text\": \"{";
string[] lines = File.ReadAllLines(harFile);
foreach (string line in lines)
{
if (line.Contains("\"url\": \""))
lastUrl = line;
if (!line.Contains(text))
continue;
if (!line.Contains("aggregations"))
continue;
if (lastUrl.Contains("search?asset=NONE"))
continue;
results.Add(new(lastUrl, line.Trim()[(text.Length - 1)..^1].Replace("\\\"", "\"")));
lastUrl = string.Empty;
}
return new(results);
}
private static void SaveAmazon(IReadOnlyList<Datum> data, string personIdFile)
{
string json;
Dictionary<string, Datum> keyValuePairs = [];
foreach (Datum datum in data)
_ = keyValuePairs.TryAdd(datum.Name.Split('.')[0], datum);
json = JsonSerializer.Serialize(keyValuePairs, DictionaryDatumGenerationContext.Default.DictionaryStringDatum);
File.WriteAllText(personIdFile, json);
}
private static void SaveAmazon(string destination, string harFile)
{
string offset;
string personId;
RootAmazon amazon;
string? personName;
string personIdFile;
string personDirectory;
PersonAmazon personAmazon;
Dictionary<string, string> keyValuePairs = [];
ReadOnlyCollection<(string Url, string AggregationLine)> aggregationLines = GetAggregationLines(harFile);
foreach ((string url, string aggregationLine) in aggregationLines)
{
if (aggregationLine.Contains(",\"category\":\"allPeople\"}"))
continue;
amazon = JsonSerializer.Deserialize(aggregationLine, RootAmazonGenerationContext.Default.RootAmazon) ?? throw new Exception();
if (amazon.Aggregations?.People is null || amazon.Aggregations.People.Count < 1)
continue;
personAmazon = amazon.Aggregations.People[0];
if (!url.Contains(personAmazon.Match))
continue;
personDirectory = Path.Combine(destination, personAmazon.SearchData.ClusterName);
_ = Directory.CreateDirectory(personDirectory);
personIdFile = Path.Combine(personDirectory, $"000) {personAmazon.Match}.json");
_ = keyValuePairs.TryAdd(personAmazon.Match, personAmazon.SearchData.ClusterName);
SaveAmazon(amazon.Data, personIdFile);
}
foreach ((string url, string aggregationLine) in aggregationLines)
{
if (aggregationLine.Contains(",\"category\":\"allPeople\"}"))
continue;
amazon = JsonSerializer.Deserialize(aggregationLine, RootAmazonGenerationContext.Default.RootAmazon) ?? throw new Exception();
if (amazon.Aggregations?.People is not null && amazon.Aggregations.People.Count > 0)
continue;
if (!url.Contains("offset="))
continue;
offset = url.Split("offset=")[1];
if (!url.Contains("people%3A("))
continue;
personId = url.Split("people%3A(")[1].Split(')')[0];
if (!keyValuePairs.TryGetValue(personId, out personName))
continue;
personDirectory = Path.Combine(destination, personName);
_ = Directory.CreateDirectory(personDirectory);
personIdFile = Path.Combine(personDirectory, $"{offset.Split('&')[0]}) {personId}.json");
SaveAmazon(amazon.Data, personIdFile);
}
}
internal static void SaveAmazon(ILogger<Worker> logger, List<string> args)
{
string root = Path.GetFullPath(args[0]);
string destination = Path.GetFullPath(args[2]);
if (string.IsNullOrEmpty(root))
throw new NullReferenceException(nameof(root));
string[] harFiles = Directory.GetFiles(root, "*.har", SearchOption.TopDirectoryOnly);
foreach (string harFile in harFiles)
SaveAmazon(destination, harFile);
logger?.LogInformation("{harFiles} count", harFiles.Length);
}
}

View File

@ -0,0 +1,35 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Text.Json;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240518
{
internal static void PersonKeyToImmichImport(ILogger<Worker> logger, List<string> args)
{
string name;
string birthDate;
string ownerId = args[5];
List<string> inserts = [];
string tableName = args[3];
string birthDateFormat = args[6];
string[] columns = args[4].Split(',');
string json = File.ReadAllText(args[2]);
string root = Path.GetFullPath(args[0]);
Dictionary<long, Person> people = JsonSerializer.Deserialize(json, PeopleSourceGenerationContext.Default.DictionaryInt64Person) ?? throw new NullReferenceException();
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Birth?.Note is null || keyValuePair.Value.Name?.ForwardSlashFull is null || keyValuePair.Value.Birth?.Date is null)
continue;
birthDate = keyValuePair.Value.Birth.Date.Value.ToString(birthDateFormat);
name = keyValuePair.Value.Name.ForwardSlashFull.Replace("/", string.Empty);
inserts.Add($"insert into \"{tableName}\" (\"{string.Join("\", \"", columns)}\") values ('{ownerId}', '{name}', '{birthDate}');");
}
string file = Path.Combine(root, $"{DateTime.Now.Ticks}.sql");
logger.LogInformation("<{file}> saved", file);
File.WriteAllLines(file, inserts);
}
}

View File

@ -0,0 +1,80 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240519
{
private record Record(long Length, long Ticks);
private static ReadOnlyDictionary<string, Record> GetKeyValuePairs(string source, string[] sourceFiles)
{
Dictionary<string, Record> results = [];
string key;
Record? record;
FileInfo fileInfo;
int sourceLength = source.Length;
foreach (string sourceFile in sourceFiles)
{
fileInfo = new(sourceFile);
key = sourceFile[sourceLength..];
if (results.TryGetValue(key, out record))
throw new NotSupportedException();
results.Add(key, new(fileInfo.Length, fileInfo.LastWriteTime.Ticks));
}
return new(results);
}
internal static void FindReplaceDirectoryName(ILogger<Worker> logger, List<string> args)
{
string checkDirectory;
string replaceText = args[3];
string[] findTexts = args[2].Split(',');
string root = Path.GetFullPath(args[0]);
string[] directories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
checkDirectory = directory;
foreach (string findText in findTexts)
checkDirectory = checkDirectory.Replace(findText, replaceText);
if (checkDirectory == directory)
continue;
if (Directory.Exists(checkDirectory))
continue;
logger.LogInformation("<{directory}> to <{checkDirectory}>", directory, checkDirectory);
Directory.Move(directory, checkDirectory);
}
string key;
Record? record;
string checkFile;
FileInfo fileInfo;
string target = Path.GetFullPath(args[6]);
string source = Path.GetFullPath(args[4]);
string compare = Path.GetFullPath(args[5]);
string[] sourceFiles = Directory.GetFiles(source, "*", SearchOption.AllDirectories);
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(source, sourceFiles);
string[] compareFiles = Directory.GetFiles(compare, "*", SearchOption.AllDirectories);
int compareLength = compare.Length;
foreach (string compareFile in compareFiles)
{
fileInfo = new(compareFile);
key = compareFile[compareLength..];
if (!keyValuePairs.TryGetValue(key, out record))
continue;
if (fileInfo.Length != record.Length || fileInfo.LastWriteTime.Ticks != record.Ticks)
continue;
checkFile = $"{target}{key}";
checkDirectory = Path.GetDirectoryName(checkFile) ?? throw new NotSupportedException();
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
if (File.Exists(checkFile))
continue;
logger.LogInformation("<{compareFile}> to <{checkFile}>", compareFile, checkFile);
File.Move(compareFile, checkFile);
}
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, compare);
}
}

View File

@ -0,0 +1,160 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240520
{
private record RecordA(string Directory, string Extension, string SourceFile, Identifier Identifier);
private record RecordB(ReadOnlyDictionary<int, Identifier> IdTo, ReadOnlyDictionary<long, Identifier> LengthTo, ReadOnlyDictionary<string, Identifier> PaddedTo);
internal sealed record Identifier(int Id, long Length, string PaddedId, long Ticks)
{
public override string ToString()
{
string result = JsonSerializer.Serialize(this, IdentifierSourceGenerationContext.Default.Identifier);
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Identifier))]
internal partial class IdentifierSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Identifier[]))]
internal partial class IdentifierCollectionSourceGenerationContext : JsonSerializerContext
{
}
private static RecordB GetRecordB(string jsonFile)
{
RecordB result;
Dictionary<int, Identifier> idTo = [];
Dictionary<long, Identifier> lengthTo = [];
Dictionary<string, Identifier> paddedTo = [];
string? json = !File.Exists(jsonFile) ? null : File.ReadAllText(jsonFile);
Identifier[]? identifiers = json is null ? null : JsonSerializer.Deserialize(json, IdentifierCollectionSourceGenerationContext.Default.IdentifierArray);
if (identifiers is null && !string.IsNullOrEmpty(jsonFile))
throw new Exception($"Invalid {nameof(jsonFile)}");
if (identifiers is not null)
{
foreach (Identifier identifier in identifiers)
{
idTo.Add(identifier.Id, identifier);
paddedTo.Add(identifier.PaddedId, identifier);
if (lengthTo.ContainsKey(identifier.Length))
{
_ = lengthTo.Remove(identifier.Length);
continue;
}
lengthTo.Add(identifier.Length, identifier);
}
}
result = new(new(idTo), new(lengthTo), new(paddedTo));
return result;
}
internal static void IdentifierRename(ILogger<Worker> logger, List<string> args)
{
int id;
string key;
RecordA recordA;
string checkFile;
FileInfo fileInfo;
string checkDirectory;
Identifier? identifier;
string offset = args[5];
string option = args[7];
string jsonFile = args[4];
string fileNameWithoutExtension;
List<RecordA> recordACollection = [];
RecordB recordB = GetRecordB(jsonFile);
string deterministicHashCode = args[3];
string source = Path.GetFullPath(args[0]);
int intMinValueLength = int.Parse(args[2]);
string destination = Path.GetFullPath(args[6]);
bool isOffsetDeterministicHashCode = offset == deterministicHashCode;
string[] sourceFiles = Directory.GetFiles(source, "*", SearchOption.AllDirectories);
logger.LogInformation("Found {files}(s)", sourceFiles.Length);
int sourceLength = source.Length;
foreach (string sourceFile in sourceFiles)
{
fileInfo = new(sourceFile);
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
if (fileInfo.Directory is null)
throw new NotSupportedException();
if (option == "Padded")
{
if (fileNameWithoutExtension.Length < intMinValueLength)
continue;
key = fileNameWithoutExtension;
if (recordB.PaddedTo.TryGetValue(key, out identifier))
{
recordACollection.Add(new($"{destination}{fileInfo.Directory.FullName[sourceLength..]}", fileInfo.Extension, fileInfo.FullName, identifier));
continue;
}
}
if (option == "Length")
{
if (recordB.LengthTo.TryGetValue(fileInfo.Length, out identifier))
{
checkDirectory = $"{destination}{fileInfo.Directory.FullName[sourceLength..]}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, $"{identifier.PaddedId}{fileInfo.Extension}");
if (File.Exists(checkFile))
continue;
File.Copy(fileInfo.FullName, checkFile);
logger.LogInformation("<{fileInfo.FullName}> was moved to <{checkFile}>", fileInfo.FullName, checkFile);
continue;
}
}
if (option == "Id")
{
if (int.TryParse(fileNameWithoutExtension, out id))
{
if (recordB.IdTo.TryGetValue(id, out identifier))
{
checkDirectory = $"{destination}{fileInfo.Directory.FullName[sourceLength..]}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, $"{identifier.PaddedId}{fileInfo.Extension}");
if (File.Exists(checkFile))
continue;
File.Move(fileInfo.FullName, checkFile);
logger.LogInformation("<{fileInfo.FullName}> was moved to <{checkFile}>", fileInfo.FullName, checkFile);
continue;
}
}
}
}
if (option == "Padded")
{
if (!isOffsetDeterministicHashCode)
recordACollection = (from l in recordACollection orderby l.Identifier.Ticks select l).ToList();
for (int i = 0; i < recordACollection.Count; i++)
{
recordA = recordACollection[i];
if (!Directory.Exists(recordA.Directory))
_ = Directory.CreateDirectory(recordA.Directory);
checkFile = Path.Combine(recordA.Directory, isOffsetDeterministicHashCode ? $"{recordA.Identifier.PaddedId}{recordA.Extension}" : $"{offset + i}{recordA.Identifier.PaddedId}{recordA.Extension}");
if (File.Exists(checkFile))
continue;
File.Move(recordA.SourceFile, checkFile);
logger.LogInformation("<{recordA.SourceFile}> was moved to <{checkFile}>", recordA.SourceFile, checkFile);
}
}
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, source);
}
}

View File

@ -0,0 +1,156 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240623
{
private record Record(int? CodeInsidersLine, string File, string[] Lines, int? StopLine, int? SubTasksLine);
private static List<Record> GetRecords(string sourceDirectory, string searchPattern, string codeInsiders, string subTasks)
{
List<Record> results = [];
int? stopLine;
int? subTasksLine;
int? codeInsidersLine;
string[] lines;
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
stopLine = null;
subTasksLine = null;
codeInsidersLine = null;
lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
if (lines[i].StartsWith(codeInsiders) && lines[i][^1] == '"')
{
if (lines.Length > i + 1 && lines[i + 1] == "```")
codeInsidersLine = i;
}
if (lines[i] != subTasks)
continue;
subTasksLine = i;
if (codeInsidersLine is null)
break;
if (lines.Length > i)
{
for (int j = i + 1; j < lines.Length; j++)
{
if (lines[j].Length > 0 && lines[j][0] == '#')
{
stopLine = j;
break;
}
}
}
stopLine ??= lines.Length;
break;
}
results.Add(new(codeInsidersLine, file, lines, stopLine, subTasksLine));
}
return results;
}
internal static void UpdateSubTasksInMarkdownFiles(ILogger<Worker> logger, List<string> args)
{
int lineCheck;
bool? foundDone;
string[] segments;
List<string> lines;
string[] indexLines;
string checkDirectory;
string done = args[7];
List<string> indexFiles;
string subTasks = args[3];
List<string> newLines = [];
List<string> oldLines = [];
string indexFile = args[5];
string searchPattern = args[2];
string directoryFilter = args[8];
string[] tasks = args[6].Split(',');
string codeInsiders = $"{args[4]} \"";
string sourceDirectory = Path.GetFullPath(args[0]);
List<Record> records = GetRecords(sourceDirectory, searchPattern, codeInsiders, subTasks);
foreach (Record record in from l in records orderby l.SubTasksLine is null, l.CodeInsidersLine is null select l)
{
if (record.SubTasksLine is null)
continue;
if (record.CodeInsidersLine is not null)
logger.LogInformation("<{file}> has [{subTasks}]", Path.GetFileNameWithoutExtension(record.File), subTasks);
else
{
logger.LogWarning("<{file}> has [{subTasks}] but doesn't have [{codeInsiders}]!", Path.GetFileNameWithoutExtension(record.File), subTasks, codeInsiders);
continue;
}
if (record.StopLine is null)
continue;
checkDirectory = record.Lines[record.CodeInsidersLine.Value][codeInsiders.Length..^1];
if (!Directory.Exists(checkDirectory))
{
logger.LogError("<{checkDirectory}> doesn't exist", Path.GetFileName(checkDirectory));
continue;
}
indexFiles = Directory.GetFiles(checkDirectory, indexFile, SearchOption.AllDirectories).ToList();
if (indexFiles.Count != 1)
{
for (int i = indexFiles.Count - 1; i > -1; i--)
{
if (!indexFiles[i].Contains(directoryFilter, StringComparison.CurrentCultureIgnoreCase))
indexFiles.RemoveAt(i);
}
if (indexFiles.Count != 1)
{
logger.LogError("<{checkDirectory}> doesn't have a [{indexFile}]", Path.GetFileName(checkDirectory), indexFile);
continue;
}
}
foundDone = null;
newLines.Clear();
oldLines.Clear();
indexLines = File.ReadAllLines(indexFiles[0]);
for (int i = 0; i < indexLines.Length; i++)
{
if (indexLines[i] == done)
foundDone = true;
segments = indexLines[i].Split(tasks[1]);
if (segments.Length > 2 || !segments[0].StartsWith(tasks[0]))
continue;
if (foundDone is null || !foundDone.Value)
newLines.Add($"- [ ] {segments[0][tasks[0].Length..]}");
else
newLines.Add($"- [x] {segments[0][tasks[0].Length..]}");
}
if (newLines.Count == 0)
continue;
lineCheck = 0;
newLines.Insert(0, string.Empty);
for (int i = record.SubTasksLine.Value + 1; i < record.StopLine.Value - 1; i++)
oldLines.Add(record.Lines[i]);
if (newLines.Count == oldLines.Count)
{
for (int i = 0; i < newLines.Count; i++)
{
if (newLines[i] != record.Lines[record.SubTasksLine.Value + 1 + i])
continue;
lineCheck++;
}
if (lineCheck == newLines.Count)
continue;
}
checkDirectory = Path.Combine(checkDirectory, DateTime.Now.Ticks.ToString());
_ = Directory.CreateDirectory(checkDirectory);
Thread.Sleep(500);
Directory.Delete(checkDirectory);
lines = record.Lines.ToList();
for (int i = record.StopLine.Value - 1; i > record.SubTasksLine.Value + 1; i--)
lines.RemoveAt(i);
if (record.StopLine.Value == record.Lines.Length && lines[^1].Length == 0)
lines.RemoveAt(lines.Count - 1);
for (int i = 0; i < newLines.Count; i++)
lines.Insert(record.SubTasksLine.Value + 1 + i, newLines[i]);
File.WriteAllLines(record.File, lines);
}
}
}

View File

@ -0,0 +1,88 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240624
{
private static void MoveUpOndDirectory(ILogger<Worker> logger, string sourceDirectory, string[] files)
{
string? match;
string checkFile;
FileInfo fileInfoA;
FileInfo fileInfoB;
string? checkDirectory;
List<string> deleteFiles = [];
Dictionary<string, string> keyValuePairs = [];
foreach (string file in files)
{
checkDirectory = Path.GetDirectoryName(Path.GetDirectoryName(file)) ?? throw new NotSupportedException();
checkFile = Path.Combine(checkDirectory, Path.GetFileName(file));
if (File.Exists(checkFile))
throw new NotSupportedException();
if (keyValuePairs.TryGetValue(checkFile, out match))
{
fileInfoA = new(file);
fileInfoB = new(match);
if (fileInfoA.Length != fileInfoB.Length)
throw new NotSupportedException("Files don't match!");
logger.LogWarning("<{file}> already exists!", file);
deleteFiles.Add(file);
continue;
}
keyValuePairs.Add(checkFile, file);
}
foreach (string file in deleteFiles)
File.Delete(file);
foreach (KeyValuePair<string, string> keyValuePair in keyValuePairs)
File.Move(keyValuePair.Value, keyValuePair.Key);
Helpers.HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
}
private static void Distinct(List<string> args, string sourceDirectory, string[] deleteMatchingIdsDirectoryNames, string[] files)
{
string fileName;
string directory;
List<string> distinct = [];
List<string> duplicate = [];
string[] keepMatchingIdsDirectoryNames = args[4].Split(',');
if (deleteMatchingIdsDirectoryNames.Length != keepMatchingIdsDirectoryNames.Length)
throw new NotSupportedException("Check arg lengths!");
string[] keepMatchingIdsDirectories = keepMatchingIdsDirectoryNames.Select(l => Path.Combine(sourceDirectory, l)).ToArray();
string[] deleteMatchingIdsDirectories = deleteMatchingIdsDirectoryNames.Select(l => Path.Combine(sourceDirectory, l)).ToArray();
foreach (string file in files)
{
fileName = Path.GetFileName(file);
if (distinct.Contains(fileName))
{
duplicate.Add(fileName);
continue;
}
distinct.Add(Path.GetFileName(file));
}
foreach (string file in files)
{
fileName = Path.GetFileName(file);
directory = Path.GetDirectoryName(file) ?? throw new NotSupportedException();
if (!duplicate.Contains(fileName))
continue;
if (deleteMatchingIdsDirectories.Contains(directory))
File.Move(file, $"{file}.del");
else if (!keepMatchingIdsDirectories.Contains(directory))
throw new NotSupportedException($"Missing <{Path.GetFileName(directory)}> as a directory for {fileName}");
}
}
internal static void MoveUpOndDirectory(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] deleteMatchingIdsDirectoryNames = args[3].Split(',');
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (deleteMatchingIdsDirectoryNames.Length == 0)
MoveUpOndDirectory(logger, sourceDirectory, files);
else
Distinct(args, sourceDirectory, deleteMatchingIdsDirectoryNames, files);
}
}

84
Day/HelperDay.cs Normal file
View File

@ -0,0 +1,84 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.Helpers;
internal static class HelperDay
{
internal static void Select(AppSettings appSettings, ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken)
{
logger.LogInformation("X) Day Helpers,");
if (args[1] == "Day-Helper-2023-09-06")
Day.Helper20230906.SaveJson(logger, args[0]);
else if (args[1] == "Day-Helper-2023-10-10")
Day.Helper20231010.HgCV(logger, args[0]);
else if (args[1] == "Day-Helper-2023-10-16")
Day.Helper20231016.MoveDirectory(logger, args[0]);
else if (args[1] == "Day-Helper-2023-10-24")
Day.Helper20231024.NetUse(logger, args[0]);
else if (args[1] == "Day-Helper-2023-11-02")
Day.Helper20231102.NuSpec(logger, args[0]);
else if (args[1] == "Day-Helper-2023-11-08")
Day.Helper20231108.MasterImage(logger, args);
else if (args[1] == "Day-Helper-2023-11-22")
Day.Helper20231122.ProcessDataStandardFormat(logger, args);
else if (args[1] == "Day-Helper-2023-11-28")
logger.LogError("{arg} - has been migrated to File-Watcher", args[1]);
else if (args[1] == "Day-Helper-2023-11-30")
Day.Helper20231130.RenameReactorProcessDataStandardFormatFiles(logger, args);
else if (args[1] == "Day-Helper-2023-12-05")
Day.Helper20231205.SplitMarkdownFile(logger, args);
else if (args[1] == "Day-Helper-2023-12-12")
logger.LogError("{arg} - was deleted on 2024-04-08", args[1]);
else if (args[1] == "Day-Helper-2023-12-22")
Day.Helper20231222.ConvertId(logger, args);
else if (args[1] == "Day-Helper-2024-01-05")
Day.Helper20240105.ConvertKeePassExport(logger, args);
else if (args[1] == "Day-Helper-2024-01-06")
Day.Helper20240106.TextToJson(logger, args);
else if (args[1] == "Day-Helper-2024-01-07")
Day.Helper20240107.DirectoryToISO(logger, args);
else if (args[1] == "Day-Helper-2024-01-08")
Day.Helper20240108.SortCodeMethods(logger, args, cancellationToken);
else if (args[1] == "Day-Helper-2024-01-27")
logger.LogError("{arg} - has been migrated to Clipboard_Send_Keys", args[1]);
else if (args[1] == "Day-Helper-2024-01-29")
Day.Helper20240129.JsonToTsv(logger, args);
else if (args[1] == "Day-Helper-2024-03-05")
Day.Helper20240305.ArchiveFiles(logger, args);
else if (args[1] == "Day-Helper-2024-04-03")
Day.Helper20240403.AlertIfNewDeviceIsConnected(logger, args);
else if (args[1] == "Day-Helper-2024-04-04")
Day.Helper20240404.ParseCSV(logger, args);
else if (args[1] == "Day-Helper-2024-04-09")
Day.Helper20240409.MonA(logger, args);
else if (args[1] == "Day-Helper-2024-04-17")
Day.Helper20240417.FilteredRunCommand(logger, args, cancellationToken);
else if (args[1] == "Day-Helper-2024-04-26")
Day.Helper20240426.UpdateTests(logger, args);
else if (args[1] == "Day-Helper-2024-04-27")
Day.Helper20240427.Immich(appSettings, logger, args);
else if (args[1] == "Day-Helper-2024-04-29")
Day.Helper20240429.GitConfigCleanUp(logger, args);
else if (args[1] == "Day-Helper-2024-05-10")
Day.Helper20240510.PullIconsForBLM(logger, args);
else if (args[1] == "Day-Helper-2024-05-13")
Day.Helper20240513.PersonKeyToName(logger, args);
else if (args[1] == "Day-Helper-2024-05-17")
Day.Helper20240517.SaveAmazon(logger, args);
else if (args[1] == "Day-Helper-2024-05-18")
Day.Helper20240518.PersonKeyToImmichImport(logger, args);
else if (args[1] == "Day-Helper-2024-05-19")
Day.Helper20240519.FindReplaceDirectoryName(logger, args);
else if (args[1] == "Day-Helper-2024-05-20")
Day.Helper20240520.IdentifierRename(logger, args);
else if (args[1] == "Day-Helper-2024-06-23")
Day.Helper20240623.UpdateSubTasksInMarkdownFiles(logger, args);
else if (args[1] == "Day-Helper-2024-06-24")
Day.Helper20240624.MoveUpOndDirectory(logger, args);
else
throw new Exception(appSettings.Company);
}
}

View File

@ -1,18 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>11.0</LangVersion>
<Nullable>enable</Nullable>
<OutputType>Exe</OutputType>
<ImplicitUsings>enable</ImplicitUsings>
<TargetFramework>net8.0</TargetFramework>
<RuntimeIdentifier>win-x64</RuntimeIdentifier>
<TargetFramework>net7.0</TargetFramework>
<UserSecretsId>eb9e8f58-fcb5-45bb-9d4d-54f064c485b1</UserSecretsId>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<UserSecretsId>8da397d4-13ec-4576-9722-3c79cad25563</UserSecretsId>
<UserSecretsIdWindowsShortcut>eb9e8f58-fcb5-45bb-9d4d-54f064c485b1</UserSecretsIdWindowsShortcut>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="runtime.win-x64.Microsoft.DotNet.ILCompiler" Version="7.0.11" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="7.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="7.0.0" />
<PackageReference Include="System.Text.Json" Version="7.0.3" />
<RuntimeHostConfigurationOption Include="AssemblyName" Value="File-Folder-Helper" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="DiscUtils.Iso9660" Version="0.16.13" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" />
<PackageReference Include="runtime.win-x64.Microsoft.DotNet.ILCompiler" Version="8.0.6" />
<PackageReference Include="System.Text.Json" Version="8.0.3" />
<PackageReference Include="TextCopy" Version="6.2.1" />
<PackageReference Include="WindowsShortcutFactory" Version="1.2.0" />
<PackageReference Include="YamlDotNet" Version="15.3.0" />
</ItemGroup>
</Project>

View File

@ -146,8 +146,8 @@ internal static partial class HelperCreateNoteFiles
file = string.Concat(Path.Combine(directory, $"{dateTime:yyyy-MM-dd}.md"));
if (File.Exists(file))
continue;
File.WriteAllLines(file, new string[]
{
File.WriteAllLines(file,
[
"---",
"type: daily-note",
$"created: {dateTime:yyyy-MM-dd}",
@ -158,7 +158,7 @@ internal static partial class HelperCreateNoteFiles
"```bash",
string.Empty,
"```",
});
]);
if (directory != lastDirectory)
{
Directory.SetCreationTime(directory, dateTime);
@ -192,8 +192,8 @@ internal static partial class HelperCreateNoteFiles
string? attributes;
int descriptionKey = 2;
string[]? headerColumns;
List<string> allLines = [];
string destinationDirectory;
List<string> allLines = new();
DateTime dateTime = new(ticks);
string csvHeader = "type,title,description,tags,links,body";
string tsvHeader = "type\ttitle\tdescription\ttags\tlinks\tbody";
@ -275,8 +275,8 @@ internal static partial class HelperCreateNoteFiles
internal static void CreateNoteFiles(ILogger<Worker> logger, string argsZero)
{
List<string> importFiles = [];
long ticks = DateTime.Now.Ticks;
List<string> importFiles = new();
CleanExistingFiles(logger, argsZero, ticks);
importFiles.AddRange(Directory.GetFiles(argsZero, "*.csv", SearchOption.TopDirectoryOnly));
importFiles.AddRange(Directory.GetFiles(argsZero, "*.tsv", SearchOption.TopDirectoryOnly));

View File

@ -16,13 +16,15 @@ internal static class HelperDeleteEmptyDirectories
foreach (string file in files)
{
fileInfo = new(file);
if (fileInfo.LastWriteTime.Ticks > ticks)
if (fileInfo.LastWriteTime.Ticks > ticks.Value)
continue;
File.Delete(file);
try
{ File.Delete(file); }
catch (IOException) { }
}
}
if (directories.Length > 0)
files = Array.Empty<string>();
files = [];
else
files = Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly);
if (directories.Length == 0 && files.Length == 0)
@ -38,7 +40,7 @@ internal static class HelperDeleteEmptyDirectories
}
else
{
List<string> check = new();
List<string> check = [];
foreach (string directory in directories)
{
DeleteOldLogFilesAndDeleteEmptyDirectories(ticks, searchPattern, directory, check);
@ -51,7 +53,7 @@ internal static class HelperDeleteEmptyDirectories
private static void DeleteOldLogFilesAndDeleteEmptyDirectories(ILogger<Worker> logger, long? ticks, string? searchPattern, string rootDirectory)
{
List<string> check = new();
List<string> check = [];
List<string> directories = Directory.GetDirectories(rootDirectory, "*", SearchOption.TopDirectoryOnly).ToList();
directories.Add(rootDirectory);
foreach (string directory in directories)
@ -78,7 +80,7 @@ internal static class HelperDeleteEmptyDirectories
internal static void DeleteOldLogFilesAndDeleteEmptyDirectories(ILogger<Worker> logger, string rootDirectory)
{
long ticks = DateTime.Now.AddHours(-120).Ticks;
long ticks = DateTime.Now.AddHours(-84).Ticks;
DeleteOldLogFilesAndDeleteEmptyDirectories(logger, ticks, "*.log*", rootDirectory);
}

View File

@ -0,0 +1,42 @@
namespace File_Folder_Helper.Helpers;
internal static class HelperDirectory
{
internal static List<string> GetDirectoryNames(string directory)
{
List<string> results = [];
string? fileName;
string? checkDirectory = directory;
string? pathRoot = Path.GetPathRoot(directory);
string extension = Path.GetExtension(directory);
if (string.IsNullOrEmpty(pathRoot))
throw new NullReferenceException(nameof(pathRoot));
if (Directory.Exists(directory))
{
fileName = Path.GetFileName(directory);
if (!string.IsNullOrEmpty(fileName))
results.Add(fileName);
}
else if ((string.IsNullOrEmpty(extension) || extension.Length > 3) && !File.Exists(directory))
{
fileName = Path.GetFileName(directory);
if (!string.IsNullOrEmpty(fileName))
results.Add(fileName);
}
for (int i = 0; i < int.MaxValue; i++)
{
checkDirectory = Path.GetDirectoryName(checkDirectory);
if (string.IsNullOrEmpty(checkDirectory) || checkDirectory == pathRoot)
break;
fileName = Path.GetFileName(checkDirectory);
if (string.IsNullOrEmpty(fileName))
continue;
results.Add(fileName);
}
results.Add(pathRoot);
results.Reverse();
return results;
}
}

View File

@ -23,7 +23,7 @@ internal static class HelperFindReplace
private static ReadOnlyCollection<(string, string)> GetFindReplace(string tnsNamesOraFile)
{
List<(string, string)> results = new();
List<(string, string)> results = [];
string[] segments;
string[] lines = File.ReadAllLines(tnsNamesOraFile);
foreach (string line in lines)

View File

@ -73,7 +73,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static ReadOnlyDictionary<long, ReadOnlyCollection<string>> Convert(Dictionary<long, List<string>> keyValuePairs)
{
Dictionary<long, ReadOnlyCollection<string>> results = new();
Dictionary<long, ReadOnlyCollection<string>> results = [];
foreach (KeyValuePair<long, List<string>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, new(keyValuePair.Value));
return new(results);
@ -81,7 +81,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static Dictionary<string, ReadOnlyCollection<ReadOnlyCollection<string>>> Convert(Dictionary<string, List<ReadOnlyCollection<string>>> keyValuePairs)
{
Dictionary<string, ReadOnlyCollection<ReadOnlyCollection<string>>> results = new();
Dictionary<string, ReadOnlyCollection<ReadOnlyCollection<string>>> results = [];
foreach (KeyValuePair<string, List<ReadOnlyCollection<string>>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, new(keyValuePair.Value));
return new(results);
@ -91,13 +91,13 @@ internal static partial class HelperGenealogicalDataCommunication
{
List<string> results;
if (person is null)
results = new() { "Id", "First-Name", "Last-Name", "Birth-Date", "Sex", "Address", "City", "State", "Zip", "Phone", "E-mail", "Facebook", "Facebook-Id", "Comment", "U-Id" };
results = ["Id", "First-Name", "Last-Name", "Birth-Date", "Sex", "Address", "City", "State", "Zip", "Phone", "E-mail", "Facebook", "Facebook-Id", "Comment", "U-Id"];
else
{
string? facebook = GetFaceBook(person);
string? facebookId = GetFaceBookId(person);
results = new()
{
results =
[
person.Id.ToString(),
string.Concat(person.Name?.Given),
string.Concat(person.Name?.Sur),
@ -113,7 +113,7 @@ internal static partial class HelperGenealogicalDataCommunication
string.Concat(facebookId),
string.Empty,
string.Concat(person.UId)
};
];
}
return new(results);
}
@ -122,7 +122,7 @@ internal static partial class HelperGenealogicalDataCommunication
{
string[] results;
string key;
List<(string? Index, string Key)> collection = new();
List<(string? Index, string Key)> collection = [];
foreach (Family family in familyCollection)
{
if (family.Id is null)
@ -138,7 +138,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static Dictionary<string, List<ReadOnlyCollection<string>>> Convert(ReadOnlyCollection<string> distinctSortedKKeys)
{
Dictionary<string, List<ReadOnlyCollection<string>>> results = new();
Dictionary<string, List<ReadOnlyCollection<string>>> results = [];
ReadOnlyCollection<string> collection;
List<ReadOnlyCollection<string>>? objectCollection;
foreach (string key in distinctSortedKKeys)
@ -147,7 +147,7 @@ internal static partial class HelperGenealogicalDataCommunication
continue;
if (!results.TryGetValue(key, out objectCollection))
{
results.Add(key, new());
results.Add(key, []);
if (!results.TryGetValue(key, out objectCollection))
throw new NotSupportedException();
}
@ -159,7 +159,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static ReadOnlyCollection<string> GetHeaderLines(string startsWith, string[] sourceLines)
{
List<string> results = new();
List<string> results = [];
for (int i = 0; i < sourceLines.Length; i++)
{
if (sourceLines[i].StartsWith(startsWith))
@ -179,7 +179,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static Dictionary<string, List<string>> GetTxtFileCollection(Input input)
{
Dictionary<string, List<string>> results = new();
Dictionary<string, List<string>> results = [];
string[] lines;
string[] directories;
string directoryName;
@ -188,7 +188,7 @@ internal static partial class HelperGenealogicalDataCommunication
List<string>? collectionA;
List<string>? collectionB;
string siblingDirectoryName;
string[] files = input.SingletonDirectory is null || !Directory.Exists(input.SingletonDirectory) ? Array.Empty<string>() : Directory.GetFiles(input.SingletonDirectory, "*.txt", SearchOption.AllDirectories);
string[] files = input.SingletonDirectory is null || !Directory.Exists(input.SingletonDirectory) ? [] : Directory.GetFiles(input.SingletonDirectory, "*.txt", SearchOption.AllDirectories);
foreach (string file in files)
{
sourceDirectory = Path.GetDirectoryName(file);
@ -203,7 +203,7 @@ internal static partial class HelperGenealogicalDataCommunication
directoryName = Path.GetFileName(sourceDirectory);
if (!results.TryGetValue(directoryName, out collectionA))
{
results.Add(directoryName, new());
results.Add(directoryName, []);
if (!results.TryGetValue(directoryName, out collectionA))
throw new Exception();
}
@ -219,7 +219,7 @@ internal static partial class HelperGenealogicalDataCommunication
siblingDirectoryName = Path.GetFileName(directory);
if (!results.TryGetValue(siblingDirectoryName, out collectionB))
{
results.Add(siblingDirectoryName, new());
results.Add(siblingDirectoryName, []);
if (!results.TryGetValue(siblingDirectoryName, out collectionB))
throw new Exception();
}
@ -270,8 +270,8 @@ internal static partial class HelperGenealogicalDataCommunication
bool moreAdded = false;
List<string>? collection;
DateOnly? dateOnly = null;
List<string> distinct = new();
List<string> @continue = new();
List<string> distinct = [];
List<string> @continue = [];
for (int j = i + 1; j < lines.Count; j++)
{
if (lines[j][0] == '1')
@ -324,7 +324,7 @@ internal static partial class HelperGenealogicalDataCommunication
string seven;
string? note = null;
DateOnly? dateOnly = null;
List<string> @continue = new();
List<string> @continue = [];
bool? isDead = lines[i].Length == 8 && lines[i][..8] == "1 DEAT Y" ? true : lines[i].Length == 8 && lines[i][..8] == "1 DEAT N" ? false : null;
for (int j = i + 1; j < lines.Count; j++)
{
@ -364,7 +364,7 @@ internal static partial class HelperGenealogicalDataCommunication
string seven;
string? note = null;
DateOnly? dateOnly = null;
List<string> @continue = new();
List<string> @continue = [];
for (int j = i + 1; j < lines.Count; j++)
{
if (lines[j][0] == '1')
@ -399,11 +399,11 @@ internal static partial class HelperGenealogicalDataCommunication
private static string[] GetNewLines(ReadOnlyCollection<string> lines, Birth? birth)
{
List<string> results = new();
List<string> results = [];
string six;
string text;
string seven;
List<string> @continue = birth is null ? new() : birth.Continue.ToList();
List<string> @continue = birth is null ? [] : birth.Continue.ToList();
for (int i = 0; i < lines.Count; i++)
{
if (birth is null)
@ -447,7 +447,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static List<long> GetIdsWhenPersonHasTitle(ReadOnlyDictionary<long, Person> people)
{
List<long> results = new();
List<long> results = [];
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Title is null)
@ -459,7 +459,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static ReadOnlyCollection<GenealogicalDataCommunicationRelation> GetRelations(ReadOnlyCollection<ReadOnlyCollection<string>> familyGroupLines)
{
List<GenealogicalDataCommunicationRelation> results = new();
List<GenealogicalDataCommunicationRelation> results = [];
int id;
string relation;
string[] segments;
@ -585,12 +585,12 @@ internal static partial class HelperGenealogicalDataCommunication
{
GenealogicalDataCommunicationCollections result;
long? id;
List<string> lines = new();
List<string> lines = [];
List<string> footerLines = [];
const string startsWith = "0 @";
List<string> footerLines = new();
Dictionary<long, List<string>> keyValuePairs = new();
List<ReadOnlyCollection<string>> familyGroupLines = new();
string[] sourceLines = string.IsNullOrEmpty(input.GenealogicalDataCommunicationFile) || !File.Exists(input.GenealogicalDataCommunicationFile) ? Array.Empty<string>() : File.ReadAllLines(input.GenealogicalDataCommunicationFile);
Dictionary<long, List<string>> keyValuePairs = [];
List<ReadOnlyCollection<string>> familyGroupLines = [];
string[] sourceLines = string.IsNullOrEmpty(input.GenealogicalDataCommunicationFile) || !File.Exists(input.GenealogicalDataCommunicationFile) ? [] : File.ReadAllLines(input.GenealogicalDataCommunicationFile);
ReadOnlyCollection<string> headerLines = GetHeaderLines(startsWith, sourceLines);
for (int i = headerLines.Count; i < sourceLines.Length; i++)
{
@ -632,7 +632,7 @@ internal static partial class HelperGenealogicalDataCommunication
}
if (id is null)
throw new Exception(string.Join(Environment.NewLine, lines));
keyValuePairs.Add(id.Value, new());
keyValuePairs.Add(id.Value, []);
if (lines.Count == 0)
continue;
keyValuePairs[id.Value].AddRange(lines);
@ -648,7 +648,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static ReadOnlyDictionary<long, Person> GetPeople(Input input, GenealogicalDataCommunicationCollections genealogicalDataCommunicationCollections)
{
Dictionary<long, Person> results = new();
Dictionary<long, Person> results = [];
long? id;
char? sex;
Name? name;
@ -724,11 +724,11 @@ internal static partial class HelperGenealogicalDataCommunication
Collections result;
long personKey;
char[] ageCollection;
Dictionary<long, string> idToName = new();
Dictionary<long, long> idToPersonKey = new();
Dictionary<long, string> idToGivenName = new();
List<PersonExport> collection = [];
Dictionary<long, string> idToName = [];
Dictionary<long, long> idToPersonKey = [];
Dictionary<long, string> idToGivenName = [];
int length = appSettings.PersonBirthdayFormat.Length;
List<PersonExport> collection = new();
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Birth?.Note is null)
@ -742,7 +742,7 @@ internal static partial class HelperGenealogicalDataCommunication
personKey = dateTime.Ticks;
idToPersonKey.Add(keyValuePair.Key, personKey);
idToName.Add(keyValuePair.Key, WindowsFileSystem().Replace(keyValuePair.Value.Name.ForwardSlashFull, "_"));
ageCollection = keyValuePair.Value.Birth.Continue.Count == 0 ? Array.Empty<char>() : keyValuePair.Value.Birth.Continue[0].ToArray();
ageCollection = keyValuePair.Value.Birth.Continue.Count == 0 ? [] : keyValuePair.Value.Birth.Continue[0].ToArray();
idToGivenName.Add(keyValuePair.Key, string.IsNullOrEmpty(keyValuePair.Value.Name.Given) ? WindowsFileSystem().Replace(keyValuePair.Value.Name.ForwardSlashFull, "_") : WindowsFileSystem().Replace(keyValuePair.Value.Name.Given, "_"));
collection.Add(new(keyValuePair.Key, new(keyValuePair.Value.Lines), keyValuePair.Value.Birth.Note, ageCollection, dateTime, personKey));
}
@ -752,7 +752,7 @@ internal static partial class HelperGenealogicalDataCommunication
private static List<Family> GetFamilyCollection(ReadOnlyCollection<ReadOnlyCollection<string>> familyGroupLines, ReadOnlyDictionary<long, Person> people, ReadOnlyDictionary<long, long> idToPersonKey, ReadOnlyDictionary<long, string> idToName, ReadOnlyDictionary<long, string> idToGivenName)
{
List<Family> results = new();
List<Family> results = [];
string? name;
long personKey;
Person? person;
@ -862,8 +862,8 @@ internal static partial class HelperGenealogicalDataCommunication
private static void WriteGenealogicalDataCommunicationCollections(ILogger<Worker> logger, Input input, GenealogicalDataCommunicationCollections genealogicalDataCommunicationCollections, ReadOnlyDictionary<long, Person> people)
{
List<string> lines = new();
List<string> allLines = new();
List<string> lines = [];
List<string> allLines = [];
if (genealogicalDataCommunicationCollections.HeaderLines.Count > 0)
{
allLines.AddRange(genealogicalDataCommunicationCollections.HeaderLines);
@ -931,8 +931,8 @@ internal static partial class HelperGenealogicalDataCommunication
long count = ticks;
string rootDirectory;
string approximateYears;
List<string> distinct = new();
List<string> duplicates = new();
List<string> distinct = [];
List<string> duplicates = [];
string personDisplayDirectoryName;
foreach (PersonExport personExport in personExportCollection)
{
@ -1010,7 +1010,7 @@ internal static partial class HelperGenealogicalDataCommunication
[Obsolete]
private static string[] GetNewLines(ReadOnlyCollection<string> lines, Name? name, string[] kFiles, string[] mFiles, string[] k2Files, string[] m2Files)
{
List<string> results = new();
List<string> results = [];
string six;
int? birthLastLine = null;
string slugName = WindowsFileSystem().Replace(string.Concat(name?.ForwardSlashFull), "_");

66
Helpers/HelperGit.cs Normal file
View File

@ -0,0 +1,66 @@
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Text;
namespace File_Folder_Helper.Helpers;
internal static class HelperGit
{
private record ProcessResult(string Errors,
int ExitCode,
string Output);
private static async Task<ProcessResult> RunProcessAsync(string application, string arguments, string workingDirectory, CancellationToken cancellationToken)
{
using Process process = new();
StringBuilder outputBuilder = new();
StringBuilder errorsBuilder = new();
process.StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true,
FileName = application,
Arguments = arguments,
WorkingDirectory = workingDirectory,
};
process.OutputDataReceived += (_, args) => outputBuilder.AppendLine(args.Data);
process.ErrorDataReceived += (_, args) => errorsBuilder.AppendLine(args.Data);
_ = process.Start();
process.BeginErrorReadLine();
process.BeginOutputReadLine();
await process.WaitForExitAsync(cancellationToken);
return new(errorsBuilder.ToString().Trim(), process.ExitCode, outputBuilder.ToString().Trim());
}
private static async Task<string> RunAsync(string arguments, string workingDirectory, CancellationToken cancellationToken)
{
ProcessResult result = await RunProcessAsync("git", arguments, workingDirectory, cancellationToken);
if (result.ExitCode != 0)
throw new Exception($"{result.ExitCode} {result.Errors}");
return result.Output;
}
internal static ReadOnlyCollection<string> GetOthersModifiedAndDeletedExcludingStandardFiles(string repositoryDirectory, bool usePathCombine, CancellationToken cancellationToken)
{
List<string> results = [];
string checkDirectory = Path.Combine(repositoryDirectory, ".git");
if (Directory.Exists(checkDirectory))
{
Task<string> task = RunAsync($"ls-files --others --modified --deleted --exclude-standard", repositoryDirectory, cancellationToken);
task.Wait(cancellationToken);
string[] files = task.Result.Split("\r\n");
foreach (string file in files)
{
if (!usePathCombine)
results.Add(file);
else
results.Add(Path.GetFullPath(Path.Combine(repositoryDirectory, file)));
}
}
return new(results);
}
}

View File

@ -1,84 +0,0 @@
using Microsoft.Extensions.Logging;
using System.Globalization;
namespace File_Folder_Helper.Helpers;
internal static class HelperHardcodedFileSearchAndSort
{
internal static void HardcodedFileSearchAndSort(ILogger log, string sourceDirectory, SearchOption searchOption = SearchOption.TopDirectoryOnly)
{
bool check;
string lines;
string checkFile;
string? directory;
FileInfo fileInfo;
string weekOfYear;
string checkDirectory;
CultureInfo cultureInfo = new("en-US");
Calendar calendar = cultureInfo.Calendar;
string[] hardcodedValues = new string[]
{
"BIORAD2",
"BIORAD3",
"BIORAD4",
"BIORAD5",
"CDE2",
"CDE3",
"CDE4",
"CDE5",
"CDE6",
"HGCV1",
"HGCV2",
"HGCV3",
"TENCOR1",
"TENCOR2",
"TENCOR3",
"SP101",
"SPV01",
"SRP",
"WC6Inch",
"WC8Inch",
"Bio-Rad"
};
string[] files = Directory.GetFiles(sourceDirectory, "*", searchOption);
foreach (string file in files)
{
directory = Path.GetDirectoryName(file);
if (string.IsNullOrEmpty(directory))
continue;
check = false;
fileInfo = new(file);
weekOfYear = calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
for (int i = 1; i < 3; i++)
{
if (check)
break;
lines = i switch
{
1 => fileInfo.Name,
2 => File.ReadAllText(file),
_ => throw new NotImplementedException()
};
foreach (string hardcodedValue in hardcodedValues)
{
if (!lines.Contains(hardcodedValue))
continue;
checkDirectory = Path.Combine(directory, $"{fileInfo.LastWriteTime:yyyy}_Week_{weekOfYear}", fileInfo.LastWriteTime.ToString("yyyy-MM-dd"), hardcodedValue);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, Path.GetFileName(file));
if (File.Exists(checkFile) || !File.Exists(file))
continue;
try
{ File.Move(file, checkFile); }
catch (Exception) { }
check = true;
break;
}
}
}
log.LogInformation("{sourceDirectory}", sourceDirectory);
}
}

View File

@ -11,8 +11,8 @@ internal static class HelperILMerge
bool result;
ProcessStartInfo processStartInfo;
StringBuilder stringBuilder = new();
string[] successChecks = new string[] { "success" };
string[] errorChecks = new string[] { "Error", "Conflict", "error:" };
string[] successChecks = ["success"];
string[] errorChecks = ["Error", "Conflict", "error:"];
FileInfo ilMerge = new("C:/Users/phares/AppData/Local/IFXApps/ILMerge/ILMerge.exe");
string[] dllFiles = Directory.GetFiles(workingDirectory, "*.dll", SearchOption.TopDirectoryOnly);
string errorFile = Path.Combine(workingDirectory, string.Concat(Path.GetFileName(workingDirectory), ".err"));

View File

@ -1,5 +1,6 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text;
using System.Text.RegularExpressions;
@ -14,6 +15,11 @@ internal static partial class HelperKanbanMetadata
[GeneratedRegex("[\\s!?.,@:;|\\\\/\"'`£$%\\^&*{}[\\]()<>~#+\\-=_¬]+")]
private static partial Regex InvalidCharacter();
private record Record(FileInfo FileInfo,
string Group,
int GroupCount,
int ItemLineNumber);
private static string GetParamCase(string value)
{
string result;
@ -27,7 +33,7 @@ internal static partial class HelperKanbanMetadata
}
private static void TestParamCases()
{
{ // cSpell:disable
if (GetParamCase("PascalCase") != "pascal-case")
throw new Exception("PascalCase");
if (GetParamCase("camelCase") != "camel-case")
@ -62,17 +68,17 @@ internal static partial class HelperKanbanMetadata
throw new Exception("יקספּערמענאַל פּרובירן");
if (GetParamCase("я надеюсь, что это сработает") != "я-надеюсь-что-это-сработает")
throw new Exception("я надеюсь, что это сработает");
}
} // cSpell:restore
private static List<(int, int, string, FileInfo)> GetCollectionFromIndex(string sourceDirectory, string[] lines)
private static List<Record> GetCollectionFromIndex(string sourceDirectory, ReadOnlyCollection<string> lines)
{
List<(int, int, string, FileInfo)> results = new();
List<Record> results = [];
string line;
FileInfo fileInfo;
string[] segments;
int groupCount = 0;
string? group = null;
for (int i = 0; i < lines.Length; i++)
for (int i = 0; i < lines.Count; i++)
{
line = lines[i];
if (line.Length < 4)
@ -91,57 +97,146 @@ internal static partial class HelperKanbanMetadata
fileInfo = new(Path.Combine(sourceDirectory, segments[1][..^1]));
if (!fileInfo.Exists)
continue;
results.Add((groupCount, i, group, fileInfo));
results.Add(new(fileInfo, group, groupCount, i));
}
return results;
}
internal static void SetMetadata(ILogger log, AppSettings appSettings, string sourceDirectory, bool addTicks)
private static void WriteKanbanBoardFile(string directory, List<Record> records, string h1)
{
string? last = null;
List<string> results = [h1];
foreach (Record record in records)
{
if (last is null || record.Group != last)
{
results.Add(string.Empty);
results.Add($"## {record.Group}");
results.Add(string.Empty);
}
results.Add($"- [ ] {Path.GetFileNameWithoutExtension(record.FileInfo.Name)}");
last = record.Group;
}
string file = Path.Combine(directory, "index.knb.md");
if (File.Exists(file))
{
string allText = File.ReadAllText(file);
if (string.Join(Environment.NewLine, results) == allText)
results.Clear();
}
if (results.Count > 0)
File.WriteAllText(file, string.Join(Environment.NewLine, results));
}
private static void WriteKanbanBoardYmlView(string directory, List<Record> records, string kanbanIndexH1)
{
List<string> results = [kanbanIndexH1, string.Empty];
string h1;
TimeSpan timeSpan;
List<string> lines;
LineNumber lineNumber;
Record[] sorted = (from l in records orderby l.GroupCount, l.FileInfo.LastWriteTime descending select l).ToArray();
foreach (Record record in sorted)
{
if (record.ItemLineNumber == 0)
throw new NotSupportedException();
(lines, lineNumber) = HelperMarkdown.GetStatusAndFrontMatterYamlEndLineNumbers(record.FileInfo);
if (lines.Count == 0)
continue;
timeSpan = new(record.FileInfo.LastWriteTime.Ticks - record.FileInfo.CreationTime.Ticks);
h1 = lineNumber.H1 is null ? Path.GetFileNameWithoutExtension(record.FileInfo.Name) : lines[lineNumber.H1.Value];
results.Add($"#{h1}");
results.Add(string.Empty);
results.Add("```yaml");
results.Add($"CreationTime: {record.FileInfo.CreationTime:yyyy-MM-dd}");
results.Add($"LastWriteTime: {record.FileInfo.LastWriteTime:yyyy-MM-dd}");
results.Add($"TotalDays: {Math.Round(timeSpan.TotalDays, 2)}");
if (lineNumber.FrontMatterYamlEnd is not null && lines.Count >= lineNumber.FrontMatterYamlEnd.Value)
{
for (int i = 0; i < lineNumber.FrontMatterYamlEnd; i++)
{
if (lines[i] == "---")
continue;
results.Add(lines[i]);
}
}
results.Add($"status: \"{record.GroupCount}-{record.Group}\"");
results.Add("```");
results.Add(string.Empty);
}
string file = Path.Combine(directory, "index.yml.md");
if (File.Exists(file))
{
string allText = File.ReadAllText(file);
if (string.Join(Environment.NewLine, results) == allText)
results.Clear();
}
if (results.Count > 0)
File.WriteAllText(file, string.Join(Environment.NewLine, results));
}
internal static void SetMetadata(string sourceDirectory, ReadOnlyCollection<string> kanbanIndexFileLines, LineNumber kanbanIndexFileLineNumber, ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles)
{
bool? match;
bool gitCheck;
string? paramCase;
string statusLine;
List<string> lines;
LineNumber lineNumber;
if (log is null)
throw new NullReferenceException();
string? directory = Path.GetDirectoryName(sourceDirectory);
List<Record> records = GetCollectionFromIndex(sourceDirectory, kanbanIndexFileLines);
if (directory is not null && kanbanIndexFileLineNumber.H1 is not null)
{
string checkDirectory = Path.Combine(directory, ".vscode", "helper");
if (Directory.Exists(checkDirectory))
{
WriteKanbanBoardFile(checkDirectory, records, kanbanIndexFileLines[kanbanIndexFileLineNumber.H1.Value]);
WriteKanbanBoardYmlView(checkDirectory, records, kanbanIndexFileLines[kanbanIndexFileLineNumber.H1.Value]);
}
}
foreach (Record record in records)
{
if (record.ItemLineNumber == 0)
throw new NotSupportedException();
(lines, lineNumber) = HelperMarkdown.GetStatusAndFrontMatterYamlEndLineNumbers(record.FileInfo);
if (lines.Count == 0)
continue;
statusLine = $"status: \"{record.GroupCount}-{record.Group}\"";
paramCase = lineNumber.H1 is null ? null : GetParamCase(lines[lineNumber.H1.Value]);
match = lineNumber.H1 is null || paramCase is null ? null : Path.GetFileNameWithoutExtension(record.FileInfo.Name) == paramCase;
if (lineNumber.FrontMatterYamlEnd is null)
throw new NotSupportedException($"{nameof(SetMetadata)} must be executed first!");
if (lineNumber.H1 is not null && paramCase is not null && match is not null && !match.Value)
lines[lineNumber.H1.Value] = $"# {paramCase}";
if (lineNumber.Status is null)
lines.Insert(lineNumber.FrontMatterYamlEnd.Value, statusLine);
else
{
if ((match is null || match.Value) && lines[lineNumber.Status.Value] == statusLine)
continue;
lines[lineNumber.Status.Value] = statusLine;
}
gitCheck = gitOthersModifiedAndDeletedExcludingStandardFiles.Contains(record.FileInfo.FullName);
if (!gitCheck)
continue;
File.WriteAllLines(record.FileInfo.FullName, lines);
}
}
internal static void SetMetadata(ILogger logger, string sourceDirectory)
{
TestParamCases();
string fullPath = Path.GetFullPath(sourceDirectory);
if (!Directory.Exists(fullPath))
_ = Directory.CreateDirectory(fullPath);
string indexFile = Path.Combine(fullPath, "index.md");
if (File.Exists(indexFile))
if (!File.Exists(indexFile))
logger.LogInformation("<{indexFile}> doesn't exist!", indexFile);
else
{
string[] indexFileLines = File.ReadAllLines(indexFile);
List<(int, int, string, FileInfo)> collectionFromIndex = GetCollectionFromIndex(sourceDirectory, indexFileLines);
foreach ((int groupCount, int itemLineNumber, string group, FileInfo fileInfo) in collectionFromIndex)
{
if (itemLineNumber == 0)
throw new NotSupportedException();
(lines, lineNumber) = HelperMarkdown.GetStatusAndFrontMatterYamlEndLineNumbers(fileInfo);
if (lines.Count == 0)
continue;
statusLine = $"status: \"{groupCount}-{group}\"";
paramCase = lineNumber.H1 is null ? null : GetParamCase(lines[lineNumber.H1.Value]);
if (addTicks)
indexFileLines[itemLineNumber] = $"{fileInfo.LastWriteTime.Ticks}~~~{indexFileLines[itemLineNumber]}";
match = lineNumber.H1 is null || paramCase is null ? null : Path.GetFileNameWithoutExtension(fileInfo.Name) == paramCase;
if (lineNumber.FrontMatterYamlEnd is null)
throw new NotSupportedException($"{nameof(SetMetadata)} must be executed first!");
if (lineNumber.H1 is not null && paramCase is not null && match is not null && !match.Value)
lines[lineNumber.H1.Value] = $"# {paramCase}";
if (lineNumber.Status is null)
lines.Insert(lineNumber.FrontMatterYamlEnd.Value, statusLine);
else
{
if ((match is null || match.Value) && lines[lineNumber.Status.Value] == statusLine)
continue;
lines[lineNumber.Status.Value] = statusLine;
}
File.WriteAllLines(fileInfo.FullName, lines);
}
if (addTicks)
File.WriteAllLines(indexFile, indexFileLines);
FileInfo fileInfo = new(indexFile);
(List<string> lines, LineNumber lineNumber) = HelperMarkdown.GetStatusAndFrontMatterYamlEndLineNumbers(fileInfo);
SetMetadata(fullPath, new(lines), lineNumber, gitOthersModifiedAndDeletedExcludingStandardFiles: new([]));
}
}

View File

@ -13,14 +13,14 @@ internal static class HelperLogMerge
FileInfo fileInfo;
string[] segments;
string checkDirectory;
List<string> lines = [];
string format = "yyyyMMdd";
List<string> moveFiles = [];
string segment1 = string.Empty;
string sourceFileNameWithoutExtension;
List<string> lines = new();
List<string> moveFiles = new();
DateTime dateTime = DateTime.Now.AddMinutes(2);
string[] sourceFiles = Directory.GetFiles(argsZero, "*.log", SearchOption.TopDirectoryOnly);
Dictionary<string, Dictionary<int, string[]>> keyValuePairs = new();
Dictionary<string, Dictionary<int, string[]>> keyValuePairs = [];
foreach (string sourceFile in sourceFiles)
{
sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(sourceFile);
@ -33,7 +33,7 @@ internal static class HelperLogMerge
moveFiles.Add(sourceFile);
day = sourceFileNameWithoutExtension[..8];
if (!keyValuePairs.ContainsKey(day))
keyValuePairs.Add(day, new Dictionary<int, string[]>());
keyValuePairs.Add(day, []);
if (sourceFileNameWithoutExtension.Substring(8, 1) == "_")
continue;
segments = sourceFileNameWithoutExtension.Split('_');

File diff suppressed because it is too large Load Diff

30
Helpers/HelperNPM.cs Normal file
View File

@ -0,0 +1,30 @@
using System.Diagnostics;
namespace File_Folder_Helper.Helpers;
internal static class HelperNPM
{
internal static string RunCommand(string commandFileName, string commandToRun, string workingDirectory)
{
string result;
if (!string.IsNullOrEmpty(commandFileName))
File.WriteAllText(Path.Combine(workingDirectory, commandFileName), commandToRun);
if (string.IsNullOrEmpty(workingDirectory))
workingDirectory = Directory.GetDirectoryRoot(Directory.GetCurrentDirectory());
ProcessStartInfo processStartInfo = new()
{
FileName = "cmd",
RedirectStandardError = true,
RedirectStandardInput = true,
RedirectStandardOutput = true,
WorkingDirectory = workingDirectory
};
Process? process = Process.Start(processStartInfo) ?? throw new Exception("Process should not be null.");
process.StandardInput.WriteLine($"{commandToRun} & exit");
process.WaitForExit();
result = $"{process.StandardOutput.ReadToEnd()}{Environment.NewLine}{process.StandardError.ReadToEnd()}{Environment.NewLine}{process.ExitCode}";
return result;
}
}

View File

@ -17,9 +17,9 @@ internal static class HelperPackageFilesByDate
PackageJson? packageJson;
string[] packageJsonFiles;
string? packageJsonDirectory;
List<DateTime> dateTimes = [];
string fileNameWithoutExtension;
string packageJsonDirectoryName;
List<DateTime> dateTimes = new();
if (log is null)
throw new NullReferenceException();
if (!Directory.Exists(sourceDirectory))

View File

@ -113,7 +113,7 @@ internal static class HelperPdfStripperWrapper
string[] lines;
string fileName;
string[] segments;
List<string> collection = new();
List<string> collection = [];
foreach (string file in files)
{
lines = File.ReadAllLines(file);

View File

@ -14,7 +14,7 @@ internal static class HelperRenameToOldMoveDeleteOldMerge
{
string renameFile;
string destinationFile;
List<string> deleteFiles = new();
List<string> deleteFiles = [];
string[] moveFiles = Directory.GetFiles(argsZero, "*", SearchOption.TopDirectoryOnly);
log.LogInformation("<{moveFiles.Length}> to move", moveFiles.Length);
foreach (string moveFile in moveFiles)

View File

@ -34,6 +34,7 @@ internal static class HelperSaveOrCopyContents
log.LogInformation("{empty}", string.Empty);
continue;
}
#pragma warning disable IDE0072
string fileName = dfb switch
{
ConsoleKey.D => "Directories",
@ -41,6 +42,7 @@ internal static class HelperSaveOrCopyContents
ConsoleKey.B => "Both",
_ => throw new NotSupportedException(),
};
#pragma warning restore IDE0072
string filePathAndName = Path.Combine(parentDirectory, $"{fileName}.txt");
if (dfb == ConsoleKey.F)
collection = Directory.GetFiles(argsZero, "*", searchOption).ToList();
@ -79,7 +81,7 @@ internal static class HelperSaveOrCopyContents
foreach (string file in files)
{
fileName = Path.GetFileName(file);
if (fileName == fileName.ToLower())
if (fileName.Equals(fileName, StringComparison.CurrentCultureIgnoreCase))
continue;
File.Move(file, file.ToLower());
filesRenamed++;

View File

@ -48,7 +48,7 @@ internal static class HelperStart
if (!result)
result = standardOutput == string.Empty && standardError == string.Empty;
if (!result)
File.WriteAllLines(errorFile, new string[] { standardOutput, Environment.NewLine, Environment.NewLine, standardError });
File.WriteAllLines(errorFile, [standardOutput, Environment.NewLine, Environment.NewLine, standardError]);
return result;
}

View File

@ -8,32 +8,64 @@ namespace File_Folder_Helper.Helpers;
internal static partial class HelperZipFilesBy
{
private static DateTimeOffset? GetDateTimeOffset(string keyFileExtension, FileInfo fileInfo, FileInfo extractKeyFileInfo)
{
DateTimeOffset? dateTimeOffset = null;
using ZipArchive zip = ZipFile.Open(fileInfo.FullName, ZipArchiveMode.Read);
foreach (ZipArchiveEntry zipArchiveEntry in zip.Entries)
{
if (!zipArchiveEntry.Name.EndsWith(keyFileExtension))
continue;
dateTimeOffset = zipArchiveEntry.LastWriteTime;
if (fileInfo.FullName[0] != '\\')
{
zipArchiveEntry.ExtractToFile(extractKeyFileInfo.FullName);
File.SetCreationTime(extractKeyFileInfo.FullName, fileInfo.CreationTime);
File.SetLastWriteTime(extractKeyFileInfo.FullName, dateTimeOffset.Value.LocalDateTime);
}
break;
}
return dateTimeOffset;
}
[GeneratedRegex("[a-zA-Z0-9]{1,}")]
private static partial Regex LowerAlphaAlphaAndNumber();
private static bool SetDateFromZipEntry(ILogger<Worker> logger, string[] zipFiles, string keyFile, string keyFileB, string keyFileC)
private static bool ExtractKeyFileAndSetDateFromZipEntry(ILogger<Worker> logger, string[] zipFiles, string keyFileExtension, string keyFileExtensionB, string keyFileExtensionC, bool renameToLower)
{
bool result = false;
string[] files;
string checkFile;
string? lowerName;
FileInfo fileInfo;
string? zipDirectory;
FileInfo extractKeyFileInfo;
DateTimeOffset? dateTimeOffset;
foreach (string zipFile in zipFiles)
{
fileInfo = new(zipFile);
if (fileInfo.DirectoryName is null)
throw new NullReferenceException(nameof(fileInfo.DirectoryName));
lowerName = !renameToLower ? null : Path.Combine(fileInfo.DirectoryName, fileInfo.Name.ToLower());
if (renameToLower && lowerName is not null && lowerName != fileInfo.FullName)
{
files = Directory.GetFiles(fileInfo.DirectoryName, $"{Path.GetFileNameWithoutExtension(fileInfo.Name)}*", SearchOption.TopDirectoryOnly);
foreach (string file in files)
File.Move(file, Path.Combine(fileInfo.DirectoryName, Path.GetFileName(file).ToLower()));
fileInfo = new(lowerName);
if (fileInfo.DirectoryName is null)
throw new NullReferenceException(nameof(fileInfo.DirectoryName));
}
extractKeyFileInfo = new(Path.Combine(fileInfo.DirectoryName, $"{Path.GetFileNameWithoutExtension(fileInfo.Name)}{keyFileExtension}"));
if (extractKeyFileInfo.Exists)
{
if (extractKeyFileInfo.CreationTime.ToString("yyyy-MM-dd") == fileInfo.CreationTime.ToString("yyyy-MM-dd") && extractKeyFileInfo.LastWriteTime.ToString("yyyy-MM-dd") == fileInfo.LastWriteTime.ToString("yyyy-MM-dd"))
continue;
File.Delete(extractKeyFileInfo.FullName);
}
try
{
dateTimeOffset = null;
fileInfo = new(zipFile);
using ZipArchive zip = ZipFile.Open(zipFile, ZipArchiveMode.Read);
foreach (ZipArchiveEntry zipArchiveEntry in zip.Entries)
{
if (!zipArchiveEntry.Name.EndsWith(keyFile))
continue;
dateTimeOffset = zipArchiveEntry.LastWriteTime;
break;
}
zipDirectory = Path.GetDirectoryName(zipFile);
if (dateTimeOffset is null || zipDirectory is null)
dateTimeOffset = GetDateTimeOffset(keyFileExtension, fileInfo, extractKeyFileInfo);
if (dateTimeOffset is null)
continue;
if (fileInfo.LastWriteTime != dateTimeOffset.Value.LocalDateTime)
{
@ -41,7 +73,9 @@ internal static partial class HelperZipFilesBy
if (!result)
result = true;
}
files = Directory.GetFiles(zipDirectory, $"*{keyFile}", SearchOption.TopDirectoryOnly);
if (string.IsNullOrEmpty(keyFileExtensionB))
continue;
files = Directory.GetFiles(fileInfo.DirectoryName, keyFileExtensionB, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileInfo = new(file);
@ -52,22 +86,11 @@ internal static partial class HelperZipFilesBy
result = true;
}
}
if (string.IsNullOrEmpty(keyFileB))
if (string.IsNullOrEmpty(keyFileExtensionC))
continue;
files = Directory.GetFiles(zipDirectory, keyFileB, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileInfo = new(file);
if (fileInfo.LastWriteTime != dateTimeOffset.Value.LocalDateTime)
{
File.SetLastWriteTime(fileInfo.FullName, dateTimeOffset.Value.LocalDateTime);
if (!result)
result = true;
}
}
if (string.IsNullOrEmpty(keyFileC))
continue;
files = Directory.GetFiles(zipDirectory, keyFileC, SearchOption.TopDirectoryOnly);
if (fileInfo.DirectoryName is null)
throw new NullReferenceException(nameof(fileInfo.DirectoryName));
files = Directory.GetFiles(fileInfo.DirectoryName, keyFileExtensionC, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileInfo = new(file);
@ -93,9 +116,41 @@ internal static partial class HelperZipFilesBy
{ File.Move(zipFile, checkFile); }
catch (Exception) { logger.LogInformation("<{zipFile}> couldn't be moved!", zipFile); }
}
}
return result;
}
private static void ZipDirectory(ILogger<Worker> logger, string directory)
{
logger.LogInformation("{directory}", directory);
string zipFile = $"{directory}.zip";
int skipChars = directory.Length + 1;
string[] files = Directory.GetFiles(directory, "*", SearchOption.AllDirectories);
string[] directories = Directory.GetDirectories(directory, "*", SearchOption.AllDirectories);
ZipArchiveMode zipArchiveMode = File.Exists(zipFile) ? ZipArchiveMode.Update : ZipArchiveMode.Create;
for (int i = 1; i < 3; i++)
{
try
{
using ZipArchive zip = ZipFile.Open(zipFile, zipArchiveMode);
for (int j = 0; j < directories.Length; j++)
_ = zip.CreateEntry($"{directories[j][skipChars..]}/");
foreach (string file in files)
{
_ = zip.CreateEntryFromFile(file, file[skipChars..]);
File.Delete(file);
}
break;
}
catch (Exception)
{
File.Delete(zipFile);
zipArchiveMode = ZipArchiveMode.Create;
}
}
Directory.Delete(directory, recursive: true);
}
internal static bool ZipFilesByDate(ILogger<Worker> logger, string sourceDirectory, SearchOption searchOption = SearchOption.TopDirectoryOnly, string dayFormat = "")
{
bool result = false;
@ -115,9 +170,9 @@ internal static partial class HelperZipFilesBy
DateTime dateTime = DateTime.MinValue;
DateTime firstEmail = new(2019, 3, 8);
CultureInfo cultureInfo = new("en-US");
Dictionary<string, DateTime> weeks = [];
Calendar calendar = cultureInfo.Calendar;
Regex regex = LowerAlphaAlphaAndNumber();
Dictionary<string, DateTime> weeks = new();
int ticksLength = nowDateTime.AddDays(-6).Ticks.ToString().Length;
for (int i = 0; i < int.MaxValue; i++)
{
@ -131,10 +186,10 @@ internal static partial class HelperZipFilesBy
}
weekOfYear = calendar.GetWeekOfYear(nowDateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string skipKey = string.Concat(nowDateTime.ToString("yyyy"), "_Week_", weekOfYear);
Dictionary<string, List<string>> keyValuePairs = new();
Dictionary<string, List<string>> keyValuePairs = [];
string[] topDirectories = Directory.GetDirectories(sourceDirectory, "*", SearchOption.TopDirectoryOnly);
if (topDirectories.Length == 0)
topDirectories = new string[] { sourceDirectory };
topDirectories = [sourceDirectory];
foreach (string topDirectory in topDirectories)
{
keyValuePairs.Clear();
@ -187,7 +242,7 @@ internal static partial class HelperZipFilesBy
if (key == skipKey)
continue;
if (!keyValuePairs.ContainsKey(key))
keyValuePairs.Add(key, new List<string>());
keyValuePairs.Add(key, []);
keyValuePairs[key].Add(subFile);
}
}
@ -237,49 +292,36 @@ internal static partial class HelperZipFilesBy
return result;
}
internal static bool SetDateFromZipEntryForNuspec(ILogger<Worker> logger, string[] files) =>
SetDateFromZipEntry(logger, files, ".nuspec", "icon", "readme");
internal static bool ExportNuspecAndSetDateFromZipEntry(ILogger<Worker> logger, string[] files, bool renameToLower) =>
ExtractKeyFileAndSetDateFromZipEntry(logger, files, ".nuspec", "icon", "readme", renameToLower);
internal static bool SetDateFromZipEntry(ILogger<Worker> logger, string sourceDirectory, SearchOption searchOption = SearchOption.AllDirectories)
internal static bool ExtractKeyFileAndSetDateFromZipEntry(ILogger<Worker> logger, string sourceDirectory, SearchOption searchOption = SearchOption.AllDirectories, bool renameToLower = false)
{
bool result = false;
bool loop;
string keyFile;
string keyFileB;
string keyFileC;
string[] zipFiles;
string searchPattern;
string keyFileExtension;
string keyFileExtensionB;
string keyFileExtensionC;
if (!Directory.Exists(sourceDirectory))
_ = Directory.CreateDirectory(sourceDirectory);
for (int i = 1; i < 3; i++)
{
(searchPattern, keyFile, keyFileB, keyFileC) = i switch
(searchPattern, keyFileExtension, keyFileExtensionB, keyFileExtensionC) = i switch
{
1 => ("*.nupkg", ".nuspec", "icon", "readme"),
2 => ("*.vsix", ".vsixmanifest", string.Empty, string.Empty),
_ => throw new NotSupportedException()
};
zipFiles = Directory.GetFiles(sourceDirectory, searchPattern, searchOption);
loop = SetDateFromZipEntry(logger, zipFiles, keyFile, keyFileB, keyFileC);
loop = ExtractKeyFileAndSetDateFromZipEntry(logger, zipFiles, keyFileExtension, keyFileExtensionB, keyFileExtensionC, renameToLower);
if (loop && !result)
result = true;
}
return result;
}
private static void ZipDirectory(ILogger<Worker> logger, string directory)
{
logger.LogInformation("{directory}", directory);
string[] files = Directory.GetFiles(directory, "*", SearchOption.AllDirectories);
using ZipArchive zip = ZipFile.Open($"{directory}.zip", ZipArchiveMode.Create);
foreach (string file in files)
{
_ = zip.CreateEntryFromFile(file, Path.GetFileName(file));
File.Delete(file);
}
Directory.Delete(directory, recursive: true);
}
internal static void ZipFilesByDirectoryWithFile(ILogger<Worker> logger, string sourceDirectory)
{
string[] files1;

View File

@ -0,0 +1,100 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
namespace File_Folder_Helper.Helpers;
public static class RijndaelEncryption
{ // cSpell:disable
/// <summary>
/// Change the input key GUID when you use this code in your own program.
/// Keep this input key very safe and prevent someone from decoding it some way!!
/// Generated 2021-08-10
/// </summary>
internal const string _InputKey = "970CCEF6-4307-4F6A-9AC8-377DADB889BD";
/// <summary>
/// Encrypt the given text and give the byte array back as a BASE64 string
/// </summary>
/// <param name="text">The text to encrypt</param>
/// <param name="salt">The password salt</param>
/// <returns>The encrypted text</returns>
public static string Encrypt(string text, string salt)
{
string result;
if (string.IsNullOrEmpty(text))
throw new ArgumentNullException(nameof(text));
#pragma warning disable SYSLIB0022
RijndaelManaged aesAlg = NewRijndaelManaged(salt);
#pragma warning restore
ICryptoTransform encryptor = aesAlg.CreateEncryptor(aesAlg.Key, aesAlg.IV);
MemoryStream msEncrypt = new();
using (CryptoStream csEncrypt = new(msEncrypt, encryptor, CryptoStreamMode.Write))
using (StreamWriter swEncrypt = new(csEncrypt))
swEncrypt.Write(text);
result = Convert.ToBase64String(msEncrypt.ToArray());
return result;
}
/// <summary>
/// Checks if a string is base64 encoded
/// </summary>
/// <param name="base64String">The base64 encoded string</param>
/// <returns></returns>
public static bool IsBase64String(string base64String)
{
bool result;
base64String = base64String.Trim();
#pragma warning restore
result = (base64String.Length % 4 == 0) && Regex.IsMatch(base64String, @"^[a-zA-Z0-9\+/]*={0,3}$", RegexOptions.None);
#pragma warning restore
return result;
}
/// <summary>
/// Decrypts the given text
/// </summary>
/// <param name="cipherText">The encrypted BASE64 text</param>
/// <param name="salt">The password salt</param>
/// <returns>De gedecrypte text</returns>
public static string Decrypt(string cipherText, string salt)
{
if (string.IsNullOrEmpty(cipherText))
throw new ArgumentNullException(nameof(cipherText));
if (!IsBase64String(cipherText))
throw new Exception("The cipherText input parameter is not base64 encoded");
string text;
#pragma warning disable SYSLIB0022
RijndaelManaged aesAlg = NewRijndaelManaged(salt);
#pragma warning restore
ICryptoTransform decryptor = aesAlg.CreateDecryptor(aesAlg.Key, aesAlg.IV);
byte[] cipher = Convert.FromBase64String(cipherText);
using (MemoryStream msDecrypt = new(cipher))
{
using CryptoStream csDecrypt = new(msDecrypt, decryptor, CryptoStreamMode.Read);
using StreamReader srDecrypt = new(csDecrypt);
text = srDecrypt.ReadToEnd();
}
return text;
}
/// <summary>
/// Create a new RijndaelManaged class and initialize it
/// </summary>
/// <param name="salt">The password salt</param>
/// <returns></returns>
#pragma warning disable SYSLIB0022, SYSLIB0041, CA5379
private static RijndaelManaged NewRijndaelManaged(string salt)
{
ArgumentNullException.ThrowIfNull(salt);
byte[] saltBytes = Encoding.ASCII.GetBytes(salt);
Rfc2898DeriveBytes key = new(_InputKey, saltBytes);
RijndaelManaged aesAlg = new();
#pragma warning restore
aesAlg.Key = key.GetBytes(aesAlg.KeySize / 8);
aesAlg.IV = key.GetBytes(aesAlg.BlockSize / 8);
return aesAlg;
}
}

View File

@ -8,7 +8,6 @@ public record AppSettings(string Company,
string[] ExcludeDirectoryNames,
string[] ExcludeSchemes,
string PersonBirthdayFormat,
char[] PersonCharacters,
char[] PersonTitleFilters,
string WorkingDirectoryName)
{

View File

@ -12,7 +12,6 @@ public class AppSettings
public string[]? ExcludeDirectoryNames { get; set; }
public string[]? ExcludeSchemes { get; set; }
public string? PersonBirthdayFormat { get; set; }
public string? PersonCharacters { get; set; }
public string? PersonTitleFilters { get; set; }
public string? WorkingDirectoryName { get; set; }
@ -22,6 +21,23 @@ public class AppSettings
return result;
}
private static void PreVerify(IConfigurationRoot configurationRoot, AppSettings? appSettings)
{
if (appSettings?.Company is null)
{
List<string> paths = [];
foreach (IConfigurationProvider configurationProvider in configurationRoot.Providers)
{
if (configurationProvider is not Microsoft.Extensions.Configuration.Json.JsonConfigurationProvider jsonConfigurationProvider)
continue;
if (jsonConfigurationProvider.Source.FileProvider is not Microsoft.Extensions.FileProviders.PhysicalFileProvider physicalFileProvider)
continue;
paths.Add(physicalFileProvider.Root);
}
throw new NotSupportedException($"Not found!{Environment.NewLine}{string.Join(Environment.NewLine, paths.Distinct())}");
}
}
private static Models.AppSettings Get(AppSettings? appSettings)
{
Models.AppSettings result;
@ -30,7 +46,6 @@ public class AppSettings
if (appSettings?.ExcludeDirectoryNames is null) throw new NullReferenceException(nameof(appSettings.ExcludeDirectoryNames));
if (appSettings?.ExcludeSchemes is null) throw new NullReferenceException(nameof(appSettings.ExcludeSchemes));
if (appSettings?.PersonBirthdayFormat is null) throw new NullReferenceException(nameof(appSettings.PersonBirthdayFormat));
if (appSettings?.PersonCharacters is null) throw new NullReferenceException(nameof(appSettings.PersonCharacters));
if (appSettings?.PersonTitleFilters is null) throw new NullReferenceException(nameof(appSettings.PersonTitleFilters));
if (appSettings?.WorkingDirectoryName is null) throw new NullReferenceException(nameof(appSettings.WorkingDirectoryName));
result = new(
@ -39,7 +54,6 @@ public class AppSettings
appSettings.ExcludeDirectoryNames,
appSettings.ExcludeSchemes,
appSettings.PersonBirthdayFormat,
appSettings.PersonCharacters.ToArray(),
appSettings.PersonTitleFilters.ToArray(),
appSettings.WorkingDirectoryName
);
@ -52,19 +66,7 @@ public class AppSettings
#pragma warning disable IL3050, IL2026
AppSettings? appSettings = configurationRoot.Get<AppSettings>();
#pragma warning restore IL3050, IL2026
if (appSettings?.Company is null)
{
foreach (IConfigurationProvider configurationProvider in configurationRoot.Providers)
{
if (configurationProvider is not Microsoft.Extensions.Configuration.Json.JsonConfigurationProvider jsonConfigurationProvider)
continue;
if (jsonConfigurationProvider.Source.FileProvider is not Microsoft.Extensions.FileProviders.PhysicalFileProvider physicalFileProvider)
continue;
if (!physicalFileProvider.Root.Contains("UserSecrets"))
continue;
throw new NotSupportedException(physicalFileProvider.Root);
}
}
PreVerify(configurationRoot, appSettings);
result = Get(appSettings);
return result;
}

140
Worker.cs
View File

@ -1,6 +1,7 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using WindowsShortcutFactory;
namespace File_Folder_Helper;
@ -24,8 +25,8 @@ public class Worker : BackgroundService
_IsSilent = silentIndex > -1;
if (_IsSilent)
args.RemoveAt(silentIndex);
_ConsoleKeys = new ConsoleKey[]
{
_ConsoleKeys =
[
ConsoleKey.A,
ConsoleKey.B,
ConsoleKey.C,
@ -47,11 +48,12 @@ public class Worker : BackgroundService
ConsoleKey.T,
ConsoleKey.U,
ConsoleKey.V,
ConsoleKey.W,
ConsoleKey.X,
ConsoleKey.Y,
ConsoleKey.Z,
ConsoleKey.Delete
};
];
}
public override Task StartAsync(CancellationToken cancellationToken) =>
@ -60,10 +62,66 @@ public class Worker : BackgroundService
public override Task StopAsync(CancellationToken cancellationToken) =>
base.StopAsync(cancellationToken);
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
private void CreateWindowsShortcut()
{
if (!stoppingToken.IsCancellationRequested)
await Task.Delay(500, stoppingToken);
object? assemblyName = AppContext.GetData("AssemblyName");
string sendToDirectory = Environment.GetFolderPath(Environment.SpecialFolder.SendTo);
string localApplicationData = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData);
if (Directory.Exists(sendToDirectory) && Directory.Exists(localApplicationData) && assemblyName is string assemblyNameValue)
{
string shortcut = Path.Combine(sendToDirectory, $"{assemblyNameValue}.lnk");
string directory = Path.Combine(localApplicationData, _AppSettings.WorkingDirectoryName, assemblyNameValue);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
if (!File.Exists(sendToDirectory))
{
WindowsShortcut windowsShortcut = new()
{
Arguments = "s X C:/ProgramData Day-Helper-2024-01-07 1",
Path = Path.Combine(directory, $"{assemblyNameValue}.exe"),
WorkingDirectory = AppContext.BaseDirectory
};
windowsShortcut.Save(shortcut);
windowsShortcut.Dispose();
}
}
}
private void LogOptions()
{
_Logger.LogInformation("A) Save (Top Directory Only),");
_Logger.LogInformation("B) Save (All Directories),");
_Logger.LogInformation("C) Clipboard (Top Directory Only),");
_Logger.LogInformation("D) Clipboard (All Directories),");
_Logger.LogInformation("E) Everything delete recursive,");
_Logger.LogInformation("F) Find and delete all *.log.* files then empty directories,");
_Logger.LogInformation("G) Genealogical Data Communication,");
// H
_Logger.LogInformation("I) Ignore case and rename files to lowercase,");
_Logger.LogInformation("J) Set Date from Json Entry,");
_Logger.LogInformation("K) Kanban support,");
_Logger.LogInformation("L) Log Merge (APC Log [0-9(8)]_*.log),");
_Logger.LogInformation("M) Markdown Wiki Link Verification,");
_Logger.LogInformation("N) Create Note Files,");
_Logger.LogInformation("O) Oracle tnsNames.ora,");
_Logger.LogInformation("P) PDF parse,");
// Q
_Logger.LogInformation("R) Rename to old, copy, delete old,");
_Logger.LogInformation("S) Set Date from Zip Entry,");
_Logger.LogInformation("T) *Ticks ~~Too long rename~~,");
// U
// V
// W
_Logger.LogInformation("X) Day Helpers,");
_Logger.LogInformation("Y) Zip file(s) by directory with file,");
_Logger.LogInformation("Z) Zip file(s) by date,");
_Logger.LogInformation("Delete) Delete empty directories,");
}
protected override async Task ExecuteAsync(CancellationToken cancellationToken)
{
if (!cancellationToken.IsCancellationRequested)
await Task.Delay(500, cancellationToken);
if (_AppSettings is null)
throw new NullReferenceException(nameof(_AppSettings));
try
@ -82,45 +140,24 @@ public class Worker : BackgroundService
consoleKey = ConsoleKey.End;
if (singleCharIndex is not null)
_Args.RemoveAt(singleCharIndex.Value);
_Logger.LogInformation(consoleKey.ToString());
if (_Args.Count == 0)
_Logger.LogInformation("Must pass a argument!");
else if (Directory.Exists(_Args[0]) && File.Exists(Path.Combine(_Args[0], string.Concat(Path.GetFileName(_Args[0]), ".dll"))))
Helpers.HelperILMerge.ILMerge(_Args[0]);
{
_Logger.LogWarning("Must pass a argument!");
CreateWindowsShortcut();
}
else if (Directory.Exists(_Args[0]))
{
for (int i = 0; i < int.MaxValue; i++)
if (!_ConsoleKeys.Contains(consoleKey))
{
if (_ConsoleKeys.Contains(consoleKey))
break;
_Logger.LogInformation("A) Save (Top Directory Only),");
_Logger.LogInformation("B) Save (All Directories),");
_Logger.LogInformation("C) Clipboard (Top Directory Only),");
_Logger.LogInformation("D) Clipboard (All Directories),");
_Logger.LogInformation("E) Everything delete recursive,");
_Logger.LogInformation("F) Find and delete all *.log.* files then empty directories,");
_Logger.LogInformation("G) Genealogical Data Communication,");
_Logger.LogInformation("H) Hardcoded file search and sort,");
_Logger.LogInformation("I) Ignore case and rename files to lowercase,");
_Logger.LogInformation("J) Set Date from Json Entry,");
_Logger.LogInformation("K) Kanban support,");
_Logger.LogInformation("L) Log Merge (APC Log [0-9(8)]_*.log),");
_Logger.LogInformation("N) Create Note Files,");
_Logger.LogInformation("M) Markdown Wiki Link Verification,");
_Logger.LogInformation("O) Oracle tnsNames.ora,");
_Logger.LogInformation("P) PDF parse,");
// Q
_Logger.LogInformation("R) Rename to old, copy, delete old,");
_Logger.LogInformation("S) Set Date from Zip Entry,");
_Logger.LogInformation("T) *Ticks ~~Too long rename~~,");
_Logger.LogInformation("U) Links for Hugo,");
_Logger.LogInformation("V) VSCode Hope Sort,");
// W
_Logger.LogInformation("X) Download,");
_Logger.LogInformation("Y) Zip file(s) by directory with file,");
_Logger.LogInformation("Z) Zip file(s) by date,");
_Logger.LogInformation("Delete) Delete empty directories,");
consoleKey = Console.ReadKey().Key;
_Logger.LogInformation(" ");
for (int i = 0; i < int.MaxValue; i++)
{
if (_ConsoleKeys.Contains(consoleKey))
break;
LogOptions();
consoleKey = Console.ReadKey().Key;
_Logger.LogInformation(" ");
}
}
switch (consoleKey)
{
@ -139,9 +176,6 @@ public class Worker : BackgroundService
case ConsoleKey.G:
Helpers.HelperGenealogicalDataCommunication.FileSystemToGenealogicalDataCommunication(_AppSettings, _Logger, _Args);
break;
case ConsoleKey.H:
Helpers.HelperHardcodedFileSearchAndSort.HardcodedFileSearchAndSort(_Logger, _Args[0]);
break;
case ConsoleKey.I:
Helpers.HelperSaveOrCopyContents.IgnoreCaseAndRenameFilesToLowercase(_Logger, _Args[0]);
break;
@ -149,7 +183,7 @@ public class Worker : BackgroundService
Helpers.HelperPackageFilesByDate.SetDateFromJsonEntry(_Logger, _Args[0]);
break;
case ConsoleKey.K:
Helpers.HelperKanbanMetadata.SetMetadata(_Logger, _AppSettings, _Args[0], addTicks: true);
Helpers.HelperKanbanMetadata.SetMetadata(_Logger, _Args[0]);
break;
case ConsoleKey.L:
Helpers.HelperLogMerge.LogMerge(_Args[0]);
@ -158,9 +192,7 @@ public class Worker : BackgroundService
Helpers.HelperCreateNoteFiles.CreateNoteFiles(_Logger, _Args[0]);
break;
case ConsoleKey.M:
if (_Args[0].EndsWith(".kanbn") && Directory.Exists(_Args[0]))
Helpers.HelperKanbanMetadata.SetMetadata(_Logger, _AppSettings, _Args[0], addTicks: false);
Helpers.HelperMarkdown.MarkdownWikiLinkVerification(_AppSettings, _Logger, _Args);
Helpers.HelperMarkdown.MarkdownWikiLinkVerification(_AppSettings, _Logger, _Args, cancellationToken);
break;
case ConsoleKey.O:
Helpers.HelperFindReplace.UpdateTnsNames(_Logger, _Args);
@ -173,21 +205,15 @@ public class Worker : BackgroundService
Helpers.HelperRenameToOldMoveDeleteOldMerge.RenameToOldMoveDeleteOld(_Logger, _Args[0]);
break;
case ConsoleKey.S:
_ = Helpers.HelperZipFilesBy.SetDateFromZipEntry(_Logger, _Args[0]);
_ = Helpers.HelperZipFilesBy.ExtractKeyFileAndSetDateFromZipEntry(_Logger, _Args[0]);
break;
case ConsoleKey.T:
Helpers.HelperTooLong.UpdateDateVerifyAndGetTicksDirectories(_Logger, _AppSettings, _Args[0]);
// Helpers.HelperTooLong.TooLong(_Args[0], delete: false);
// Helpers.HelperTooLong.TooLong(_Args[0], delete: true);
break;
case ConsoleKey.U:
Helpers.HelperMarkdown.MarkdownConvertLinksForHugo(_AppSettings, _Logger, _Args);
break;
case ConsoleKey.V:
Helpers.HelperVSCodePossibleExtension.Sort(_Logger, _Args);
break;
case ConsoleKey.X:
Helpers.HelperDownload.SaveJson(_Logger, _Args[0]);
Helpers.HelperDay.Select(_AppSettings, _Logger, _Args, cancellationToken);
break;
case ConsoleKey.Y:
Helpers.HelperZipFilesBy.ZipFilesByDirectoryWithFile(_Logger, _Args[0]);
@ -211,7 +237,7 @@ public class Worker : BackgroundService
throw new Exception(_Args[0]);
}
catch (Exception ex)
{ _Logger.LogError("{Message}{NewLine}{StackTrace}", ex.Message, Environment.NewLine, ex.StackTrace); }
{ _Logger.LogError("{Message}{NewLine}{_Args}{NewLine}{StackTrace}", ex.Message, Environment.NewLine, string.Join(' ', _Args), Environment.NewLine, ex.StackTrace); }
if (_IsSilent)
_Logger.LogInformation("Done. Bye");
else

View File

@ -1,10 +1,11 @@
{
"scripts": {
"prettier.check": "prettier . --check",
"prettier.write": "prettier . --write",
"kanbn.board": "kanbn board",
"garbage-collect": "git gc",
"git:pre-commit": "cp .vscode/pre-commit .git/hooks/ && chmod +x .git/hooks/pre-commit && echo 'hook copied'",
"kanbn.board.json": "kanbn board -j > .kanbn/board.json",
"garbage-collect": "git gc"
"kanbn.board": "kanbn board",
"prettier.check": "prettier . --check",
"prettier.write": "prettier . --write"
},
"devDependencies": {
"prettier": "3.0.0"