Compare commits

..

10 Commits

Author SHA1 Message Date
8ca489d818 ~ over | split 2025-05-17 07:51:53 -07:00
8f22f188a2 WriteNginxFileSystem 2025-04-29 15:12:17 -07:00
d23f802cdb free-file-sync-change-created-date 2025-04-21 10:27:06 -07:00
74e9fc33af process-data-standard-format-to-json update 2025-04-19 08:15:08 -07:00
aa6461c62d Scripts 2025-04-11 20:31:29 -07:00
fad2db46b5 https sync 2025-04-10 09:16:01 -07:00
cc9c5013a9 KumaToGatus 2025-04-04 18:59:31 -07:00
23c0ff9683 Update to 2025-02-18 and 2025-02-19 2025-03-31 19:52:26 -07:00
919279a917 javascript methods for sequence to readable date
c# like java for PI5

Helper 2025-02-19 more updates for Compare
2025-03-26 17:02:35 -07:00
0621d0f07e Updates to Backup method 2025-03-23 15:57:07 -07:00
31 changed files with 2517 additions and 1242 deletions

View File

@ -11,3 +11,11 @@ Authorization: Basic {{pat}}
GET {{host}}/tfs/FactoryIntegration/_apis/wit/workitems/{{ids}}/updates GET {{host}}/tfs/FactoryIntegration/_apis/wit/workitems/{{ids}}/updates
Accept: application/json Accept: application/json
Authorization: Basic {{pat}} Authorization: Basic {{pat}}
### Iterations
GET {{host}}/tfs/FactoryIntegration/ART%20SPS/cea9f426-6fb1-4d65-93d5-dbf471056212/_apis/work/teamsettings/iterations?
Accept: application/json
Authorization: Basic {{pat}}
###

83
.vscode/launch.json vendored
View File

@ -13,43 +13,66 @@
"args": [ "args": [
"s", "s",
"X", "X",
"L:/DevOps/EAF-Mesa-Integration/met08thftirqs408m/bin/Debug",
"Day-Helper-2025-03-06",
"*.pdsf",
"s",
"X",
"D:/ProgramData/VisualStudioCode",
"Day-Helper-2025-03-05",
"isccvm57294f1ed.infineon.com",
".vscode/extensions/bennycode.sort-everything-1.4.1",
"s",
"X",
"\\\\mesfs.infineon.com\\EC_Characterization_Si\\Archive\\BIORAD4\\2025_Week_10\\2025-03-03\\03--_2025-03-03_05;54_AM_1292405457",
"Day-Helper-2025-02-19",
"csv-*.pdsf",
"*.pdsf",
"Test,Count,MesEntity,HeaderUniqueId,UniqueId,Date,Wafer,Position,BIORAD4",
",BIORAD4",
",BIORAD4",
"MID|Cassette,Lot|Batch,Title|Batch,Wafer|Text,Thickness|Site,MeanThickness|GradeMean,|BIORAD4",
"888",
"999",
"s",
"X",
"D:/Tmp",
"Day-Helper-2025-02-18",
"*.pdsf",
"A_MES_ENTITY=",
"B_SEQUENCE=",
"\\\\mesfs.infineon.com\\EC_Characterization_Si\\Archive", "\\\\mesfs.infineon.com\\EC_Characterization_Si\\Archive",
"Day-Helper-2025-04-29",
"*.pdsf",
"333",
"444",
"555",
"666", "666",
"777", "777",
"888", "888",
"999", "999",
"s", "s",
"X", "X",
"D:/5-Other-Small/Kanban-mestsa003/{}/2025/2025_Week_05/223065", "\\\\mesfs.infineon.com\\EC_Characterization_Si\\Archive\\BIORAD4\\2025_Week_16\\2025-04-17",
"Day-Helper-2025-02-04", "Day-Helper-2025-02-19",
"csv-*.pdsf",
"*.pdsf",
"Time,HeaderUniqueId,UniqueId,Date,Wafer,Position,BIORAD4",
",BIORAD4",
",BIORAD4",
"Test|EventId,Date|DateTime,Position|Slot,DeltaThicknessSlotsOneAndTwentyFive|Actual Delta Thick Pts 1 and 25,PercentDeltaThicknessSlotsOneAndTwentyFive|% Delta Thick Pts 1 and 25,MID|Cassette,Lot|Batch,Title|Batch,Wafer|Text,Thickness|Site,MeanThickness|GradeMean,|BIORAD4",
"Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,MID,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,GradeStdDev,HeaderUniqueId,Layer,MeanThickness,PassFail,RDS,Slot,Title,UniqueId,Wafer,Zone,Mean,Position,StdDev,Thickness,ThicknessSlotOne,ThicknessSlotTwentyFive,DeltaThicknessSlotsOneAndTwentyFive,PercentDeltaThicknessSlotsOneAndTwentyFive",
"Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Batch,Cassette,DateTime,Destination,Mean,PassFail,Recipe,Reference,Site,Slot,Source,StdDev,Text,GradeMean,GradeStdDev,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Thickness First Slot,Thickness Last Slot,Actual Delta Thick Pts 1 and 25,% Delta Thick Pts 1 and 25,EventId",
"0,1,2,31,3,6,5,8,9,27,7,23,24,13,8,21,-1,25,20,12,22,16,7,-1,19,26,11,16,18,15,-1,-1,29,30",
"s",
"X",
"C:/Users/phares/AppData/Roaming/FreeFileSync",
"Day-Helper-2025-04-21",
"GlobalSettings.xml",
"LastSync|Config",
"s",
"X",
"L:/Tmp/MET08ANLYSDIFAAST230",
"Day-Helper-2025-03-06",
"*.pdsf",
"s",
"X",
"D:/ProgramData/VisualStudioCode|D:/6-Other-Large-Z/Linux-Ubuntu-Phares/home/lphares/dorico",
"Day-Helper-2025-04-07",
"z-include-patterns.nsv",
"z-exclude-patterns.nsv",
"https://isccvm57294f1ed/VisualStudioCode|hxttps://dorico.phares.duckdns.org|hxttps://mestsa006.infineon.com/VisualStudioCode",
"+|G|G|G|-",
"||||",
"666",
"777",
"888",
"999",
"s",
"X",
"C:/Users/PHARES/AppData/Local/IFXApps/gatus",
"Day-Helper-2025-04-04",
"*.json",
".metrics",
"https://messa010ec.infineon.com/metrics",
"gatus_results_endpoint_success",
"666",
"777",
"888",
"999",
""
], ],
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"console": "integratedTerminal", "console": "integratedTerminal",

View File

@ -19,6 +19,7 @@
"Exif", "Exif",
"FAMC", "FAMC",
"FAMS", "FAMS",
"Gatus",
"GIVN", "GIVN",
"HUSB", "HUSB",
"Immich", "Immich",
@ -35,6 +36,7 @@
"OBJE", "OBJE",
"onenote", "onenote",
"PDFC", "PDFC",
"pdsf",
"Permyriad", "Permyriad",
"pged", "pged",
"Phares", "Phares",

14
.vscode/tasks.json vendored
View File

@ -228,6 +228,20 @@
"type": "shell", "type": "shell",
"command": "npx jest", "command": "npx jest",
"problemMatcher": [] "problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",
"L:/DevOps/Mesa_FI/File-Folder-Helper",
"Day-Helper-2025-03-20",
"false",
"4"
],
"problemMatcher": []
} }
] ]
} }

View File

@ -42,7 +42,7 @@ internal static partial class Helper20240403
string keyIndex = args[5]; string keyIndex = args[5];
string directory = args[0]; string directory = args[0];
logger.LogInformation(directory); logger.LogInformation(directory);
string[] columns = args[4].Split('|'); string[] columns = args[4].Split('~');
DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration = new(columns, directory, ignore, int.Parse(keyIndex), pattern, primary); DynamicHostConfigurationProtocolConfiguration dynamicHostConfigurationProtocolConfiguration = new(columns, directory, ignore, int.Parse(keyIndex), pattern, primary);
AlertIfNewDeviceIsConnected(dynamicHostConfigurationProtocolConfiguration, logger); AlertIfNewDeviceIsConnected(dynamicHostConfigurationProtocolConfiguration, logger);
} }

View File

@ -701,7 +701,7 @@ internal static partial class Helper20240911
List<char> spaces = []; List<char> spaces = [];
List<string> lines = []; List<string> lines = [];
ReadOnlyCollection<WorkItem> results; ReadOnlyCollection<WorkItem> results;
string[] workItemTypes = args[4].Split('|'); string[] workItemTypes = args[4].Split('~');
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
string destinationDirectory = Path.GetFullPath(args[6]); string destinationDirectory = Path.GetFullPath(args[6]);
if (!Directory.Exists(destinationDirectory)) if (!Directory.Exists(destinationDirectory))

View File

@ -12,7 +12,7 @@ internal static partial class Helper20241212
string newFileName; string newFileName;
string directoryName; string directoryName;
string searchPattern = args[2]; string searchPattern = args[2];
string[] searchPatterns = args[3].Split('|'); string[] searchPatterns = args[3].Split('~');
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories); string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0) if (files.Length == 0)

View File

@ -11,12 +11,31 @@ namespace File_Folder_Helper.ADO2024.PI4;
internal static partial class Helper20241217 internal static partial class Helper20241217
{ {
private record Record(string Directory, Job? Job, string Path); private record SecureShell(
private record Job(string AlternatePath, string Directory, string Extension, File[] Files, int FilesCount, double FilesTotalLength, int Keep, Target[] Targets); );
private record SecureShell();
private record ServerMessageBlock(string Path, bool Required); private record ServerMessageBlock(string Path,
private record Target(SecureShell? SecureShell, ServerMessageBlock? ServerMessageBlock); bool Required);
private record File(long LastWriteTicks, long Length, string RelativePath);
private record Target(SecureShell? SecureShell,
ServerMessageBlock? ServerMessageBlock);
private record File(long LastWriteTicks,
long Length,
string RelativePath);
private record Record(string Directory,
Job Job,
string Path);
private record Job(string? AlternatePath,
string Directory,
string Extension,
File[] Files,
int FilesCount,
double FilesTotalLength,
int Keep,
Target[] Targets);
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Job))] [JsonSerializable(typeof(Job))]
@ -30,6 +49,95 @@ internal static partial class Helper20241217
{ {
} }
internal static void Backup(ILogger<Worker> logger, List<string> args)
{
Job jobNew;
string path;
string? json;
bool areTheyTheSame;
string directoryName;
ReadOnlyCollection<File> files;
string searchPattern = args[2];
string[] ignoreFileNames = args[3].Split('~');
string sourceDirectory = Path.GetFullPath(args[0]);
char destinationDriveLetter = args[4].Split(':')[0][0];
logger.LogInformation("Searching <{sourceDirectory}> with search pattern {searchPattern}", args[0], searchPattern);
if (Debugger.IsAttached)
Verify(searchPattern, ignoreFileNames);
IEnumerable<Record> records = GetRecords(sourceDirectory, searchPattern);
foreach (Record record in records)
{
if (record.Job is null || string.IsNullOrEmpty(record.Job.Extension))
continue;
logger.LogInformation("Searching <{directory}>", record.Directory);
files = GetFiles(searchPattern, ignoreFileNames, record);
jobNew = GetJob(searchPattern, ignoreFileNames, record, files);
json = JsonSerializer.Serialize(jobNew, JobSourceGenerationContext.Default.Job);
areTheyTheSame = GetAreTheyTheSame(logger, searchPattern, ignoreFileNames, record, jobNew);
if (areTheyTheSame)
{
WriteAllText(record.Path, json);
continue;
}
directoryName = Path.GetFileName(record.Directory);
path = Path.Combine(record.Directory, $"{directoryName}-{DateTime.Now:yyyy-MM-dd-HH-mm-ss-fff}{record.Job.Extension}");
logger.LogInformation("Writing <{directory}> extension", record.Directory);
WritePassedExtension(destinationDriveLetter, record, files, path);
WriteAllText(record.Path, json);
}
if (Debugger.IsAttached && records.Count() == 0)
{
files = GetFiles(sourceDirectory, searchPattern, ignoreFileNames);
json = JsonSerializer.Serialize(files.ToArray(), FilesSourceGenerationContext.Default.FileArray);
WriteAllText(Path.Combine(Environment.CurrentDirectory, ".vscode", "helper", ".json"), json);
}
}
private static void Verify(string searchPattern, string[] ignoreFileNames)
{
List<Target> targets = [
new(new SecureShell(), null),
new(null, new ServerMessageBlock("\\\\mesfs.infineon.com\\EC_APC\\DEV", true))
];
string directory = Path.Combine(Environment.CurrentDirectory, ".vscode", "helper");
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string path = Path.Combine(directory, "verify.json");
ReadOnlyCollection<File> files = GetFiles(directory, searchPattern, ignoreFileNames);
ReadOnlyCollection<File> collection = GetFilteredFiles(searchPattern, ignoreFileNames, files);
double filesTotalLength = collection.Select(l => l.Length).Sum();
Job job = new(AlternatePath: "C:/Users/phares",
Directory: directory,
Extension: ".iso",
Files: collection.ToArray(),
FilesCount: files.Count,
FilesTotalLength: filesTotalLength,
Keep: 3,
Targets: targets.ToArray());
string json = JsonSerializer.Serialize(job, JobSourceGenerationContext.Default.Job);
WriteAllText(path, json);
}
private static ReadOnlyCollection<File> GetFilteredFiles(string searchPattern, string[] ignoreFileNames, ReadOnlyCollection<File> files)
{
List<File> results = [];
string fileName;
foreach (File file in files)
{
if (file.RelativePath == searchPattern)
continue;
fileName = Path.GetFileName(file.RelativePath);
if (fileName == searchPattern)
throw new Exception("Found nested file!");
if (ignoreFileNames.Any(l => l == fileName))
continue;
if (file.Length == 0)
continue;
results.Add(file);
}
return results.AsReadOnly();
}
private static IEnumerable<Record> GetRecords(string directory, string searchPattern) private static IEnumerable<Record> GetRecords(string directory, string searchPattern)
{ {
Job? job; Job? job;
@ -48,8 +156,19 @@ internal static partial class Helper20241217
continue; continue;
} }
json = System.IO.File.ReadAllText(file); json = System.IO.File.ReadAllText(file);
if (string.IsNullOrEmpty(json) || json is "{}" or "[]")
job = null;
else
job = JsonSerializer.Deserialize(json, JobSourceGenerationContext.Default.Job); job = JsonSerializer.Deserialize(json, JobSourceGenerationContext.Default.Job);
record = new(directoryName, job, file); job ??= new(AlternatePath: null,
Directory: directory,
Extension: ".iso",
Files: [],
FilesCount: 0,
FilesTotalLength: 0,
Keep: 3,
Targets: []);
record = new(Directory: directoryName, Job: job, Path: file);
yield return record; yield return record;
} }
} }
@ -72,7 +191,7 @@ internal static partial class Helper20241217
relativePath = Path.GetRelativePath(directory, fileInfo.FullName).Replace(';', '_'); relativePath = Path.GetRelativePath(directory, fileInfo.FullName).Replace(';', '_');
if (relativePath.StartsWith("..")) if (relativePath.StartsWith(".."))
relativePath = relativePath[3..]; relativePath = relativePath[3..];
file = new(fileInfo.LastWriteTime.Ticks, fileInfo.Length, relativePath); file = new(LastWriteTicks: fileInfo.LastWriteTime.Ticks, Length: fileInfo.Length, RelativePath: relativePath);
results.Add(file); results.Add(file);
} }
return results.AsReadOnly(); return results.AsReadOnly();
@ -81,61 +200,78 @@ internal static partial class Helper20241217
private static ReadOnlyCollection<File> GetFiles(string searchPattern, string[] ignoreFileNames, Record record) => private static ReadOnlyCollection<File> GetFiles(string searchPattern, string[] ignoreFileNames, Record record) =>
GetFiles(record.Directory, searchPattern, ignoreFileNames); GetFiles(record.Directory, searchPattern, ignoreFileNames);
private static string? GetJsonIfNotEqual(string searchPattern, string[] ignoreFileNames, Record record, Job job, ReadOnlyCollection<File> files) private static Job GetJob(string searchPattern, string[] ignoreFileNames, Record record, ReadOnlyCollection<File> files)
{ {
string? result; Job result;
string? jsonNew; ReadOnlyCollection<File> collection = GetFilteredFiles(searchPattern, ignoreFileNames, files);
string? jsonOld; double filesTotalLengthNew = collection.Select(l => l.Length).Sum();
string fileName; result = new(AlternatePath: record.Job.AlternatePath,
int ignoreCount = 0; Directory: record.Directory,
double filesTotalLengthNew = 0; Extension: record.Job.Extension,
File[] filesArray = files.ToArray(); Files: collection.ToArray(),
double filesTotalLengthOld = job.FilesTotalLength; FilesCount: collection.Count,
foreach (File file in files) FilesTotalLength: filesTotalLengthNew,
filesTotalLengthNew += file.Length; Keep: record.Job.Keep,
Job jobNew = new(job.AlternatePath, Targets: record.Job.Targets);
record.Directory, return result;
job.Extension,
filesArray,
files.Count,
filesTotalLengthNew,
job.Keep,
job.Targets);
result = JsonSerializer.Serialize(jobNew, JobSourceGenerationContext.Default.Job);
if (filesTotalLengthNew != filesTotalLengthOld)
{
filesTotalLengthOld = 0;
foreach (File file in job.Files)
{
fileName = Path.GetFileName(file.RelativePath);
if (fileName == searchPattern || ignoreFileNames.Any(l => l == fileName))
{
ignoreCount += 1;
continue;
} }
if (file.Length == 0)
private static bool GetAreTheyTheSame(ILogger<Worker> logger, string searchPattern, string[] ignoreFileNames, Record record, Job jobNew)
{ {
ignoreCount += 1; bool result;
continue; ReadOnlyCollection<File> collection = GetFilteredFiles(searchPattern, ignoreFileNames, record.Job.Files.AsReadOnly());
} int filesCountOld = collection.Count;
filesTotalLengthOld += file.Length; int filesCountNew = jobNew.Files.Length;
} if (filesCountNew != filesCountOld)
}
if (filesTotalLengthNew != filesTotalLengthOld || files.Count != (job.Files.Length - ignoreCount))
{ {
jsonNew = null; result = false;
jsonOld = null; logger.LogInformation("<{directory}> file count has changed {filesCountNew} != {filesCountOld}", record.Directory, filesCountNew, filesCountOld);
} }
else else
{ {
jsonNew = JsonSerializer.Serialize((from l in filesArray orderby l.RelativePath.Length, l.RelativePath select l).ToArray(), FilesSourceGenerationContext.Default.FileArray); double filesTotalLengthOld = collection.Select(l => l.Length).Sum();
jsonOld = JsonSerializer.Serialize((from l in job.Files orderby l.RelativePath.Length, l.RelativePath where l.RelativePath != searchPattern select l).ToArray(), FilesSourceGenerationContext.Default.FileArray); double filesTotalLengthNew = jobNew.Files.Select(l => l.Length).Sum();
if (filesTotalLengthNew != filesTotalLengthOld)
{
result = false;
logger.LogInformation("<{directory}> file length has changed {filesTotalLengthNew} != {filesTotalLengthOld}", record.Directory, filesTotalLengthNew, filesTotalLengthOld);
}
else
{
string jsonNew = JsonSerializer.Serialize(jobNew.Files, FilesSourceGenerationContext.Default.FileArray);
string jsonOld = JsonSerializer.Serialize(collection.ToArray(), FilesSourceGenerationContext.Default.FileArray);
if (jsonNew == jsonOld)
result = true;
else
{
result = false;
WriteAllText(Path.Combine(Environment.CurrentDirectory, ".vscode", "helper", "old.json"), jsonOld);
WriteAllText(Path.Combine(Environment.CurrentDirectory, ".vscode", "helper", "new.json"), jsonNew);
logger.LogInformation("<{directory}> file serialized are different {filesTotalLengthNew} != {filesTotalLengthOld}", record.Directory, filesTotalLengthNew, filesTotalLengthOld);
}
}
} }
if (!string.IsNullOrEmpty(jsonNew) && !string.IsNullOrEmpty(jsonOld) && jsonNew == jsonOld)
result = null;
return result; return result;
} }
private static void WriteAllText(string path, string text)
{
string check = !System.IO.File.Exists(path) ? string.Empty : System.IO.File.ReadAllText(path);
if (check != text)
System.IO.File.WriteAllText(path, text);
}
private static void WritePassedExtension(char destinationDriveLetter, Record record, ReadOnlyCollection<File> files, string path)
{
string directoryName = Path.GetFileName(record.Directory);
if (record.Job.Extension.Equals(".iso", StringComparison.OrdinalIgnoreCase))
WriteISO(destinationDriveLetter, record, files, path, directoryName);
else if (record.Job.Extension.Equals(".zip", StringComparison.OrdinalIgnoreCase))
WriteZIP(destinationDriveLetter, record, files, path);
else
throw new NotImplementedException();
}
private static void WriteISO(char destinationDriveLetter, Record record, ReadOnlyCollection<File> files, string path, string directoryName) private static void WriteISO(char destinationDriveLetter, Record record, ReadOnlyCollection<File> files, string path, string directoryName)
{ {
string checkFile = $"{destinationDriveLetter}{path[1..]}"; string checkFile = $"{destinationDriveLetter}{path[1..]}";
@ -169,167 +305,4 @@ internal static partial class Helper20241217
_ = zip.CreateEntryFromFile(Path.Combine(record.Directory, file.RelativePath), file.RelativePath); _ = zip.CreateEntryFromFile(Path.Combine(record.Directory, file.RelativePath), file.RelativePath);
} }
private static void WriteExtension(char destinationDriveLetter, Record record, Job job, ReadOnlyCollection<File> files, string path)
{
string directoryName = Path.GetFileName(record.Directory);
if (job.Extension.Equals(".iso", StringComparison.OrdinalIgnoreCase))
WriteISO(destinationDriveLetter, record, files, path, directoryName);
else if (job.Extension.Equals(".zip", StringComparison.OrdinalIgnoreCase))
WriteZIP(destinationDriveLetter, record, files, path);
else
throw new NotImplementedException();
}
private static void PushTo(ServerMessageBlock serverMessageBlock, string path)
{
string remotePath = Path.Combine(serverMessageBlock.Path, Path.GetFileName(path));
System.IO.File.Copy(path, remotePath);
}
private static void PushTo(string directory, string path)
{
string remotePath = Path.Combine(directory, Path.GetFileName(path));
System.IO.File.Copy(path, remotePath);
}
private static ReadOnlyCollection<Exception> PushTo(Job job, string path)
{
List<Exception> results = [];
foreach (Target target in job.Targets)
{
if (target.SecureShell is not null)
continue;
else if (target.ServerMessageBlock is not null)
{
try
{ PushTo(target.ServerMessageBlock, path); }
catch (Exception ex)
{
if (target.ServerMessageBlock.Required)
results.Add(ex);
}
}
else
throw new NotImplementedException();
}
return results.AsReadOnly();
}
private static void DeleteOld(Job job, ServerMessageBlock serverMessageBlock, string path)
{
List<string> results = [];
string[] files = Directory.GetFiles(serverMessageBlock.Path, $"*{job.Extension}", SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
if (file == path)
continue;
results.Add(file);
}
for (int i = job.Keep - 1; i < results.Count; i++)
System.IO.File.Delete(results[i]);
}
private static ReadOnlyCollection<Exception> DeleteOld(Job job, string path)
{
List<Exception> results = [];
foreach (Target target in job.Targets)
{
if (target.SecureShell is not null)
continue;
else if (target.ServerMessageBlock is not null)
{
try
{ DeleteOld(job, target.ServerMessageBlock, path); }
catch (Exception ex)
{
if (target.ServerMessageBlock.Required)
results.Add(ex);
}
}
else
throw new NotImplementedException();
}
return results.AsReadOnly();
}
private static void Verify(string searchPattern, string[] ignoreFileNames)
{
List<Target> targets = [
new(new SecureShell(), null),
new(null, new ServerMessageBlock("\\\\mesfs.infineon.com\\EC_APC\\DEV", true))
];
string directory = Path.Combine(Environment.CurrentDirectory, ".vscode", "helper");
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
ReadOnlyCollection<File> files = GetFiles(directory, searchPattern, ignoreFileNames);
double filesTotalLength = 0;
foreach (File file in files)
filesTotalLength += file.Length;
Job job = new(
"C:/Users/phares",
directory,
"*.iso",
files.ToArray(),
files.Count,
filesTotalLength,
3,
targets.ToArray());
string json = JsonSerializer.Serialize(job, JobSourceGenerationContext.Default.Job);
System.IO.File.WriteAllText(Path.Combine(directory, "verify.json"), json);
}
internal static void Backup(ILogger<Worker> logger, List<string> args)
{
string path;
string? json;
string directoryName;
ReadOnlyCollection<File> files;
string searchPattern = args[2];
ReadOnlyCollection<Exception> exceptions;
string[] ignoreFileNames = args[3].Split('|');
string sourceDirectory = Path.GetFullPath(args[0]);
char destinationDriveLetter = args[4].Split(':')[0][0];
logger.LogInformation("Searching <{sourceDirectory}> with search pattern {searchPattern}", args[0], searchPattern);
if (Debugger.IsAttached)
Verify(searchPattern, ignoreFileNames);
IEnumerable<Record> records = GetRecords(sourceDirectory, searchPattern);
foreach (Record record in records)
{
if (record.Job is null || record.Job.Targets.Length == 0 || string.IsNullOrEmpty(record.Job.Extension))
continue;
logger.LogInformation("Searching <{directory}>", record.Directory);
files = GetFiles(searchPattern, ignoreFileNames, record);
json = GetJsonIfNotEqual(searchPattern, ignoreFileNames, record, record.Job, files);
if (string.IsNullOrEmpty(json))
continue;
directoryName = Path.GetFileName(record.Directory);
path = Path.Combine(record.Directory, $"{directoryName}-{DateTime.Now:yyyy-MM-dd-HH-mm-ss-fff}{record.Job.Extension}");
logger.LogInformation("Writing <{directory}> extension", record.Directory);
WriteExtension(destinationDriveLetter, record, record.Job, files, path);
logger.LogInformation("Pushing <{directory}> extension", record.Directory);
exceptions = PushTo(record.Job, path);
if (exceptions.Count != 0)
{
foreach (Exception exception in exceptions)
logger.LogError(exception, exception.Message);
PushTo(record.Job.AlternatePath, path);
}
System.IO.File.WriteAllText(record.Path, json);
System.IO.File.Delete(path);
logger.LogInformation("Deleting old <{directory}> extension", record.Directory);
exceptions = DeleteOld(record.Job, path);
if (exceptions.Count != 0)
{
foreach (Exception exception in exceptions)
logger.LogError(exception, exception.Message);
}
}
if (Debugger.IsAttached && records.Count() == 0)
{
files = GetFiles(sourceDirectory, searchPattern, ignoreFileNames);
json = JsonSerializer.Serialize(files.ToArray(), FilesSourceGenerationContext.Default.FileArray);
System.IO.File.WriteAllText(Path.Combine(Environment.CurrentDirectory, ".vscode", "helper", ".json"), json);
}
}
} }

View File

@ -45,7 +45,7 @@ internal static partial class Helper20250114
internal static void Rename(ILogger<Worker> logger, List<string> args) internal static void Rename(ILogger<Worker> logger, List<string> args)
{ {
string dateFormat = args[3]; string dateFormat = args[3];
string[] searchPatterns = args[2].Split('|'); string[] searchPatterns = args[2].Split('~');
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
foreach (string searchPattern in searchPatterns) foreach (string searchPattern in searchPatterns)
Rename(logger, sourceDirectory, searchPattern, dateFormat); Rename(logger, sourceDirectory, searchPattern, dateFormat);

381
ADO2025/PI5/.editorconfig Normal file
View File

@ -0,0 +1,381 @@
[*.md]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.csproj]
end_of_line = crlf
file_header_template = unset
indent_size = 2
indent_style = space
insert_final_newline = false
root = true
tab_width = 2
[*.cs]
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_case_contents_when_block = true
csharp_indent_labels = one_less_than_current
csharp_indent_switch_labels = true
csharp_new_line_before_catch = false
csharp_new_line_before_else = false
csharp_new_line_before_finally = false
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = none
csharp_new_line_between_query_expression_clauses = true
csharp_prefer_braces = false
csharp_prefer_qualified_reference = true:error
csharp_prefer_simple_default_expression = true:warning
csharp_prefer_simple_using_statement = true:warning
csharp_prefer_static_local_function = true:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = false
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true
csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true
csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = false
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true
csharp_style_allow_embedded_statements_on_same_line_experimental = true
csharp_style_conditional_delegate_call = true
csharp_style_deconstructed_variable_declaration = false
csharp_style_expression_bodied_accessors = when_on_single_line:warning
csharp_style_expression_bodied_constructors = when_on_single_line:warning
csharp_style_expression_bodied_indexers = when_on_single_line:warning
csharp_style_expression_bodied_lambdas = when_on_single_line:warning
csharp_style_expression_bodied_local_functions = when_on_single_line:warning
csharp_style_expression_bodied_methods = when_on_single_line:warning
csharp_style_expression_bodied_operators = when_on_single_line:warning
csharp_style_expression_bodied_properties = when_on_single_line:warning
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning
csharp_style_inlined_variable_declaration = false
csharp_style_namespace_declarations = file_scoped:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_not_pattern = true:warning
csharp_style_prefer_null_check_over_type_check = true
csharp_style_prefer_pattern_matching = true:warning
csharp_style_prefer_range_operator = true:warning
csharp_style_prefer_switch_expression = true:warning
csharp_style_throw_expression = true
csharp_style_unused_value_assignment_preference = discard_variable:warning
csharp_style_unused_value_expression_statement_preference = discard_variable:warning
csharp_style_var_elsewhere = false:warning
csharp_style_var_for_built_in_types = false:warning
csharp_style_var_when_type_is_apparent = false:warning
csharp_using_directive_placement = outside_namespace
dotnet_analyzer_diagnostic.category-Design.severity = error
dotnet_analyzer_diagnostic.category-Documentation.severity = error
dotnet_analyzer_diagnostic.category-Globalization.severity = none
dotnet_analyzer_diagnostic.category-Interoperability.severity = error
dotnet_analyzer_diagnostic.category-Maintainability.severity = error
dotnet_analyzer_diagnostic.category-Naming.severity = none
dotnet_analyzer_diagnostic.category-Performance.severity = none
dotnet_analyzer_diagnostic.category-Reliability.severity = error
dotnet_analyzer_diagnostic.category-Security.severity = error
dotnet_analyzer_diagnostic.category-SingleFile.severity = error
dotnet_analyzer_diagnostic.category-Style.severity = error
dotnet_analyzer_diagnostic.category-Usage.severity = error
dotnet_code_quality_unused_parameters = all
dotnet_code_quality_unused_parameters = non_public
dotnet_code_quality.CAXXXX.api_surface = private, internal
dotnet_diagnostic.CA1001.severity = error # CA1001: Types that own disposable fields should be disposable
dotnet_diagnostic.CA1051.severity = error # CA1051: Do not declare visible instance fields
dotnet_diagnostic.CA1511.severity = warning # CA1511: Use 'ArgumentException.ThrowIfNullOrEmpty' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1513.severity = warning # Use 'ObjectDisposedException.ThrowIf' instead of explicitly throwing a new exception instance
dotnet_diagnostic.CA1825.severity = warning # CA1825: Avoid zero-length array allocations
dotnet_diagnostic.CA1829.severity = error # CA1829: Use Length/Count property instead of Count() when available
dotnet_diagnostic.CA1834.severity = warning # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1860.severity = error # CA1860: Prefer comparing 'Count' to 0 rather than using 'Any()', both for clarity and for performance
dotnet_diagnostic.CA1862.severity = warning # CA1862: Prefer using 'string.Equals(string, StringComparison)' to perform a case-insensitive comparison, but keep in mind that this might cause subtle changes in behavior, so make sure to conduct thorough testing after applying the suggestion, or if culturally sensitive comparison is not required, consider using 'StringComparison.OrdinalIgnoreCase'
dotnet_diagnostic.CA1869.severity = none # CA1869: Avoid creating a new 'JsonSerializerOptions' instance for every serialization operation. Cache and reuse instances instead.
dotnet_diagnostic.CA2201.severity = none # CA2201: Exception type System.NullReferenceException is reserved by the runtime
dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template should not vary between calls to 'LoggerExtensions.LogInformation(ILogger, string?, params object?[])'
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = error # Using directive is unnecessary
dotnet_diagnostic.IDE0010.severity = none # Add missing cases to switch statement (IDE0010)
dotnet_diagnostic.IDE0028.severity = error # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
dotnet_diagnostic.IDE0048.severity = none # Parentheses preferences (IDE0047 and IDE0048)
dotnet_diagnostic.IDE0049.severity = warning # Use language keywords instead of framework type names for type references (IDE0049)
dotnet_diagnostic.IDE0051.severity = error # Private member '' is unused [, ]
dotnet_diagnostic.IDE0058.severity = error # IDE0058: Expression value is never used
dotnet_diagnostic.IDE0060.severity = error # IDE0060: Remove unused parameter
dotnet_diagnostic.IDE0074.severity = warning # IDE0074: Use compound assignment
dotnet_diagnostic.IDE0130.severity = none # Namespace does not match folder structure (IDE0130)
dotnet_diagnostic.IDE0270.severity = warning # IDE0270: Null check can be simplified
dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]csharp(IDE0290)
dotnet_diagnostic.IDE0300.severity = error # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = error #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.IDE2000.severity = error # IDE2000: Allow multiple blank lines
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method
dotnet_naming_rule.class_should_be_pascal_case.severity = warning
dotnet_naming_rule.class_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.class_should_be_pascal_case.symbols = class
dotnet_naming_rule.delegate_should_be_pascal_case.severity = warning
dotnet_naming_rule.delegate_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.delegate_should_be_pascal_case.symbols = delegate
dotnet_naming_rule.enum_should_be_pascal_case.severity = warning
dotnet_naming_rule.enum_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.enum_should_be_pascal_case.symbols = enum
dotnet_naming_rule.event_should_be_pascal_case.severity = warning
dotnet_naming_rule.event_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.event_should_be_pascal_case.symbols = event
dotnet_naming_rule.interface_should_be_begins_with_i.severity = warning
dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
dotnet_naming_rule.method_should_be_pascal_case.severity = warning
dotnet_naming_rule.method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.method_should_be_pascal_case.symbols = method
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = warning
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
dotnet_naming_rule.private_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.private_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.private_method_should_be_pascal_case.symbols = private_method
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_field_should_be_private_of_internal_field.symbols = private_or_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.private_or_internal_static_field_should_be_private_of_internal_field.symbols = private_or_internal_static_field
dotnet_naming_rule.property_should_be_pascal_case.severity = warning
dotnet_naming_rule.property_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.property_should_be_pascal_case.symbols = property
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.severity = warning
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.style = private_of_internal_field
dotnet_naming_rule.public_or_protected_field_should_be_private_of_internal_field.symbols = public_or_protected_field
dotnet_naming_rule.static_field_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_field_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_field_should_be_pascal_case.symbols = static_field
dotnet_naming_rule.static_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.static_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.static_method_should_be_pascal_case.symbols = static_method
dotnet_naming_rule.struct_should_be_pascal_case.severity = warning
dotnet_naming_rule.struct_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.struct_should_be_pascal_case.symbols = struct
dotnet_naming_rule.types_should_be_pascal_case.severity = warning
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
dotnet_naming_style.begins_with_i.capitalization = pascal_case
dotnet_naming_style.begins_with_i.required_prefix = I
dotnet_naming_style.begins_with_i.required_suffix =
dotnet_naming_style.begins_with_i.word_separator =
dotnet_naming_style.pascal_case.capitalization = pascal_case
dotnet_naming_style.pascal_case.required_prefix =
dotnet_naming_style.pascal_case.required_suffix =
dotnet_naming_style.pascal_case.word_separator =
dotnet_naming_style.private_of_internal_field.capitalization = pascal_case
dotnet_naming_style.private_of_internal_field.required_prefix = _
dotnet_naming_style.private_of_internal_field.required_suffix =
dotnet_naming_style.private_of_internal_field.word_separator =
dotnet_naming_symbols.abstract_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.abstract_method.applicable_kinds = method
dotnet_naming_symbols.abstract_method.required_modifiers = abstract
dotnet_naming_symbols.class.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.class.applicable_kinds = class
dotnet_naming_symbols.class.required_modifiers =
dotnet_naming_symbols.delegate.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.delegate.applicable_kinds = delegate
dotnet_naming_symbols.delegate.required_modifiers =
dotnet_naming_symbols.enum.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.enum.applicable_kinds = enum
dotnet_naming_symbols.enum.required_modifiers =
dotnet_naming_symbols.event.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.event.applicable_kinds = event
dotnet_naming_symbols.event.required_modifiers =
dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.interface.applicable_kinds = interface
dotnet_naming_symbols.interface.required_modifiers =
dotnet_naming_symbols.method.applicable_accessibilities = public
dotnet_naming_symbols.method.applicable_kinds = method
dotnet_naming_symbols.method.required_modifiers =
dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
dotnet_naming_symbols.non_field_members.required_modifiers =
dotnet_naming_symbols.private_method.applicable_accessibilities = private
dotnet_naming_symbols.private_method.applicable_kinds = method
dotnet_naming_symbols.private_method.required_modifiers =
dotnet_naming_symbols.private_or_internal_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_field.required_modifiers =
dotnet_naming_symbols.private_or_internal_static_field.applicable_accessibilities = internal, private, private_protected
dotnet_naming_symbols.private_or_internal_static_field.applicable_kinds = field
dotnet_naming_symbols.private_or_internal_static_field.required_modifiers = static
dotnet_naming_symbols.property.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.property.applicable_kinds = property
dotnet_naming_symbols.property.required_modifiers =
dotnet_naming_symbols.public_or_protected_field.applicable_accessibilities = public, protected
dotnet_naming_symbols.public_or_protected_field.applicable_kinds = field
dotnet_naming_symbols.public_or_protected_field.required_modifiers =
dotnet_naming_symbols.static_field.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_field.applicable_kinds = field
dotnet_naming_symbols.static_field.required_modifiers = static
dotnet_naming_symbols.static_method.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.static_method.applicable_kinds = method
dotnet_naming_symbols.static_method.required_modifiers = static
dotnet_naming_symbols.struct.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.struct.applicable_kinds = struct
dotnet_naming_symbols.struct.required_modifiers =
dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
dotnet_naming_symbols.types.required_modifiers =
dotnet_remove_unnecessary_suppression_exclusions = 0
dotnet_separate_import_directive_groups = true
dotnet_sort_system_directives_first = true
dotnet_style_allow_multiple_blank_lines_experimental = false:warning
dotnet_style_allow_statement_immediately_after_block_experimental = true
dotnet_style_coalesce_expression = true
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_namespace_match_folder = true
dotnet_style_null_propagation = true:warning
dotnet_style_object_initializer = true:warning
dotnet_style_operator_placement_when_wrapping = beginning_of_line
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_compound_assignment = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false
dotnet_style_prefer_conditional_expression_over_return = false
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_simplified_boolean_expressions = true:warning
dotnet_style_prefer_simplified_interpolation = true
dotnet_style_qualification_for_event = false:error
dotnet_style_qualification_for_field = false
dotnet_style_qualification_for_method = false:error
dotnet_style_qualification_for_property = false:error
dotnet_style_readonly_field = true:warning
dotnet_style_require_accessibility_modifiers = for_non_interface_members
end_of_line = crlf
file_header_template = unset
indent_size = 4
indent_style = space
insert_final_newline = false
root = true
tab_width = 4
# https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/quality-rules/ca1822
# https://github.com/dotnet/aspnetcore/blob/main/.editorconfig
# https://github.com/dotnet/project-system/blob/main/.editorconfig
# Question
csharp_prefer_simple_using_statement = false # Question
csharp_style_expression_bodied_constructors = when_on_single_line:none # Question
csharp_style_expression_bodied_properties = true # Question
csharp_style_implicit_object_creation_when_type_is_apparent = true:warning # Question
csharp_style_pattern_matching_over_as_with_null_check = false # Question
csharp_style_prefer_pattern_matching = false # Question
csharp_style_prefer_range_operator = false # Question
csharp_style_prefer_switch_expression = false # Question
csharp_style_unused_value_assignment_preference = unused_local_variable # Question
csharp_style_unused_value_expression_statement_preference = false # Question
csharp_style_var_elsewhere = false:none # Question
csharp_style_var_for_built_in_types = false:none # Question
csharp_style_var_when_type_is_apparent = false:warning # Question
dotnet_diagnostic.CA1001.severity = none # Question - Types that own disposable fields should be disposable
dotnet_diagnostic.CA1051.severity = none # Question - Do not declare visible instance fields
dotnet_diagnostic.CA1416.severity = none # Question - This call site is reachable on all platforms.
dotnet_diagnostic.CA1510.severity = none # Question - Use
dotnet_diagnostic.CA1834.severity = none # CA1834: Consider using 'StringBuilder.Append(char)' when applicable
dotnet_diagnostic.CA1860.severity = none # Question - Avoid using
dotnet_diagnostic.CA1862.severity = none # Question - Prefer using
dotnet_diagnostic.CA2208.severity = none # Question - Instantiate argument exceptions correctly
dotnet_diagnostic.CA2211.severity = none # Question - Non-constant fields should not be visible
dotnet_diagnostic.CA2249.severity = none # Question - Use
dotnet_diagnostic.CA2253.severity = none # Question - Named placeholders should not be numeric values
dotnet_diagnostic.CS0103.severity = none # Question - The name
dotnet_diagnostic.CS0168.severity = none # Question - The variable
dotnet_diagnostic.CS0219.severity = none # Question - The variable
dotnet_diagnostic.CS0612.severity = none # Question - is obsolete
dotnet_diagnostic.CS0618.severity = none # Question - Compiler Warning (level 2)
dotnet_diagnostic.CS0659.severity = none # Question - Compiler Warning (level 3)
dotnet_diagnostic.CS8019.severity = warning # Question - Unnecessary using directive.
dotnet_diagnostic.CS8600.severity = none # Question - Converting null literal or possible null value to non-nullable type
dotnet_diagnostic.CS8602.severity = none # Question - Dereference of a possibly null reference.
dotnet_diagnostic.CS8603.severity = none # Question - Possible null reference return
dotnet_diagnostic.CS8604.severity = none # Question - Possible null reference argument for parameter.
dotnet_diagnostic.CS8618.severity = none # Question - Non-nullable variable must contain a non-null value when exiting constructor
dotnet_diagnostic.CS8625.severity = none # Question - Cannot convert null literal to non-nullable reference type.
dotnet_diagnostic.CS8629.severity = none # Question - Nullable value type may be null
dotnet_diagnostic.CS8765.severity = none # Question - Nullability of type of parameter
dotnet_diagnostic.IDE0005.severity = none # Question - Remove unnecessary using directives
dotnet_diagnostic.IDE0008.severity = warning # Question - Use explicit type instead of
dotnet_diagnostic.IDE0017.severity = none # Question - Object initialization can be simplified
dotnet_diagnostic.IDE0019.severity = none # Question - Use pattern matching
dotnet_diagnostic.IDE0021.severity = none # Question - Use expression body for constructor
dotnet_diagnostic.IDE0022.severity = none # Question - Use expression body for method
dotnet_diagnostic.IDE0025.severity = none # Question - Use expression body for property
dotnet_diagnostic.IDE0027.severity = none # Question - Use expression body for accessor
dotnet_diagnostic.IDE0028.severity = none # Question - Use collection initializers or expressions
dotnet_diagnostic.IDE0031.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0032.severity = none # Question - Use auto property
dotnet_diagnostic.IDE0037.severity = none # Question - Member name can be simplified
dotnet_diagnostic.IDE0041.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0047.severity = none # Question - Parentheses preferences
dotnet_diagnostic.IDE0049.severity = warning # Question - Name can be simplified
dotnet_diagnostic.IDE0051.severity = none # Question - Remove unused private member
dotnet_diagnostic.IDE0053.severity = none # Question - Use expression body for lambdas
dotnet_diagnostic.IDE0054.severity = none # Question - Use compound assignment
dotnet_diagnostic.IDE0055.severity = none # Question - Formatting rule
dotnet_diagnostic.IDE0057.severity = none # Question - Substring can be simplified
dotnet_diagnostic.IDE0058.severity = none # Question - Remove unnecessary expression value
dotnet_diagnostic.IDE0059.severity = none # Question - Unnecessary assignment of a value to
dotnet_diagnostic.IDE0060.severity = none # Question - Remove unused parameter
dotnet_diagnostic.IDE0063.severity = none # Question - Use simple
dotnet_diagnostic.IDE0065.severity = none # Question -
dotnet_diagnostic.IDE0066.severity = none # Question - Use
dotnet_diagnostic.IDE0078.severity = none # Question - Use pattern matching (may change code meaning)
dotnet_diagnostic.IDE0090.severity = warning # Question - Simplify new expression
dotnet_diagnostic.IDE0100.severity = error # Question - Remove redundant equality
dotnet_diagnostic.IDE0160.severity = warning # Question - Use block-scoped namespace
dotnet_diagnostic.IDE0161.severity = warning # Question - Namespace declaration preferences
dotnet_diagnostic.IDE0270.severity = none # Question - Null check can be simplified
dotnet_diagnostic.IDE0300.severity = none # Question - Collection initialization can be simplified
dotnet_diagnostic.IDE1006.severity = none # Question - Use collection expression for builder dotnet_style_prefer_collection_expression
dotnet_style_null_propagation = false # Question
dotnet_style_object_initializer = false # Question
dotnet_style_prefer_auto_properties = false # Question
dotnet_style_allow_statement_immediately_after_block_experimental = true # Question
dotnet_style_prefer_inferred_anonymous_type_member_names = false:warning # Question
dotnet_style_prefer_is_null_check_over_reference_equality_method = false # Question

View File

@ -1,13 +1,37 @@
using Microsoft.Extensions.Logging;
using System.Globalization; using System.Globalization;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250218 internal static partial class Helper20250218 {
{
private static void MoveToArchive(ILogger<Worker> logger, string searchMES, string searchSequence, string destinationRoot, string[] files) internal static void MoveToArchive(ILogger<Worker> logger, List<string> args) {
{ string checkDirectory;
string searchMES = args[4];
string searchPattern = args[3];
string searchSequence = args[5];
string destinationRoot = args[6];
string checkDirectoryName = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] directories = Directory.GetDirectories(sourceDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories) {
checkDirectory = Path.Combine(directory, checkDirectoryName);
if (!Directory.Exists(checkDirectory))
continue;
MoveToArchive(logger, searchPattern, searchMES, searchSequence, destinationRoot, checkDirectory);
}
}
private static void MoveToArchive(ILogger<Worker> logger, string searchPattern, string searchMES, string searchSequence, string destinationRoot, string checkDirectory) {
string[] files = Directory.GetFiles(checkDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0)
logger.LogInformation("<{files}>(s)", files.Length);
else
MoveToArchive(logger, searchMES, searchSequence, destinationRoot, files);
}
private static void MoveToArchive(ILogger<Worker> logger, string searchMES, string searchSequence, string destinationRoot, string[] files) {
string mes; string mes;
string text; string text;
string sequence; string sequence;
@ -20,8 +44,7 @@ internal static partial class Helper20250218
string[] segmentsC; string[] segmentsC;
string checkDirectory; string checkDirectory;
Calendar calendar = new CultureInfo("en-US").Calendar; Calendar calendar = new CultureInfo("en-US").Calendar;
foreach (string file in files) foreach (string file in files) {
{
fileInfo = new(file); fileInfo = new(file);
if (string.IsNullOrEmpty(fileInfo.DirectoryName)) if (string.IsNullOrEmpty(fileInfo.DirectoryName))
continue; continue;
@ -37,20 +60,17 @@ internal static partial class Helper20250218
segmentsC = Path.GetFileName(fileInfo.DirectoryName).Split('-'); segmentsC = Path.GetFileName(fileInfo.DirectoryName).Split('-');
weekOfYear = $"{fileInfo.LastWriteTime.Year}_Week_{calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}"; weekOfYear = $"{fileInfo.LastWriteTime.Year}_Week_{calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
checkDirectory = Path.GetFullPath(Path.Combine(destinationRoot, mes, weekOfYear, fileInfo.LastWriteTime.ToString("yyyy-MM-dd"))); checkDirectory = Path.GetFullPath(Path.Combine(destinationRoot, mes, weekOfYear, fileInfo.LastWriteTime.ToString("yyyy-MM-dd")));
if (!Directory.Exists(checkDirectory)) if (!Directory.Exists(checkDirectory)) {
{
logger.LogInformation("<{checkDirectory}>", checkDirectory); logger.LogInformation("<{checkDirectory}>", checkDirectory);
continue; continue;
} }
matches = Directory.GetDirectories(checkDirectory, sequence, SearchOption.AllDirectories); matches = Directory.GetDirectories(checkDirectory, sequence, SearchOption.AllDirectories);
if (matches.Length != 1) if (matches.Length != 1) {
{
logger.LogInformation("!= 1 <{checkDirectory}>", checkDirectory); logger.LogInformation("!= 1 <{checkDirectory}>", checkDirectory);
continue; continue;
} }
checkFile = segmentsC.Length == 2 ? Path.Combine(matches[0], $"csv-{segmentsC[1]}-{fileInfo.Name}") : Path.Combine(matches[0], $"csv-{fileInfo.Name}"); checkFile = segmentsC.Length == 2 ? Path.Combine(matches[0], $"csv-{segmentsC[1]}-{fileInfo.Name}") : Path.Combine(matches[0], $"csv-{fileInfo.Name}");
if (File.Exists(checkFile)) if (File.Exists(checkFile)) {
{
logger.LogInformation("csv- {segmentsC} <{checkDirectory}>", segmentsC.Length, checkDirectory); logger.LogInformation("csv- {segmentsC} <{checkDirectory}>", segmentsC.Length, checkDirectory);
continue; continue;
} }
@ -58,18 +78,6 @@ internal static partial class Helper20250218
} }
} }
internal static void MoveToArchive(ILogger<Worker> logger, List<string> args)
{
string searchMES = args[3];
string searchPattern = args[2];
string searchSequence = args[4];
string destinationRoot = args[5];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
MoveToArchive(logger, searchMES, searchSequence, destinationRoot, files);
}
} }
// L:\DevOps\MESA_FI\file-folder-helper\bin\Debug\net8.0\win-x64>dotnet File-Folder-Helper.dll X D:/Tmp Day-Helper-2025-02-18 *.pdsf A_MES_ENTITY= B_SEQUENCE= \\mesfs.infineon.com\EC_Characterization_Si\Archive // L:\DevOps\MESA_FI\file-folder-helper\bin\Debug\net8.0\win-x64>dotnet File-Folder-Helper.dll X \\mesfs.infineon.com\EC_EDA\Production\Traces Day-Helper-2025-02-18 Source *.pdsf A_MES_ENTITY= B_SEQUENCE= \\mesfs.infineon.com\EC_Characterization_Si\Archive

View File

@ -1,110 +1,287 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250219 internal static partial class Helper20250219 {
{
private record ProcessDataStandardFormat(ReadOnlyCollection<string> Body, private record ProcessDataStandardFormat(ReadOnlyCollection<string> Body,
ReadOnlyCollection<string> Columns, ReadOnlyCollection<string> Columns,
string Logistics); ReadOnlyCollection<string> Logistics,
long? Sequence);
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))] [JsonSerializable(typeof(JsonElement[]))]
private partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext private partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext {
{
} }
private static ProcessDataStandardFormat GetLogisticsColumnsAndBody(string path, string[]? lines) private record ProcessDataStandardFormatMapping(ReadOnlyCollection<string> BackfillColumns,
{ ReadOnlyCollection<int> ColumnIndices,
ProcessDataStandardFormat result; ReadOnlyCollection<string> IgnoreColumns,
string segment; ReadOnlyCollection<string> IndexOnlyColumns,
List<string> body = []; ReadOnlyDictionary<string, string> KeyValuePairs,
List<string> columns = []; ReadOnlyCollection<string> NewColumnNames,
StringBuilder logistics = new(); ReadOnlyCollection<string> OldColumnNames);
lines ??= File.ReadAllLines(path);
string[] segments; internal static void Compare(ILogger<Worker> logger, List<string> args) {
if (lines.Length < 7) string[] segmentsB;
segments = []; List<string> distinct = [];
else string searchPattern = args[2];
segments = lines[6].Trim().Split('\t'); string searchPatternB = args[3];
for (int c = 0; c < segments.Length; c++) string[] segments = args[7].Split(',');
{ Dictionary<string, string> keyValuePairs = [];
segment = segments[c][1..^1]; ReadOnlyCollection<string> ignore = args[4].Split(',').AsReadOnly();
if (!columns.Contains(segment)) ReadOnlyCollection<string> backfill = args[5].Split(',').AsReadOnly();
columns.Add(segment); ReadOnlyCollection<string> indexOnly = args[6].Split(',').AsReadOnly();
else ReadOnlyCollection<string> oldColumnNames = args[8].Split(',').AsReadOnly();
{ ReadOnlyCollection<string> newColumnNames = args[9].Split(',').AsReadOnly();
for (short i = 1; i < short.MaxValue; i++) ReadOnlyCollection<int> columnIndices = args[10].Split(',').Select(int.Parse).ToArray().AsReadOnly();
{ foreach (string segment in segments) {
segment = string.Concat(segment, "_", i); segmentsB = segment.Split('|');
if (!columns.Contains(segment)) if (segmentsB.Length != 2)
{ continue;
columns.Add(segment); if (distinct.Contains(segmentsB[0]))
break; continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
} }
ProcessDataStandardFormatMapping processDataStandardFormatMapping = new(BackfillColumns: backfill,
ColumnIndices: columnIndices,
NewColumnNames: newColumnNames,
IgnoreColumns: ignore,
IndexOnlyColumns: indexOnly,
KeyValuePairs: keyValuePairs.AsReadOnly(),
OldColumnNames: oldColumnNames);
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
Compare(logger, sourceDirectory.Length, searchPatternB, processDataStandardFormatMapping, files);
} }
private static void Compare(ILogger<Worker> logger, int sourceDirectoryLength, string searchPattern, ProcessDataStandardFormatMapping pdsfMapping, string[] files) {
bool compare;
string directory;
string[] matches;
string directorySegment;
string[] directoryFiles;
const int columnsLine = 6;
JsonElement[]? jsonElementsNew;
JsonElement[]? jsonElementsOld;
ProcessDataStandardFormat processDataStandardFormat;
FileInfo[] collection = files.Select(l => new FileInfo(l)).ToArray();
foreach (FileInfo fileInfo in collection) {
directory = fileInfo.DirectoryName ?? throw new Exception();
directoryFiles = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly);
matches = (from l in directoryFiles where l != fileInfo.FullName select l).ToArray();
if (matches.Length < 1)
continue;
directorySegment = directory[sourceDirectoryLength..];
processDataStandardFormat = GetProcessDataStandardFormat(logger, fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
jsonElementsNew = GetArray(logger, pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElementsNew is null)
continue;
if (pdsfMapping.OldColumnNames.Count == pdsfMapping.ColumnIndices.Count) {
processDataStandardFormat = GetProcessDataStandardFormat(logger, pdsfMapping, jsonElementsNew, processDataStandardFormat);
Write(logger, fileInfo, processDataStandardFormat);
} }
} foreach (string match in matches) {
bool lookForLogistics = false; processDataStandardFormat = GetProcessDataStandardFormat(logger, fileInfo.LastWriteTime, pdsfMapping.OldColumnNames.Count, columnsLine, match, lines: null);
for (int r = 7; r < lines.Length; r++) jsonElementsOld = GetArray(logger, pdsfMapping.OldColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
{ if (jsonElementsOld is null || jsonElementsOld.Length != jsonElementsNew.Length) {
if (lines[r].StartsWith("NUM_DATA_ROWS")) logger.LogWarning("! <{match}> (jsonElementsOld.Length:{jsonElementsOld} != jsonElementsNew.Length:{jsonElementsNew})", match, jsonElementsOld?.Length, jsonElementsNew.Length);
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue; continue;
} }
if (lines[r].StartsWith("LOGISTICS_1")) compare = Compare(logger, pdsfMapping, directorySegment, jsonElementsNew, jsonElementsOld);
{ if (!compare) {
for (int i = r; i < lines.Length; i++) logger.LogWarning("! <{match}>", match);
{ continue;
if (lines[r].StartsWith("END_HEADER"))
break;
_ = logistics.AppendLine(lines[i]);
} }
logger.LogInformation("<{match}>", match);
}
}
}
private static bool Compare(ILogger<Worker> logger, ProcessDataStandardFormatMapping processDataStandardFormatMapping, string directory, JsonElement[] jsonElementsNew, JsonElement[] jsonElementsOld) {
bool result;
int? q;
string valueNew;
string valueOld;
List<string> columns = [];
JsonProperty jsonPropertyOld;
JsonProperty jsonPropertyNew;
List<string> columnPairs = [];
JsonProperty[] jsonPropertiesOld;
JsonProperty[] jsonPropertiesNew;
List<string> unknownColumns = [];
List<string> differentColumns = [];
int last = jsonElementsOld.Length - 1;
List<string> sameAfterSpaceSplitColumns = [];
for (int i = last; i > -1; i--) {
if (jsonElementsOld[i].ValueKind != JsonValueKind.Object) {
unknownColumns.Add(string.Empty);
break; break;
} }
jsonPropertiesOld = jsonElementsOld[i].EnumerateObject().ToArray();
jsonPropertiesNew = jsonElementsNew[i].EnumerateObject().ToArray();
for (int p = 0; p < jsonPropertiesOld.Length; p++) {
jsonPropertyOld = jsonPropertiesOld[p];
valueOld = jsonPropertyOld.Value.ToString();
if (processDataStandardFormatMapping.KeyValuePairs.TryGetValue(jsonPropertyOld.Name, out string? name) && !string.IsNullOrEmpty(name)) {
q = TryGetPropertyIndex(jsonPropertiesNew, name);
if (q is null && i == 0)
unknownColumns.Add($"{jsonPropertyOld.Name}|{name}");
} else {
q = TryGetPropertyIndex(jsonPropertiesNew, jsonPropertyOld.Name);
if (q is null) {
if (i == 0)
unknownColumns.Add(jsonPropertyOld.Name);
} }
result = new(Body: body.AsReadOnly(), }
Columns: columns.AsReadOnly(), if (q is null) {
logistics.ToString()); if (processDataStandardFormatMapping.IgnoreColumns.Contains(jsonPropertyOld.Name)) {
if (i == last) {
columns.Add("-1");
columnPairs.Add($"{jsonPropertyOld.Name}:");
logger.LogDebug("{p} )) {jsonPropertyOld.Name} **", p, jsonPropertyOld.Name);
}
continue;
}
if (i == last) {
columns.Add("-1");
columnPairs.Add($"{jsonPropertyOld.Name}:");
if (!string.IsNullOrEmpty(valueOld))
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ??", p, jsonPropertyOld.Name);
}
} else {
jsonPropertyNew = jsonPropertiesNew[q.Value];
if (i == last) {
columns.Add(q.Value.ToString());
columnPairs.Add($"{jsonPropertyOld.Name}:{jsonPropertyNew.Name}");
}
valueNew = jsonPropertyNew.Value.ToString();
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ~~ {q.Value} => {jsonPropertyNew.Name}", p, jsonPropertyOld.Name, q.Value, jsonPropertyNew.Name);
if (valueNew != valueOld && !differentColumns.Contains(jsonPropertyOld.Name)) {
if (valueNew.Length >= 2 && valueNew.Split(' ')[0] == valueOld)
sameAfterSpaceSplitColumns.Add(jsonPropertyOld.Name);
else {
if (processDataStandardFormatMapping.BackfillColumns.Contains(jsonPropertyOld.Name) && i != last)
continue;
if (processDataStandardFormatMapping.IndexOnlyColumns.Contains(jsonPropertyOld.Name) && int.TryParse(jsonPropertyOld.Name[^2..], out int index) && i != index - 1)
continue;
logger.LogWarning("For [{jsonProperty.Name}] <{directory}> doesn't match (valueNew:{valueNew} != valueOld:{valueOld})!", jsonPropertyOld.Name, directory, valueNew, valueOld);
differentColumns.Add(jsonPropertyOld.Name);
}
}
}
}
if (i == last) {
logger.LogInformation(string.Join(',', columns));
logger.LogInformation($"{string.Join(';', columnPairs)};");
}
}
result = unknownColumns.Count == 0 && differentColumns.Count == 0 && sameAfterSpaceSplitColumns.Count == 0;
return result; return result;
} }
private static JsonElement[]? GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false) private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName) {
{ int? result = null;
for (int i = 0; i < jsonProperties.Length; i++) {
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null) {
for (int i = 0; i < jsonProperties.Length; i++) {
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ILogger<Worker> logger, DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines) {
ProcessDataStandardFormat result;
long sequence;
string[] segments;
List<string> body = [];
List<string> logistics = [];
bool lookForLogistics = false;
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = [];
else {
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns) {
logger.LogWarning("{segments} != {expectedColumns}", segments.Length, expectedColumns);
segments = [];
}
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++) {
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics) {
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1")) {
for (int i = r; i < lines.Length; i++) {
if (lines[r].StartsWith("END_HEADER"))
break;
logistics.Add(lines[i]);
}
break;
}
}
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else {
segments = logistics[0].Split("SEQUENCE=");
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(Body: body.AsReadOnly(),
Columns: columns.AsReadOnly(),
Logistics: logistics.AsReadOnly(),
Sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(ILogger<Worker> logger, int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers) {
JsonElement[]? results; JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t')) if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception(); results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else else {
{
string value; string value;
string[] segments; string[] segments;
List<string> lines = []; List<string> lines = [];
StringBuilder stringBuilder = new(); StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body) foreach (string bodyLine in processDataStandardFormat.Body) {
{
_ = stringBuilder.Clear(); _ = stringBuilder.Clear();
_ = stringBuilder.Append('{'); _ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t'); segments = bodyLine.Split('\t');
if (!lookForNumbers) if (segments.Length != expectedColumns) {
{ logger.LogWarning("{segments} != {expectedColumns}", segments.Length, expectedColumns);
for (int c = 1; c < segments.Length; c++) continue;
{ }
if (!lookForNumbers) {
for (int c = 0; c < segments.Length; c++) {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
} } else {
else for (int c = 0; c < segments.Length; c++) {
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value)) if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
@ -124,182 +301,72 @@ internal static partial class Helper20250219
return results; return results;
} }
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName) private static ProcessDataStandardFormat GetProcessDataStandardFormat(ILogger<Worker> logger, ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat) {
{ ProcessDataStandardFormat result;
int? result = null; int column;
for (int i = 0; i < jsonProperties.Length; i++) string value;
{ List<string> values = [];
if (jsonProperties[i].Name != propertyName) List<string> results = [];
continue; JsonProperty jsonProperty;
result = i; JsonProperty[] jsonProperties;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
private static bool Compare(ILogger<Worker> logger, ReadOnlyCollection<string> ignore, ReadOnlyCollection<string> backfill, ReadOnlyCollection<string> indexOnly, ReadOnlyDictionary<string, string> keyValuePairs, string directory, JsonElement[] jsonElementsNew, JsonElement[] jsonElementsOld)
{
bool result;
int? q;
string valueNew;
string valueOld;
JsonProperty jsonPropertyOld;
JsonProperty jsonPropertyNew;
JsonProperty[] jsonPropertiesOld;
JsonProperty[] jsonPropertiesNew;
List<string> unknownColumns = []; List<string> unknownColumns = [];
List<string> differentColumns = []; for (int i = 0; i < jsonElements.Length; i++) {
int last = jsonElementsOld.Length - 1; values.Clear();
List<string> sameAfterSpaceSplitColumns = []; if (jsonElements[i].ValueKind != JsonValueKind.Object) {
for (int i = last; i > 0; i--)
{
if (jsonElementsOld[i].ValueKind != JsonValueKind.Object)
{
unknownColumns.Add(string.Empty); unknownColumns.Add(string.Empty);
break; break;
} }
jsonPropertiesOld = jsonElementsOld[i].EnumerateObject().ToArray(); jsonProperties = jsonElements[i].EnumerateObject().ToArray();
jsonPropertiesNew = jsonElementsNew[i].EnumerateObject().ToArray(); if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count) {
for (int p = 0; p < jsonPropertiesOld.Length; p++) logger.LogWarning("{jsonProperties} != {NewColumnNames}", jsonProperties.Length, processDataStandardFormatMapping.NewColumnNames.Count);
{
jsonPropertyOld = jsonPropertiesOld[p];
valueOld = jsonPropertyOld.Value.ToString();
if (ignore.Contains(jsonPropertyOld.Name))
{
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} **", p, jsonPropertyOld.Name);
continue; continue;
} }
if (keyValuePairs.TryGetValue(jsonPropertyOld.Name, out string? name) && !string.IsNullOrEmpty(name)) for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++) {
{ column = processDataStandardFormatMapping.ColumnIndices[c];
q = TryGetPropertyIndex(jsonPropertiesNew, name); if (column == -1)
if (q is null && i == 0) value = processDataStandardFormatMapping.OldColumnNames[c];
unknownColumns.Add($"{jsonPropertyOld.Name}|{name}"); else {
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
} }
else values.Add(value);
{
q = TryGetPropertyIndex(jsonPropertiesNew, jsonPropertyOld.Name);
if (q is null)
{
if (i == 0)
unknownColumns.Add(jsonPropertyOld.Name);
} }
results.Add(string.Join('\t', values));
} }
if (q is null) result = new(Body: new(results),
{ Columns: processDataStandardFormatMapping.OldColumnNames,
if (i == last && !string.IsNullOrEmpty(valueOld)) Logistics: processDataStandardFormat.Logistics,
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ??", p, jsonPropertyOld.Name); Sequence: processDataStandardFormat.Sequence);
}
else
{
jsonPropertyNew = jsonPropertiesNew[q.Value];
valueNew = jsonPropertyNew.Value.ToString();
if (i == last)
logger.LogDebug("{p} )) {jsonPropertyOld.Name} ~~ {q.Value} => {jsonPropertyNew.Name}", p, jsonPropertyOld.Name, q.Value, jsonPropertyNew.Name);
if (valueNew != valueOld && !differentColumns.Contains(jsonPropertyOld.Name))
{
if (valueNew.Length >= 2 && valueNew.Split(' ')[0] == valueOld)
sameAfterSpaceSplitColumns.Add(jsonPropertyOld.Name);
else
{
if (backfill.Contains(jsonPropertyOld.Name) && i != last)
continue;
if (indexOnly.Contains(jsonPropertyOld.Name) && int.TryParse(jsonPropertyOld.Name[^2..], out int index) && i != index - 1)
continue;
logger.LogWarning("For [{jsonProperty.Name}] <{directory}> doesn't match (valueNew:{valueNew} != valueOld:{valueOld})!", jsonPropertyOld.Name, directory, valueNew, valueOld);
differentColumns.Add(jsonPropertyOld.Name);
}
}
}
}
}
result = unknownColumns.Count == 0 && differentColumns.Count == 0 && sameAfterSpaceSplitColumns.Count == 0;
return result; return result;
} }
private static void Compare(ILogger<Worker> logger, int sourceDirectoryLength, ReadOnlyCollection<string> ignore, ReadOnlyCollection<string> backfill, ReadOnlyCollection<string> indexOnly, ReadOnlyDictionary<string, string> keyValuePairs, string searchPattern, string[] files) private static void Write(ILogger<Worker> logger, FileInfo fileInfo, ProcessDataStandardFormat processDataStandardFormat) {
{ List<string> results = [];
bool isMatch; if (processDataStandardFormat.Sequence is null)
string directory; throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string[] matches; string endOffset = "E#######T";
string directorySegment; string dataOffset = "D#######T";
string[] directoryFiles; string headerOffset = "H#######T";
JsonElement[]? jsonElementsNew; string format = "MM/dd/yyyy HH:mm:ss";
JsonElement[]? jsonElementsOld; string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
ProcessDataStandardFormat processDataStandardFormat; results.Add("HEADER_TAG\tHEADER_VALUE");
FileInfo[] collection = files.Select(l => new FileInfo(l)).ToArray(); results.Add("FORMAT\t2.00");
string[] sorted = (from l in collection orderby l.CreationTime descending select l.FullName).ToArray(); results.Add("NUMBER_PASSES\t0001");
foreach (string file in sorted) results.Add($"HEADER_OFFSET\t{headerOffset}");
{ results.Add($"DATA_OFFSET\t{dataOffset}");
directory = Path.GetDirectoryName(file) ?? throw new Exception(); results.Add($"END_OFFSET\t{endOffset}");
directoryFiles = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly); results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
matches = (from l in directoryFiles where l != file select l).ToArray(); results.AddRange(processDataStandardFormat.Body);
if (matches.Length < 1) results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
continue; results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
directorySegment = directory[sourceDirectoryLength..]; results.Add("DELIMITER\t;");
processDataStandardFormat = GetLogisticsColumnsAndBody(file, lines: null); results.Add($"START_TIME_FORMAT\t{format}");
jsonElementsNew = GetArray(processDataStandardFormat); results.Add($"START_TIME\t{startTime}");
if (jsonElementsNew is null) results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
continue; results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
foreach (string match in matches) results.AddRange(processDataStandardFormat.Logistics);
{ File.WriteAllText($"{fileInfo.FullName}.tsv", string.Join(Environment.NewLine, results));
processDataStandardFormat = GetLogisticsColumnsAndBody(match, lines: null); logger.LogDebug("<{fileInfo}>", fileInfo);
jsonElementsOld = GetArray(processDataStandardFormat);
if (jsonElementsOld is null || jsonElementsOld.Length != jsonElementsNew.Length)
{
logger.LogWarning("! <{match}> (jsonElementsOld.Length:{jsonElementsOld} != jsonElementsNew.Length:{jsonElementsNew})", match, jsonElementsOld?.Length, jsonElementsNew.Length);
continue;
}
isMatch = Compare(logger, ignore, backfill, indexOnly, keyValuePairs, directorySegment, jsonElementsNew, jsonElementsOld);
if (!isMatch)
{
logger.LogWarning("! <{match}>", match);
continue;
}
logger.LogInformation("<{match}>", match);
}
}
}
internal static void Compare(ILogger<Worker> logger, List<string> args)
{
string[] segmentsB;
List<string> distinct = [];
string searchPattern = args[2];
string searchPatternB = args[3];
string[] segments = args[7].Split(',');
Dictionary<string, string> keyValuePairs = [];
ReadOnlyCollection<string> ignore = args[4].Split(',').AsReadOnly();
ReadOnlyCollection<string> backfill = args[5].Split(',').AsReadOnly();
ReadOnlyCollection<string> indexOnly = args[6].Split(',').AsReadOnly();
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation("<{files}>(s)", files.Length);
Compare(logger, sourceDirectory.Length, ignore, backfill, indexOnly, keyValuePairs.AsReadOnly(), searchPatternB, files);
} }
} }

View File

@ -1,16 +1,35 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Text.Json; using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250228 internal static partial class Helper20250228 {
{
private record Record(string TableName, ReadOnlyCollection<string> Columns, ReadOnlyCollection<string[]> Rows); private record Record(string TableName, ReadOnlyCollection<string> Columns, ReadOnlyCollection<string[]> Rows);
private static ReadOnlyCollection<Record> GetRecords(string headerA, string headerB, string file) internal static void PostgresDumpToJson(ILogger<Worker> logger, List<string> args) {
{ string searchPattern = args[2];
string headerA = args[3].Replace('_', ' ');
string headerB = args[4].Replace('_', ' ');
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
PostgresDumpToJson(logger, headerA, headerB, files[0]);
}
private static void PostgresDumpToJson(ILogger<Worker> logger, string headerA, string headerB, string file) {
ReadOnlyCollection<Record> records = GetRecords(headerA, headerB, file);
if (records.Count > 0)
WriteFile(file, records);
else
logger.LogWarning("<{records}>(s)", records.Count);
}
private static ReadOnlyCollection<Record> GetRecords(string headerA, string headerB, string file) {
List<Record> results = []; List<Record> results = [];
string line; string line;
string[] segmentsA; string[] segmentsA;
@ -23,11 +42,9 @@ internal static partial class Helper20250228
string? tableName = null; string? tableName = null;
string[] lines = File.ReadAllLines(file); string[] lines = File.ReadAllLines(file);
ReadOnlyCollection<string>? columns = null; ReadOnlyCollection<string>? columns = null;
for (int i = 0; i < lines.Length; i++) for (int i = 0; i < lines.Length; i++) {
{
line = lines[i]; line = lines[i];
if (tableName is null) if (tableName is null) {
{
segmentsA = line.Split(headerA); segmentsA = line.Split(headerA);
if (segmentsA.Length != 2) if (segmentsA.Length != 2)
continue; continue;
@ -45,18 +62,14 @@ internal static partial class Helper20250228
continue; continue;
segmentsE = segmentsB[0].Split(' '); segmentsE = segmentsB[0].Split(' ');
tableName = segmentsE[0]; tableName = segmentsE[0];
} } else if (columns is null)
else if (columns is null)
break; break;
else else {
{
rows = []; rows = [];
for (int j = i + 1; j < lines.Length; j++) for (int j = i + 1; j < lines.Length; j++) {
{
i = j; i = j;
segmentsF = lines[j].Split('\t'); segmentsF = lines[j].Split('\t');
if (segmentsF.Length != columns.Count) if (segmentsF.Length != columns.Count) {
{
if (rows.Count > 0) if (rows.Count > 0)
results.Add(new(TableName: tableName, Columns: columns, Rows: rows.AsReadOnly())); results.Add(new(TableName: tableName, Columns: columns, Rows: rows.AsReadOnly()));
break; break;
@ -70,26 +83,24 @@ internal static partial class Helper20250228
return results.AsReadOnly(); return results.AsReadOnly();
} }
private static void WriteFile(string file, ReadOnlyCollection<Record> records) private static void WriteFile(string file, ReadOnlyCollection<Record> records) {
{
List<string> results = []; List<string> results = [];
string json; string json;
string text; string text;
Dictionary<string, string?> keyValuePairs = []; Dictionary<string, string?> keyValuePairs = [];
foreach (Record record in records) foreach (Record record in records) {
{
results.Clear(); results.Clear();
foreach (string[] row in record.Rows) foreach (string[] row in record.Rows) {
{
keyValuePairs.Clear(); keyValuePairs.Clear();
for (int i = 0; i < row.Length; i++) for (int i = 0; i < row.Length; i++) {
{
if (row[i] == "\\N") if (row[i] == "\\N")
keyValuePairs.Add(record.Columns[i], null); keyValuePairs.Add(record.Columns[i], null);
else else
keyValuePairs.Add(record.Columns[i], row[i]); keyValuePairs.Add(record.Columns[i], row[i]);
} }
#pragma warning disable IL3050, IL2026
json = JsonSerializer.Serialize(keyValuePairs); json = JsonSerializer.Serialize(keyValuePairs);
#pragma warning restore IL3050, IL2026
results.Add(json); results.Add(json);
} }
text = string.Join($",{Environment.NewLine}", results); text = string.Join($",{Environment.NewLine}", results);
@ -97,26 +108,4 @@ internal static partial class Helper20250228
} }
} }
private static void PostgresDumpToJson(ILogger<Worker> logger, string headerA, string headerB, string file)
{
ReadOnlyCollection<Record> records = GetRecords(headerA, headerB, file);
if (records.Count > 0)
WriteFile(file, records);
else
logger.LogWarning("<{records}>(s)", records.Count);
}
internal static void PostgresDumpToJson(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2];
string headerA = args[3].Replace('_', ' ');
string headerB = args[4].Replace('_', ' ');
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
PostgresDumpToJson(logger, headerA, headerB, files[0]);
}
} }

View File

@ -1,51 +1,12 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250301 internal static partial class Helper20250301 {
{
private static ReadOnlyCollection<string> CopyFiles(char split, string workingDirectory, string directory, string[] files) internal static void PocketBaseImportWithDeno(ILogger<Worker> logger, List<string> args) {
{
List<string> results = [];
string fileName;
string checkFile;
string checkDirectory = Path.Combine(workingDirectory, directory);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
foreach (string file in files)
{
fileName = Path.GetFileName(file).Split(split)[^1];
checkFile = Path.Combine(checkDirectory, fileName);
if (File.Exists(checkFile))
File.Delete(checkFile);
File.Copy(file, checkFile);
results.Add(fileName);
}
return results.AsReadOnly();
}
private static void PocketBaseImportWithDeno(ILogger<Worker> logger, char split, string workingDirectory, string scriptName, string directory, string[] files)
{
string checkFile = Path.Combine(workingDirectory, scriptName);
if (!File.Exists(checkFile))
logger.LogWarning("<{checkFile}> doesn't exist!", checkFile);
else
{
ReadOnlyCollection<string> fileNames = CopyFiles(split, workingDirectory, directory, files);
if (fileNames.Count == 0)
logger.LogWarning("<{fileNames}>(s)", fileNames.Count);
else
{
foreach (string fileName in fileNames)
logger.LogInformation("deno run --unstable --allow-read --allow-env --allow-net {scriptName} --id=true --input={fileName}", scriptName, fileName);
}
}
}
internal static void PocketBaseImportWithDeno(ILogger<Worker> logger, List<string> args)
{
char split = args[3][0]; char split = args[3][0];
string directory = args[6]; string directory = args[6];
string scriptName = args[5]; string scriptName = args[5];
@ -59,4 +20,37 @@ internal static partial class Helper20250301
PocketBaseImportWithDeno(logger, split, workingDirectory, scriptName, directory, files); PocketBaseImportWithDeno(logger, split, workingDirectory, scriptName, directory, files);
} }
private static void PocketBaseImportWithDeno(ILogger<Worker> logger, char split, string workingDirectory, string scriptName, string directory, string[] files) {
string checkFile = Path.Combine(workingDirectory, scriptName);
if (!File.Exists(checkFile))
logger.LogWarning("<{checkFile}> doesn't exist!", checkFile);
else {
ReadOnlyCollection<string> fileNames = CopyFiles(split, workingDirectory, directory, files);
if (fileNames.Count == 0)
logger.LogWarning("<{fileNames}>(s)", fileNames.Count);
else {
foreach (string fileName in fileNames)
logger.LogInformation("deno run --unstable --allow-read --allow-env --allow-net {scriptName} --id=true --input={fileName}", scriptName, fileName);
}
}
}
private static ReadOnlyCollection<string> CopyFiles(char split, string workingDirectory, string directory, string[] files) {
List<string> results = [];
string fileName;
string checkFile;
string checkDirectory = Path.Combine(workingDirectory, directory);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
foreach (string file in files) {
fileName = Path.GetFileName(file).Split(split)[^1];
checkFile = Path.Combine(checkDirectory, fileName);
if (File.Exists(checkFile))
File.Delete(checkFile);
File.Copy(file, checkFile);
results.Add(fileName);
}
return results.AsReadOnly();
}
} }

View File

@ -1,131 +1,19 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Text.Json; using System.Text.Json;
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250305 internal static partial class Helper20250305 {
{
private static readonly HttpClient _HttpClient = new(); private static readonly HttpClient _HttpClient = new();
private record Record(Uri URI, string Path, DateTime LastModified, int? TotalSeconds); private record Record(Uri URI, string Path, DateTime LastModified, int? TotalSeconds);
private static ReadOnlyCollection<NginxFileSystem>? GetCollection(string format, TimeZoneInfo timeZoneInfo, Uri uri) internal static void WriteNginxFileSystemDelta(ILogger<Worker> logger, List<string> args) {
{
List<NginxFileSystem>? results;
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(uri);
taskHttpResponseMessage.Wait();
if (!taskHttpResponseMessage.Result.IsSuccessStatusCode)
results = null;
else
{
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
if (taskString.Result.StartsWith('<'))
results = null;
else
{
NginxFileSystem[]? nginxFileSystems = JsonSerializer.Deserialize(taskString.Result, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (nginxFileSystems is null)
results = null;
else
{
results = [];
NginxFileSystem nginxFileSystem;
for (int i = 0; i < nginxFileSystems.Length; i++)
{
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
results.Add(nginxFileSystem);
}
}
}
}
return results?.AsReadOnly();
}
private static ReadOnlyCollection<Record> GetRecords(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory)
{
List<Record> results = [];
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}");
ReadOnlyCollection<NginxFileSystem>? nginxFileSystems = GetCollection(format, timeZoneInfo, uri);
if (nginxFileSystems is not null)
{
NginxFileSystem nginxFileSystem;
ReadOnlyCollection<Record> records;
string checkDirectory = $"{compareDirectory}\\{string.Join('\\', directoryNames)}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
for (int i = 0; i < nginxFileSystems.Count; i++)
{
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
if (nginxFileSystem.Type == "file")
{
Record? record = CompareFile(host, directoryNames, compareDirectory, nginxFileSystem);
if (record is not null)
results.Add(record);
}
else
{
records = CompareDirectory(format, timeZoneInfo, host, directoryNames, compareDirectory, nginxFileSystem);
foreach (Record record in records)
results.Add(record);
}
}
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Record> CompareDirectory(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem)
{
ReadOnlyCollection<Record> results;
List<string> collection = directoryNames.ToList();
collection.Add(nginxFileSystem.Name);
results = GetRecords(format, timeZoneInfo, host, collection.AsReadOnly(), compareDirectory);
return results;
}
private static Record? CompareFile(string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem)
{
Record? result;
if (nginxFileSystem.LastModified is null || nginxFileSystem.Length is null)
result = null;
else
{
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}/{nginxFileSystem.Name}");
FileInfo fileInfo = new($"{compareDirectory}\\{string.Join('\\', directoryNames)}\\{nginxFileSystem.Name}");
if (!fileInfo.Exists)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: null);
else
{
int totalSeconds = (int)new TimeSpan(fileInfo.LastWriteTime.Ticks - nginxFileSystem.LastModified.Value.Ticks).TotalSeconds;
if (totalSeconds is not < 2 or not > -2)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: totalSeconds);
else if (fileInfo.Length != nginxFileSystem.Length.Value)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: 0);
else
result = null;
}
}
return result;
}
private static void Download(Record record)
{
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(record.URI);
taskHttpResponseMessage.Wait();
if (taskHttpResponseMessage.Result.IsSuccessStatusCode)
{
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
File.WriteAllText(record.Path, taskString.Result);
File.SetLastWriteTime(record.Path, record.LastModified);
}
}
internal static void WriteNginxFileSystemDelta(ILogger<Worker> logger, List<string> args)
{
string host = args[2]; string host = args[2];
string rootDirectoryName = args[3]; string rootDirectoryName = args[3];
string format = NginxFileSystem.GetFormat(); string format = NginxFileSystem.GetFormat();
@ -136,8 +24,7 @@ internal static partial class Helper20250305
#if ShellProgressBar #if ShellProgressBar
ProgressBar progressBar = new(records.Count, "Downloading", new ProgressBarOptions() { ProgressCharacter = '─', ProgressBarOnBottom = true, DisableBottomPercentage = true }); ProgressBar progressBar = new(records.Count, "Downloading", new ProgressBarOptions() { ProgressCharacter = '─', ProgressBarOnBottom = true, DisableBottomPercentage = true });
#endif #endif
foreach (Record record in records) foreach (Record record in records) {
{
#if ShellProgressBar #if ShellProgressBar
progressBar.Tick(); progressBar.Tick();
#endif #endif
@ -155,4 +42,99 @@ internal static partial class Helper20250305
#endif #endif
} }
private static ReadOnlyCollection<Record> GetRecords(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory) {
List<Record> results = [];
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}");
ReadOnlyCollection<NginxFileSystem>? nginxFileSystems = GetCollection(format, timeZoneInfo, uri);
if (nginxFileSystems is not null) {
NginxFileSystem nginxFileSystem;
ReadOnlyCollection<Record> records;
string checkDirectory = $"{compareDirectory}\\{string.Join('\\', directoryNames)}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
for (int i = 0; i < nginxFileSystems.Count; i++) {
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
if (nginxFileSystem.Type == "file") {
Record? record = CompareFile(host, directoryNames, compareDirectory, nginxFileSystem);
if (record is not null)
results.Add(record);
} else {
records = CompareDirectory(format, timeZoneInfo, host, directoryNames, compareDirectory, nginxFileSystem);
foreach (Record record in records)
results.Add(record);
}
}
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<NginxFileSystem>? GetCollection(string format, TimeZoneInfo timeZoneInfo, Uri uri) {
List<NginxFileSystem>? results;
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(uri);
taskHttpResponseMessage.Wait();
if (!taskHttpResponseMessage.Result.IsSuccessStatusCode)
results = null;
else {
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
if (taskString.Result.StartsWith('<'))
results = null;
else {
NginxFileSystem[]? nginxFileSystems = JsonSerializer.Deserialize(taskString.Result, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (nginxFileSystems is null)
results = null;
else {
results = [];
NginxFileSystem nginxFileSystem;
for (int i = 0; i < nginxFileSystems.Length; i++) {
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
results.Add(nginxFileSystem);
}
}
}
}
return results?.AsReadOnly();
}
private static Record? CompareFile(string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem) {
Record? result;
if (nginxFileSystem.LastModified is null || nginxFileSystem.Length is null)
result = null;
else {
Uri uri = new($"https://{host}/{string.Join('/', directoryNames)}/{nginxFileSystem.Name}");
FileInfo fileInfo = new($"{compareDirectory}\\{string.Join('\\', directoryNames)}\\{nginxFileSystem.Name}");
if (!fileInfo.Exists)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: null);
else {
int totalSeconds = (int)new TimeSpan(fileInfo.LastWriteTime.Ticks - nginxFileSystem.LastModified.Value.Ticks).TotalSeconds;
if (totalSeconds is not < 2 or not > -2)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: totalSeconds);
else if (fileInfo.Length != nginxFileSystem.Length.Value)
result = new(URI: uri, Path: fileInfo.FullName, LastModified: nginxFileSystem.LastModified.Value, TotalSeconds: 0);
else
result = null;
}
}
return result;
}
private static ReadOnlyCollection<Record> CompareDirectory(string format, TimeZoneInfo timeZoneInfo, string host, ReadOnlyCollection<string> directoryNames, string compareDirectory, NginxFileSystem nginxFileSystem) {
ReadOnlyCollection<Record> results;
List<string> collection = directoryNames.ToList();
collection.Add(nginxFileSystem.Name);
results = GetRecords(format, timeZoneInfo, host, collection.AsReadOnly(), compareDirectory);
return results;
}
private static void Download(Record record) {
Task<HttpResponseMessage> taskHttpResponseMessage = _HttpClient.GetAsync(record.URI);
taskHttpResponseMessage.Wait();
if (taskHttpResponseMessage.Result.IsSuccessStatusCode) {
Task<string> taskString = taskHttpResponseMessage.Result.Content.ReadAsStringAsync();
taskString.Wait();
File.WriteAllText(record.Path, taskString.Result);
File.SetLastWriteTime(record.Path, record.LastModified);
}
}
} }

View File

@ -2,94 +2,84 @@ using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250306 internal static partial class Helper20250306 {
{
private static int? GetProcessDataStandardFormatColumnTitlesLine(string[] lines) internal static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, List<string> args) {
{
int? result = null;
for (int i = 0; i < lines.Length; i++)
{
if (lines[i].StartsWith("END_OFFSET") && i + 2 < lines.Length)
{
result = i + 1;
break;
}
}
return result;
}
private static string? ProcessDataStandardFormatToLastDataLine(string[] lines, int columnTitlesLine)
{
string? result = null;
for (int i = columnTitlesLine + 1; i < lines.Length; i++)
{
if (lines[i].StartsWith("NUM_DATA_ROWS"))
{
result = lines[i - 1];
break;
}
}
return result;
}
private static string ProcessDataStandardFormatToJson(int columnTitlesLine, string[] columns, string[] lines)
{
#pragma warning disable CA1845, IDE0057
string result = "[\n";
string line;
string value;
string[] segments;
if (columns.Length == 0)
columns = lines[columnTitlesLine].Trim().Split('\t');
for (int i = columnTitlesLine + 1; i < lines.Length; i++)
{
line = "{";
segments = lines[i].Trim().Split('\t');
if (segments.Length != columns.Length)
break;
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
}
line = line.Substring(0, line.Length - 1) + '}' + ',' + '\n';
result += line;
}
result = result.Substring(0, result.Length - 1) + ']';
return result;
#pragma warning restore CA1845, IDE0057
}
private static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, string file)
{
string[] lines = File.ReadAllLines(file);
int? columnTitlesLine = GetProcessDataStandardFormatColumnTitlesLine(lines);
if (columnTitlesLine is null)
logger.LogWarning("<{columnTitlesLine}> is null", nameof(columnTitlesLine));
else
{
string? text = ProcessDataStandardFormatToLastDataLine(lines, columnTitlesLine.Value);
File.WriteAllText(".lbl", text);
if (lines.Length < columnTitlesLine.Value + 1)
logger.LogWarning("<{lines}>(s)", lines.Length);
else
{
string json = ProcessDataStandardFormatToJson(columnTitlesLine.Value, [], lines);
File.WriteAllText(".json", json);
}
}
}
internal static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2]; string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories); string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.TopDirectoryOnly);
if (files.Length != 1) if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length); logger.LogWarning("<{files}>(s)", files.Length);
else else
ProcessDataStandardFormatToJson(logger, files[0]); ProcessDataStandardFormatToJson(logger, files[0]);
} }
private static void ProcessDataStandardFormatToJson(ILogger<Worker> logger, string file) {
string[] lines = File.ReadAllLines(file);
int? columnTitlesLine = GetProcessDataStandardFormatColumnTitlesLine(lines);
if (columnTitlesLine is null)
logger.LogWarning("<{columnTitlesLine}> is null", nameof(columnTitlesLine));
else {
string? text = ProcessDataStandardFormatToLastDataLine(lines, columnTitlesLine.Value);
File.WriteAllText(Path.Combine(".vscode", "helper", ".lbl"), text);
if (lines.Length < columnTitlesLine.Value + 1)
logger.LogWarning("<{lines}>(s)", lines.Length);
else {
string json = ProcessDataStandardFormatToJson(columnTitlesLine.Value, [], lines);
File.WriteAllText(Path.Combine(".vscode", "helper", ".json"), json);
}
}
}
private static int? GetProcessDataStandardFormatColumnTitlesLine(string[] lines) {
int? result = null;
bool foundEndOfFile = false;
for (int i = 0; i < lines.Length; i++) {
if (lines[i] == "EOF")
foundEndOfFile = true;
if (foundEndOfFile && lines[i].StartsWith("END_OFFSET") && i + 3 < lines.Length) {
result = i + 2;
break;
}
}
return result;
}
private static string? ProcessDataStandardFormatToLastDataLine(string[] lines, int columnTitlesLine) {
string? result = null;
for (int i = columnTitlesLine + 1; i < lines.Length; i++) {
if (lines[i].StartsWith("NUM_DATA_ROWS")) {
result = lines[i - 2];
break;
}
}
return result;
}
private static string ProcessDataStandardFormatToJson(int columnTitlesLine, string[] columns, string[] lines) {
#pragma warning disable CA1845, IDE0057
string result = "[\n";
string line;
string value;
string[] segments;
if (columns.Length == 0)
columns = lines[columnTitlesLine].Trim().Split('|');
int columnsLength = columns.Length - 2;
for (int i = columnTitlesLine + 1; i < lines.Length; i++) {
line = "{";
segments = lines[i].Trim().Split('|');
if (segments.Length != columnsLength)
continue;
for (int c = 1; c < segments.Length; c++) {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
}
line = line.Substring(0, line.Length - 1) + '}' + ',' + '\n';
result += line;
}
result = result.Substring(0, result.Length - 2) + ']';
return result;
#pragma warning restore CA1845, IDE0057
}
} }

View File

@ -1,47 +1,38 @@
using File_Folder_Helper.Helpers; using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250315 internal static partial class Helper20250315 {
{
internal static void Empty(ILogger<Worker> logger, List<string> args) internal static void Empty(ILogger<Worker> logger, List<string> args) {
{ string[] searchPatterns = args[2].Split('~');
string[] searchPatterns = args[2].Split('|');
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
if (searchPatterns.Length == 1) if (searchPatterns.Length == 1) {
{
string[] files = Directory.GetFiles(sourceDirectory, searchPatterns[0], SearchOption.AllDirectories); string[] files = Directory.GetFiles(sourceDirectory, searchPatterns[0], SearchOption.AllDirectories);
if (files.Length == 0) if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length); logger.LogWarning("<{files}>(s)", files.Length);
else else {
{
string directoryName; string directoryName;
string[] directories; string[] directories;
foreach (string file in files) foreach (string file in files) {
{
directoryName = Path.GetDirectoryName(file) ?? throw new Exception(); directoryName = Path.GetDirectoryName(file) ?? throw new Exception();
directories = Directory.GetDirectories(directoryName, "*", SearchOption.TopDirectoryOnly); directories = Directory.GetDirectories(directoryName, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories) foreach (string directory in directories)
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, directory); HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, directory);
} }
} }
} } else {
else
{
string[] files; string[] files;
string checkFile; string checkFile;
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory); HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
foreach (string searchPattern in searchPatterns) foreach (string searchPattern in searchPatterns) {
{
files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories); files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0) if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length); logger.LogWarning("<{files}>(s)", files.Length);
else else {
{ foreach (string file in files) {
foreach (string file in files)
{
checkFile = $"{file}.json"; checkFile = $"{file}.json";
if (File.Exists(checkFile)) if (File.Exists(checkFile))
continue; continue;

View File

@ -1,14 +1,14 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Diagnostics; using System.Diagnostics;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250320 internal static partial class Helper20250320 {
{
private record Match(string Name, private record Match(string Name,
string Parameters, string Parameters,
@ -34,11 +34,9 @@ internal static partial class Helper20250320
ReadOnlyCollection<int> ReferenceToLineNumbers, ReadOnlyCollection<int> ReferenceToLineNumbers,
int? ScopeEnum, int? ScopeEnum,
Search Search, Search Search,
int StartLine) int StartLine) {
{
public override string ToString() public override string ToString() {
{
string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method); string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method);
return result; return result;
} }
@ -47,8 +45,7 @@ internal static partial class Helper20250320
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Method[]))] [JsonSerializable(typeof(Method[]))]
private partial class MethodCollectionCommonSourceGenerationContext : JsonSerializerContext private partial class MethodCollectionCommonSourceGenerationContext : JsonSerializerContext {
{
} }
private record MethodWith(int? EndLine, private record MethodWith(int? EndLine,
@ -60,11 +57,9 @@ internal static partial class Helper20250320
ReadOnlyCollection<int> ReferenceToLineNumbers, ReadOnlyCollection<int> ReferenceToLineNumbers,
int? ScopeEnum, int? ScopeEnum,
Search Search, Search Search,
int StartLine) int StartLine) {
{
public override string ToString() public override string ToString() {
{
string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method); string result = JsonSerializer.Serialize(this, MethodCollectionCommonSourceGenerationContext.Default.Method);
return result; return result;
} }
@ -73,8 +68,7 @@ internal static partial class Helper20250320
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(MethodWith[]))] [JsonSerializable(typeof(MethodWith[]))]
private partial class MethodWithCollectionCommonSourceGenerationContext : JsonSerializerContext private partial class MethodWithCollectionCommonSourceGenerationContext : JsonSerializerContext {
{
} }
private const string _Name = "name"; private const string _Name = "name";
@ -88,11 +82,14 @@ internal static partial class Helper20250320
[GeneratedRegex(@"[[\]<,>?a-zA-Z0-9_()\s]*?\s[a-z_]{1}[a-zA-Z0-9_]*?,")] [GeneratedRegex(@"[[\]<,>?a-zA-Z0-9_()\s]*?\s[a-z_]{1}[a-zA-Z0-9_]*?,")]
private static partial Regex CSharpParameter(); private static partial Regex CSharpParameter();
[GeneratedRegex(@"(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_]*)+\((?<parameters>.*)\)")] // VSCode Search ^\s*\b(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_])+\((?<parameters>.*)\)\s?\{?$
[GeneratedRegex(@"^\s*\b(?<scope>public|private|internal|protected|\sI[a-zA-Z0-9_]*\.)\s?\b(?<static>static)?\s?\b(?<partial>partial)?\s?\b(?<async>async)?\s?\b(?<result>[\[\]\.\?<,>a-zA-Z0-9_()\s]*?)\s?\b(?<name>[A-Z_]{1}[a-zA-Z0-9_]*)+\((?<parameters>.*)\)\s?\{?$")]
private static partial Regex CSharpMethodLine(); private static partial Regex CSharpMethodLine();
internal static void SortCodeMethods(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken) private static ReadOnlyCollection<Method> GetSortedMethods(ReadOnlyCollection<Method> methods) =>
{ (from l in methods orderby l.ScopeEnum descending, l.ReferenceToLineNumbers.Count descending, l.Line.Length, l.Match.Name.Length, l.Match.Name select l).ToArray().AsReadOnly();
internal static void SortCodeMethods(ILogger<Worker> logger, List<string> args, CancellationToken cancellationToken) {
bool check; bool check;
string[] lines; string[] lines;
List<string> changed = []; List<string> changed = [];
@ -104,16 +101,13 @@ internal static partial class Helper20250320
string repositoryDirectory = Path.GetFullPath(args[0]); string repositoryDirectory = Path.GetFullPath(args[0]);
string[] cSharpFiles = Directory.GetFiles(repositoryDirectory, "*.cs", SearchOption.AllDirectories); string[] cSharpFiles = Directory.GetFiles(repositoryDirectory, "*.cs", SearchOption.AllDirectories);
ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles = logOnly ? new(cSharpFiles) : Helpers.HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(repositoryDirectory, usePathCombine, cancellationToken); ReadOnlyCollection<string> gitOthersModifiedAndDeletedExcludingStandardFiles = logOnly ? new(cSharpFiles) : Helpers.HelperGit.GetOthersModifiedAndDeletedExcludingStandardFiles(repositoryDirectory, usePathCombine, cancellationToken);
foreach (string cSharpFile in cSharpFiles) foreach (string cSharpFile in cSharpFiles) {
{
if (!gitOthersModifiedAndDeletedExcludingStandardFiles.Contains(cSharpFile)) if (!gitOthersModifiedAndDeletedExcludingStandardFiles.Contains(cSharpFile))
continue; continue;
for (int i = 0; i < 10; i++) for (int i = 0; i < 10; i++) {
{
lines = File.ReadAllLines(cSharpFile); lines = File.ReadAllLines(cSharpFile);
check = SortFile(logger, logOnly, scopeSpaces, cSharpFile, lines); check = SortFile(logger, logOnly, scopeSpaces, cSharpFile, lines);
if (check) if (check) {
{
Thread.Sleep(500); Thread.Sleep(500);
changed.Add($"{i + 1:00}) {cSharpFile}"); changed.Add($"{i + 1:00}) {cSharpFile}");
} }
@ -123,41 +117,35 @@ internal static partial class Helper20250320
} }
if (changed.Count == 0) if (changed.Count == 0)
logger.LogInformation("No changes :)"); logger.LogInformation("No changes :)");
else else {
{
changed.Reverse(); changed.Reverse();
foreach (string c in changed) foreach (string c in changed)
logger.LogInformation(c); logger.LogInformation(c);
} }
} }
private static bool SortFile(ILogger<Worker> logger, bool logOnly, int scopeSpaces, string cSharpFile, string[] lines) private static bool SortFile(ILogger<Worker> logger, bool logOnly, int scopeSpaces, string cSharpFile, string[] lines) {
{
bool result; bool result;
ReadOnlyCollection<Method> methods = GetMethods(logger, scopeSpaces, cSharpFile, lines); ReadOnlyCollection<Method> methods = GetMethods(logger, scopeSpaces, cSharpFile, lines);
if (methods.Count == 0) if (methods.Count == 0)
result = false; result = false;
else if (methods.Any(l => l.EndLine is null)) else if (methods.Any(l => l.EndLine is null))
result = false; result = false;
else if (logOnly) else if (logOnly) {
{
foreach (Method method in methods) foreach (Method method in methods)
logger.LogInformation("{cSharpFile} - {Name} has {lines} line(s)", cSharpFile, method.Match.Name, (method.EndLine is null ? 999999 : method.EndLine.Value - method.StartLine).ToString("000000")); logger.LogInformation("{cSharpFile} - {Name} has {lines} line(s)", cSharpFile, method.Match.Name, (method.EndLine is null ? 999999 : method.EndLine.Value - method.StartLine).ToString("000000"));
result = false; result = false;
} } else {
else
{
ReadOnlyCollection<Method> sortedMethods = GetSortedMethods(methods); ReadOnlyCollection<Method> sortedMethods = GetSortedMethods(methods);
if (Debugger.IsAttached) if (Debugger.IsAttached)
File.WriteAllText(Path.Combine(".vscode", "helper", ".txt"), string.Join(Environment.NewLine, sortedMethods.Select(l => $"{l.Match.Name} => {l.Parameters.Count}"))); File.WriteAllText(Path.Combine(".vscode", "helper", ".json"), JsonSerializer.Serialize(sortedMethods.ToArray(), MethodCollectionCommonSourceGenerationContext.Default.MethodArray));
ReadOnlyCollection<MethodWith> collection = GetCollection(logger, lines, sortedMethods); ReadOnlyCollection<MethodWith> collection = GetCollection(logger, lines, sortedMethods);
result = WriteAllLines(cSharpFile, lines, collection); result = WriteAllLines(cSharpFile, lines, collection);
} }
return result; return result;
} }
private static ReadOnlyCollection<Method> GetMethods(ILogger<Worker> logger, int scopeSpaces, string cSharpFile, string[] lines) private static ReadOnlyCollection<Method> GetMethods(ILogger<Worker> logger, int scopeSpaces, string cSharpFile, string[] lines) {
{
List<Method> results = []; List<Method> results = [];
int check; int check;
int blocks; int blocks;
@ -176,8 +164,7 @@ internal static partial class Helper20250320
Regex parameterRegex = CSharpParameter(); Regex parameterRegex = CSharpParameter();
ReadOnlyDictionary<string, string> parameters; ReadOnlyDictionary<string, string> parameters;
System.Text.RegularExpressions.Match regularExpressionsMatch; System.Text.RegularExpressions.Match regularExpressionsMatch;
for (int i = 0; i < lines.Length; i++) for (int i = 0; i < lines.Length; i++) {
{
check = GetNumberOfStartSpaces(lines, i); check = GetNumberOfStartSpaces(lines, i);
if (check != scopeSpaces) if (check != scopeSpaces)
continue; continue;
@ -215,17 +202,17 @@ internal static partial class Helper20250320
firstLine = lines[startLine].Trim(); firstLine = lines[startLine].Trim();
else else
firstLine = lines[startLine + 1].Trim(); firstLine = lines[startLine + 1].Trim();
isLinq = !lines[i + 1].StartsWith("#pragma") && !lines[i + 1].StartsWith("#nullable") && lines[i + 1].Trim() != "{"; isLinq = !lines[i + 1].StartsWith("#pragma") && !lines[i + 1].StartsWith("#nullable") && lines[i].Trim()[^1] != '{' && lines[i + 1].Trim() != "{";
if (isLinq) if (isLinq)
blocks++; blocks++;
endLine = null; endLine = null;
for (int j = i + 1; j < lines.Length; j++) if (lines[i].Trim()[^1] == '{')
{ blocks++;
for (int j = i + 1; j < lines.Length; j++) {
innerLine = lines[j].Trim(); innerLine = lines[j].Trim();
if (innerLine.StartsWith("#pragma") || innerLine.StartsWith("#nullable")) if (innerLine.StartsWith("#pragma") || innerLine.StartsWith("#nullable"))
continue; continue;
if (isLinq && string.IsNullOrEmpty(innerLine)) if (isLinq && string.IsNullOrEmpty(innerLine)) {
{
if (line.EndsWith(';')) if (line.EndsWith(';'))
blocks--; blocks--;
} }
@ -240,11 +227,9 @@ internal static partial class Helper20250320
break; break;
} }
referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: 0, end: lines.Length, i: i, search: search, parameters: parameters); referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: 0, end: lines.Length, i: i, search: search, parameters: parameters);
if (referenceToLineNumbers.Count == 0) if (referenceToLineNumbers.Count == 0) {
{
lineSegmentFirst = line.Split(match.Name)[0]; lineSegmentFirst = line.Split(match.Name)[0];
if (!lines[i - 1].Trim().StartsWith("[Obsolete")) if (!lines[i - 1].Trim().StartsWith("[Obsolete")) {
{
if (lineSegmentFirst.StartsWith("private")) if (lineSegmentFirst.StartsWith("private"))
logger.LogWarning("// <{cSharpFileName}> {name} with {parameters} parameter(s) <{line}>", Path.GetFileName(cSharpFile), match.Name, parameters, lineSegmentFirst); logger.LogWarning("// <{cSharpFileName}> {name} with {parameters} parameter(s) <{line}>", Path.GetFileName(cSharpFile), match.Name, parameters, lineSegmentFirst);
else else
@ -269,11 +254,9 @@ internal static partial class Helper20250320
return results.AsReadOnly(); return results.AsReadOnly();
} }
private static int GetNumberOfStartSpaces(string[] lines, int i) private static int GetNumberOfStartSpaces(string[] lines, int i) {
{
int result = 0; int result = 0;
foreach (char @char in lines[i]) foreach (char @char in lines[i]) {
{
if (@char != ' ') if (@char != ' ')
break; break;
result += 1; result += 1;
@ -281,11 +264,9 @@ internal static partial class Helper20250320
return result; return result;
} }
private static int GetScopeEnum(Match match) private static int GetScopeEnum(Match match) {
{
int result; int result;
int value = match.Scope switch int value = match.Scope switch {
{
"public" => 8000, "public" => 8000,
"internal" => 7000, "internal" => 7000,
"protected" => 6000, "protected" => 6000,
@ -301,29 +282,36 @@ internal static partial class Helper20250320
return result; return result;
} }
private static ReadOnlyDictionary<string, string> GetParameters(Regex parameterRegex, Match match) private static ReadOnlyDictionary<string, string> GetParameters(Regex parameterRegex, Match match) {
{
Dictionary<string, string> results = []; Dictionary<string, string> results = [];
string value; string value;
string[] segments; string[] segments;
System.Text.RegularExpressions.Match[] matches = parameterRegex.Matches($"{match.Parameters},").ToArray(); System.Text.RegularExpressions.Match[] matches = parameterRegex.Matches($"{match.Parameters},").ToArray();
foreach (System.Text.RegularExpressions.Match m in matches) try {
{ foreach (System.Text.RegularExpressions.Match m in matches) {
if (!m.Success) if (!m.Success)
continue; continue;
value = m.Value.Trim()[..^1]; value = m.Value.Trim()[..^1];
segments = value.Split(' '); segments = value.Split(' ');
results.Add(segments[^1], value); results.Add(segments[^1], value);
} }
} catch (Exception) {
results.Clear();
System.Text.RegularExpressions.Match m;
for (int i = 0; i < matches.Length; i++) {
m = matches[i];
if (!m.Success)
continue;
results.Add(i.ToString(), i.ToString());
}
}
return new(results); return new(results);
} }
private static int GetStartLine(string[] lines, int i) private static int GetStartLine(string[] lines, int i) {
{
int result = i; int result = i;
string line; string line;
for (int j = i - 1; j > -1; j--) for (int j = i - 1; j > -1; j--) {
{
line = lines[j].Trim(); line = lines[j].Trim();
if (!line.StartsWith('[') && !line.StartsWith('#') && !line.StartsWith("/// ")) if (!line.StartsWith('[') && !line.StartsWith('#') && !line.StartsWith("/// "))
break; break;
@ -332,12 +320,10 @@ internal static partial class Helper20250320
return result; return result;
} }
private static int GetLineBlockCount(string line, bool isLinq) private static int GetLineBlockCount(string line, bool isLinq) {
{
int result = 0; int result = 0;
bool ignore = false; bool ignore = false;
for (int i = 0; i < line.Length; i++) for (int i = 0; i < line.Length; i++) {
{
if (line[i] == '\'') if (line[i] == '\'')
i++; i++;
else if (!isLinq && !ignore && line[i] == '{') else if (!isLinq && !ignore && line[i] == '{')
@ -352,27 +338,21 @@ internal static partial class Helper20250320
return result; return result;
} }
private static List<int> GetReferenceToLineNumbers(string[] lines, int start, int end, int i, Search search, ReadOnlyDictionary<string, string> parameters) private static List<int> GetReferenceToLineNumbers(string[] lines, int start, int end, int i, Search search, ReadOnlyDictionary<string, string> parameters) {
{
List<int> results = []; List<int> results = [];
string[] segments; string[] segments;
string[] afterSegments; string[] afterSegments;
string lastSegmentBeforeDot; string lastSegmentBeforeDot;
for (int j = start; j < end; j++) for (int j = start; j < end; j++) {
{
if (j == i) if (j == i)
continue; continue;
segments = lines[j].Split(search.Name); segments = lines[j].Split(search.Name);
if (segments.Length == 1) if (segments.Length == 1) {
{
segments = lines[j].Split(search.Not); segments = lines[j].Split(search.Not);
if (segments.Length == 1) if (segments.Length == 1) {
{
segments = lines[j].Split(search.Wrap); segments = lines[j].Split(search.Wrap);
if (segments.Length == 1) if (segments.Length == 1) {
{ if (!lines[j].EndsWith(search.Delegate)) {
if (!lines[j].EndsWith(search.Delegate))
{
segments = lines[j].Split(search.Constructor); segments = lines[j].Split(search.Constructor);
if (segments.Length == 1) if (segments.Length == 1)
continue; continue;
@ -382,16 +362,12 @@ internal static partial class Helper20250320
} }
if (lines[j].EndsWith(search.Delegate)) if (lines[j].EndsWith(search.Delegate))
results.Add(j); results.Add(j);
else else {
{
lastSegmentBeforeDot = segments[^1].Split(").")[0]; lastSegmentBeforeDot = segments[^1].Split(").")[0];
if (parameters.Count == 0) if (parameters.Count == 0) {
{
if (lastSegmentBeforeDot.Contains(',')) if (lastSegmentBeforeDot.Contains(','))
continue; continue;
} } else {
else
{
afterSegments = lastSegmentBeforeDot.Split(','); afterSegments = lastSegmentBeforeDot.Split(',');
if (afterSegments.Length != parameters.Count) if (afterSegments.Length != parameters.Count)
continue; continue;
@ -402,15 +378,10 @@ internal static partial class Helper20250320
return results; return results;
} }
private static ReadOnlyCollection<Method> GetSortedMethods(ReadOnlyCollection<Method> methods) => private static ReadOnlyCollection<MethodWith> GetCollection(ILogger<Worker> logger, string[] lines, ReadOnlyCollection<Method> sortedMethods) {
(from l in methods orderby l.ScopeEnum descending, l.ReferenceToLineNumbers.Count descending, l.Line.Length, l.Match.Name.Length, l.Match.Name select l).ToArray().AsReadOnly();
private static ReadOnlyCollection<MethodWith> GetCollection(ILogger<Worker> logger, string[] lines, ReadOnlyCollection<Method> sortedMethods)
{
List<MethodWith> results = []; List<MethodWith> results = [];
List<Method> check = sortedMethods.ToList(); List<Method> check = sortedMethods.ToList();
foreach (Method method in sortedMethods) foreach (Method method in sortedMethods) {
{
logger.LogInformation($"{method.Match.Name} => {method.Parameters.Count}"); logger.LogInformation($"{method.Match.Name} => {method.Parameters.Count}");
if (method.EndLine is null) if (method.EndLine is null)
continue; continue;
@ -422,25 +393,21 @@ internal static partial class Helper20250320
return results.AsReadOnly(); return results.AsReadOnly();
} }
private static MethodWith GetMethodWith(string[] lines, ReadOnlyCollection<Method> methods, List<Method> check, Method method, int methodEndLineValue) private static MethodWith GetMethodWith(string[] lines, ReadOnlyCollection<Method> methods, List<Method> check, Method method, int methodEndLineValue) {
{
MethodWith methodWith; MethodWith methodWith;
List<int> referenceToLineNumbers; List<int> referenceToLineNumbers;
MethodWith[] sortedReferences; MethodWith[] sortedReferences;
Dictionary<int, MethodWith> references = []; Dictionary<int, MethodWith> references = [];
foreach (Method m in methods) foreach (Method m in methods) {
{
if (m.EndLine is null) if (m.EndLine is null)
continue; continue;
if (m == method) if (m == method)
continue; continue;
referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: method.StartLine, end: methodEndLineValue, i: -1, search: m.Search, parameters: m.Parameters); referenceToLineNumbers = GetReferenceToLineNumbers(lines: lines, start: method.StartLine, end: methodEndLineValue, i: -1, search: m.Search, parameters: m.Parameters);
if (referenceToLineNumbers.Count > 0) if (referenceToLineNumbers.Count > 0) {
{
if (!check.Remove(m)) if (!check.Remove(m))
continue; continue;
foreach (int i in referenceToLineNumbers) foreach (int i in referenceToLineNumbers) {
{
if (references.ContainsKey(i)) if (references.ContainsKey(i))
continue; continue;
methodWith = GetMethodWith(lines, methods, check, m, m.EndLine.Value); methodWith = GetMethodWith(lines, methods, check, m, m.EndLine.Value);
@ -466,23 +433,20 @@ internal static partial class Helper20250320
return methodWith; return methodWith;
} }
private static bool WriteAllLines(string cSharpFile, string[] lines, ReadOnlyCollection<MethodWith> collection) private static bool WriteAllLines(string cSharpFile, string[] lines, ReadOnlyCollection<MethodWith> collection) {
{
bool result; bool result;
if (Debugger.IsAttached) if (Debugger.IsAttached)
WriteDebug(collection); WriteDebug(collection);
List<string> results = []; List<string> results = [];
ReadOnlyCollection<int> methodLines = GetMethodLines(collection); ReadOnlyCollection<int> methodLines = GetMethodLines(collection);
int maxMethodLines = methodLines.Max(); int maxMethodLines = methodLines.Max();
for (int i = 0; i < maxMethodLines; i++) for (int i = 0; i < maxMethodLines; i++) {
{
if (methodLines.Contains(i)) if (methodLines.Contains(i))
continue; continue;
results.Add(lines[i]); results.Add(lines[i]);
} }
List<bool> nests = [true]; List<bool> nests = [true];
foreach (MethodWith methodWith in collection) foreach (MethodWith methodWith in collection) {
{
if (methodWith.EndLine is null) if (methodWith.EndLine is null)
continue; continue;
AppendLines(results, nests, lines, methodWith, methodWith.EndLine.Value); AppendLines(results, nests, lines, methodWith, methodWith.EndLine.Value);
@ -493,16 +457,14 @@ internal static partial class Helper20250320
string join = string.Join(Environment.NewLine, results); string join = string.Join(Environment.NewLine, results);
if (join == text) if (join == text)
result = false; result = false;
else else {
{
result = true; result = true;
File.WriteAllText(cSharpFile, join); File.WriteAllText(cSharpFile, join);
} }
return result; return result;
} }
private static void WriteDebug(ReadOnlyCollection<MethodWith> collection) private static void WriteDebug(ReadOnlyCollection<MethodWith> collection) {
{
List<string> results = []; List<string> results = [];
List<bool> nests = [true]; List<bool> nests = [true];
foreach (MethodWith methodWith in collection) foreach (MethodWith methodWith in collection)
@ -510,8 +472,7 @@ internal static partial class Helper20250320
File.WriteAllText(Path.Combine(".vscode", "helper", ".md"), string.Join(Environment.NewLine, results)); File.WriteAllText(Path.Combine(".vscode", "helper", ".md"), string.Join(Environment.NewLine, results));
} }
private static void AppendLines(List<string> results, List<bool> nests, MethodWith methodWith) private static void AppendLines(List<string> results, List<bool> nests, MethodWith methodWith) {
{
nests.Add(true); nests.Add(true);
results.Add($" - {new string('#', nests.Count)} {methodWith.Match.Name} => {methodWith.Parameters.Count}"); results.Add($" - {new string('#', nests.Count)} {methodWith.Match.Name} => {methodWith.Parameters.Count}");
foreach (MethodWith m in methodWith.References) foreach (MethodWith m in methodWith.References)
@ -519,12 +480,10 @@ internal static partial class Helper20250320
nests.RemoveAt(nests.Count - 1); nests.RemoveAt(nests.Count - 1);
} }
private static ReadOnlyCollection<int> GetMethodLines(ReadOnlyCollection<MethodWith> collection) private static ReadOnlyCollection<int> GetMethodLines(ReadOnlyCollection<MethodWith> collection) {
{
List<int> results = []; List<int> results = [];
List<bool> nests = [true]; List<bool> nests = [true];
foreach (MethodWith methodWith in collection) foreach (MethodWith methodWith in collection) {
{
if (methodWith.EndLine is null) if (methodWith.EndLine is null)
continue; continue;
AppendLineNumbers(results, nests, methodWith, methodWith.EndLine.Value); AppendLineNumbers(results, nests, methodWith, methodWith.EndLine.Value);
@ -535,13 +494,11 @@ internal static partial class Helper20250320
return new(results); return new(results);
} }
private static void AppendLineNumbers(List<int> results, List<bool> nests, MethodWith methodWith, int methodWithEndLineValue) private static void AppendLineNumbers(List<int> results, List<bool> nests, MethodWith methodWith, int methodWithEndLineValue) {
{
nests.Add(true); nests.Add(true);
for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++) for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++)
results.Add(i); results.Add(i);
foreach (MethodWith m in methodWith.References) foreach (MethodWith m in methodWith.References) {
{
if (m.EndLine is null) if (m.EndLine is null)
continue; continue;
AppendLineNumbers(results, nests, m, m.EndLine.Value); AppendLineNumbers(results, nests, m, m.EndLine.Value);
@ -549,13 +506,11 @@ internal static partial class Helper20250320
nests.RemoveAt(nests.Count - 1); nests.RemoveAt(nests.Count - 1);
} }
private static void AppendLines(List<string> results, List<bool> nests, string[] lines, MethodWith methodWith, int methodWithEndLineValue) private static void AppendLines(List<string> results, List<bool> nests, string[] lines, MethodWith methodWith, int methodWithEndLineValue) {
{
nests.Add(true); nests.Add(true);
for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++) for (int i = methodWith.StartLine; i < methodWithEndLineValue + 1; i++)
results.Add(lines[i]); results.Add(lines[i]);
foreach (MethodWith m in methodWith.References) foreach (MethodWith m in methodWith.References) {
{
if (m.EndLine is null) if (m.EndLine is null)
continue; continue;
AppendLines(results, nests, lines, m, m.EndLine.Value); AppendLines(results, nests, lines, m, m.EndLine.Value);

View File

@ -1,19 +1,18 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5; namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250321 internal static partial class Helper20250321 {
{
private record Record(string Directory, private record Record(string Directory,
string File, string File,
ThreeDeep ThreeDeep) ThreeDeep ThreeDeep) {
{
public static ReadOnlyCollection<Record> GetCollection(string sourceDirectory, string searchPattern, string[] files) public static ReadOnlyCollection<Record> GetCollection(string sourceDirectory, string searchPattern, string[] files) {
{
List<Record> results = []; List<Record> results = [];
Record record; Record record;
string directory; string directory;
@ -21,8 +20,7 @@ internal static partial class Helper20250321
bool json = searchPattern.Contains(".json"); bool json = searchPattern.Contains(".json");
bool check = searchPattern.Split('.').Length == 3; bool check = searchPattern.Split('.').Length == 3;
ReadOnlyCollection<ThreeDeep> collection = ThreeDeep.GetCollection(files); ReadOnlyCollection<ThreeDeep> collection = ThreeDeep.GetCollection(files);
foreach (ThreeDeep threeDeep in collection) foreach (ThreeDeep threeDeep in collection) {
{
if (!json && check) if (!json && check)
fileNameWithoutExtension = threeDeep.DirectoryName; fileNameWithoutExtension = threeDeep.DirectoryName;
else if (!json && !check) else if (!json && !check)
@ -32,19 +30,15 @@ internal static partial class Helper20250321
else else
throw new NotImplementedException(); throw new NotImplementedException();
directory = $"{fileNameWithoutExtension[^1]}{fileNameWithoutExtension[^3..][..2]}"; directory = $"{fileNameWithoutExtension[^1]}{fileNameWithoutExtension[^3..][..2]}";
if (json || (!json && !check)) if (json || (!json && !check)) {
{
record = new(Directory: Path.Combine(sourceDirectory, "new-a", directory), record = new(Directory: Path.Combine(sourceDirectory, "new-a", directory),
File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}", File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}",
ThreeDeep: threeDeep); ThreeDeep: threeDeep);
} } else if (!json && check) {
else if (!json && check)
{
record = new(Directory: Path.Combine(sourceDirectory, "new-b", directory, threeDeep.DirectoryName), record = new(Directory: Path.Combine(sourceDirectory, "new-b", directory, threeDeep.DirectoryName),
File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}", File: $"{threeDeep.FileNameWithoutExtension}{threeDeep.Extension}",
ThreeDeep: threeDeep); ThreeDeep: threeDeep);
} } else
else
throw new NotImplementedException(); throw new NotImplementedException();
results.Add(record); results.Add(record);
} }
@ -59,17 +53,14 @@ internal static partial class Helper20250321
long Length, long Length,
string DirectoryName, string DirectoryName,
string ParentDirectoryName, string ParentDirectoryName,
string Root) string Root) {
{
public static ReadOnlyCollection<ThreeDeep> GetCollection(string[] files) public static ReadOnlyCollection<ThreeDeep> GetCollection(string[] files) {
{
List<ThreeDeep> results = []; List<ThreeDeep> results = [];
ThreeDeep record; ThreeDeep record;
FileInfo fileInfo; FileInfo fileInfo;
string parentDirectory; string parentDirectory;
foreach (string file in files) foreach (string file in files) {
{
fileInfo = new(file); fileInfo = new(file);
parentDirectory = Path.GetDirectoryName(fileInfo.DirectoryName) ?? throw new Exception(); parentDirectory = Path.GetDirectoryName(fileInfo.DirectoryName) ?? throw new Exception();
record = new(Extension: Path.GetExtension(file), record = new(Extension: Path.GetExtension(file),
@ -89,14 +80,12 @@ internal static partial class Helper20250321
} }
internal static void MoveToLast(ILogger<Worker> logger, List<string> args) internal static void MoveToLast(ILogger<Worker> logger, List<string> args) {
{ string[] searchPatterns = args[2].Split('~');
string[] searchPatterns = args[2].Split('|');
string sourceDirectory = Path.GetFullPath(args[0]); string sourceDirectory = Path.GetFullPath(args[0]);
if (searchPatterns.Length == 1) if (searchPatterns.Length == 1)
logger.LogInformation("No code for just one!"); logger.LogInformation("No code for just one!");
else else {
{
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory); HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
ReadOnlyCollection<Record> collection = GetCollection(logger, searchPatterns, sourceDirectory); ReadOnlyCollection<Record> collection = GetCollection(logger, searchPatterns, sourceDirectory);
if (collection.Count != 0) if (collection.Count != 0)
@ -108,17 +97,14 @@ internal static partial class Helper20250321
} }
} }
private static ReadOnlyCollection<Record> GetCollection(ILogger<Worker> logger, string[] searchPatterns, string sourceDirectory) private static ReadOnlyCollection<Record> GetCollection(ILogger<Worker> logger, string[] searchPatterns, string sourceDirectory) {
{
string[] files; string[] files;
List<Record> results = []; List<Record> results = [];
foreach (string searchPattern in searchPatterns) foreach (string searchPattern in searchPatterns) {
{
files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories); files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0) if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length); logger.LogWarning("<{files}>(s)", files.Length);
else else {
{
ReadOnlyCollection<Record> collection = Record.GetCollection(sourceDirectory, searchPattern, files); ReadOnlyCollection<Record> collection = Record.GetCollection(sourceDirectory, searchPattern, files);
results.AddRange(collection); results.AddRange(collection);
} }
@ -126,25 +112,21 @@ internal static partial class Helper20250321
return results.AsReadOnly(); return results.AsReadOnly();
} }
private static void UseCollection(ReadOnlyCollection<Record> collection) private static void UseCollection(ReadOnlyCollection<Record> collection) {
{
string fullPath; string fullPath;
string checkFile; string checkFile;
List<string> distinct = []; List<string> distinct = [];
foreach (Record record in collection) foreach (Record record in collection) {
{
if (distinct.Contains(record.Directory)) if (distinct.Contains(record.Directory))
continue; continue;
distinct.Add(record.Directory); distinct.Add(record.Directory);
} }
foreach (string directory in distinct) foreach (string directory in distinct) {
{
if (Directory.Exists(directory)) if (Directory.Exists(directory))
continue; continue;
_ = Directory.CreateDirectory(directory); _ = Directory.CreateDirectory(directory);
} }
foreach (Record record in collection) foreach (Record record in collection) {
{
checkFile = Path.Combine(record.Directory, record.File); checkFile = Path.Combine(record.Directory, record.File);
if (File.Exists(checkFile)) if (File.Exists(checkFile))
continue; continue;

View File

@ -0,0 +1,236 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250404 {
internal record KafkaProducerSaslOptions(
[property: JsonPropertyName("mechanism")] string Mechanism
);
internal record MonitorList(
[property: JsonPropertyName("id")] int Id,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("description")] string Description,
[property: JsonPropertyName("pathName")] string PathName,
[property: JsonPropertyName("parent")] int? Parent,
[property: JsonPropertyName("childrenIDs")] IReadOnlyList<int> ChildrenIDs,
[property: JsonPropertyName("url")] string Url,
[property: JsonPropertyName("method")] string Method,
[property: JsonPropertyName("hostname")] object Hostname,
[property: JsonPropertyName("port")] object Port,
[property: JsonPropertyName("maxretries")] int MaxRetries,
[property: JsonPropertyName("weight")] int Weight,
[property: JsonPropertyName("active")] bool Active,
[property: JsonPropertyName("forceInactive")] bool ForceInactive,
[property: JsonPropertyName("type")] string Type,
[property: JsonPropertyName("timeout")] int Timeout,
[property: JsonPropertyName("interval")] int Interval,
[property: JsonPropertyName("retryInterval")] int RetryInterval,
[property: JsonPropertyName("resendInterval")] int ResendInterval,
[property: JsonPropertyName("keyword")] object Keyword,
[property: JsonPropertyName("invertKeyword")] bool InvertKeyword,
[property: JsonPropertyName("expiryNotification")] bool ExpiryNotification,
[property: JsonPropertyName("ignoreTls")] bool IgnoreTls,
[property: JsonPropertyName("upsideDown")] bool UpsideDown,
[property: JsonPropertyName("packetSize")] int PacketSize,
[property: JsonPropertyName("maxredirects")] int MaxRedirects,
[property: JsonPropertyName("accepted_statuscodes")] IReadOnlyList<string> AcceptedStatusCodes,
[property: JsonPropertyName("dns_resolve_type")] string DnsResolveType,
[property: JsonPropertyName("dns_resolve_server")] string DnsResolveServer,
[property: JsonPropertyName("dns_last_result")] object DnsLastResult,
[property: JsonPropertyName("docker_container")] string DockerContainer,
[property: JsonPropertyName("docker_host")] object DockerHost,
[property: JsonPropertyName("proxyId")] object ProxyId,
[property: JsonPropertyName("notificationIDList")] NotificationIDList NotificationIDList,
[property: JsonPropertyName("tags")] IReadOnlyList<object> Tags,
[property: JsonPropertyName("maintenance")] bool Maintenance,
[property: JsonPropertyName("mqttTopic")] string MqttTopic,
[property: JsonPropertyName("mqttSuccessMessage")] string MqttSuccessMessage,
[property: JsonPropertyName("databaseQuery")] object DatabaseQuery,
[property: JsonPropertyName("authMethod")] string AuthMethod,
[property: JsonPropertyName("grpcUrl")] object GrpcUrl,
[property: JsonPropertyName("grpcProtobuf")] object GrpcProtobuf,
[property: JsonPropertyName("grpcMethod")] object GrpcMethod,
[property: JsonPropertyName("grpcServiceName")] object GrpcServiceName,
[property: JsonPropertyName("grpcEnableTls")] bool GrpcEnableTls,
[property: JsonPropertyName("radiusCalledStationId")] object RadiusCalledStationId,
[property: JsonPropertyName("radiusCallingStationId")] object RadiusCallingStationId,
[property: JsonPropertyName("game")] object Game,
[property: JsonPropertyName("gamedigGivenPortOnly")] bool GameDigGivenPortOnly,
[property: JsonPropertyName("httpBodyEncoding")] string HttpBodyEncoding,
[property: JsonPropertyName("jsonPath")] object JsonPath,
[property: JsonPropertyName("expectedValue")] object ExpectedValue,
[property: JsonPropertyName("kafkaProducerTopic")] object KafkaProducerTopic,
[property: JsonPropertyName("kafkaProducerBrokers")] IReadOnlyList<object> KafkaProducerBrokers,
[property: JsonPropertyName("kafkaProducerSsl")] bool KafkaProducerSsl,
[property: JsonPropertyName("kafkaProducerAllowAutoTopicCreation")] bool KafkaProducerAllowAutoTopicCreation,
[property: JsonPropertyName("kafkaProducerMessage")] object KafkaProducerMessage,
[property: JsonPropertyName("screenshot")] object Screenshot,
[property: JsonPropertyName("headers")] object Headers,
[property: JsonPropertyName("body")] object Body,
[property: JsonPropertyName("grpcBody")] object GrpcBody,
[property: JsonPropertyName("grpcMetadata")] object GrpcMetadata,
[property: JsonPropertyName("basic_auth_user")] string BasicAuthUser,
[property: JsonPropertyName("basic_auth_pass")] string BasicAuthPass,
[property: JsonPropertyName("oauth_client_id")] object OauthClientId,
[property: JsonPropertyName("oauth_client_secret")] object OauthClientSecret,
[property: JsonPropertyName("oauth_token_url")] object OauthTokenUrl,
[property: JsonPropertyName("oauth_scopes")] object OauthScopes,
[property: JsonPropertyName("oauth_auth_method")] string OauthAuthMethod,
[property: JsonPropertyName("pushToken")] string PushToken,
[property: JsonPropertyName("databaseConnectionString")] string DatabaseConnectionString,
[property: JsonPropertyName("radiusUsername")] object RadiusUsername,
[property: JsonPropertyName("radiusPassword")] object RadiusPassword,
[property: JsonPropertyName("radiusSecret")] object RadiusSecret,
[property: JsonPropertyName("mqttUsername")] string MqttUsername,
[property: JsonPropertyName("mqttPassword")] string MqttPassword,
[property: JsonPropertyName("authWorkstation")] object AuthWorkstation,
[property: JsonPropertyName("authDomain")] object AuthDomain,
[property: JsonPropertyName("tlsCa")] object TlsCa,
[property: JsonPropertyName("tlsCert")] object TlsCert,
[property: JsonPropertyName("tlsKey")] object TlsKey,
[property: JsonPropertyName("kafkaProducerSaslOptions")] KafkaProducerSaslOptions KafkaProducerSaslOptions,
[property: JsonPropertyName("includeSensitiveData")] bool IncludeSensitiveData
);
internal record NotificationIDList(
[property: JsonPropertyName("4")] bool _4
);
internal record NotificationList(
[property: JsonPropertyName("id")] int Id,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("active")] bool Active,
[property: JsonPropertyName("userId")] int UserId,
[property: JsonPropertyName("isDefault")] bool IsDefault,
[property: JsonPropertyName("config")] string Config
);
internal record Kuma(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("notificationList")] IReadOnlyList<NotificationList> NotificationList,
[property: JsonPropertyName("monitorList")] IReadOnlyList<MonitorList> MonitorList
);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Kuma))]
private partial class KumaCommonSourceGenerationContext : JsonSerializerContext {
}
internal static void KumaToGatus(ILogger<Worker> logger, List<string> args) {
string url = args[4];
string fileName = args[3];
string searchPattern = args[2];
ParseMetrics(logger, fileName, url);
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length == 0)
logger.LogWarning("<{files}>(s)", files.Length);
else
KumaToGatus(files);
}
private static void ParseMetrics(ILogger<Worker> logger, string fileName, string url) {
FileStream fileStream = new(fileName, FileMode.Truncate);
HttpClient httpClient = new();
Task<Stream> streamTask = httpClient.GetStreamAsync(url);
streamTask.Wait();
Task task = streamTask.Result.CopyToAsync(fileStream);
task.Wait();
ParseMetrics(logger, fileStream);
fileStream.Dispose();
streamTask.Dispose();
httpClient.Dispose();
}
private static void ParseMetrics(ILogger<Worker> _, FileStream __) {
// Task<List<IMetric>> metrics = PrometheusMetricsParser.ParseAsync(fileStream);
// metrics.Wait();
// foreach (IMetric metric in metrics.Result) {
// if (metric is not Gauge gauge)
// continue;
// foreach (GaugeMeasurement gaugeMeasurement in gauge.Measurements) {
// if (string.IsNullOrEmpty(metric.Name))
// continue;
// foreach (KeyValuePair<string, string> keyValuePair in gaugeMeasurement.Labels) {
// logger.LogInformation("name:{name}; timestamp:{timestamp}; value:{value}; key-name:{key-name}; key-value:{key-value}",
// metric.Name,
// gaugeMeasurement.Timestamp,
// gaugeMeasurement.Value,
// keyValuePair.Key,
// keyValuePair.Value);
// }
// }
// }
}
private static void KumaToGatus(string[] files) {
Kuma? kuma;
string json;
string checkFile;
foreach (string file in files) {
checkFile = file.ToLower().Replace('_', '-');
if (checkFile != file)
File.Move(file, checkFile);
json = File.ReadAllText(checkFile);
kuma = JsonSerializer.Deserialize(json, KumaCommonSourceGenerationContext.Default.Kuma);
if (kuma is null)
continue;
WriteGatus(checkFile, kuma);
}
}
private static void WriteGatus(string file, Kuma kuma) {
List<string> results = [
string.Empty,
$"# set GATUS_CONFIG_PATH=./{Path.GetFileName(file)}.yaml",
string.Empty,
"endpoints:"
];
string[] segments;
foreach (MonitorList monitorList in kuma.MonitorList) {
if (monitorList.Type is not "http" and not "postgres")
continue;
results.Add($" - name: {monitorList.Name}");
results.Add($" group: {monitorList.PathName.Split(' ')[0]}");
results.Add($" enabled: {monitorList.Active.ToString().ToLower()}");
results.Add($" interval: {monitorList.Interval}s");
if (monitorList.Type == "http") {
results.Add($" method: {monitorList.Method}");
results.Add($" url: \"{monitorList.Url}\"");
if (monitorList.AuthMethod == "basic") {
results.Add($" # user: \"{monitorList.BasicAuthUser}\"");
results.Add($" # password: \"{monitorList.BasicAuthPass}\"");
}
results.Add(" conditions:");
results.Add(" - \"[STATUS] < 300\"");
if (monitorList.Url.Contains("https"))
results.Add(" - \"[CERTIFICATE_EXPIRATION] > 48h\"");
results.Add($" - \"[RESPONSE_TIME] < {monitorList.Timeout}\"");
} else if (monitorList.Type == "postgres") {
segments = monitorList.DatabaseConnectionString.Split('@');
if (segments.Length != 2)
continue;
results.Add($" # connectionString: \"{monitorList.DatabaseConnectionString}\"");
results.Add($" url: \"tcp://{segments[1].Split('/')[0]}\"");
results.Add(" conditions:");
results.Add(" - \"[CONNECTED] == true\"");
} else
throw new NotImplementedException();
results.Add(" alerts:");
results.Add(" - type: email");
results.Add(" description: \"healthcheck failed\"");
results.Add(" send-on-resolved: true");
results.Add(" - type: gotify");
results.Add(" description: \"healthcheck failed\"");
results.Add(" send-on-resolved: true");
results.Add(string.Empty);
}
File.WriteAllText($"{file}.yaml", string.Join(Environment.NewLine, results));
}
}

View File

@ -0,0 +1,693 @@
using System.Collections.ObjectModel;
using System.Text.Json;
using System.Text.Json.Serialization;
using File_Folder_Helper.Models;
using Microsoft.Extensions.FileSystemGlobbing;
using Microsoft.Extensions.Logging;
#if ShellProgressBar
using ShellProgressBar;
#endif
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250407 {
private record Record(string RelativePath,
long Size,
long Ticks);
private record Download(string Directory,
string Display,
string File,
long Size,
long Ticks,
string UniformResourceLocator);
private record Segment(Record? Left,
string? LeftDirectory,
Record? Right,
string RightDirectory,
string RootUniformResourceLocator);
private record Logic(char GreaterThan,
bool? LeftSideIsNewer,
int LeftSideIsNewerIndex,
bool? LeftSideOnly,
int LeftSideOnlyIndex,
char LessThan,
char Minus,
bool? NotEqualBut,
int NotEqualButIndex,
char Plus,
string[] Raw,
bool? RightSideIsNewer,
int RightSideIsNewerIndex,
bool? RightSideOnly,
int RightSideOnlyIndex) {
internal static Logic? Get(string[] segments) {
Logic? result;
bool check = true;
bool? notEqualBut;
bool? leftSideOnly;
bool? rightSideOnly;
bool? leftSideIsNewer;
const char plus = '+';
bool? rightSideIsNewer;
const char minus = '-';
const char lessThan = 'L';
const char greaterThan = 'G';
const int notEqualButIndex = 2;
const int leftSideOnlyIndex = 0;
const int rightSideOnlyIndex = 4;
const int leftSideIsNewerIndex = 1;
const int rightSideIsNewerIndex = 3;
if (string.IsNullOrEmpty(segments[leftSideOnlyIndex]))
leftSideOnly = null;
else if (segments[leftSideOnlyIndex][0] == plus)
leftSideOnly = true;
else if (segments[leftSideOnlyIndex][0] == minus)
leftSideOnly = false;
else {
check = false;
leftSideOnly = null;
}
if (string.IsNullOrEmpty(segments[leftSideIsNewerIndex]))
leftSideIsNewer = null;
else if (segments[leftSideIsNewerIndex][0] == greaterThan)
leftSideIsNewer = true;
else if (segments[leftSideIsNewerIndex][0] == lessThan)
leftSideIsNewer = false;
else {
check = false;
leftSideIsNewer = null;
}
if (string.IsNullOrEmpty(segments[notEqualButIndex]))
notEqualBut = null;
else if (segments[notEqualButIndex][0] == greaterThan)
notEqualBut = true;
else if (segments[notEqualButIndex][0] == lessThan)
notEqualBut = false;
else {
check = false;
notEqualBut = null;
}
if (string.IsNullOrEmpty(segments[rightSideIsNewerIndex]))
rightSideIsNewer = null;
else if (segments[rightSideIsNewerIndex][0] == greaterThan)
rightSideIsNewer = true;
else if (segments[rightSideIsNewerIndex][0] == lessThan)
rightSideIsNewer = false;
else {
check = false;
rightSideIsNewer = null;
}
if (string.IsNullOrEmpty(segments[rightSideOnlyIndex]))
rightSideOnly = null;
else if (segments[rightSideOnlyIndex][0] == plus)
rightSideOnly = true;
else if (segments[rightSideOnlyIndex][0] == minus)
rightSideOnly = false;
else {
check = false;
rightSideOnly = null;
}
result = !check ? null : new(GreaterThan: greaterThan,
LeftSideIsNewerIndex: leftSideIsNewerIndex,
LeftSideIsNewer: leftSideIsNewer,
LeftSideOnly: leftSideOnly,
LeftSideOnlyIndex: leftSideOnlyIndex,
LessThan: lessThan,
Minus: minus,
NotEqualBut: notEqualBut,
NotEqualButIndex: notEqualButIndex,
Plus: plus,
RightSideIsNewer: rightSideIsNewer,
RightSideIsNewerIndex: rightSideIsNewerIndex,
RightSideOnly: rightSideOnly,
Raw: segments,
RightSideOnlyIndex: rightSideOnlyIndex);
return result;
}
}
private record Review(Segment[]? AreEqual,
Segment[]? LeftSideIsNewer,
Segment[]? LeftSideOnly,
Segment[]? NotEqualBut,
Record[]? Records,
Segment[]? RightSideIsNewer,
Segment[]? RightSideOnly);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Review))]
private partial class ReviewCommonSourceGenerationContext : JsonSerializerContext {
}
internal static void Sync(ILogger<Worker> logger, List<string> args) {
Matcher matcher = new();
string fileName = $"{args[1]}.json";
string[] segments = args[5].Split('~');
string rightDirectory = Path.GetFullPath(args[0].Split('~')[0]);
Logic? logic = segments.Length != 5 ? null : Logic.Get(segments);
string includePatternsFile = Path.Combine(rightDirectory, args[2]);
string excludePatternsFile = Path.Combine(rightDirectory, args[3]);
string[] rootUniformResourceLocators = args.Count < 5 ? [] : args[4].Split('~');
matcher.AddIncludePatterns(!File.Exists(includePatternsFile) ? ["*"] : File.ReadAllLines(includePatternsFile));
matcher.AddExcludePatterns(!File.Exists(excludePatternsFile) ? ["System Volume Information"] : File.ReadAllLines(excludePatternsFile));
ReadOnlyCollection<Record> rightRecords = GetRecords(rightDirectory, matcher);
if (rightRecords.Count == 0)
logger.LogInformation("No source records");
else {
string checkFile = Path.Combine(rightDirectory, fileName);
Review review = new(AreEqual: null,
LeftSideIsNewer: null,
LeftSideOnly: null,
NotEqualBut: null,
Records: rightRecords.ToArray(),
RightSideIsNewer: null,
RightSideOnly: null);
string json = JsonSerializer.Serialize(review, ReviewCommonSourceGenerationContext.Default.Review);
WriteAllText(checkFile, json);
if (rootUniformResourceLocators.Length == 0)
logger.LogInformation("No urls");
else {
string format = NginxFileSystem.GetFormat();
TimeZoneInfo timeZoneInfo = TimeZoneInfo.Local;
Sync(logger, rightDirectory, fileName, logic, rootUniformResourceLocators, rightRecords, format, timeZoneInfo);
}
}
}
private static ReadOnlyCollection<Record> GetRecords(string rightDirectory, Matcher matcher) {
List<Record> results = [
new(RelativePath: rightDirectory,
Size: 0,
Ticks: 0)];
Record record;
FileInfo fileInfo;
string relativePath;
ReadOnlyCollection<ReadOnlyCollection<string>> collection = Helpers.HelperDirectory.GetFilesCollection(rightDirectory, "*", "*");
foreach (ReadOnlyCollection<string> c in collection) {
foreach (string f in c) {
if (!matcher.Match(rightDirectory, f).HasMatches)
continue;
fileInfo = new(f);
if (fileInfo.Length == 0)
continue;
relativePath = Path.GetRelativePath(rightDirectory, fileInfo.FullName);
record = new(RelativePath: relativePath,
Size: fileInfo.Length,
Ticks: fileInfo.LastWriteTime.ToUniversalTime().Ticks);
results.Add(record);
}
}
return results.AsReadOnly();
}
private static void WriteAllText(string path, string text) {
string check = !File.Exists(path) ? string.Empty : File.ReadAllText(path);
if (check != text)
File.WriteAllText(path, text);
}
private static void Sync(ILogger<Worker> logger, string rightDirectory, string fileName, Logic? logic, string[] rootUniformResourceLocators, ReadOnlyCollection<Record> rightRecords, string format, TimeZoneInfo timeZoneInfo) {
Review? review;
foreach (string rootUniformResourceLocator in rootUniformResourceLocators) {
if (!rootUniformResourceLocator.StartsWith("https:"))
logger.LogInformation("Not supported URL <{url}>", rootUniformResourceLocator);
else {
review = GetJsonResponse(logger, fileName, rootUniformResourceLocator, format, timeZoneInfo);
if (review?.Records is null || review.Records.Length == 0)
logger.LogInformation("No response records");
else {
ReadOnlyCollection<Record> leftRecords = review.Records.AsReadOnly();
Sync(logger, rightDirectory, fileName, logic, rightRecords, rootUniformResourceLocator, leftRecords);
}
}
}
}
private static Review? GetJsonResponse(ILogger<Worker> logger, string fileName, string rootUniformResourceLocator, string format, TimeZoneInfo timeZoneInfo) {
Review? result;
Task<string> response;
HttpClient httpClient = new();
Task<HttpResponseMessage> httpResponseMessage;
string url = new(rootUniformResourceLocator.EndsWith('/') ?
$"{rootUniformResourceLocator[..^1]}/{fileName}" :
$"{rootUniformResourceLocator}/{fileName}");
httpResponseMessage = httpClient.GetAsync(rootUniformResourceLocator);
httpResponseMessage.Wait();
if (!httpResponseMessage.Result.IsSuccessStatusCode) {
logger.LogInformation("Failed to download: <{rootUniformResourceLocator}>;", rootUniformResourceLocator);
result = null;
} else {
response = httpResponseMessage.Result.Content.ReadAsStringAsync();
response.Wait();
NginxFileSystem[]? nginxFileSystems = JsonSerializer.Deserialize(response.Result, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
bool isNewest = nginxFileSystems is not null && IsNewest(fileName, format, timeZoneInfo, new(rootUniformResourceLocator), nginxFileSystems);
if (nginxFileSystems is null) {
logger.LogInformation("Failed to parse: <{rootUniformResourceLocator}>;", rootUniformResourceLocator);
result = null;
} else if (!isNewest) {
logger.LogInformation("Outdated remote file: <{rootUniformResourceLocator}>;", rootUniformResourceLocator);
result = null;
} else {
httpResponseMessage = httpClient.GetAsync(url);
httpResponseMessage.Wait();
if (!httpResponseMessage.Result.IsSuccessStatusCode) {
logger.LogInformation("Failed to download: <{url}>;", url);
result = null;
} else {
response = httpResponseMessage.Result.Content.ReadAsStringAsync();
response.Wait();
result = string.IsNullOrEmpty(response.Result) ?
null :
JsonSerializer.Deserialize(response.Result, ReviewCommonSourceGenerationContext.Default.Review);
}
}
}
return result;
}
private static bool IsNewest(string fileName, string format, TimeZoneInfo timeZoneInfo, Uri uri, NginxFileSystem[] nginxFileSystems) {
bool result;
DateTime? match = null;
NginxFileSystem nginxFileSystem;
DateTime dateTime = DateTime.MinValue;
for (int i = 0; i < nginxFileSystems.Length; i++) {
nginxFileSystem = NginxFileSystem.Get(format, timeZoneInfo, uri, nginxFileSystems[i]);
if (nginxFileSystem.LastModified is not null && nginxFileSystem.Name == fileName) {
match = nginxFileSystem.LastModified.Value;
continue;
}
if (nginxFileSystem.LastModified is null || nginxFileSystem.LastModified <= dateTime)
continue;
dateTime = nginxFileSystem.LastModified.Value;
}
result = match is not null && match.Value > dateTime;
return result;
}
private static void Sync(ILogger<Worker> logger, string rightDirectory, string fileName, Logic? l, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
string json;
string checkFile;
HttpClient httpClient = new();
checkFile = Path.Combine(rightDirectory, fileName);
if (File.Exists(checkFile))
File.Delete(checkFile);
ReadOnlyCollection<Segment> areEqual = GetAreEqual(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
ReadOnlyCollection<Segment> notEqualBut = GetNotEqualBut(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
ReadOnlyCollection<Segment> leftSideOnly = GetLeftSideOnly(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
ReadOnlyCollection<Segment> rightSideOnly = GetRightSideOnly(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
ReadOnlyCollection<Segment> leftSideIsNewer = GetLeftSideIsNewer(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
ReadOnlyCollection<Segment> rightSideIsNewer = GetRightSideIsNewer(rightDirectory, fileName, rightRecords, rootUniformResourceLocators, leftRecords);
Review review = new(AreEqual: areEqual.ToArray(),
LeftSideIsNewer: leftSideIsNewer.ToArray(),
LeftSideOnly: leftSideOnly.ToArray(),
NotEqualBut: notEqualBut.ToArray(),
Records: null,
RightSideIsNewer: rightSideIsNewer.ToArray(),
RightSideOnly: rightSideOnly.ToArray());
json = JsonSerializer.Serialize(review, ReviewCommonSourceGenerationContext.Default.Review);
checkFile = Path.Combine(rightDirectory, fileName);
WriteAllText(checkFile, json);
if (notEqualBut.Count > 0 && l is not null && l.NotEqualBut is not null && l.Raw[l.NotEqualButIndex][0] == l.Minus && !l.NotEqualBut.Value)
logger.LogDebug("Doing nothing with {name}", nameof(Logic.NotEqualBut));
if (leftSideOnly.Count > 0 && l is not null && l.LeftSideOnly is not null && l.Raw[l.LeftSideOnlyIndex][0] == l.Minus && !l.LeftSideOnly.Value)
throw new NotImplementedException("Not possible with https!");
if (leftSideIsNewer.Count > 0 && l is not null && l.LeftSideIsNewer is not null && l.Raw[l.LeftSideIsNewerIndex][0] == l.LessThan && !l.LeftSideIsNewer.Value)
throw new NotImplementedException("Not possible with https!");
if (rightSideIsNewer.Count > 0 && l is not null && l.RightSideIsNewer is not null && l.Raw[l.RightSideIsNewerIndex][0] == l.LessThan && !l.RightSideIsNewer.Value)
throw new NotImplementedException("Not possible with https!");
if (rightSideOnly.Count > 0 && l is not null && l.RightSideOnly is not null && l.Raw[l.RightSideOnlyIndex][0] == l.Plus && l.RightSideOnly.Value)
throw new NotImplementedException("Not possible with https!");
if (rightSideOnly.Count > 0 && l is not null && l.RightSideOnly is not null && l.Raw[l.RightSideOnlyIndex][0] == l.Minus && !l.RightSideOnly.Value)
DoWork(logger, rightDirectory, httpClient, rightSideOnly, delete: true, download: false);
if (leftSideOnly.Count > 0 && l is not null && l.LeftSideOnly is not null && l.Raw[l.LeftSideOnlyIndex][0] == l.Plus && l.LeftSideOnly.Value)
DoWork(logger, rightDirectory, httpClient, leftSideOnly, delete: false, download: true);
if (leftSideIsNewer.Count > 0 && l is not null && l.LeftSideIsNewer is not null && l.Raw[l.LeftSideIsNewerIndex][0] == l.GreaterThan && l.LeftSideIsNewer.Value)
DoWork(logger, rightDirectory, httpClient, leftSideIsNewer, delete: true, download: true);
if (notEqualBut.Count > 0 && l is not null && l.NotEqualBut is not null && l.Raw[l.NotEqualButIndex][0] == l.Plus && l.NotEqualBut.Value)
DoWork(logger, rightDirectory, httpClient, notEqualBut, delete: true, download: true);
if (rightSideIsNewer.Count > 0 && l is not null && l.RightSideIsNewer is not null && l.Raw[l.RightSideIsNewerIndex][0] == l.GreaterThan && l.RightSideIsNewer.Value)
DoWork(logger, rightDirectory, httpClient, rightSideIsNewer, delete: true, download: true);
}
private static ReadOnlyCollection<Segment> GetAreEqual(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
double totalSeconds;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(rightRecords);
foreach (Record r in leftRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (!keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
totalSeconds = new TimeSpan(record.Ticks - r.Ticks).TotalSeconds;
if (record.Size != r.Size || totalSeconds is > 2 or < -2)
continue;
segment = new(Left: r,
LeftDirectory: checkDirectory,
Right: record,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static ReadOnlyDictionary<string, Record> GetKeyValuePairs(ReadOnlyCollection<Record> records) {
Dictionary<string, Record> results = [];
foreach (Record record in records)
results.Add(record.RelativePath, record);
return new(results);
}
private static ReadOnlyCollection<Segment> GetNotEqualBut(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
double totalSeconds;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(rightRecords);
foreach (Record r in leftRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (!keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
if (record.Size == r.Size)
continue;
totalSeconds = new TimeSpan(record.Ticks - r.Ticks).TotalSeconds;
if (totalSeconds is >= 2 or <= -2)
continue;
segment = new(Left: r,
LeftDirectory: checkDirectory,
Right: record,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Segment> GetLeftSideOnly(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(rightRecords);
foreach (Record r in leftRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
segment = new(Left: r,
LeftDirectory: checkDirectory,
Right: record,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Segment> GetRightSideOnly(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(leftRecords);
foreach (Record r in rightRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
segment = new(Left: record,
LeftDirectory: null,
Right: r,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Segment> GetLeftSideIsNewer(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
double totalSeconds;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(rightRecords);
foreach (Record r in leftRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (!keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
totalSeconds = new TimeSpan(record.Ticks - r.Ticks).TotalSeconds;
if (totalSeconds is > -2)
continue;
segment = new(Left: r,
LeftDirectory: checkDirectory,
Right: record,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<Segment> GetRightSideIsNewer(string rightDirectory, string fileName, ReadOnlyCollection<Record> rightRecords, string rootUniformResourceLocators, ReadOnlyCollection<Record> leftRecords) {
List<Segment> results = [];
Record? record;
Segment segment;
double totalSeconds;
string? checkDirectory = null;
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(leftRecords);
foreach (Record r in rightRecords) {
if (checkDirectory is null && r.Size == 0 && r.Ticks == 0) {
checkDirectory = r.RelativePath;
continue;
}
if (r.RelativePath == rightDirectory || r.RelativePath == fileName)
continue;
if (!keyValuePairs.TryGetValue(r.RelativePath, out record))
continue;
totalSeconds = new TimeSpan(record.Ticks - r.Ticks).TotalSeconds;
if (totalSeconds is > -2)
continue;
segment = new(Left: record,
LeftDirectory: null,
Right: r,
RightDirectory: rightDirectory,
RootUniformResourceLocator: rootUniformResourceLocators);
results.Add(segment);
}
return results.AsReadOnly();
}
private static void DoWork(ILogger<Worker> logger, string rightDirectory, HttpClient httpClient, ReadOnlyCollection<Segment> segments, bool delete, bool download) {
long sum;
Record[] records = (from l in segments where l.Left is not null select l.Left).ToArray();
try { sum = records.Sum(l => l.Size); } catch (Exception) { sum = 0; }
string size = GetSizeWithSuffix(sum);
if (delete) {
logger.LogInformation("Starting to delete {count} file(s) [{sum}]", segments.Count, size);
DoDeletes(logger, rightDirectory, segments);
logger.LogInformation("Deleted {count} file(s) [{sum}]", segments.Count, size);
}
if (download) {
logger.LogInformation("Starting to download {count} file(s) [{sum}]", segments.Count, size);
DoDownloads(logger, rightDirectory, segments, httpClient);
logger.LogInformation("Downloaded {count} file(s) [{sum}]", segments.Count, size);
}
}
private static string GetSizeWithSuffix(long value) {
string result;
int i = 0;
string[] SizeSuffixes = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
if (value < 0) {
result = "-" + GetSizeWithSuffix(-value);
} else {
while (Math.Round(value / 1024f) >= 1) {
value /= 1024;
i++;
}
result = string.Format("{0:n1} {1}", value, SizeSuffixes[i]);
}
return result;
}
private static string GetDurationWithSuffix(long ticks) {
string result;
TimeSpan timeSpan = new(DateTime.Now.Ticks - ticks);
if (timeSpan.TotalMilliseconds < 1000)
result = $"{timeSpan.Milliseconds} ms";
else if (timeSpan.TotalMilliseconds < 60000)
result = $"{Math.Floor(timeSpan.TotalSeconds)} s";
else if (timeSpan.TotalMilliseconds < 3600000)
result = $"{Math.Floor(timeSpan.TotalMinutes)} m";
else
result = $"{Math.Floor(timeSpan.TotalHours)} h";
return result;
}
private static void DoDeletes(ILogger<Worker> logger, string rightDirectory, ReadOnlyCollection<Segment> segments) {
Record? record;
string size;
string count = segments.Count.ToString("000000");
#if ShellProgressBar
ProgressBar progressBar = new(segments.Count, $"Deleting: {count};", new ProgressBarOptions() { ProgressCharacter = '─', ProgressBarOnBottom = true, DisableBottomPercentage = true });
#endif
for (int i = 0; i < segments.Count; i++) {
#if ShellProgressBar
progressBar.Tick();
#endif
record = segments[i].Right;
if (record is null)
continue;
size = GetSizeWithSuffix(record.Size);
try {
File.Delete(Path.Combine(rightDirectory, record.RelativePath));
logger.LogInformation("{i} of {count} - Deleted: <{RelativePath}> - {size};", i.ToString("000000"), count, record.RelativePath, size);
} catch (Exception) {
logger.LogInformation("Failed to delete: <{RelativePath}> - {size};", record.RelativePath, size);
}
}
#if ShellProgressBar
progressBar.Dispose();
#endif
}
private static void DoDownloads(ILogger<Worker> logger, string rightDirectory, ReadOnlyCollection<Segment> segments, HttpClient httpClient) {
int i = 0;
long ticks;
string size;
string duration;
DateTime dateTime;
Task<string> response;
string count = segments.Count.ToString("000000");
ReadOnlyCollection<Download> downloads = GetDownloads(rightDirectory, segments);
Task<HttpResponseMessage> httpResponseMessage;
#if ShellProgressBar
ProgressBar progressBar = new(downloads.Count, $"Downloading: {count};", new ProgressBarOptions() { ProgressCharacter = '─', ProgressBarOnBottom = true, DisableBottomPercentage = true });
#endif
foreach (Download download in downloads) {
#if ShellProgressBar
progressBar.Tick();
#endif
i += 1;
ticks = DateTime.Now.Ticks;
size = GetSizeWithSuffix(download.Size);
httpResponseMessage = httpClient.GetAsync(download.UniformResourceLocator);
httpResponseMessage.Wait(-1);
if (!httpResponseMessage.Result.IsSuccessStatusCode)
logger.LogInformation("Failed to download: <{checkURL}> - {size};", download.UniformResourceLocator, size);
else {
response = httpResponseMessage.Result.Content.ReadAsStringAsync();
response.Wait();
try {
File.WriteAllText(download.File, response.Result);
duration = GetDurationWithSuffix(ticks);
dateTime = new DateTime(download.Ticks).ToLocalTime();
File.SetLastWriteTime(download.File, dateTime);
logger.LogInformation("{i} of {count} - Downloaded: <{checkURL}> - {size} - {timeSpan};",
i.ToString("000000"),
count,
download.Display,
size,
duration);
} catch (Exception) {
logger.LogInformation("Failed to download: <{checkURL}> - {size};", download.UniformResourceLocator, size);
}
}
}
#if ShellProgressBar
progressBar.Dispose();
#endif
}
private static ReadOnlyCollection<Download> GetDownloads(string rightDirectory, ReadOnlyCollection<Segment> segments) {
List<Download> results = [];
string checkFile;
Download download;
string? checkDirectory;
List<Download> collection = [];
string? checkUniformResourceLocator;
foreach (Segment segment in segments) {
if (segment.Left is null)
continue;
checkFile = Path.Combine(rightDirectory, segment.Left.RelativePath);
checkDirectory = Path.GetDirectoryName(checkFile);
if (string.IsNullOrEmpty(checkDirectory))
continue;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
if (File.Exists(checkFile) && new FileInfo(checkFile).Length == 0)
File.Delete(checkFile);
checkUniformResourceLocator = ConvertTo(segment.RootUniformResourceLocator, segment.Left.RelativePath);
if (string.IsNullOrEmpty(checkUniformResourceLocator))
continue;
download = new(Directory: checkDirectory,
Display: checkUniformResourceLocator[segment.RootUniformResourceLocator.Length..],
File: checkFile,
Size: segment.Left.Size,
Ticks: segment.Left.Ticks,
UniformResourceLocator: checkUniformResourceLocator);
collection.Add(download);
}
Download[] sorted = (from l in collection orderby l.Size select l).ToArray();
int stop = sorted.Length < 100 ? sorted.Length : 100;
for (int i = 0; i < stop; i++)
results.Add(sorted[i]);
for (int i = sorted.Length - 1; i > stop - 1; i--)
results.Add(sorted[i]);
if (collection.Count != results.Count)
throw new Exception();
return results.AsReadOnly();
}
private static string? ConvertTo(string rootURL, string relativePath) {
string? result = rootURL.EndsWith('/') ? rootURL[..^1] : rootURL;
string windowsRoot = "c:\\";
string windowsMock = $"{windowsRoot}{relativePath}";
string fileName = Path.GetFileName(windowsMock);
ReadOnlyCollection<string> directoryNames = Helpers.HelperDirectory.GetDirectoryNames(windowsMock);
foreach (string directoryName in directoryNames) {
if (directoryName == windowsRoot || directoryName == fileName)
continue;
result = $"{result}/{directoryName}";
}
result = result == rootURL ? null : $"{result}/{fileName}";
return result;
}
}

View File

@ -0,0 +1,47 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250421 {
internal static void FreeFileSyncChangeCreatedDate(ILogger<Worker> logger, List<string> args) {
string searchPattern = args[2];
string[] searchPatterns = args[3].Split('~');
string sourceDirectory = Path.GetFullPath(args[0]);
if (searchPatterns.Length != 2)
throw new NotImplementedException($"Not the correct number of {searchPatterns} were passed!");
string lastSyncSearch = $"{searchPatterns[0]}=\"";
string configurationFileSearch = $"{searchPatterns[1]}=\"";
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (files.Length != 1)
logger.LogWarning("<{files}>(s)", files.Length);
else
ChangeCreatedDate(lastSyncSearch, configurationFileSearch, files[0]);
}
private static void ChangeCreatedDate(string lastSyncSearch, string configurationFileSearch, string sourceFile) {
long epoch;
string lastSync;
string[] segments;
string[] segmentsB;
DateTime creationTime;
string configurationFile;
string[] lines = File.ReadAllLines(sourceFile);
foreach (string line in lines) {
segments = line.Split(lastSyncSearch);
if (segments.Length != 2)
continue;
segmentsB = line.Split(configurationFileSearch);
if (segmentsB.Length != 2)
continue;
lastSync = segments[1].Split('"')[0];
if (!long.TryParse(lastSync, out epoch) || epoch == 0)
continue;
configurationFile = segmentsB[1].Split('"')[0];
if (!File.Exists(configurationFile))
continue;
creationTime = new(DateTimeOffset.UnixEpoch.AddSeconds(epoch).ToLocalTime().Ticks);
File.SetCreationTime(configurationFile, creationTime);
}
}
}

View File

@ -0,0 +1,72 @@
using System.Collections.ObjectModel;
using System.Text.Json;
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2025.PI5;
internal static partial class Helper20250429 {
private record Record(string Directory, string File, bool FileExists);
internal static void WriteNginxFileSystem(ILogger<Worker> logger, List<string> args) {
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
ReadOnlyCollection<Record> subDirectories = GetSubDirectories(searchPattern, sourceDirectory);
if (subDirectories.Count == 0)
logger.LogWarning("<{results}>(s)", subDirectories.Count);
else
WriteNginxFileSystem(searchPattern, subDirectories);
}
private static ReadOnlyCollection<Record> GetSubDirectories(string searchPattern, string sourceDirectory) {
List<Record> results = [];
bool exists;
Record record;
string checkFile;
string[] subDirectories;
string[] directories = Directory.GetDirectories(sourceDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories) {
subDirectories = Directory.GetDirectories(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in subDirectories) {
checkFile = Path.Combine(subDirectory, $"{searchPattern.Split('*')[^1]}.json");
exists = File.Exists(checkFile);
record = new(Directory: subDirectory, File: checkFile, FileExists: exists);
results.Add(record);
}
}
return results.OrderByDescending(l => l.FileExists).ToArray().AsReadOnly();
}
private static void WriteNginxFileSystem(string searchPattern, ReadOnlyCollection<Record> subDirectories) {
string lines;
string result;
string[] files;
FileInfo fileInfo;
List<string> results = [];
NginxFileSystem nginxFileSystem;
foreach (Record record in subDirectories) {
results.Clear();
files = Directory.GetFiles(record.Directory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files) {
fileInfo = new(file);
nginxFileSystem = new(Name: fileInfo.FullName,
LastModified: null,
MTime: fileInfo.LastWriteTime.ToUniversalTime().ToString(),
URI: null,
Type: "file",
Length: fileInfo.Length);
results.Add(JsonSerializer.Serialize(nginxFileSystem, NginxFileSystemSingleLineSourceGenerationContext.Default.NginxFileSystem));
}
if (results.Count == 0)
continue;
result = $"[{Environment.NewLine}{string.Join($",{Environment.NewLine}", results)}{Environment.NewLine}]";
lines = !record.FileExists ? string.Empty : File.ReadAllText(record.File);
if (result == lines)
continue;
File.WriteAllText(record.File, result);
}
}
}

View File

@ -151,6 +151,14 @@ internal static class HelperDay
ADO2025.PI5.Helper20250320.SortCodeMethods(logger, args, cancellationToken); ADO2025.PI5.Helper20250320.SortCodeMethods(logger, args, cancellationToken);
else if (args[1] == "Day-Helper-2025-03-21") else if (args[1] == "Day-Helper-2025-03-21")
ADO2025.PI5.Helper20250321.MoveToLast(logger, args); ADO2025.PI5.Helper20250321.MoveToLast(logger, args);
else if (args[1] == "Day-Helper-2025-04-04")
ADO2025.PI5.Helper20250404.KumaToGatus(logger, args);
else if (args[1] == "Day-Helper-2025-04-07")
ADO2025.PI5.Helper20250407.Sync(logger, args);
else if (args[1] == "Day-Helper-2025-04-21")
ADO2025.PI5.Helper20250421.FreeFileSyncChangeCreatedDate(logger, args);
else if (args[1] == "Day-Helper-2025-04-29")
ADO2025.PI5.Helper20250429.WriteNginxFileSystem(logger, args);
else else
throw new Exception(appSettings.Company); throw new Exception(appSettings.Company);
} }

View File

@ -17,7 +17,7 @@
<PackageReference Include="MetadataExtractor" Version="2.8.1" /> <PackageReference Include="MetadataExtractor" Version="2.8.1" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.1" /> <PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.1" /> <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.1" />
<PackageReference Include="runtime.win-x64.Microsoft.DotNet.ILCompiler" Version="8.0.14" /> <PackageReference Include="runtime.win-x64.Microsoft.DotNet.ILCompiler" Version="8.0.15" />
<PackageReference Include="System.Text.Json" Version="8.0.5" /> <PackageReference Include="System.Text.Json" Version="8.0.5" />
<PackageReference Include="TextCopy" Version="6.2.1" /> <PackageReference Include="TextCopy" Version="6.2.1" />
<PackageReference Include="WindowsShortcutFactory" Version="1.2.0" /> <PackageReference Include="WindowsShortcutFactory" Version="1.2.0" />

View File

@ -41,4 +41,31 @@ internal static class HelperDirectory
return new(results); return new(results);
} }
internal static ReadOnlyCollection<ReadOnlyCollection<string>> GetFilesCollection(string directory, string directorySearchFilter, string fileSearchFilter)
{
List<ReadOnlyCollection<string>> results = [];
string[] files;
if (!fileSearchFilter.Contains('*'))
fileSearchFilter = string.Concat('*', fileSearchFilter);
if (!directorySearchFilter.Contains('*'))
directorySearchFilter = string.Concat('*', directorySearchFilter);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
results.Add(Directory.GetFiles(directory, fileSearchFilter, SearchOption.TopDirectoryOnly).AsReadOnly());
string[] directories = Directory.GetDirectories(directory, directorySearchFilter, SearchOption.TopDirectoryOnly);
foreach (string innerDirectory in directories)
{
try
{
files = Directory.GetFiles(innerDirectory, fileSearchFilter, SearchOption.AllDirectories);
if (files.Length == 0)
continue;
results.Add(files.AsReadOnly());
}
catch (UnauthorizedAccessException)
{ continue; }
}
return results.AsReadOnly();
}
} }

View File

@ -45,6 +45,12 @@ internal partial class NginxFileSystemSourceGenerationContext : JsonSerializerCo
{ {
} }
[JsonSourceGenerationOptions(WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(NginxFileSystem))]
internal partial class NginxFileSystemSingleLineSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] [JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(NginxFileSystem[]))] [JsonSerializable(typeof(NginxFileSystem[]))]
internal partial class NginxFileSystemCollectionSourceGenerationContext : JsonSerializerContext internal partial class NginxFileSystemCollectionSourceGenerationContext : JsonSerializerContext

View File

@ -1,88 +0,0 @@
"use strict";
getValue($('gv.thicknessPoints', ''), $('dcp.BIORAD2/csv/Index', '0'));
function getCollectionParseFloat(collection) {
let result = [];
let value;
for (let i = 0; i < collection.length; i++) {
value = parseFloat(collection[i]);
result.push(value);
}
return result;
}
function getSum(collection) {
let result = 0;
if (!collection || collection.length === 0) {
result = 0;
}
else {
for (let i = 0; i < collection.length; i++) {
result += collection[i];
}
}
return result;
}
function getAverage(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = 0;
else {
let sum = getSum(collection);
result = sum / collection.length;
}
return result;
}
function getValue(thicknessPoints, index) {
let result = null;
if (index === 13) {
if (thicknessPoints != undefined && thicknessPoints.length > 1) {
let collection = thicknessPoints[0] === '|' ? thicknessPoints.substring(1).split('|') : thicknessPoints.split('|');
let collectionParseFloat = getCollectionParseFloat(collection);
let thicknessFourteen3mmEdgeMean = getAverage([[collectionParseFloat[10], collectionParseFloat[11], collectionParseFloat[12], collectionParseFloat[13]]]);
let thicknessFourteenMeanFrom = getAverage([[collectionParseFloat[1], collectionParseFloat[2], collectionParseFloat[6], collectionParseFloat[7]]]);
result = (thicknessFourteen3mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100;
}
}
return result;
}
function getVariance(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = null;
else {
let variance = 0;
let t = collection[0];
for (let i = 1; i < collection.length; i++) {
t += collection[i];
const diff = ((i + 1) * collection[i]) - t;
variance += diff * diff / ((i + 1.0) * i);
}
result = variance / (collection.length - 1);
}
return result;
}
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD2/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD3/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD4/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD5/b-csv/Thickness', '')
// \\mesfs.infineon.com\EC_Characterization_Si\Archive\BIORAD2\2025_Week_08\2025-02-20\64-659712-4626_2025-02-20_11;50_AM_5144331401\638756490128318288
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14
const thicknessPoints = getCollectionParseFloat('|4.022|3.952|3.936|3.971|3.954|3.976|3.949|3.906|3.967|3.995|3.997|3.932|3.766|3.890'.substring(1).split('|'));
const thicknessTenPoints = thicknessPoints.slice(0, 10);
const thicknessFourteenCriticalPointsAverage = getAverage(thicknessTenPoints); // 15 // *3.962799999999999
const thicknessFourteenCriticalPointsStdDev = Math.sqrt(getVariance(thicknessTenPoints)); // 16 // *0.0318496467798311
const thicknessFourteenCenterMean = thicknessPoints[4]; // 17 // 3.954
const thicknessFourteenMeanFrom = getAverage([thicknessPoints[1], thicknessPoints[2], thicknessPoints[6], thicknessPoints[7]]); // 18 // *3.954
const thicknessFourteen5mmEdgeMean = getAverage([thicknessPoints[0], thicknessPoints[9]]); // 19 // *4.0085
const thicknessFourteen3mmEdgeMean = getAverage([thicknessPoints[10], thicknessPoints[11], thicknessPoints[12], thicknessPoints[13]]); // 20 // *3.89625
const thicknessFourteen5mmEdgePercent = (thicknessFourteen5mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 21 // *1.848440576764267
const thicknessFourteen3mmEdgePercent = (thicknessFourteen3mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 22 // *-1.0036206567998442
console.log(thicknessFourteenCriticalPointsAverage);

View File

@ -1,4 +1,5 @@
"use strict"; "use strict";
// DateTime normal = DateTime.Now.ToUniversalTime(); // DateTime normal = DateTime.Now.ToUniversalTime();
// logger.LogInformation("Now - ToUniversalTime: {ticks}", normal.Ticks); // logger.LogInformation("Now - ToUniversalTime: {ticks}", normal.Ticks);
// DateTime utc1970DateTime = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); // DateTime utc1970DateTime = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@ -16,42 +17,7 @@
// console.log("dateText: " + dateText); // console.log("dateText: " + dateText);
// DateTime utcMeDateTime = new(1980, 1, 17, 0, 0, 0, DateTimeKind.Utc); // DateTime utcMeDateTime = new(1980, 1, 17, 0, 0, 0, DateTimeKind.Utc);
// long meTotalSeconds = (long)Math.Floor(fileInfo.LastWriteTime.ToUniversalTime().Subtract(utcMeDateTime).TotalSeconds); // long meTotalSeconds = (long)Math.Floor(fileInfo.LastWriteTime.ToUniversalTime().Subtract(utcMeDateTime).TotalSeconds);
const now = new Date();
const time = now.getTime();
const year = now.getFullYear();
const start = new Date(year, 0, 0);
const oneDay = 1000 * 60 * 60 * 24;
const timezoneOffset = now.getTimezoneOffset();
const diff = (now - start) + ((start.getTimezoneOffset() - timezoneOffset) * 60 * 1000);
const day = Math.floor(diff / oneDay);
console.log('Day of year: ' + day);
var season = year + "-";
if (day < 78)
season = season + "0.Winter";
else if (day < 124)
season = season + "1.Spring";
else if (day < 171)
season = season + "2.Spring";
else if (day < 217)
season = season + "3.Summer";
else if (day < 264)
season = season + "4.Summer";
else if (day < 309)
season = season + "5.Fall";
else if (day < 354)
season = season + "6.Fall";
else
season = season + "7.Winter";
let seconds = time.valueOf() + timezoneOffset;
let epoch = seconds * 10000;
let ticks = epoch + 621355968000000000;
let dateText = seconds + " - " + ticks + " - " + now.toString();
console.log("dateText: " + dateText);
console.log("end");
let original = "d:\\5-Other-Small\\Kanban\\Year-Season\\2025\\2025-0.Winter\\1737913505637";
let segments = original.split('\\');
let path = segments.slice(0, -3).join('\\') + '\\2021\\2021-0.Summer\\123';
console.log(path);
// epoch: 25201000 // epoch: 25201000
// ticks: 638665132483790000 // ticks: 638665132483790000
// dateText: 638665132483790000 - Wed Nov 06 2024 10:55:58 GMT-0700 (Mountain Standard Time) // dateText: 638665132483790000 - Wed Nov 06 2024 10:55:58 GMT-0700 (Mountain Standard Time)
@ -59,3 +25,82 @@ console.log(path);
// 638665135325760000 // 638665135325760000
// 638665136814890000 // 638665136814890000
// utc1970DateTime: 621355968000000000 // utc1970DateTime: 621355968000000000
function getDateText() {
let result;
const now = new Date();
const time = now.getTime();
const year = now.getFullYear();
const start = new Date(year, 0, 0);
const oneDay = 1000 * 60 * 60 * 24;
const timezoneOffset = now.getTimezoneOffset();
const diff = (now - start) + ((start.getTimezoneOffset() - timezoneOffset) * 60 * 1000);
const day = Math.floor(diff / oneDay);
console.log('Day of year: ' + day);
var season = year + "-";
if (day < 78)
season = season + "0.Winter";
else if (day < 124)
season = season + "1.Spring";
else if (day < 171)
season = season + "2.Spring";
else if (day < 217)
season = season + "3.Summer";
else if (day < 264)
season = season + "4.Summer";
else if (day < 309)
season = season + "5.Fall";
else if (day < 354)
season = season + "6.Fall";
else
season = season + "7.Winter";
const seconds = time.valueOf() + timezoneOffset;
const epoch = seconds * 10000;
const ticks = epoch + 621355968000000000;
result = seconds + " - " + ticks + " - " + now.toString();
return result;
}
const dateText = getDateText();
console.log("dateText: " + dateText);
function getPath() {
let result;
const original = "d:\\5-Other-Small\\Kanban\\Year-Season\\2025\\2025-0.Winter\\1737913505637";
const segments = original.split('\\');
result = segments.slice(0, -3).join('\\') + '\\2021\\2021-0.Summer\\123';
return result;
}
const path = getPath();
console.log("path: " + path);
// https://tickstodatetime.wassupy.com/?ticks=638784250251441727
function getInfinityQS(value, sequence) {
let result;
if (sequence.length < 18)
result = [value];
else {
const now = new Date();
const epochHour = 36000000000;
const epochTicks = 621355968000000000;
const timezoneHourOffset = now.getTimezoneOffset() / 60;
const timezoneOffset = timezoneHourOffset * epochHour;
const milliseconds = (sequence - epochTicks + timezoneOffset) / 10000;
const flooredMilliseconds = Math.floor(milliseconds / (60 * 1000)) * (60 * 1000);
const ceiledMilliseconds = Math.ceil(milliseconds / (60 * 1000)) * (60 * 1000);
result = [(flooredMilliseconds / 1000), (ceiledMilliseconds / 1000)];
}
return result;
}
const date = '3/26/2025';
const infinityQS = 1742853453;
const sequence = '638784250251441727';
const values = getInfinityQS(date, sequence);
console.info("InfinityQS: " + values);
if (values[0] < infinityQS && values[1] > infinityQS)
console.info("InfinityQS: Pass");
else
console.warn("InfinityQS: Fail");

View File

@ -1,130 +0,0 @@
"use strict";
function getCollectionParseFloat(collection) {
let result = [];
let value;
for (let i = 0; i < collection.length; i++) {
value = parseFloat(collection[i]);
result.push(value);
}
return result;
}
function getSum(collection) {
let result = 0;
if (!collection || collection.length === 0) {
result = 0;
}
else {
for (let i = 0; i < collection.length; i++) {
result += collection[i];
}
}
return result;
}
function getAverage(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = 0;
else {
let sum = getSum(collection);
result = sum / collection.length;
}
return result;
}
function getVariance(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = null;
else {
let variance = 0;
let t = collection[0];
for (let i = 1; i < collection.length; i++) {
t += collection[i];
const diff = ((i + 1) * collection[i]) - t;
variance += diff * diff / ((i + 1.0) * i);
}
result = variance / (collection.length - 1);
}
return result;
}
function getNineEdgeMeanDelta(edge4mmRhoPoints, edge10mmRhoPoints) {
let result;
const nine4mmEdgeSum = getSum(edge4mmRhoPoints);
const nine10mmEdgeSum = getSum(edge10mmRhoPoints);
result = (nine4mmEdgeSum - nine10mmEdgeSum) / nine10mmEdgeSum * 100;
return result;
}
function getMax(collection) {
let result = collection[0];
for (let i = 1; i < collection.length; i++) {
if (collection[i] > result) {
result = collection[i];
}
}
return result;
}
function getMin(collection) {
let result = collection[0];
for (let i = 1; i < collection.length; i++) {
if (collection[i] < result) {
result = collection[i];
}
}
return result;
}
function getNineResRangePercent(criticalRhoPoints) {
let result;
const nineCriticalPointsAverage = getAverage(criticalRhoPoints);
// result = (Math.max(...criticalRhoPoints) - Math.min(...criticalRhoPoints)) / nineCriticalPointsAverage * 100;
// let max = criticalRhoPoints.reduce((a, b) => Math.max(a, b));
// let min = criticalRhoPoints.reduce((a, b) => Math.min(a, b));
// result = (max - min) / nineCriticalPointsAverage * 100;
// let max = criticalRhoPoints.sort((a, b) => b - a);
// let min = criticalRhoPoints.sort((a, b) => a - b);
// result = (max[0] - min[0]) / nineCriticalPointsAverage * 100;
let max = getMax(criticalRhoPoints);
let min = getMin(criticalRhoPoints);
result = (max - min) / nineCriticalPointsAverage * 100;
return result;
}
function getValue(allRhoAvg, index) {
let result = null;
if (index === 8) {
if (allRhoAvg != undefined && allRhoAvg.length > 1) {
let collection = allRhoAvg[0] === '|' ? allRhoAvg.substring(1).split('|') : allRhoAvg.split('|');
let collectionParseFloat = getCollectionParseFloat(collection);
result = average(collectionParseFloat);
}
}
return result;
}
// 0 1 2 3 4 5 6 7 8
// 1 2 3 4 5 6 7 8 9
const allRhoAvg = getCollectionParseFloat('|2.648|3.076|2.877|2.747|2.821|2.765|2.669|2.814|2.876'.substring(1).split('|'));
const edge4mmRhoPoints = getCollectionParseFloat('|2.877|2.747|2.669|2.814'.substring(1).split('|'));
const edge10mmRhoPoints = getCollectionParseFloat('|3.076|2.821|2.765|2.876'.substring(1).split('|'));
const criticalRhoPoints = getCollectionParseFloat('|2.648|3.076|2.821|2.765|2.876'.substring(1).split('|'));
// 0 1 2 3 4 5 6 7 8
// 1 2 3 4 5 6 7 8 9
const allPhase = getCollectionParseFloat('|88.874|88.999|89.085|89.029|89.018|89.007|89.049|89.024|89.007'.substring(1).split('|'));
const criticalPhasePoints = getCollectionParseFloat('|88.874|88.999|89.018|89.007|89.007'.substring(1).split('|'));
const nineMean = getAverage(allRhoAvg);
const nine4mmEdgeMean = getAverage(edge4mmRhoPoints);
const nine10mmEdgeMean = getAverage(edge10mmRhoPoints);
const nineCriticalPointsAverage = getAverage(criticalRhoPoints);
const nineResRangePercent = getNineResRangePercent(criticalRhoPoints);
const nineCriticalPointsStdDev = Math.sqrt(getVariance(criticalRhoPoints));
const nineCriticalPointsPhaseAngleAverage = getAverage(criticalPhasePoints);
const nineEdgeMeanDelta = getNineEdgeMeanDelta(edge4mmRhoPoints, edge10mmRhoPoints);
console.log(nineCriticalPointsStdDev);

View File

@ -147,7 +147,7 @@ public class Worker : BackgroundService
_Logger.LogWarning("Must pass a argument!"); _Logger.LogWarning("Must pass a argument!");
CreateWindowsShortcut(); CreateWindowsShortcut();
} }
else if (Directory.Exists(_Args[0])) else if (Directory.Exists(_Args[0].Split('~')[0]) || Directory.Exists(_Args[0]))
{ {
if (!_ConsoleKeys.Contains(consoleKey)) if (!_ConsoleKeys.Contains(consoleKey))
{ {