Files
file-folder-helper/ADO2025/PI7/Helper-2025-10-22.cs

142 lines
6.6 KiB
C#

using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.ADO2025.PI7;
internal static partial class Helper20251022 {
private record Root(int Count, JsonElement[] Records, ProcessDataStandardFormat? ProcessDataStandardFormat) {
internal static Root? Get(string file) {
Root? result;
string? json = File.ReadAllText(file);
result = JsonSerializer.Deserialize(json, Helper20251022RootSourceGenerationContext.Default.Root);
if (result is null || result.ProcessDataStandardFormat is null || result.Records.Length == 0) {
result = null;
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Root))]
private partial class Helper20251022RootSourceGenerationContext : JsonSerializerContext {
}
[JsonSourceGenerationOptions(WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(JsonElement))]
private partial class Helper20251022JsonElementSourceGenerationContext : JsonSerializerContext {
}
private record ProcessDataStandardFormat([property: JsonPropertyName("Footer")] JsonElement? Footer, [property: JsonPropertyName("LOGISTICS_1")] JsonElement? Logistics);
internal static void CombineFiles(ILogger<Worker> logger, List<string> args) {
logger.LogInformation(args[0]);
logger.LogInformation(args[1]);
logger.LogInformation(args[2]);
logger.LogInformation(args[3]);
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0].Split('~')[0]);
string destinationDirectory = Path.GetFullPath(args[3].Split('~')[0]);
string[] directories = Directory.GetDirectories(sourceDirectory, "*", SearchOption.TopDirectoryOnly);
ReadOnlyDictionary<string, List<string>> files = GetFiles(logger, searchPattern, directories.AsReadOnly());
CombineFiles(logger, destinationDirectory, files);
}
private static ReadOnlyDictionary<string, List<string>> GetFiles(ILogger<Worker> logger, string searchPattern, ReadOnlyCollection<string> directories) {
Dictionary<string, List<string>> results = [];
string key;
string[] files;
FileInfo fileInfo;
string[] segments;
string weekOfYear;
Calendar calendar = new CultureInfo("en-US").Calendar;
List<string>? collection;
foreach (string directory in directories) {
files = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly);
files = files.OrderBy(l => l).ThenBy(l => l.Length).ToArray();
for (int i = 0; i < files.Length; i++) {
fileInfo = new(files[i]);
segments = fileInfo.Name.Split('_');
if (segments.Length < 2) {
logger.LogWarning("{fileInfoName} does not have enough segments!", fileInfo.Name);
continue;
}
weekOfYear = $"{fileInfo.LastWriteTime.Year}_Week_{calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
key = $"{segments[0]}_{weekOfYear}";
if (!results.TryGetValue(key, out collection)) {
collection = [];
results.Add(key, collection);
}
collection.Add(files[i]);
}
}
return results.AsReadOnly();
}
private static void CombineFiles(ILogger<Worker> logger, string destinationDirectory, ReadOnlyDictionary<string, List<string>> files) {
Root? root;
string json;
string[] lines;
string fileName;
string? jsonOld;
string? jsonFooter;
string? jsonLogistics;
JsonElement jsonElementB;
List<string> segments = [];
List<string> jsonLines = [];
if (!Directory.Exists(destinationDirectory)) {
_ = Directory.CreateDirectory(destinationDirectory);
}
foreach (KeyValuePair<string, List<string>> keyValuePair in files) {
jsonLines.Clear();
fileName = Path.Combine(destinationDirectory, $"{keyValuePair.Key}.json");
foreach (string file in keyValuePair.Value) {
root = Root.Get(file);
if (root is null) {
logger.LogWarning("result is null");
continue;
}
foreach (JsonElement jsonElement in root.Records) {
jsonFooter = root.ProcessDataStandardFormat?.Footer?.ToString();
jsonLogistics = root.ProcessDataStandardFormat?.Logistics?.ToString();
if (string.IsNullOrEmpty(jsonFooter) && string.IsNullOrEmpty(jsonLogistics)) {
jsonLines.Add(jsonElement.ToString());
} else {
segments.Clear();
segments.Add(jsonElement.ToString()[..^1]);
if (!string.IsNullOrEmpty(jsonFooter)) {
segments.Add(",");
lines = jsonFooter[1..^1].Split(Environment.NewLine);
segments.AddRange(lines.Select(l => l.Trim()));
}
if (!string.IsNullOrEmpty(jsonLogistics)) {
segments.Add(",");
lines = jsonLogistics[1..^1].Split(Environment.NewLine);
segments.AddRange(lines.Select(l => l.Trim()));
}
segments.Add("}");
jsonElementB = JsonSerializer.Deserialize(string.Join(' ', segments), Helper20251022JsonElementSourceGenerationContext.Default.JsonElement);
jsonLines.Add(jsonElementB.ToString());
}
}
}
if (jsonLines.Count == 0) {
logger.LogWarning("jsonLines is empty");
continue;
}
json = string.Concat('[', Environment.NewLine, string.Join($",{Environment.NewLine}", jsonLines), Environment.NewLine, ']');
jsonOld = File.Exists(fileName) ? File.ReadAllText(fileName) : null;
if (!string.IsNullOrEmpty(jsonOld) && jsonOld == json) {
logger.LogInformation("{fileName} has no changes", fileName);
continue;
}
File.WriteAllText(fileName, json);
}
}
}