Moved to ADO2024 PI#

Ran SortCodeMethods
This commit is contained in:
2024-10-11 09:15:32 -07:00
parent 3d114918e4
commit b6d8d4c52f
58 changed files with 25064 additions and 1423 deletions

View File

@ -0,0 +1,56 @@
using Microsoft.Extensions.Logging;
using System.Diagnostics;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240429
{
internal static void GitConfigCleanUp(ILogger<Worker> logger, List<string> args)
{
string[] files;
Process? process;
string? directory;
string standardError;
string ignoreError = args[5];
string searchPattern = args[3];
ProcessStartInfo processStartInfo;
string root = Path.GetFullPath(args[0]);
string[] removeRemotes = args[4].Split(',');
string systemVolumeInformation = Path.Combine(root, args[2]);
string[] subDirectories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in subDirectories)
{
if (subDirectory == systemVolumeInformation)
continue;
files = Directory.GetFiles(subDirectory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
directory = Path.GetDirectoryName(file);
if (directory is null)
continue;
foreach (string removeRemote in removeRemotes)
{
processStartInfo = new()
{
FileName = "git",
WorkingDirectory = directory,
Arguments = $"remote rm {removeRemote}",
RedirectStandardError = true
};
process = Process.Start(processStartInfo);
if (process is null)
continue;
#pragma warning disable IDE0058
process.WaitForExit(7000);
#pragma warning restore IDE0058
standardError = process.StandardError.ReadToEnd();
if (!standardError.Contains(ignoreError))
logger.LogInformation(standardError);
logger.LogInformation("for <{directoryName}> remote rm {removeRemote}", directory, removeRemote);
}
}
}
}
}

View File

@ -0,0 +1,34 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240510
{
internal static void PullIconsForBLM(ILogger<Worker> logger, List<string> args)
{
string fileName;
FileInfo fileInfo;
string searchPattern = args[4];
string sourceDirectory = args[3];
string root = Path.GetFullPath(args[0]);
string createDirectory = Path.Combine(root, args[2]);
if (!Directory.Exists(createDirectory))
_ = Directory.CreateDirectory(createDirectory);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
fileName = Path.GetFileName(file);
fileInfo = new(Path.Combine(createDirectory, fileName));
if (fileInfo.Exists && fileInfo.LastWriteTime == new FileInfo(file).LastWriteTime)
continue;
File.Copy(file, fileInfo.FullName, overwrite: true);
logger.LogInformation("<{fileName}> copied", fileName);
}
logger.LogWarning("What reactor is this near?");
string? reactor = Console.ReadLine();
if (!string.IsNullOrEmpty(reactor))
_ = Directory.CreateDirectory(Path.Combine(sourceDirectory, Environment.MachineName, reactor));
}
}

View File

@ -0,0 +1,39 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Text.Json;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240513
{
internal static void PersonKeyToName(ILogger<Worker> logger, List<string> args)
{
Person? person;
string directoryName;
string checkDirectory;
string root = Path.GetFullPath(args[0]);
string json = File.ReadAllText(args[2]);
Dictionary<string, Person> keyValuePairs = [];
string[] directories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
Dictionary<long, Person> people = JsonSerializer.Deserialize(json, PeopleSourceGenerationContext.Default.DictionaryInt64Person) ?? throw new NullReferenceException();
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Birth?.Note is null)
continue;
keyValuePairs.Add(keyValuePair.Value.Birth.Note, keyValuePair.Value);
}
foreach (string directory in directories)
{
directoryName = Path.GetFileName(directory);
if (!keyValuePairs.TryGetValue(directoryName, out person) || person.Name?.ForwardSlashFull is null)
continue;
checkDirectory = Path.Combine(root, $"{person.Name.ForwardSlashFull.Replace('/', '-')}{directoryName}-{person.Id}");
if (Directory.Exists(checkDirectory))
continue;
Directory.Move(directory, checkDirectory);
logger.LogInformation("<{directory}> was moved", directory);
}
}
}

View File

@ -0,0 +1,334 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240517
{
public record ContentSignature([property: JsonPropertyName("contentSignature")] string Value,
[property: JsonPropertyName("contentSignatureType")] string ContentSignatureType);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ContentSignature))]
public partial class ContentSignatureGenerationContext : JsonSerializerContext
{
}
public record Type([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Type))]
public partial class TypeGenerationContext : JsonSerializerContext
{
}
public record ImageAmazon([property: JsonPropertyName("colorSpace")] string ColorSpace,
[property: JsonPropertyName("dateTime")] DateTime DateTime,
[property: JsonPropertyName("dateTimeDigitized")] DateTime DateTimeDigitized,
[property: JsonPropertyName("dateTimeOriginal")] DateTime DateTimeOriginal,
[property: JsonPropertyName("exposureMode")] string ExposureMode,
[property: JsonPropertyName("exposureProgram")] string ExposureProgram,
[property: JsonPropertyName("exposureTime")] string ExposureTime,
[property: JsonPropertyName("flash")] string Flash,
[property: JsonPropertyName("focalLength")] string FocalLength,
[property: JsonPropertyName("height")] int Height,
[property: JsonPropertyName("make")] string Make,
[property: JsonPropertyName("meteringMode")] string MeteringMode,
[property: JsonPropertyName("model")] string Model,
[property: JsonPropertyName("orientation")] string Orientation,
[property: JsonPropertyName("resolutionUnit")] string ResolutionUnit,
[property: JsonPropertyName("sensingMethod")] string SensingMethod,
[property: JsonPropertyName("sharpness")] string Sharpness,
[property: JsonPropertyName("software")] string Software,
[property: JsonPropertyName("subSecTime")] string SubSecTime,
[property: JsonPropertyName("subSecTimeDigitized")] string SubSecTimeDigitized,
[property: JsonPropertyName("subSecTimeOriginal")] string SubSecTimeOriginal,
[property: JsonPropertyName("whiteBalance")] string WhiteBalance,
[property: JsonPropertyName("width")] int Width,
[property: JsonPropertyName("apertureValue")] string ApertureValue);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ImageAmazon))]
public partial class ImageAmazonGenerationContext : JsonSerializerContext
{
}
public record ContentProperties([property: JsonPropertyName("contentDate")] DateTime ContentDate,
[property: JsonPropertyName("contentSignatures")] IReadOnlyList<ContentSignature> ContentSignatures,
[property: JsonPropertyName("contentType")] string ContentType,
[property: JsonPropertyName("extension")] string Extension,
[property: JsonPropertyName("image")] ImageAmazon Image,
[property: JsonPropertyName("md5")] string Md5,
[property: JsonPropertyName("size")] int Size,
[property: JsonPropertyName("version")] int Version);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ContentProperties))]
public partial class ContentPropertiesGenerationContext : JsonSerializerContext
{
}
public record XAccntParentMap();
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(XAccntParentMap))]
public partial class XAccntParentMapGenerationContext : JsonSerializerContext
{
}
public record Datum([property: JsonPropertyName("accessRuleIds")] IReadOnlyList<object> AccessRuleIds,
[property: JsonPropertyName("childAssetTypeInfo")] IReadOnlyList<object> ChildAssetTypeInfo,
[property: JsonPropertyName("contentProperties")] ContentProperties ContentProperties,
[property: JsonPropertyName("createdBy")] string CreatedBy,
[property: JsonPropertyName("createdDate")] DateTime CreatedDate,
[property: JsonPropertyName("eTagResponse")] string ETagResponse,
[property: JsonPropertyName("groupPermissions")] IReadOnlyList<object> GroupPermissions,
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("isRoot")] bool IsRoot,
[property: JsonPropertyName("isShared")] bool IsShared,
[property: JsonPropertyName("keywords")] IReadOnlyList<object> Keywords,
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("labels")] IReadOnlyList<object> Labels,
[property: JsonPropertyName("modifiedDate")] DateTime ModifiedDate,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("ownerId")] string OwnerId,
[property: JsonPropertyName("parentMap")] ParentMap ParentMap,
[property: JsonPropertyName("parents")] IReadOnlyList<string> Parents,
[property: JsonPropertyName("protectedFolder")] bool ProtectedFolder,
[property: JsonPropertyName("restricted")] bool Restricted,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("subKinds")] IReadOnlyList<object> SubKinds,
[property: JsonPropertyName("transforms")] IReadOnlyList<string> Transforms,
[property: JsonPropertyName("version")] int Version,
[property: JsonPropertyName("xAccntParentMap")] XAccntParentMap XAccntParentMap,
[property: JsonPropertyName("xAccntParents")] IReadOnlyList<object> XAccntParents,
[property: JsonPropertyName("match")] bool? Match);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Datum))]
public partial class DatumGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Dictionary<string, Datum>))]
public partial class DictionaryDatumGenerationContext : JsonSerializerContext
{
}
public record LocationAmazon([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(LocationAmazon))]
public partial class LocationAmazonGenerationContext : JsonSerializerContext
{
}
public record LocationInfo([property: JsonPropertyName("city")] string City,
[property: JsonPropertyName("country")] string Country,
[property: JsonPropertyName("countryIso3Code")] string CountryIso3Code,
[property: JsonPropertyName("state")] string State,
[property: JsonPropertyName("stateCode")] string StateCode);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(LocationInfo))]
public partial class LocationInfoGenerationContext : JsonSerializerContext
{
}
public record SearchData([property: JsonPropertyName("clusterName")] string ClusterName,
[property: JsonPropertyName("locationId")] string LocationId,
[property: JsonPropertyName("locationInfo")] LocationInfo LocationInfo,
[property: JsonPropertyName("thingId")] string ThingId);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(SearchData))]
public partial class SearchDataGenerationContext : JsonSerializerContext
{
}
public record AllPerson([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(AllPerson))]
public partial class AllPersonGenerationContext : JsonSerializerContext
{
}
public record PersonAmazon([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(PersonAmazon))]
public partial class PersonAmazonGenerationContext : JsonSerializerContext
{
}
public record ClusterId([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ClusterId))]
public partial class ClusterIdGenerationContext : JsonSerializerContext
{
}
public record Thing([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Thing))]
public partial class ThingGenerationContext : JsonSerializerContext
{
}
public record Time([property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("match")] string Match,
[property: JsonPropertyName("searchData")] SearchData SearchData);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Time))]
public partial class TimeGenerationContext : JsonSerializerContext
{
}
public record ParentMap([property: JsonPropertyName("FOLDER")] IReadOnlyList<string> FOLDER);
public record Aggregations([property: JsonPropertyName("allPeople")] IReadOnlyList<AllPerson> AllPeople,
[property: JsonPropertyName("clusterId")] IReadOnlyList<ClusterId> ClusterId,
[property: JsonPropertyName("location")] IReadOnlyList<LocationAmazon> Location,
[property: JsonPropertyName("people")] IReadOnlyList<PersonAmazon> People,
[property: JsonPropertyName("things")] IReadOnlyList<Thing> Things,
[property: JsonPropertyName("time")] IReadOnlyList<Time> Time,
[property: JsonPropertyName("type")] IReadOnlyList<Type> Type);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Aggregations))]
public partial class AggregationsGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ParentMap))]
public partial class ParentMapGenerationContext : JsonSerializerContext
{
}
public record RootAmazon([property: JsonPropertyName("aggregations")] Aggregations Aggregations,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("data")] IReadOnlyList<Datum> Data);
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(RootAmazon))]
public partial class RootAmazonGenerationContext : JsonSerializerContext
{
}
private static void SaveAmazon(string destination, string harFile)
{
string offset;
string personId;
RootAmazon amazon;
string? personName;
string personIdFile;
string personDirectory;
PersonAmazon personAmazon;
Dictionary<string, string> keyValuePairs = [];
ReadOnlyCollection<(string Url, string AggregationLine)> aggregationLines = GetAggregationLines(harFile);
foreach ((string url, string aggregationLine) in aggregationLines)
{
if (aggregationLine.Contains(",\"category\":\"allPeople\"}"))
continue;
amazon = JsonSerializer.Deserialize(aggregationLine, RootAmazonGenerationContext.Default.RootAmazon) ?? throw new Exception();
if (amazon.Aggregations?.People is null || amazon.Aggregations.People.Count < 1)
continue;
personAmazon = amazon.Aggregations.People[0];
if (!url.Contains(personAmazon.Match))
continue;
personDirectory = Path.Combine(destination, personAmazon.SearchData.ClusterName);
_ = Directory.CreateDirectory(personDirectory);
personIdFile = Path.Combine(personDirectory, $"000) {personAmazon.Match}.json");
_ = keyValuePairs.TryAdd(personAmazon.Match, personAmazon.SearchData.ClusterName);
SaveAmazon(amazon.Data, personIdFile);
}
foreach ((string url, string aggregationLine) in aggregationLines)
{
if (aggregationLine.Contains(",\"category\":\"allPeople\"}"))
continue;
amazon = JsonSerializer.Deserialize(aggregationLine, RootAmazonGenerationContext.Default.RootAmazon) ?? throw new Exception();
if (amazon.Aggregations?.People is not null && amazon.Aggregations.People.Count > 0)
continue;
if (!url.Contains("offset="))
continue;
offset = url.Split("offset=")[1];
if (!url.Contains("people%3A("))
continue;
personId = url.Split("people%3A(")[1].Split(')')[0];
if (!keyValuePairs.TryGetValue(personId, out personName))
continue;
personDirectory = Path.Combine(destination, personName);
_ = Directory.CreateDirectory(personDirectory);
personIdFile = Path.Combine(personDirectory, $"{offset.Split('&')[0]}) {personId}.json");
SaveAmazon(amazon.Data, personIdFile);
}
}
internal static void SaveAmazon(ILogger<Worker> logger, List<string> args)
{
string root = Path.GetFullPath(args[0]);
string destination = Path.GetFullPath(args[2]);
if (string.IsNullOrEmpty(root))
throw new NullReferenceException(nameof(root));
string[] harFiles = Directory.GetFiles(root, "*.har", SearchOption.TopDirectoryOnly);
foreach (string harFile in harFiles)
SaveAmazon(destination, harFile);
logger?.LogInformation("{harFiles} count", harFiles.Length);
}
private static void SaveAmazon(IReadOnlyList<Datum> data, string personIdFile)
{
string json;
Dictionary<string, Datum> keyValuePairs = [];
foreach (Datum datum in data)
_ = keyValuePairs.TryAdd(datum.Name.Split('.')[0], datum);
json = JsonSerializer.Serialize(keyValuePairs, DictionaryDatumGenerationContext.Default.DictionaryStringDatum);
File.WriteAllText(personIdFile, json);
}
private static ReadOnlyCollection<(string, string)> GetAggregationLines(string harFile)
{
List<(string, string)> results = [];
if (!File.Exists(harFile))
throw new Exception();
string lastUrl = string.Empty;
string text = "\"text\": \"{";
string[] lines = File.ReadAllLines(harFile);
foreach (string line in lines)
{
if (line.Contains("\"url\": \""))
lastUrl = line;
if (!line.Contains(text))
continue;
if (!line.Contains("aggregations"))
continue;
if (lastUrl.Contains("search?asset=NONE"))
continue;
results.Add(new(lastUrl, line.Trim()[(text.Length - 1)..^1].Replace("\\\"", "\"")));
lastUrl = string.Empty;
}
return new(results);
}
}

View File

@ -0,0 +1,35 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Text.Json;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240518
{
internal static void PersonKeyToImmichImport(ILogger<Worker> logger, List<string> args)
{
string name;
string birthDate;
string ownerId = args[5];
List<string> inserts = [];
string tableName = args[3];
string birthDateFormat = args[6];
string[] columns = args[4].Split(',');
string json = File.ReadAllText(args[2]);
string root = Path.GetFullPath(args[0]);
Dictionary<long, Person> people = JsonSerializer.Deserialize(json, PeopleSourceGenerationContext.Default.DictionaryInt64Person) ?? throw new NullReferenceException();
foreach (KeyValuePair<long, Person> keyValuePair in people)
{
if (keyValuePair.Value.Birth?.Note is null || keyValuePair.Value.Name?.ForwardSlashFull is null || keyValuePair.Value.Birth?.Date is null)
continue;
birthDate = keyValuePair.Value.Birth.Date.Value.ToString(birthDateFormat);
name = keyValuePair.Value.Name.ForwardSlashFull.Replace("/", string.Empty);
inserts.Add($"insert into \"{tableName}\" (\"{string.Join("\", \"", columns)}\") values ('{ownerId}', '{name}', '{birthDate}');");
}
string file = Path.Combine(root, $"{DateTime.Now.Ticks}.sql");
logger.LogInformation("<{file}> saved", file);
File.WriteAllLines(file, inserts);
}
}

View File

@ -0,0 +1,80 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240519
{
private record Record(long Length, long Ticks);
private static ReadOnlyDictionary<string, Record> GetKeyValuePairs(string source, string[] sourceFiles)
{
Dictionary<string, Record> results = [];
string key;
Record? record;
FileInfo fileInfo;
int sourceLength = source.Length;
foreach (string sourceFile in sourceFiles)
{
fileInfo = new(sourceFile);
key = sourceFile[sourceLength..];
if (results.TryGetValue(key, out record))
throw new NotSupportedException();
results.Add(key, new(fileInfo.Length, fileInfo.LastWriteTime.Ticks));
}
return new(results);
}
internal static void FindReplaceDirectoryName(ILogger<Worker> logger, List<string> args)
{
string checkDirectory;
string replaceText = args[3];
string[] findTexts = args[2].Split(',');
string root = Path.GetFullPath(args[0]);
string[] directories = Directory.GetDirectories(root, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
checkDirectory = directory;
foreach (string findText in findTexts)
checkDirectory = checkDirectory.Replace(findText, replaceText);
if (checkDirectory == directory)
continue;
if (Directory.Exists(checkDirectory))
continue;
logger.LogInformation("<{directory}> to <{checkDirectory}>", directory, checkDirectory);
Directory.Move(directory, checkDirectory);
}
string key;
Record? record;
string checkFile;
FileInfo fileInfo;
string target = Path.GetFullPath(args[6]);
string source = Path.GetFullPath(args[4]);
string compare = Path.GetFullPath(args[5]);
string[] sourceFiles = Directory.GetFiles(source, "*", SearchOption.AllDirectories);
ReadOnlyDictionary<string, Record> keyValuePairs = GetKeyValuePairs(source, sourceFiles);
string[] compareFiles = Directory.GetFiles(compare, "*", SearchOption.AllDirectories);
int compareLength = compare.Length;
foreach (string compareFile in compareFiles)
{
fileInfo = new(compareFile);
key = compareFile[compareLength..];
if (!keyValuePairs.TryGetValue(key, out record))
continue;
if (fileInfo.Length != record.Length || fileInfo.LastWriteTime.Ticks != record.Ticks)
continue;
checkFile = $"{target}{key}";
checkDirectory = Path.GetDirectoryName(checkFile) ?? throw new NotSupportedException();
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
if (File.Exists(checkFile))
continue;
logger.LogInformation("<{compareFile}> to <{checkFile}>", compareFile, checkFile);
File.Move(compareFile, checkFile);
}
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, compare);
}
}

View File

@ -0,0 +1,160 @@
using File_Folder_Helper.Helpers;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240520
{
private record RecordA(string Directory, string Extension, string SourceFile, Identifier Identifier);
private record RecordB(ReadOnlyDictionary<int, Identifier> IdTo, ReadOnlyDictionary<long, Identifier> LengthTo, ReadOnlyDictionary<string, Identifier> PaddedTo);
internal sealed record Identifier(int Id, long Length, string PaddedId, long Ticks)
{
public override string ToString()
{
string result = JsonSerializer.Serialize(this, IdentifierSourceGenerationContext.Default.Identifier);
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Identifier))]
internal partial class IdentifierSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Identifier[]))]
internal partial class IdentifierCollectionSourceGenerationContext : JsonSerializerContext
{
}
private static RecordB GetRecordB(string jsonFile)
{
RecordB result;
Dictionary<int, Identifier> idTo = [];
Dictionary<long, Identifier> lengthTo = [];
Dictionary<string, Identifier> paddedTo = [];
string? json = !File.Exists(jsonFile) ? null : File.ReadAllText(jsonFile);
Identifier[]? identifiers = json is null ? null : JsonSerializer.Deserialize(json, IdentifierCollectionSourceGenerationContext.Default.IdentifierArray);
if (identifiers is null && !string.IsNullOrEmpty(jsonFile))
throw new Exception($"Invalid {nameof(jsonFile)}");
if (identifiers is not null)
{
foreach (Identifier identifier in identifiers)
{
idTo.Add(identifier.Id, identifier);
paddedTo.Add(identifier.PaddedId, identifier);
if (lengthTo.ContainsKey(identifier.Length))
{
_ = lengthTo.Remove(identifier.Length);
continue;
}
lengthTo.Add(identifier.Length, identifier);
}
}
result = new(new(idTo), new(lengthTo), new(paddedTo));
return result;
}
internal static void IdentifierRename(ILogger<Worker> logger, List<string> args)
{
int id;
string key;
RecordA recordA;
string checkFile;
FileInfo fileInfo;
string checkDirectory;
Identifier? identifier;
string offset = args[5];
string option = args[7];
string jsonFile = args[4];
string fileNameWithoutExtension;
List<RecordA> recordACollection = [];
RecordB recordB = GetRecordB(jsonFile);
string deterministicHashCode = args[3];
string source = Path.GetFullPath(args[0]);
int intMinValueLength = int.Parse(args[2]);
string destination = Path.GetFullPath(args[6]);
bool isOffsetDeterministicHashCode = offset == deterministicHashCode;
string[] sourceFiles = Directory.GetFiles(source, "*", SearchOption.AllDirectories);
logger.LogInformation("Found {files}(s)", sourceFiles.Length);
int sourceLength = source.Length;
foreach (string sourceFile in sourceFiles)
{
fileInfo = new(sourceFile);
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
if (fileInfo.Directory is null)
throw new NotSupportedException();
if (option == "Padded")
{
if (fileNameWithoutExtension.Length < intMinValueLength)
continue;
key = fileNameWithoutExtension;
if (recordB.PaddedTo.TryGetValue(key, out identifier))
{
recordACollection.Add(new($"{destination}{fileInfo.Directory.FullName[sourceLength..]}", fileInfo.Extension, fileInfo.FullName, identifier));
continue;
}
}
if (option == "Length")
{
if (recordB.LengthTo.TryGetValue(fileInfo.Length, out identifier))
{
checkDirectory = $"{destination}{fileInfo.Directory.FullName[sourceLength..]}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, $"{identifier.PaddedId}{fileInfo.Extension}");
if (File.Exists(checkFile))
continue;
File.Copy(fileInfo.FullName, checkFile);
logger.LogInformation("<{fileInfo.FullName}> was moved to <{checkFile}>", fileInfo.FullName, checkFile);
continue;
}
}
if (option == "Id")
{
if (int.TryParse(fileNameWithoutExtension, out id))
{
if (recordB.IdTo.TryGetValue(id, out identifier))
{
checkDirectory = $"{destination}{fileInfo.Directory.FullName[sourceLength..]}";
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
checkFile = Path.Combine(checkDirectory, $"{identifier.PaddedId}{fileInfo.Extension}");
if (File.Exists(checkFile))
continue;
File.Move(fileInfo.FullName, checkFile);
logger.LogInformation("<{fileInfo.FullName}> was moved to <{checkFile}>", fileInfo.FullName, checkFile);
continue;
}
}
}
}
if (option == "Padded")
{
if (!isOffsetDeterministicHashCode)
recordACollection = (from l in recordACollection orderby l.Identifier.Ticks select l).ToList();
for (int i = 0; i < recordACollection.Count; i++)
{
recordA = recordACollection[i];
if (!Directory.Exists(recordA.Directory))
_ = Directory.CreateDirectory(recordA.Directory);
checkFile = Path.Combine(recordA.Directory, isOffsetDeterministicHashCode ? $"{recordA.Identifier.PaddedId}{recordA.Extension}" : $"{offset + i}{recordA.Identifier.PaddedId}{recordA.Extension}");
if (File.Exists(checkFile))
continue;
File.Move(recordA.SourceFile, checkFile);
logger.LogInformation("<{recordA.SourceFile}> was moved to <{checkFile}>", recordA.SourceFile, checkFile);
}
}
HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, source);
}
}

View File

@ -0,0 +1,206 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240623
{
private record SubTaskLine(string Text, bool Done, long? Ticks, int? Line);
private record Record(int? CodeInsidersLine, string File, string[] Lines, int? StopLine, int? SubTasksLine);
private static List<Record> GetRecords(string sourceDirectory, string searchPattern, string codeInsiders, string subTasks)
{
List<Record> results = [];
int? stopLine;
string[] lines;
int? subTasksLine;
int? codeInsidersLine;
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
stopLine = null;
subTasksLine = null;
codeInsidersLine = null;
lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
if (lines[i].StartsWith(codeInsiders) && lines[i][^1] == '"')
{
if (lines.Length > i + 1 && lines[i + 1] == "```")
codeInsidersLine = i;
}
if (lines[i] != subTasks)
continue;
subTasksLine = i;
if (codeInsidersLine is null)
break;
if (lines.Length > i)
{
for (int j = i + 1; j < lines.Length; j++)
{
if (lines[j].Length > 0 && lines[j][0] == '#')
{
stopLine = j;
break;
}
}
}
stopLine ??= lines.Length;
break;
}
results.Add(new(codeInsidersLine, file, lines, stopLine, subTasksLine));
}
return results;
}
private static ReadOnlyCollection<SubTaskLine> GetSubTasks(string subTasks, string[] tasks, bool? foundDone, string fallbackLine, FileInfo fileInfo)
{
List<SubTaskLine> results = [];
string line;
bool doneValue;
string? h1 = null;
bool foundSubTasks = false;
int tasksZeroLength = tasks[0].Length;
string[] lines = File.ReadAllLines(fileInfo.FullName);
for (int i = 0; i < lines.Length; i++)
{
line = lines[i];
if (line.StartsWith("# "))
h1 = line[2..];
if (!foundSubTasks && line == subTasks)
foundSubTasks = true;
if (!foundSubTasks)
continue;
if (line.Length <= tasksZeroLength || !line.StartsWith(tasks[0]) || line[tasksZeroLength] is not ' ' and not 'x' || line[tasksZeroLength + 1] != ']')
continue;
doneValue = foundDone is not null && foundDone.Value;
results.Add(new($" {line}", doneValue, fileInfo.LastWriteTime.Ticks, i));
}
doneValue = foundDone is not null && foundDone.Value;
if (h1 is null)
results.Add(new(fallbackLine, doneValue, fileInfo.LastWriteTime.Ticks, Line: null));
else
results.Add(new(foundDone is null || !foundDone.Value ? $"- [ ] {fileInfo.LastWriteTime.Ticks} ~~~ {h1}" : $"- [x] {fileInfo.LastWriteTime.Ticks} ~~~ {h1}", doneValue, fileInfo.LastWriteTime.Ticks, Line: 0));
return new(results);
}
internal static void UpdateSubTasksInMarkdownFiles(ILogger<Worker> logger, List<string> args)
{
int lineCheck;
bool doneValue;
bool? foundDone;
FileInfo fileInfo;
string[] newLines;
string[] segments;
List<string> lines;
string fallbackLine;
string[] indexLines;
string checkDirectory;
string done = args[7];
List<string> indexFiles;
string subTasks = args[3];
List<string> oldLines = [];
string indexFile = args[5];
string searchPattern = args[2];
string directoryFilter = args[8];
string[] tasks = args[6].Split(',');
string codeInsiders = $"{args[4]} \"";
List<SubTaskLine> allSubTaskLines = [];
ReadOnlyCollection<SubTaskLine> subTaskLines;
string sourceDirectory = Path.GetFullPath(args[0]);
List<Record> records = GetRecords(sourceDirectory, searchPattern, codeInsiders, subTasks);
foreach (Record record in from l in records orderby l.SubTasksLine is null, l.CodeInsidersLine is null select l)
{
if (record.SubTasksLine is null)
continue;
if (record.CodeInsidersLine is not null)
logger.LogInformation("<{file}> has [{subTasks}]", Path.GetFileNameWithoutExtension(record.File), subTasks);
else
{
logger.LogWarning("<{file}> has [{subTasks}] but doesn't have [{codeInsiders}]!", Path.GetFileNameWithoutExtension(record.File), subTasks, codeInsiders);
continue;
}
if (record.StopLine is null)
continue;
checkDirectory = record.Lines[record.CodeInsidersLine.Value][codeInsiders.Length..^1];
if (!Directory.Exists(checkDirectory))
{
logger.LogError("<{checkDirectory}> doesn't exist", Path.GetFileName(checkDirectory));
continue;
}
indexFiles = Directory.GetFiles(checkDirectory, indexFile, SearchOption.AllDirectories).ToList();
if (indexFiles.Count != 1)
{
for (int i = indexFiles.Count - 1; i > -1; i--)
{
if (!indexFiles[i].Contains(directoryFilter, StringComparison.CurrentCultureIgnoreCase))
indexFiles.RemoveAt(i);
}
if (indexFiles.Count != 1)
{
logger.LogError("<{checkDirectory}> doesn't have a [{indexFile}]", Path.GetFileName(checkDirectory), indexFile);
continue;
}
}
foundDone = null;
oldLines.Clear();
allSubTaskLines.Clear();
indexLines = File.ReadAllLines(indexFiles[0]);
checkDirectory = Path.GetDirectoryName(indexFiles[0]) ?? throw new Exception();
for (int i = 0; i < indexLines.Length; i++)
{
if (indexLines[i] == done)
foundDone = true;
segments = indexLines[i].Split(tasks[1]);
doneValue = foundDone is not null && foundDone.Value;
if (segments.Length > 2 || !segments[0].StartsWith(tasks[0]))
continue;
fallbackLine = foundDone is null || !foundDone.Value ? $"- [ ] {segments[0][tasks[0].Length..]}" : $"- [x] {segments[0][tasks[0].Length..]}";
fileInfo = new(Path.GetFullPath(Path.Combine(checkDirectory, segments[1][..^1])));
if (!fileInfo.Exists)
{
allSubTaskLines.Add(new(fallbackLine, doneValue, Ticks: null, Line: null));
continue;
}
subTaskLines = GetSubTasks(subTasks, tasks, doneValue, fallbackLine, fileInfo);
for (int j = subTaskLines.Count - 1; j >= 0; j--)
allSubTaskLines.Add(subTaskLines[j]);
}
if (allSubTaskLines.Count == 0)
continue;
lineCheck = 0;
for (int i = record.SubTasksLine.Value + 1; i < record.StopLine.Value - 1; i++)
oldLines.Add(record.Lines[i]);
if (allSubTaskLines.Any(l => l.Ticks is null))
newLines = (from l in allSubTaskLines select l.Text).ToArray();
else
newLines = (from l in allSubTaskLines orderby l.Done descending, l.Ticks, l.Line select l.Text).ToArray();
if (newLines.Length == oldLines.Count)
{
for (int i = 0; i < newLines.Length; i++)
{
if (newLines[i] != record.Lines[record.SubTasksLine.Value + 1 + i])
continue;
lineCheck++;
}
if (lineCheck == newLines.Length)
continue;
}
checkDirectory = Path.Combine(checkDirectory, DateTime.Now.Ticks.ToString());
_ = Directory.CreateDirectory(checkDirectory);
Thread.Sleep(500);
Directory.Delete(checkDirectory);
lines = record.Lines.ToList();
for (int i = record.StopLine.Value - 1; i > record.SubTasksLine.Value + 1; i--)
lines.RemoveAt(i);
if (record.StopLine.Value == record.Lines.Length && lines[^1].Length == 0)
lines.RemoveAt(lines.Count - 1);
for (int i = 0; i < newLines.Length; i++)
lines.Insert(record.SubTasksLine.Value + 1 + i, newLines[i]);
lines.Insert(record.SubTasksLine.Value + 1, string.Empty);
File.WriteAllLines(record.File, lines);
}
}
}

View File

@ -0,0 +1,88 @@
using Microsoft.Extensions.Logging;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240624
{
private static void MoveUpOndDirectory(ILogger<Worker> logger, string sourceDirectory, string[] files)
{
string? match;
string checkFile;
FileInfo fileInfoA;
FileInfo fileInfoB;
string? checkDirectory;
List<string> deleteFiles = [];
Dictionary<string, string> keyValuePairs = [];
foreach (string file in files)
{
checkDirectory = Path.GetDirectoryName(Path.GetDirectoryName(file)) ?? throw new NotSupportedException();
checkFile = Path.Combine(checkDirectory, Path.GetFileName(file));
if (File.Exists(checkFile))
throw new NotSupportedException();
if (keyValuePairs.TryGetValue(checkFile, out match))
{
fileInfoA = new(file);
fileInfoB = new(match);
if (fileInfoA.Length != fileInfoB.Length)
throw new NotSupportedException("Files don't match!");
logger.LogWarning("<{file}> already exists!", file);
deleteFiles.Add(file);
continue;
}
keyValuePairs.Add(checkFile, file);
}
foreach (string file in deleteFiles)
File.Delete(file);
foreach (KeyValuePair<string, string> keyValuePair in keyValuePairs)
File.Move(keyValuePair.Value, keyValuePair.Key);
Helpers.HelperDeleteEmptyDirectories.DeleteEmptyDirectories(logger, sourceDirectory);
}
private static void Distinct(List<string> args, string sourceDirectory, string[] deleteMatchingIdsDirectoryNames, string[] files)
{
string fileName;
string directory;
List<string> distinct = [];
List<string> duplicate = [];
string[] keepMatchingIdsDirectoryNames = args[4].Split(',');
if (deleteMatchingIdsDirectoryNames.Length != keepMatchingIdsDirectoryNames.Length)
throw new NotSupportedException("Check arg lengths!");
string[] keepMatchingIdsDirectories = keepMatchingIdsDirectoryNames.Select(l => Path.Combine(sourceDirectory, l)).ToArray();
string[] deleteMatchingIdsDirectories = deleteMatchingIdsDirectoryNames.Select(l => Path.Combine(sourceDirectory, l)).ToArray();
foreach (string file in files)
{
fileName = Path.GetFileName(file);
if (distinct.Contains(fileName))
{
duplicate.Add(fileName);
continue;
}
distinct.Add(Path.GetFileName(file));
}
foreach (string file in files)
{
fileName = Path.GetFileName(file);
directory = Path.GetDirectoryName(file) ?? throw new NotSupportedException();
if (!duplicate.Contains(fileName))
continue;
if (deleteMatchingIdsDirectories.Contains(directory))
File.Move(file, $"{file}.del");
else if (!keepMatchingIdsDirectories.Contains(directory))
throw new NotSupportedException($"Missing <{Path.GetFileName(directory)}> as a directory for {fileName}");
}
}
internal static void MoveUpOneDirectory(ILogger<Worker> logger, List<string> args)
{
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] deleteMatchingIdsDirectoryNames = args[3].Split(',');
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.AllDirectories);
if (deleteMatchingIdsDirectoryNames.Length == 0)
MoveUpOndDirectory(logger, sourceDirectory, files);
else
Distinct(args, sourceDirectory, deleteMatchingIdsDirectoryNames, files);
}
}

View File

@ -0,0 +1,79 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240711
{
internal static void GitRemoteRemove(ILogger<Worker> logger, List<string> args)
{
string line;
string[] lines;
bool branchCheck;
bool remoteCheck;
string? directory;
string? parentDirectory;
string parentDirectoryName;
string branchName = args[8];
string searchPattern = args[2];
string remoteToAddUrl = args[6];
string remoteToRemove = args[3];
string remoteToAddName = args[5];
ReadOnlyCollection<string> messages;
string remoteToRemoveFilter = args[4];
string sourceDirectory = Path.GetFullPath(args[0]);
string lastRemoteSegment = remoteToRemove.Split('/')[^1];
string extension = args[7].Length > 2 ? args[7] : string.Empty;
string[] files = Directory.EnumerateFiles(sourceDirectory, searchPattern, new EnumerationOptions() { IgnoreInaccessible = true, RecurseSubdirectories = true, AttributesToSkip = FileAttributes.None }).ToArray();
logger.LogInformation("Found {files} file(s)", files.Length);
foreach (string file in files)
{
branchCheck = false;
remoteCheck = false;
lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
line = lines[i];
if (!line.Contains(remoteToRemove))
continue;
if (!lines[i - 1].Contains(remoteToRemoveFilter))
continue;
remoteCheck = true;
break;
}
for (int i = 0; i < lines.Length; i++)
{
line = lines[i];
if (!line.Contains(branchName))
continue;
branchCheck = true;
break;
}
if (!remoteCheck)
continue;
directory = Path.GetDirectoryName(file);
if (directory is null)
continue;
parentDirectory = Path.GetDirectoryName(directory);
if (parentDirectory is null)
continue;
parentDirectoryName = Path.GetFileName(parentDirectory).ToLower();
messages = Helpers.HelperGit.RemoteRemove(parentDirectory, lastRemoteSegment, CancellationToken.None);
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.RemoteRemove), parentDirectoryName, message);
messages = Helpers.HelperGit.RemoteAdd(parentDirectory, remoteToAddName, $"{remoteToAddUrl}{parentDirectoryName}{extension}", CancellationToken.None);
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.RemoteAdd), parentDirectoryName, message);
if (!branchCheck)
continue;
try
{ messages = Helpers.HelperGit.PushBranch(parentDirectory, remoteToAddName, branchName, CancellationToken.None); }
catch (Exception ex)
{ messages = new([ex.Message]); }
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.PushBranch), parentDirectoryName, message);
}
}
}

View File

@ -0,0 +1,84 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240718
{
private static Host[] GetHosts(ILogger<Worker> logger, string file)
{
Host[] results;
string lines = File.ReadAllText(file);
string json = $"[{lines.Replace("\r\n", ",")}]";
logger.LogDebug(lines);
results = JsonSerializer.Deserialize(json, HostSourceGenerationContext.Default.HostArray) ?? throw new NullReferenceException();
return results;
}
private static ReadOnlyCollection<string> GetLines(Host[] hosts, string title, string wired)
{
List<string> results = [$"# {title}", string.Empty, "```mermaid", "flowchart TB", $" subgraph {title}"];
int id;
string check;
List<int> distinct = [];
string newLine = $"{Environment.NewLine} ";
foreach (Host host in hosts)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
if (distinct.Contains(id))
throw new NotSupportedException($"{id} is not distinct!");
distinct.Add(id);
results.Add($" {id}(fa:{host.Type}{newLine}{host.Colon}{newLine}{host.Hyphen}{newLine}{host.Device}{newLine}https://{host.Name}/)");
}
results.Add(" end");
results.Add($" subgraph {title}");
foreach (Host host in from l in hosts orderby l.Location, l.Type, l.Line select l)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
check = host.Type == wired ? "-->" : "-.->";
results.Add($" {id} {check} |{id}| {host.Location}{host.Type}{host.Line}");
}
results.Add(" end");
results.Add($" subgraph {title}");
foreach (Host host in from l in hosts orderby l.Line, l.Location, l.Type select l)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Line is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
check = host.Type == wired ? "-->" : "-.->";
results.Add($" {host.Location}{host.Type}{host.Line} {check} Line{host.Line}");
}
results.Add(" end");
results.Add("```");
return results.AsReadOnly();
}
internal static void JsonToMarkdown(ILogger<Worker> logger, List<string> args)
{
Host[] hosts;
string title = args[3];
string wired = args[4];
string extension = args[5];
string searchPattern = args[2];
ReadOnlyCollection<string> lines;
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
hosts = GetHosts(logger, file);
lines = GetLines(hosts, title, wired);
File.WriteAllText($"{file}{extension}", string.Join(Environment.NewLine, lines));
}
}
}

View File

@ -0,0 +1,206 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization;
using System.Text.Json;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240724
{
private record FileConnectorConfigurationSystem(string AlternateTargetFolder,
string FileAgeThreshold,
string[] SourceFileFilters,
string TargetFileLocation);
#pragma warning disable IDE0028, IDE0056, IDE0300, IDE0240, IDE0241
private static readonly HttpClient _HttpClient = new();
private static readonly string _StaticFileServer = "localhost:5054";
private static readonly FileConnectorConfigurationSystem _FileConnectorConfiguration = new(
"D:/Tmp/Phares/AlternateTargetFolder",
"000:20:00:01",
[".txt"],
"D:/Tmp/Phares/TargetFileLocation");
private static string[] GetValidDays(DateTime fileAgeThresholdDateTime)
{
DateTime dateTime = DateTime.Now;
return new string[] { dateTime.ToString("yyyy-MM-dd"), fileAgeThresholdDateTime.ToString("yyyy-MM-dd") }.Distinct().ToArray();
}
private static string[] GetValidWeeks(DateTime fileAgeThresholdDateTime)
{
DateTime dateTime = DateTime.Now;
Calendar calendar = new CultureInfo("en-US").Calendar;
string weekOfYear = $"{dateTime:yyyy}_Week_{calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
string lastWeekOfYear = $"{fileAgeThresholdDateTime:yyyy}_Week_{calendar.GetWeekOfYear(fileAgeThresholdDateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
return new string[] { weekOfYear, lastWeekOfYear }.Distinct().ToArray();
}
private static ReadOnlyCollection<NginxFileSystem> GetDayNginxFileSystemCollection(DateTime fileAgeThresholdDateTime, string week, string day, string dayUrl, NginxFileSystem[] dayNginxFileSystemCollection)
{
List<NginxFileSystem> results = new();
DateTime dateTime;
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
foreach (NginxFileSystem dayNginxFileSystem in dayNginxFileSystemCollection)
{
if (!DateTime.TryParseExact(dayNginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTime))
continue;
if (dateTime < fileAgeThresholdDateTime)
continue;
results.Add(new(
Path.GetFullPath(Path.Combine(_FileConnectorConfiguration.TargetFileLocation, week, day, dayNginxFileSystem.Name)),
string.Concat(dayUrl, '/', dayNginxFileSystem.Name),
dateTime.ToString(),
dayNginxFileSystem.Size));
}
return results.AsReadOnly();
}
private static DateTime GetFileAgeThresholdDateTime(string fileAgeThreshold)
{
DateTime result = DateTime.Now;
string[] segments = fileAgeThreshold.Split(':');
for (int i = 0; i < segments.Length; i++)
{
result = i switch
{
0 => result.AddDays(double.Parse(segments[i]) * -1),
1 => result.AddHours(double.Parse(segments[i]) * -1),
2 => result.AddMinutes(double.Parse(segments[i]) * -1),
3 => result.AddSeconds(double.Parse(segments[i]) * -1),
_ => throw new Exception(),
};
}
return result;
}
private static ReadOnlyCollection<NginxFileSystem> GetDayNginxFileSystemCollection(DateTime fileAgeThresholdDateTime)
{
#nullable enable
List<NginxFileSystem> results = new();
string dayUrl;
string dayJson;
string weekJson;
string checkWeek;
Task<HttpResponseMessage> task;
NginxFileSystem[]? dayNginxFileSystemCollection;
NginxFileSystem[]? weekNginxFileSystemCollection;
string[] days = GetValidDays(fileAgeThresholdDateTime);
string[] weeks = GetValidWeeks(fileAgeThresholdDateTime);
foreach (string week in weeks)
{
checkWeek = string.Concat("http://", _StaticFileServer, '/', week);
task = _HttpClient.GetAsync(checkWeek);
task.Wait();
if (!task.Result.IsSuccessStatusCode)
continue;
weekJson = _HttpClient.GetStringAsync(checkWeek).Result;
weekNginxFileSystemCollection = JsonSerializer.Deserialize(weekJson, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (weekNginxFileSystemCollection is null)
continue;
foreach (NginxFileSystem weekNginxFileSystem in weekNginxFileSystemCollection)
{
if (!(from l in days where weekNginxFileSystem.Name == l select false).Any())
continue;
dayUrl = string.Concat(checkWeek, '/', weekNginxFileSystem.Name);
dayJson = _HttpClient.GetStringAsync(dayUrl).Result;
dayNginxFileSystemCollection = JsonSerializer.Deserialize(dayJson, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (dayNginxFileSystemCollection is null)
continue;
results.AddRange(GetDayNginxFileSystemCollection(fileAgeThresholdDateTime, week, weekNginxFileSystem.Name, dayUrl, dayNginxFileSystemCollection));
}
}
return results.AsReadOnly();
#nullable disable
}
private static ReadOnlyCollection<Tuple<DateTime, FileInfo, FileInfo, string>> GetPossible()
{
List<Tuple<DateTime, FileInfo, FileInfo, string>> results = new();
DateTime dateTime;
FileInfo targetFileInfo;
FileInfo alternateFileInfo;
DateTime fileAgeThresholdDateTime = GetFileAgeThresholdDateTime(_FileConnectorConfiguration.FileAgeThreshold);
ReadOnlyCollection<NginxFileSystem> dayNginxFileSystemCollection = GetDayNginxFileSystemCollection(fileAgeThresholdDateTime);
foreach (NginxFileSystem nginxFileSystem in dayNginxFileSystemCollection)
{
targetFileInfo = new FileInfo(nginxFileSystem.Name);
if (targetFileInfo.Directory is null)
continue;
if (!Directory.Exists(targetFileInfo.Directory.FullName))
_ = Directory.CreateDirectory(targetFileInfo.Directory.FullName);
if (!DateTime.TryParse(nginxFileSystem.MTime, out dateTime))
continue;
if (targetFileInfo.Exists && targetFileInfo.LastWriteTime == dateTime)
continue;
alternateFileInfo = new(Path.Combine(_FileConnectorConfiguration.AlternateTargetFolder, nginxFileSystem.Name));
results.Add(new(dateTime, targetFileInfo, alternateFileInfo, nginxFileSystem.Type));
}
return (from l in results orderby l.Item1 select l).ToList().AsReadOnly();
}
private static void Test()
{
#nullable enable
if (_HttpClient is null)
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
{
ReadOnlyCollection<Tuple<DateTime, FileInfo, FileInfo, string>> possibleDownload = GetPossible();
if (possibleDownload.Count > 0)
{
string targetFileName = possibleDownload[0].Item4;
FileInfo targetFileInfo = possibleDownload[0].Item2;
FileInfo alternateFileInfo = possibleDownload[0].Item3;
DateTime matchNginxFileSystemDateTime = possibleDownload[0].Item1;
// if (alternateFileInfo.Exists)
// File.Delete(alternateFileInfo.FullName);
if (targetFileInfo.Exists)
File.Delete(targetFileInfo.FullName);
string targetJson = _HttpClient.GetStringAsync(targetFileName).Result;
File.WriteAllText(targetFileInfo.FullName, targetJson);
targetFileInfo.LastWriteTime = matchNginxFileSystemDateTime;
// File.Copy(targetFileInfo.FullName, alternateFileInfo.FullName);
File.AppendAllText(alternateFileInfo.FullName, targetJson);
}
}
#nullable disable
}
internal static void CopyDirectories(ILogger<Worker> logger, List<string> args)
{
Test();
string[] files;
Process process;
string checkDirectory;
string filter = args[3];
string replaceWith = args[4];
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] foundDirectories = Directory.GetDirectories(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation($"Found {foundDirectories.Length} directories");
foreach (string foundDirectory in foundDirectories)
{
if (!foundDirectory.Contains(filter))
continue;
logger.LogDebug(foundDirectory);
checkDirectory = foundDirectory.Replace(filter, replaceWith);
if (Directory.Exists(checkDirectory))
{
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
if (files.Length > 0)
continue;
Directory.Delete(checkDirectory);
}
process = Process.Start("cmd.exe", $"/c xCopy \"{foundDirectory}\" \"{checkDirectory}\" /S /E /I /H /Y");
process.WaitForExit();
}
}
}

View File

@ -0,0 +1,64 @@
using Microsoft.Extensions.Logging;
using System.Diagnostics;
namespace File_Folder_Helper.ADO2024.PI2;
internal static partial class Helper20240728
{
internal static void DownloadSslCertificates(ILogger<Worker> logger, List<string> args)
{
string file;
Process? process;
string[] segments;
string standardError;
string standardOutput;
string argumentSegment;
string store = args[9];
string domain = args[2];
List<string> lines = [];
string logSegment = args[8];
string endCertificate = args[7];
string beginCertificate = args[6];
int waitForExit = int.Parse(args[5]);
string[] subdomains = args[3].Split(',');
string sourceDirectory = Path.GetFullPath(args[0]);
ProcessStartInfo processStartInfo = new()
{
CreateNoWindow = true,
RedirectStandardError = true,
RedirectStandardOutput = true,
UseShellExecute = false,
FileName = args[4],
WorkingDirectory = sourceDirectory
};
foreach (string subdomain in subdomains)
{
argumentSegment = $"{subdomain}.{domain}:443 -servername {subdomain}.{domain}";
processStartInfo.Arguments = $"s_client -connect {subdomain}.{domain}:443 -servername {subdomain}.{domain}";
process = Process.Start(processStartInfo);
if (process is null)
continue;
_ = process.WaitForExit(waitForExit);
process.Kill(entireProcessTree: true);
standardOutput = process.StandardOutput.ReadToEnd();
if (!standardOutput.Contains(beginCertificate) || !standardOutput.Contains(endCertificate))
{
standardError = process.StandardError.ReadToEnd();
logger.LogWarning($"Error: {subdomain}{Environment.NewLine}{standardOutput}{Environment.NewLine}{standardError}");
continue;
}
segments = standardOutput.Split(beginCertificate);
if (segments.Length != 2)
break;
segments = segments[1].Split(endCertificate);
if (segments.Length != 2)
break;
lines.Add($"{logSegment} \"{store}\" {subdomain}.{domain}.cert");
file = Path.Combine(sourceDirectory, $"{subdomain}.{domain}.cert");
File.WriteAllText(file, $"{beginCertificate}{segments[0]}{endCertificate}{Environment.NewLine}");
}
File.WriteAllLines(Path.Combine(sourceDirectory, $"{DateTime.Now.Ticks}.txt"), lines);
}
}