VSCodeTask

Download SSL Certificates
Sort Subtasks of Markdown files
Test BioRad
EAF CopyDirectories
json to Markdown
Sort Day 2024 Q2
GitRemoteRemove
Handle directoryInfo.LinkTarget better
Remove StartAt
Handle directoryInfo.LinkTarget
This commit is contained in:
2024-08-02 13:32:23 -07:00
parent 1cd20fa08b
commit 2923e86a94
23 changed files with 1211 additions and 731 deletions

View File

@ -0,0 +1,79 @@
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240711
{
internal static void GitRemoteRemove(ILogger<Worker> logger, List<string> args)
{
string line;
string[] lines;
bool branchCheck;
bool remoteCheck;
string? directory;
string? parentDirectory;
string parentDirectoryName;
string branchName = args[8];
string searchPattern = args[2];
string remoteToAddUrl = args[6];
string remoteToRemove = args[3];
string remoteToAddName = args[5];
ReadOnlyCollection<string> messages;
string remoteToRemoveFilter = args[4];
string sourceDirectory = Path.GetFullPath(args[0]);
string lastRemoteSegment = remoteToRemove.Split('/')[^1];
string extension = args[7].Length > 2 ? args[7] : string.Empty;
string[] files = Directory.EnumerateFiles(sourceDirectory, searchPattern, new EnumerationOptions() { IgnoreInaccessible = true, RecurseSubdirectories = true, AttributesToSkip = FileAttributes.None }).ToArray();
logger.LogInformation("Found {files} file(s)", files.Length);
foreach (string file in files)
{
branchCheck = false;
remoteCheck = false;
lines = File.ReadAllLines(file);
for (int i = 0; i < lines.Length; i++)
{
line = lines[i];
if (!line.Contains(remoteToRemove))
continue;
if (!lines[i - 1].Contains(remoteToRemoveFilter))
continue;
remoteCheck = true;
break;
}
for (int i = 0; i < lines.Length; i++)
{
line = lines[i];
if (!line.Contains(branchName))
continue;
branchCheck = true;
break;
}
if (!remoteCheck)
continue;
directory = Path.GetDirectoryName(file);
if (directory is null)
continue;
parentDirectory = Path.GetDirectoryName(directory);
if (parentDirectory is null)
continue;
parentDirectoryName = Path.GetFileName(parentDirectory).ToLower();
messages = Helpers.HelperGit.RemoteRemove(parentDirectory, lastRemoteSegment, CancellationToken.None);
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.RemoteRemove), parentDirectoryName, message);
messages = Helpers.HelperGit.RemoteAdd(parentDirectory, remoteToAddName, $"{remoteToAddUrl}{parentDirectoryName}{extension}", CancellationToken.None);
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.RemoteAdd), parentDirectoryName, message);
if (!branchCheck)
continue;
try
{ messages = Helpers.HelperGit.PushBranch(parentDirectory, remoteToAddName, branchName, CancellationToken.None); }
catch (Exception ex)
{ messages = new([ex.Message]); }
foreach (string message in messages)
logger.LogInformation("{function} => {parentDirectoryName}: [{message}]", nameof(Helpers.HelperGit.PushBranch), parentDirectoryName, message);
}
}
}

View File

@ -0,0 +1,84 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Text.Json;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240718
{
private static Host[] GetHosts(ILogger<Worker> logger, string file)
{
Host[] results;
string lines = File.ReadAllText(file);
string json = $"[{lines.Replace("\r\n", ",")}]";
logger.LogDebug(lines);
results = JsonSerializer.Deserialize(json, HostSourceGenerationContext.Default.HostArray) ?? throw new NullReferenceException();
return results;
}
private static ReadOnlyCollection<string> GetLines(Host[] hosts, string title, string wired)
{
List<string> results = [$"# {title}", string.Empty, "```mermaid", "flowchart TB", $" subgraph {title}"];
int id;
string check;
List<int> distinct = [];
string newLine = $"{Environment.NewLine} ";
foreach (Host host in hosts)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
if (distinct.Contains(id))
throw new NotSupportedException($"{id} is not distinct!");
distinct.Add(id);
results.Add($" {id}(fa:{host.Type}{newLine}{host.Colon}{newLine}{host.Hyphen}{newLine}{host.Device}{newLine}https://{host.Name}/)");
}
results.Add(" end");
results.Add($" subgraph {title}");
foreach (Host host in from l in hosts orderby l.Location, l.Type, l.Line select l)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
check = host.Type == wired ? "-->" : "-.->";
results.Add($" {id} {check} |{id}| {host.Location}{host.Type}{host.Line}");
}
results.Add(" end");
results.Add($" subgraph {title}");
foreach (Host host in from l in hosts orderby l.Line, l.Location, l.Type select l)
{
if (host.Id is null || host.Hyphen is null || host.Device is null || host.Name is null || host.Line is null || host.Hyphen.Length != 17)
continue;
if (!int.TryParse(host.Id, out id))
throw new NotSupportedException($"{host.Id} is not a number");
check = host.Type == wired ? "-->" : "-.->";
results.Add($" {host.Location}{host.Type}{host.Line} {check} Line{host.Line}");
}
results.Add(" end");
results.Add("```");
return results.AsReadOnly();
}
internal static void JsonToMarkdown(ILogger<Worker> logger, List<string> args)
{
Host[] hosts;
string title = args[3];
string wired = args[4];
string extension = args[5];
string searchPattern = args[2];
ReadOnlyCollection<string> lines;
string sourceDirectory = Path.GetFullPath(args[0]);
string[] files = Directory.GetFiles(sourceDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
hosts = GetHosts(logger, file);
lines = GetLines(hosts, title, wired);
File.WriteAllText($"{file}{extension}", string.Join(Environment.NewLine, lines));
}
}
}

View File

@ -0,0 +1,206 @@
using File_Folder_Helper.Models;
using Microsoft.Extensions.Logging;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization;
using System.Text.Json;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240724
{
private record FileConnectorConfigurationSystem(string AlternateTargetFolder,
string FileAgeThreshold,
string[] SourceFileFilters,
string TargetFileLocation);
#pragma warning disable IDE0028, IDE0056, IDE0300, IDE0240, IDE0241
private static readonly HttpClient _HttpClient = new();
private static readonly string _StaticFileServer = "localhost:5054";
private static readonly FileConnectorConfigurationSystem _FileConnectorConfiguration = new(
"D:/Tmp/Phares/AlternateTargetFolder",
"000:20:00:01",
[".txt"],
"D:/Tmp/Phares/TargetFileLocation");
private static DateTime GetFileAgeThresholdDateTime(string fileAgeThreshold)
{
DateTime result = DateTime.Now;
string[] segments = fileAgeThreshold.Split(':');
for (int i = 0; i < segments.Length; i++)
{
result = i switch
{
0 => result.AddDays(double.Parse(segments[i]) * -1),
1 => result.AddHours(double.Parse(segments[i]) * -1),
2 => result.AddMinutes(double.Parse(segments[i]) * -1),
3 => result.AddSeconds(double.Parse(segments[i]) * -1),
_ => throw new Exception(),
};
}
return result;
}
private static string[] GetValidWeeks(DateTime fileAgeThresholdDateTime)
{
DateTime dateTime = DateTime.Now;
Calendar calendar = new CultureInfo("en-US").Calendar;
string weekOfYear = $"{dateTime:yyyy}_Week_{calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
string lastWeekOfYear = $"{fileAgeThresholdDateTime:yyyy}_Week_{calendar.GetWeekOfYear(fileAgeThresholdDateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday):00}";
return new string[] { weekOfYear, lastWeekOfYear }.Distinct().ToArray();
}
private static string[] GetValidDays(DateTime fileAgeThresholdDateTime)
{
DateTime dateTime = DateTime.Now;
return new string[] { dateTime.ToString("yyyy-MM-dd"), fileAgeThresholdDateTime.ToString("yyyy-MM-dd") }.Distinct().ToArray();
}
private static ReadOnlyCollection<NginxFileSystem> GetDayNginxFileSystemCollection(DateTime fileAgeThresholdDateTime, string week, string day, string dayUrl, NginxFileSystem[] dayNginxFileSystemCollection)
{
List<NginxFileSystem> results = new();
DateTime dateTime;
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
foreach (NginxFileSystem dayNginxFileSystem in dayNginxFileSystemCollection)
{
if (!DateTime.TryParseExact(dayNginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTime))
continue;
if (dateTime < fileAgeThresholdDateTime)
continue;
results.Add(new(
Path.GetFullPath(Path.Combine(_FileConnectorConfiguration.TargetFileLocation, week, day, dayNginxFileSystem.Name)),
string.Concat(dayUrl, '/', dayNginxFileSystem.Name),
dateTime.ToString(),
dayNginxFileSystem.Size));
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<NginxFileSystem> GetDayNginxFileSystemCollection(DateTime fileAgeThresholdDateTime)
{
#nullable enable
List<NginxFileSystem> results = new();
string dayUrl;
string dayJson;
string weekJson;
string checkWeek;
Task<HttpResponseMessage> task;
NginxFileSystem[]? dayNginxFileSystemCollection;
NginxFileSystem[]? weekNginxFileSystemCollection;
string[] days = GetValidDays(fileAgeThresholdDateTime);
string[] weeks = GetValidWeeks(fileAgeThresholdDateTime);
foreach (string week in weeks)
{
checkWeek = string.Concat("http://", _StaticFileServer, '/', week);
task = _HttpClient.GetAsync(checkWeek);
task.Wait();
if (!task.Result.IsSuccessStatusCode)
continue;
weekJson = _HttpClient.GetStringAsync(checkWeek).Result;
weekNginxFileSystemCollection = JsonSerializer.Deserialize(weekJson, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (weekNginxFileSystemCollection is null)
continue;
foreach (NginxFileSystem weekNginxFileSystem in weekNginxFileSystemCollection)
{
if (!(from l in days where weekNginxFileSystem.Name == l select false).Any())
continue;
dayUrl = string.Concat(checkWeek, '/', weekNginxFileSystem.Name);
dayJson = _HttpClient.GetStringAsync(dayUrl).Result;
dayNginxFileSystemCollection = JsonSerializer.Deserialize(dayJson, NginxFileSystemCollectionSourceGenerationContext.Default.NginxFileSystemArray);
if (dayNginxFileSystemCollection is null)
continue;
results.AddRange(GetDayNginxFileSystemCollection(fileAgeThresholdDateTime, week, weekNginxFileSystem.Name, dayUrl, dayNginxFileSystemCollection));
}
}
return results.AsReadOnly();
#nullable disable
}
private static ReadOnlyCollection<Tuple<DateTime, FileInfo, FileInfo, string>> GetPossible()
{
List<Tuple<DateTime, FileInfo, FileInfo, string>> results = new();
DateTime dateTime;
FileInfo targetFileInfo;
FileInfo alternateFileInfo;
DateTime fileAgeThresholdDateTime = GetFileAgeThresholdDateTime(_FileConnectorConfiguration.FileAgeThreshold);
ReadOnlyCollection<NginxFileSystem> dayNginxFileSystemCollection = GetDayNginxFileSystemCollection(fileAgeThresholdDateTime);
foreach (NginxFileSystem nginxFileSystem in dayNginxFileSystemCollection)
{
targetFileInfo = new FileInfo(nginxFileSystem.Name);
if (targetFileInfo.Directory is null)
continue;
if (!Directory.Exists(targetFileInfo.Directory.FullName))
_ = Directory.CreateDirectory(targetFileInfo.Directory.FullName);
if (!DateTime.TryParse(nginxFileSystem.MTime, out dateTime))
continue;
if (targetFileInfo.Exists && targetFileInfo.LastWriteTime == dateTime)
continue;
alternateFileInfo = new(Path.Combine(_FileConnectorConfiguration.AlternateTargetFolder, nginxFileSystem.Name));
results.Add(new(dateTime, targetFileInfo, alternateFileInfo, nginxFileSystem.Type));
}
return (from l in results orderby l.Item1 select l).ToList().AsReadOnly();
}
private static void Test()
{
#nullable enable
if (_HttpClient is null)
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
{
ReadOnlyCollection<Tuple<DateTime, FileInfo, FileInfo, string>> possibleDownload = GetPossible();
if (possibleDownload.Count > 0)
{
string targetFileName = possibleDownload[0].Item4;
FileInfo targetFileInfo = possibleDownload[0].Item2;
FileInfo alternateFileInfo = possibleDownload[0].Item3;
DateTime matchNginxFileSystemDateTime = possibleDownload[0].Item1;
// if (alternateFileInfo.Exists)
// File.Delete(alternateFileInfo.FullName);
if (targetFileInfo.Exists)
File.Delete(targetFileInfo.FullName);
string targetJson = _HttpClient.GetStringAsync(targetFileName).Result;
File.WriteAllText(targetFileInfo.FullName, targetJson);
targetFileInfo.LastWriteTime = matchNginxFileSystemDateTime;
// File.Copy(targetFileInfo.FullName, alternateFileInfo.FullName);
File.AppendAllText(alternateFileInfo.FullName, targetJson);
}
}
#nullable disable
}
internal static void CopyDirectories(ILogger<Worker> logger, List<string> args)
{
Test();
string[] files;
Process process;
string checkDirectory;
string filter = args[3];
string replaceWith = args[4];
string searchPattern = args[2];
string sourceDirectory = Path.GetFullPath(args[0]);
string[] foundDirectories = Directory.GetDirectories(sourceDirectory, searchPattern, SearchOption.AllDirectories);
logger.LogInformation($"Found {foundDirectories.Length} directories");
foreach (string foundDirectory in foundDirectories)
{
if (!foundDirectory.Contains(filter))
continue;
logger.LogDebug(foundDirectory);
checkDirectory = foundDirectory.Replace(filter, replaceWith);
if (Directory.Exists(checkDirectory))
{
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
if (files.Length > 0)
continue;
Directory.Delete(checkDirectory);
}
process = Process.Start("cmd.exe", $"/c xCopy \"{foundDirectory}\" \"{checkDirectory}\" /S /E /I /H /Y");
process.WaitForExit();
}
}
}

View File

@ -0,0 +1,64 @@
using Microsoft.Extensions.Logging;
using System.Diagnostics;
namespace File_Folder_Helper.Day;
internal static partial class Helper20240728
{
internal static void DownloadSslCertificates(ILogger<Worker> logger, List<string> args)
{
string file;
Process? process;
string[] segments;
string standardError;
string standardOutput;
string argumentSegment;
string store = args[9];
string domain = args[2];
List<string> lines = [];
string logSegment = args[8];
string endCertificate = args[7];
string beginCertificate = args[6];
int waitForExit = int.Parse(args[5]);
string[] subdomains = args[3].Split(',');
string sourceDirectory = Path.GetFullPath(args[0]);
ProcessStartInfo processStartInfo = new()
{
CreateNoWindow = true,
RedirectStandardError = true,
RedirectStandardOutput = true,
UseShellExecute = false,
FileName = args[4],
WorkingDirectory = sourceDirectory
};
foreach (string subdomain in subdomains)
{
argumentSegment = $"{subdomain}.{domain}:443 -servername {subdomain}.{domain}";
processStartInfo.Arguments = $"s_client -connect {subdomain}.{domain}:443 -servername {subdomain}.{domain}";
process = Process.Start(processStartInfo);
if (process is null)
continue;
_ = process.WaitForExit(waitForExit);
process.Kill(entireProcessTree: true);
standardOutput = process.StandardOutput.ReadToEnd();
if (!standardOutput.Contains(beginCertificate) || !standardOutput.Contains(endCertificate))
{
standardError = process.StandardError.ReadToEnd();
logger.LogWarning($"Error: {subdomain}{Environment.NewLine}{standardOutput}{Environment.NewLine}{standardError}");
continue;
}
segments = standardOutput.Split(beginCertificate);
if (segments.Length != 2)
break;
segments = segments[1].Split(endCertificate);
if (segments.Length != 2)
break;
lines.Add($"{logSegment} \"{store}\" {subdomain}.{domain}.cert");
file = Path.Combine(sourceDirectory, $"{subdomain}.{domain}.cert");
File.WriteAllText(file, $"{beginCertificate}{segments[0]}{endCertificate}{Environment.NewLine}");
}
File.WriteAllLines(Path.Combine(sourceDirectory, $"{DateTime.Now.Ticks}.txt"), lines);
}
}