Compare commits

..

1 Commits

Author SHA1 Message Date
42c264e68f Added ts file for bun testing
Added http to text area box

Moved CoD column to match query

Alignment
2025-07-18 14:23:33 -07:00
48 changed files with 2200 additions and 1312 deletions

View File

@ -109,7 +109,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary
dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
@ -121,7 +121,6 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning

View File

@ -1,43 +1,36 @@
{ {
"configurations": [ "configurations": [
{ {
"mode": "debug", "name": ".NET Core Attach",
"name": "Go launch file", "type": "coreclr",
"program": "${file}", "request": "attach",
"request": "launch", "processId": 22868
"type": "go"
}, },
{ {
"type": "node",
"request": "launch",
"name": "node Launch Current Opened File", "name": "node Launch Current Opened File",
"program": "${file}", "program": "${file}"
"request": "launch",
"type": "node"
}, },
{ {
"cwd": "${workspaceFolder}", "type": "bun",
"internalConsoleOptions": "neverOpen", "internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Debug File", "name": "Debug File",
"program": "${file}", "program": "${file}",
"request": "launch",
"stopOnEntry": false,
"type": "bun",
"watchMode": false
},
{
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen", "stopOnEntry": false,
"name": "Run File",
"noDebug": true,
"program": "${file}",
"request": "launch",
"type": "bun",
"watchMode": false "watchMode": false
}, },
{ {
"name": ".NET Core Attach", "type": "bun",
"processId": 32760, "internalConsoleOptions": "neverOpen",
"request": "attach", "request": "launch",
"type": "coreclr" "name": "Run File",
} "program": "${file}",
"cwd": "${workspaceFolder}",
"noDebug": true,
"watchMode": false
},
] ]
} }

View File

@ -1,11 +1,4 @@
{ {
"files.associations": {
"*.ffs_gui": "xml",
"*.hurl": "http",
"*.org": "ini",
"*.net": "ini",
"default": "ini"
},
"[markdown]": { "[markdown]": {
"editor.wordWrap": "off" "editor.wordWrap": "off"
}, },
@ -33,7 +26,6 @@
"Smpl", "Smpl",
"Villach", "Villach",
"Vrng", "Vrng",
"VSTS",
"Weightest", "Weightest",
"WSJF" "WSJF"
], ],

View File

@ -1,134 +1,19 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"inputs": [
{
"default": "Development",
"description": "Which ASP Net Core Environment?",
"id": "ASPNETCORE_ENVIRONMENT",
"options": [
"Development",
"Production"
],
"type": "pickString"
},
{
"default": "{AssemblyTitle}",
"description": "What Assembly Title?",
"id": "AssemblyTitle",
"type": "promptString"
},
{
"default": "{Build.BuildId}",
"description": "Which Build BuildId?",
"id": "Build.BuildId",
"type": "promptString"
},
{
"default": "{Build.Reason}",
"description": "Which Build Reason?",
"id": "Build.Reason",
"type": "promptString"
},
{
"default": "{Build.Repository.Id}",
"description": "Which Build Repository Id?",
"id": "Build.Repository.Id",
"type": "promptString"
},
{
"default": "{Build.Repository.Name}",
"description": "Which Build Repository Name?",
"id": "Build.Repository.Name",
"type": "promptString"
},
{
"default": "{Build.SourceVersion}",
"description": "Which Build Source Version?",
"id": "Build.SourceVersion",
"type": "promptString"
},
{
"default": "Debug",
"description": "Which Configuration?",
"id": "Configuration",
"options": [
"Debug",
"Release"
],
"type": "pickString"
},
{
"default": "net8.0",
"description": "Which Core Version?",
"id": "CoreVersion",
"options": [
"net8.0"
],
"type": "pickString"
},
{
"default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe",
"description": "Which MS Build?",
"id": "MSBuild",
"type": "promptString"
},
{
"default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/",
"description": "Which Nuget Source?",
"id": "NugetSource",
"type": "promptString"
},
{
"default": "win-x64",
"description": "Which Runtime?",
"id": "Runtime",
"options": [
"win-x64",
"win-x32",
"linux-x64",
"linux-x32"
],
"type": "pickString"
},
{
"default": "L:/",
"description": "Which System DefaultWorkingDirectory?",
"id": "System.DefaultWorkingDirectory",
"options": [
"L:/",
"D:/",
"C:/"
],
"type": "pickString"
},
{
"default": "v4.8",
"description": "Which Core Target Framework Version?",
"id": "TargetFrameworkVersion",
"options": [
"v4.8"
],
"type": "pickString"
},
{
"default": "{UserSecretsId}",
"description": "Which Core User Secrets Id?",
"id": "UserSecretsId",
"type": "promptString"
}
],
"tasks": [ "tasks": [
{ {
"label": "Build", "label": "Build",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
"build" "build",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
], ],
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test Debug", "label": "Test-Debug",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -139,7 +24,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test Release", "label": "Test-Release",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -165,7 +50,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Format Whitespaces", "label": "Format-Whitespaces",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -202,13 +87,13 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Code Project", "label": "Project",
"type": "shell", "type": "shell",
"command": "code ../MESAFIBACKLOG.csproj", "command": "code ../MESAFIBACKLOG.csproj",
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Code Read Me", "label": "Readme",
"type": "shell", "type": "shell",
"command": "code ../README.md", "command": "code ../README.md",
"problemMatcher": [] "problemMatcher": []
@ -228,7 +113,7 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Code Git Config", "label": "Git Config",
"type": "shell", "type": "shell",
"command": "code ../.git/config", "command": "code ../.git/config",
"problemMatcher": [] "problemMatcher": []

View File

@ -7,7 +7,6 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.IO; using System.IO;
using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
@ -45,11 +44,12 @@ public class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), SharedDescriptionSourceGenerationContext.Default.Description); description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -0,0 +1,135 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.APC;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string fileNameAfterUnderscoreSplit = GetFileNameAfterUnderscoreSplit(reportFullPath);
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.TargetFileLocation, fileNameAfterUnderscoreSplit);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,160 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Archive;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _JobIdParentDirectory;
private readonly string _JobIdArchiveParentDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
_JobIdArchiveParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
{
Queue<string> queue = new();
queue.Enqueue(path);
while (queue.Count > 0)
{
path = queue.Dequeue();
foreach (string subDirectory in Directory.GetDirectories(path))
{
queue.Enqueue(subDirectory);
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
yield return subDirectory;
}
}
}
private void MoveArchive(string reportFullPath, DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
string logisticsSequence = _Logistics.Sequence.ToString();
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
if (Directory.GetDirectories(jobIdDirectory).Length == 0)
File.Copy(reportFullPath, Path.Combine(destinationArchiveDirectory, Path.GetFileName(reportFullPath)));
else
{
string[] matchDirectories = GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).ToArray();
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -14,11 +14,21 @@ public class CellInstanceConnectionName
IFileRead result = cellInstanceConnectionName switch IFileRead result = cellInstanceConnectionName switch
{ {
nameof(ADO) => new ADO.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(ADO) => new ADO.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(APC) => new APC.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Archive) => new Archive.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(DownloadWorkItems) => new DownloadWorkItems.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(DownloadWorkItems) => new DownloadWorkItems.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Dummy) => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(IQSSi) => new IQSSi.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(json) => new json.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(json) => new json.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Kanban) => new Kanban.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Kanban) => new Kanban.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Markdown) => new Markdown.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Markdown) => new Markdown.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(MoveMatchingFiles) => new MoveMatchingFiles.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Violation) => new Violation.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Violation) => new Violation.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped") _ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
}; };

View File

@ -0,0 +1,295 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.Dummy;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly string[] _CellNames;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_LastDummyRunIndex = -1;
List<string> cellNames = new();
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
_CellNames = cellNames.ToArray();
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
Callback(null);
else
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
{
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
try
{
if (warning)
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
{
foreach (string file in files)
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{
_SMTP.SendHighPriorityEmailMessage(subject, body);
File.WriteAllText(".email", body);
}
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
{
string[] files;
bool warning = false;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
string inProcessDirectory = Path.Combine("_ProgressPath", "Dummy In-Process", sequence.ToString());
if (!Directory.Exists(inProcessDirectory))
_ = Directory.CreateDirectory(inProcessDirectory);
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
if (files.Length != 0)
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
{
warning = true;
break;
}
}
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
}
private string GetCellName(string pathSegment)
{
string result = string.Empty;
foreach (string cellName in _CellNames)
{
if (pathSegment.ToLower().Contains(cellName.ToLower()))
{
result = cellName;
break;
}
}
if (string.IsNullOrEmpty(result))
{
int count;
List<(string CellName, int Count)> cellNames = new();
foreach (string cellName in _CellNames)
{
count = 0;
foreach (char @char in cellName.ToLower())
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
cellNames.Add(new(cellName, count));
}
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
}
return result;
}
private void Callback(object state)
{
try
{
string sourceParentDirectory;
string targetParentDirectory;
DateTime dateTime = DateTime.Now;
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.SourceFileLocation)))
sourceParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.SourceFileLocation);
else
sourceParentDirectory = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.TargetFileLocation)))
targetParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
else
targetParentDirectory = GetParentParent(_FileConnectorConfiguration.TargetFileLocation);
if (sourceParentDirectory != targetParentDirectory)
throw new Exception("Target and source must have the same parent for Si Dummy FileConnectorConfiguration!");
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
if (!_IsEAFHosted || check)
{
string monARessource;
string sourceFileFilter;
string sourceArchiveFile;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
if (!Directory.Exists(traceDummyDirectory))
_ = Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
File.AppendAllText(traceDummyFile, string.Empty);
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
_LastDummyRunIndex = 0;
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
sourceArchiveFile = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, sourceFileFilter);
if (File.Exists(sourceArchiveFile))
{
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
throw new Exception("Invalid file name for source archive file!");
monARessource = GetCellName(sourceArchiveFile);
if (string.IsNullOrEmpty(monARessource))
throw new Exception("Could not determine which cell archive file is associated with!");
if (_IsEAFHosted)
CallbackFileExists(sourceArchiveFile, traceDummyFile, _FileConnectorConfiguration.TargetFileLocation, monARessource, sequence);
break;
}
}
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{
_SMTP.SendHighPriorityEmailMessage(subject, body);
File.WriteAllText(".email", body);
}
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{
_SMTP.SendHighPriorityEmailMessage(subject, body);
File.WriteAllText(".email", body);
}
catch (Exception) { }
}
}
}

View File

@ -0,0 +1,134 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
string successDirectory = string.Empty;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -8,7 +8,6 @@ using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
@ -222,11 +221,12 @@ public class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), SharedDescriptionSourceGenerationContext.Default.Description); description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -399,11 +399,12 @@ public class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), SharedDescriptionSourceGenerationContext.Default.Description); description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -0,0 +1,397 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.MoveMatchingFiles;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
internal class PreWith
{
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal string MatchingFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string checkDirectory,
string checkFile,
string errFile,
string matchingFile,
string noWaitDirectory)
{
ErrFile = errFile;
CheckFile = checkFile;
MatchingFile = matchingFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
}
internal class Pre
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal Pre(string matchingFile, string checkFile)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
}
}
internal class Post
{
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal Post(string checkFile, string errFile)
{
ErrFile = errFile;
CheckFile = checkFile;
}
}
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() =>
WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,Test,Count,MesEntity,HeaderUniqueId,UniqueId,Id,Recipe,Date,AreaDeltaFromLastRun,GLimit,HGCV1";
string args5 = "Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,HGCV1";
string args6 = "RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09,HGCV1";
string args7 = "FlatZMean|MeanFlatZ,GradeMean|MeanGrade,NAvgMean|MeanNAvg,NslMean|MeanNsl,PhaseMean|MeanPhase,RhoAvgMean|MeanRhoAvg,RhoslMean|MeanRhosl,RsMean|MeanRs,VdMean|MeanVd,FlatZRadialGradient|RadialGradientFlatZ,GradeRadialGradient|RadialGradientGrade,NAvgRadialGradient|RadialGradientNAvg,NslRadialGradient|RadialGradientNsl,PhaseRadialGradient|RadialGradientPhase,RhoAvgRadialGradient|RadialGradientRhoAvg,RhoslRadialGradient|RadialGradientRhosl,RsRadialGradient|RadialGradientRs,VdRadialGradient|RadialGradientVd,FlatZStdDev|StandardDeviationPercentageFlatZ,GradeStdDev|StandardDeviationPercentageGrade,NAvgStdDev|StandardDeviationPercentageNAvg,NslStdDev|StandardDeviationPercentageNsl,PhaseStdDev|StandardDeviationPercentagePhase,RhoAvgStdDev|StandardDeviationPercentageRhoAvg,RhoslStdDev|StandardDeviationPercentageRhosl,RsStdDev|StandardDeviationPercentageRs,VdStdDev|StandardDeviationPercentageVd,|HGCV1";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Area,Folder,HeaderUniqueId,Id,Layer,Model,Pattern,Phase,Plan,RampRate,RDS,SetupFile,StartVoltage,StopVoltage,UniqueId,Wafer,WaferSize,Zone,Ccomp,CondType,FlatZ,FlatZMean,FlatZRadialGradient,FlatZStdDev,GLimit,Grade,GradeMean,GradeRadialGradient,GradeStdDev,NAvg,NAvgMean,NAvgRadialGradient,NAvgStdDev,Nsl,NslMean,NslRadialGradient,NslStdDev,PhaseMean,PhaseRadialGradient,PhaseStdDev,RhoAvg,RhoAvgMean,RhoAvgRadialGradient,RhoAvgStdDev,RhoMethod,Rhosl,RhoslMean,RhoslRadialGradient,RhoslStdDev,RsMean,RsRadialGradient,RsStdDev,Vd,VdMean,VdRadialGradient,VdStdDev,Variation,AreaDeltaFromLastRun,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Index,Operator,StartVoltage,Wafer,StopVoltage,Lot,RampRate,Plan,GLimit,Date,Time,SetupFile,WaferSize,Folder,Ccomp,Pattern,Area,CondType,RhoMethod,Model,MeanNAvg,MeanNsl,MeanVd,MeanFlatZ,MeanRhoAvg,MeanRhosl,MeanPhase,MeanGrade,MeanRs,StandardDeviationPercentageNAvg,StandardDeviationPercentageNsl,StandardDeviationPercentageVd,StandardDeviationPercentageFlatZ,StandardDeviationPercentageRhoAvg,StandardDeviationPercentageRhosl,StandardDeviationPercentagePhase,StandardDeviationPercentageGrade,StandardDeviationPercentageRs,RadialGradientNAvg,RadialGradientNsl,RadialGradientVd,RadialGradientFlatZ,RadialGradientRhoAvg,RadialGradientRhosl,RadialGradientPhase,RadialGradientGrade,RadialGradientRs,Site,X,Y,NAvg,RhoAvg,Nsl,Rhosl,Vd,Phase,FlatZ,Grade,XLeft,XRight,BottomY,TopY,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
// string args10 = "0,1,2,-1,-1,3,-1,12,70,8,66,67,-1,19,16,-1,-1,68,22,18,58,10,9,65,14,5,7,-1,6,15,69,17,20,59,26,44,35,11,60,30,48,39,53,23,41,32,55,24,42,33,29,47,38,54,27,45,36,21,56,28,46,37,31,49,40,57,25,43,34,81,80,72,73,74,75,76,77,78,79,83,84,85,86,87,88,89,90,91";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{
List<PreWith> results = new();
string errFile;
PreWith preWith;
string? checkDirectory;
string noWaitDirectory;
foreach (Pre pre in preCollection)
{
errFile = string.Concat(pre.CheckFile, ".err");
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
if (string.IsNullOrEmpty(checkDirectory))
continue;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(checkDirectory: checkDirectory,
checkFile: pre.CheckFile,
errFile: errFile,
matchingFile: pre.MatchingFile,
noWaitDirectory: noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
{
List<string> results = new();
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
if (Path.GetFileName(directory).Length != numberLength)
continue;
results.Add(directory);
}
results.Sort();
return results.AsReadOnly();
}
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
}
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
List<Pre> results = new();
Pre pre;
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
pre = new(matchingFile, checkFile);
results.Add(pre);
}
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach (Post post in postCollection)
{
if (File.Exists(post.ErrFile))
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
if (File.Exists(post.CheckFile))
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
long preWait;
foreach (PreWith preWith in preWithCollection)
{
if (!_IsEAFHosted)
continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
results.Add(post);
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(preWith.ErrFile))
throw new Exception(File.ReadAllText(preWith.ErrFile));
if (!File.Exists(preWith.CheckFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
return results.AsReadOnly();
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
if (!_IsEAFHosted)
ProcessDataStandardFormat.Write("../../.pdsf", processDataStandardFormat, wsResults: null);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
if (_IsEAFHosted)
{
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
}
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results;
}
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
{
List<string> results = new();
string[] found;
string fileName = Path.GetFileName(reportFullPath);
foreach (string searchDirectory in searchDirectories)
{
for (int i = 0; i < int.MaxValue; i++)
{
found = Directory.GetFiles(searchDirectory, fileName, SearchOption.AllDirectories);
if (found.Length != 0)
{
results.AddRange(found);
break;
}
if (new TimeSpan(DateTime.Now.Ticks - ticks).TotalSeconds > _BreakAfterSeconds)
break;
}
}
return results.AsReadOnly();
}
}

View File

@ -0,0 +1,133 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private static void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<Description> descriptions, Test[] tests)
{
if (string.IsNullOrEmpty(reportFullPath))
throw new ArgumentException($"'{nameof(reportFullPath)}' cannot be null or empty.", nameof(reportFullPath));
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
if (descriptions is null)
throw new ArgumentNullException(nameof(descriptions));
if (tests is null)
throw new ArgumentNullException(nameof(tests));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,144 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#pragma warning disable IDE0060, CA1822
private void SendData(string reportFullPath, DateTime dateTime, List<Description> descriptions)
#pragma warning restore IDE0060, CA1822
{
// WSRequest wsRequest = new(this, _Logistics, descriptions);
// int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
// string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
// (string jsonResults, WS.Results wsResults) = WS.SendData(_OpenInsightMetrologyViewerAPI, _Logistics.Sequence, directory, wsRequest);
// if (!wsResults.Success)
// throw new Exception(wsResults.ToString());
// _Log.Debug(wsResults.HeaderId);
// lock (_StaticRuns)
// {
// if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
// _StaticRuns.Add(_Logistics.Sequence, new());
// _StaticRuns[_Logistics.Sequence].Add(jsonResults);
// }
// string checkDirectory = Path.Combine(directory, wsResults.HeaderId.ToString());
// if (!Directory.Exists(checkDirectory))
// _ = Directory.CreateDirectory(checkDirectory);
// File.Copy(reportFullPath, Path.Combine(checkDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,161 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewerAttachments;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _OpenInsightMetrologyViewerAPI;
private readonly string _OpenInsightMetrologyViewerFileShare;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
_OpenInsightMetrologyViewerFileShare = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerFileShare");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private string? GetHeaderIdDirectory(long headerId)
{
string? result = null;
int weekNum;
string year;
string weekDirectory;
string checkDirectory;
DateTime[] dateTimes = new DateTime[] { _Logistics.DateTimeFromSequence, _Logistics.DateTimeFromSequence.AddDays(-6.66) };
foreach (DateTime dateTime in dateTimes)
{
year = dateTime.Year.ToString();
weekNum = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
weekDirectory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekNum:00}");
if (!Directory.Exists(weekDirectory))
_ = Directory.CreateDirectory(weekDirectory);
checkDirectory = Path.Combine(weekDirectory, $"-{headerId}");
if (!Directory.Exists(checkDirectory))
continue;
result = checkDirectory;
break;
}
return result;
}
private static void PostOpenInsightMetrologyViewerAttachments(List<Description> descriptions)
{
if (descriptions is null)
throw new ArgumentNullException(nameof(descriptions));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,192 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.Processed;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _JobIdParentDirectory;
private readonly string _JobIdProcessParentDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
_JobIdProcessParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<Description> descriptions)
{
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
if (descriptions is null)
throw new ArgumentNullException(nameof(descriptions));
FileInfo fileInfo = new(reportFullPath);
_ = _Logistics.Sequence.ToString();
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
if (_JobIdProcessParentDirectory is null)
{ }
// OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
// JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
// string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
// string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
// string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
// string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
// string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
// Directory.Move(matchDirectories[0], destinationJobIdDirectory);
// if (!Directory.Exists(sequenceDirectory))
// _ = Directory.CreateDirectory(sequenceDirectory);
// File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
// File.WriteAllText(jsonFileName, json);
}
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
{
string checkFile;
string jobIdDirectoryFileName;
string matchDirectoryFileName;
string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
{
jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
foreach (string matchDirectoryFile in matchDirectoryFiles)
{
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
{
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
if (File.Exists(checkFile))
continue;
File.Move(jobIdDirectoryFile, checkFile);
}
}
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)
{
// OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
// JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
// string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
// string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
// string historicalText = File.ReadAllText(jsonFileName);
// if (json != historicalText)
// throw new Exception("File doesn't match historical!");
}
return results;
}
}

View File

@ -0,0 +1,132 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.SPaCe;
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
{
FileInfo fileInfo = new(_Logistics.ReportFullPath);
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
}
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{
bool isDummyRun = false;
string successDirectory = string.Empty;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = _FileConnectorConfiguration.TargetFileLocation;
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}
}

View File

@ -615,11 +615,12 @@ public class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), SharedDescriptionSourceGenerationContext.Default.Description); description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -189,8 +189,7 @@ public class FileRead : Shared.FileRead, IFileRead
timeCriticality: fields.MicrosoftVSTSCommonTimeCriticality is null or 0 ? null : (long)fields.MicrosoftVSTSCommonTimeCriticality, timeCriticality: fields.MicrosoftVSTSCommonTimeCriticality is null or 0 ? null : (long)fields.MicrosoftVSTSCommonTimeCriticality,
title: fields.SystemTitle.Trim(), title: fields.SystemTitle.Trim(),
violation: null, violation: null,
weightedShortestJobFirst: fields.CustomWSJF is null or 0 ? null : fields.CustomWSJF, weightedShortestJobFirst: fields.CustomWSJF is null or 0 ? null : (long)fields.CustomWSJF,
weightedShortestJobFirstFibonacci: fields.CustomWSJFFib is null or 0 ? null : fields.CustomWSJFFib,
workItemType: fields.SystemWorkItemType); workItemType: fields.SystemWorkItemType);
results.Add(workItem.Id, workItem); results.Add(workItem.Id, workItem);
} }

View File

@ -13,32 +13,16 @@ var _machineId = '';
var _sessionId = ''; var _sessionId = '';
var _windowLocationHRef = ''; var _windowLocationHRef = '';
function compareFunctionEffort(a: any, b: any) { function compareFunctionSortOrder(a: any, b: any) {
if (a.Effort == undefined || a.Effort === '&nbsp;') { return a.SortOrder - b.SortOrder;
return -1;
}
if (b.Effort !== a.Effort) {
return b.Effort - a.Effort;
}
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
} }
function compareFunctionSortOrder(a: any, b: any) { function compareFunctionParentId(a: any, b: any) {
if (a.SortOrder == undefined || a.SortOrder === '&nbsp;') { return a.ParentId - b.ParentId || a.Id - b.Id;
return -1;
}
return a.SortOrder - b.SortOrder
} }
function compareFunctionWeightedShortestJobFirst(a: any, b: any) { function compareFunctionWeightedShortestJobFirst(a: any, b: any) {
if (b.WeightedShortestJobFirst == undefined || b.WeightedShortestJobFirst === '&nbsp;') { if (b.WeightedShortestJobFirst === '&nbsp;') {
return -1;
}
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
}
function compareFunctionWeightedShortestJobFirstNullFirst(a: any, b: any) {
if (a.WeightedShortestJobFirst == undefined || a.WeightedShortestJobFirst === '&nbsp;') {
return -1; return -1;
} }
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst; return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
@ -158,19 +142,8 @@ function updateRecordCoD(b: any, r: any, t: any, c: any, e: any, w: any, workIte
let effort = workItem.Effort; let effort = workItem.Effort;
let businessValue = workItem.BusinessValue; let businessValue = workItem.BusinessValue;
let timeCriticality = workItem.TimeCriticality; let timeCriticality = workItem.TimeCriticality;
let weightedShortestJobFirst = workItem.WeightedShortestJobFirst.toFixed(2);
let riskReductionMinusOpportunityEnablement = workItem.RiskReductionMinusOpportunityEnablement; let riskReductionMinusOpportunityEnablement = workItem.RiskReductionMinusOpportunityEnablement;
let weightedShortestJobFirst = workItem.WeightedShortestJobFirst == undefined ? null : workItem.WeightedShortestJobFirst.toFixed(2);
let weightedShortestJobFirstFibonacci = workItem.WeightedShortestJobFirstFibonacci == undefined ? null : workItem.WeightedShortestJobFirstFibonacci.toFixed(2);
if (weightedShortestJobFirst == null || weightedShortestJobFirstFibonacci == null || weightedShortestJobFirst != weightedShortestJobFirstFibonacci) {
if (weightedShortestJobFirst != weightedShortestJobFirstFibonacci) {
}
}
if (workItem.Id === 330178) {
if (weightedShortestJobFirst != weightedShortestJobFirstFibonacci) {
}
}
workItem.CumulativeStoryPoints = '&nbsp;'; workItem.CumulativeStoryPoints = '&nbsp;';
workItem.TotalStoryPoints = totalStoryPoints + ' User Story Point(s)'; workItem.TotalStoryPoints = totalStoryPoints + ' User Story Point(s)';
workItem.AbsoluteDelta = data.Effort == undefined || data.Effort.FibonacciAverage == undefined || totalStoryPoints == undefined || totalStoryPoints === 0 ? '&nbsp;' : round(Math.abs(data.Effort.FibonacciAverage - ((totalStoryPoints / highestTotalStoryPoints) * 5)), 1); workItem.AbsoluteDelta = data.Effort == undefined || data.Effort.FibonacciAverage == undefined || totalStoryPoints == undefined || totalStoryPoints === 0 ? '&nbsp;' : round(Math.abs(data.Effort.FibonacciAverage - ((totalStoryPoints / highestTotalStoryPoints) * 5)), 1);
@ -186,20 +159,7 @@ function updateRecordCoD(b: any, r: any, t: any, c: any, e: any, w: any, workIte
workItem.TimeCriticalityNotifications = data.TimeCriticality == undefined ? '&nbsp;' : getNotifications(t, data.TimeCriticality); workItem.TimeCriticalityNotifications = data.TimeCriticality == undefined ? '&nbsp;' : getNotifications(t, data.TimeCriticality);
workItem.RiskReductionMinusOpportunityEnablementNotifications = data.RiskReductionOpportunityEnablement == undefined ? '&nbsp;' : getNotifications(r, data.RiskReductionOpportunityEnablement); workItem.RiskReductionMinusOpportunityEnablementNotifications = data.RiskReductionOpportunityEnablement == undefined ? '&nbsp;' : getNotifications(r, data.RiskReductionOpportunityEnablement);
workItem.SortOrder = data.SortOrder == undefined ? 0 : data.SortOrder; workItem.SortOrder = data.SortOrder == undefined ? 0 : data.SortOrder;
if (workItem.Effort == undefined || let check = effort == workItem.Effort
workItem.BusinessValue == undefined ||
workItem.TimeCriticality == undefined ||
workItem.RiskReductionMinusOpportunityEnablement == undefined ||
workItem.WeightedShortestJobFirst == undefined ||
workItem.Effort == '&nbsp;' ||
workItem.BusinessValue == '&nbsp;' ||
workItem.TimeCriticality == '&nbsp;' ||
workItem.RiskReductionMinusOpportunityEnablement == '&nbsp;' ||
workItem.WeightedShortestJobFirst == '&nbsp;') {
workItem.api = '';
}
else {
let check = effort === workItem.Effort
&& businessValue === workItem.BusinessValue && businessValue === workItem.BusinessValue
&& timeCriticality === workItem.TimeCriticality && timeCriticality === workItem.TimeCriticality
&& riskReductionMinusOpportunityEnablement === workItem.RiskReductionMinusOpportunityEnablement; && riskReductionMinusOpportunityEnablement === workItem.RiskReductionMinusOpportunityEnablement;
@ -207,13 +167,13 @@ function updateRecordCoD(b: any, r: any, t: any, c: any, e: any, w: any, workIte
workItem.api = ''; workItem.api = '';
} else { } else {
workItem.api = ` workItem.api = `
### Work Item Patch ${check} WSJF ${workItem.Id} ${effort}:${workItem.Effort}; ${businessValue}:${workItem.BusinessValue}; ${timeCriticality}:${workItem.TimeCriticality}; ${riskReductionMinusOpportunityEnablement}:${workItem.RiskReductionMinusOpportunityEnablement}; ${weightedShortestJobFirst}:${workItem.WeightedShortestJobFirst}; ### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
PATCH https://tfs.intra.infineon.com/tfs/FactoryIntegration/_apis/wit/workitems/${workItem.Id}?api-version=7.0 patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
Authorization: Basic {{PAT}} Authorization: Basic {{PAT}}
Content-Type: application/json-patch+json Content-Type: application/json-patch+json
[ [
{ {
"op": "replace", "op": "replace",
"path": "/fields/Microsoft.VSTS.Common.BusinessValue", "path": "/fields/Microsoft.VSTS.Common.BusinessValue",
@ -244,19 +204,7 @@ Content-Type: application/json-patch+json
"path": "/fields/Custom.WSJFFib", "path": "/fields/Custom.WSJFFib",
"value": "${workItem.WeightedShortestJobFirst}" "value": "${workItem.WeightedShortestJobFirst}"
} }
] ]`;
HTTP 200
[Asserts]
header "Content-Type" == "application/json; charset=utf-8; api-version=7.0"
jsonpath "$.id" == ${workItem.Id}
jsonpath "$.fields['Microsoft.VSTS.Common.BusinessValue']" == ${workItem.BusinessValue}
jsonpath "$.fields['Microsoft.VSTS.Scheduling.Effort']" == ${workItem.Effort}
jsonpath "$.fields['Custom.RRminusOE']" == ${workItem.RiskReductionMinusOpportunityEnablement}
jsonpath "$.fields['Microsoft.VSTS.Common.TimeCriticality']" == ${workItem.TimeCriticality}
jsonpath "$.fields['Custom.WSJF']" == ${workItem.WeightedShortestJobFirst}
jsonpath "$.fields['Custom.WSJFFib']" == ${workItem.WeightedShortestJobFirst}`;
}
} }
} }
} }
@ -316,42 +264,30 @@ function getRecords(b: any, r: any, t: any, c: any, e: any, w: any, data: any, d
workItem.State = getState(workItem.State); workItem.State = getState(workItem.State);
records.push(workItem); records.push(workItem);
} }
if (_windowLocationHRef.indexOf('=EFFORT') > -1) { if (_windowLocationHRef.indexOf('=WSJF') > -1) {
records.sort(compareFunctionEffort); records.sort(compareFunctionWeightedShortestJobFirst);
} }
else if (_windowLocationHRef.indexOf('=LIVE') > -1) { else if (_windowLocationHRef.indexOf('=LIVE') > -1) {
records.sort(compareFunctionSortOrder); records.sort(compareFunctionSortOrder);
} }
else if (_windowLocationHRef.indexOf('=WSJF') > -1) {
records.sort(compareFunctionWeightedShortestJobFirst);
}
else { else {
records.sort(compareFunctionWeightedShortestJobFirstNullFirst); records.sort(compareFunctionParentId);
} }
return records; return records;
} }
function getHtmlTextAndApi(fromHtml: any, b: any, r: any, t: any, c: any, e: any, w: any, records: any) { function getHtmlTextAndHttp(fromHtml: any, b: any, r: any, t: any, c: any, e: any, w: any, records: any) {
let record; let record;
let http = '';
let lineA = ''; let lineA = '';
let lineB = ''; let lineB = '';
let lineC = ''; let lineC = '';
let api = `
### FactoryIntegration (hurl.exe)
GET https://tfs.intra.infineon.com/tfs/FactoryIntegration
HTTP 401
[Asserts]
duration < 12345
`;
let text = 'Id\tRisk Reduction and/or Opportunity Enablement\tTime Criticality\tBusiness Value\tEffort\tWSJF\tCoD\tFeature Total Story Points\tAbsolute Delta\tState\tRequester\tAssigned To\tIteration Path\tSystem\tTitle-123\r\n'; let text = 'Id\tRisk Reduction and/or Opportunity Enablement\tTime Criticality\tBusiness Value\tEffort\tWSJF\tCoD\tFeature Total Story Points\tAbsolute Delta\tState\tRequester\tAssigned To\tIteration Path\tSystem\tTitle-123\r\n';
let html = '<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Priority</th><th>Risk Reduction and/or Opportunity Enablement</th><th>Time Criticality</th><th>Business Value</th><th>Cost of Delay (CoD)</th><th>Effort</th><th>WSJF</th></tr>'; let html = '<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Priority</th><th>Risk Reduction and/or Opportunity Enablement</th><th>Time Criticality</th><th>Business Value</th><th>Cost of Delay (CoD)</th><th>Effort</th><th>WSJF</th></tr>';
for (let i = 0; i < records.length; i++) { for (let i = 0; i < records.length; i++) {
record = records[i]; record = records[i];
if (record.api !== '') { if (record.api !== '') {
api += record.api + '\r\n'; http += record.api + '\r\n';
} }
text += record.Id + '\t' + text += record.Id + '\t' +
record.RiskReductionMinusOpportunityEnablement + '\t' + record.RiskReductionMinusOpportunityEnablement + '\t' +
@ -426,7 +362,7 @@ duration < 12345
console.log(text); console.log(text);
html += lineA + lineB + lineC; html += lineA + lineB + lineC;
} }
return { html, text, api }; return { html, text, http };
} }
const username = ''; const username = '';
@ -489,7 +425,7 @@ fetch(workItems.b)
if (dataA.length > 0) if (dataA.length > 0)
console.log(dataA[0]); console.log(dataA[0]);
const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems); const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems);
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records); let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
if (result == undefined) { } if (result == undefined) { }
}) })
.catch((e) => console.error(e)); .catch((e) => console.error(e));

View File

@ -13,29 +13,16 @@ var _machineId = '';
var _sessionId = ''; var _sessionId = '';
var _windowLocationHRef = ''; var _windowLocationHRef = '';
function compareFunctionEffort(a, b) { function compareFunctionSortOrder(a, b) {
if (a.Effort == undefined || a.Effort === '&nbsp;') { return a.SortOrder - b.SortOrder;
return -1;
}
if (b.Effort !== a.Effort) {
return b.Effort - a.Effort;
}
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
} }
function compareFunctionSortOrder(a, b) { function compareFunctionParentId(a, b) {
return a.SortOrder - b.SortOrder return a.ParentId - b.ParentId || a.Id - b.Id;
} }
function compareFunctionWeightedShortestJobFirst(a, b) { function compareFunctionWeightedShortestJobFirst(a, b) {
if (b.WeightedShortestJobFirst == undefined || b.WeightedShortestJobFirst === '&nbsp;') { if (b.WeightedShortestJobFirst === '&nbsp;') {
return -1;
}
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
}
function compareFunctionWeightedShortestJobFirstNullFirst(a, b) {
if (a.WeightedShortestJobFirst == undefined || a.WeightedShortestJobFirst === '&nbsp;') {
return -1; return -1;
} }
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst; return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst;
@ -155,8 +142,8 @@ function updateRecordCoD(b, r, t, c, e, w, workItem, highestTotalStoryPoints, da
let effort = workItem.Effort; let effort = workItem.Effort;
let businessValue = workItem.BusinessValue; let businessValue = workItem.BusinessValue;
let timeCriticality = workItem.TimeCriticality; let timeCriticality = workItem.TimeCriticality;
let weightedShortestJobFirst = workItem.WeightedShortestJobFirst.toFixed(2);
let riskReductionMinusOpportunityEnablement = workItem.RiskReductionMinusOpportunityEnablement; let riskReductionMinusOpportunityEnablement = workItem.RiskReductionMinusOpportunityEnablement;
let weightedShortestJobFirst = workItem.WeightedShortestJobFirst == undefined ? '&nbsp;' : workItem.WeightedShortestJobFirst.toFixed(2);
workItem.CumulativeStoryPoints = '&nbsp;'; workItem.CumulativeStoryPoints = '&nbsp;';
workItem.TotalStoryPoints = totalStoryPoints + ' User Story Point(s)'; workItem.TotalStoryPoints = totalStoryPoints + ' User Story Point(s)';
workItem.AbsoluteDelta = data.Effort == undefined || data.Effort.FibonacciAverage == undefined || totalStoryPoints == undefined || totalStoryPoints === 0 ? '&nbsp;' : round(Math.abs(data.Effort.FibonacciAverage - ((totalStoryPoints / highestTotalStoryPoints) * 5)), 1); workItem.AbsoluteDelta = data.Effort == undefined || data.Effort.FibonacciAverage == undefined || totalStoryPoints == undefined || totalStoryPoints === 0 ? '&nbsp;' : round(Math.abs(data.Effort.FibonacciAverage - ((totalStoryPoints / highestTotalStoryPoints) * 5)), 1);
@ -172,20 +159,7 @@ function updateRecordCoD(b, r, t, c, e, w, workItem, highestTotalStoryPoints, da
workItem.TimeCriticalityNotifications = data.TimeCriticality == undefined ? '&nbsp;' : getNotifications(t, data.TimeCriticality); workItem.TimeCriticalityNotifications = data.TimeCriticality == undefined ? '&nbsp;' : getNotifications(t, data.TimeCriticality);
workItem.RiskReductionMinusOpportunityEnablementNotifications = data.RiskReductionOpportunityEnablement == undefined ? '&nbsp;' : getNotifications(r, data.RiskReductionOpportunityEnablement); workItem.RiskReductionMinusOpportunityEnablementNotifications = data.RiskReductionOpportunityEnablement == undefined ? '&nbsp;' : getNotifications(r, data.RiskReductionOpportunityEnablement);
workItem.SortOrder = data.SortOrder == undefined ? 0 : data.SortOrder; workItem.SortOrder = data.SortOrder == undefined ? 0 : data.SortOrder;
if (workItem.Effort == undefined || let check = effort == workItem.Effort
workItem.BusinessValue == undefined ||
workItem.TimeCriticality == undefined ||
workItem.RiskReductionMinusOpportunityEnablement == undefined ||
workItem.WeightedShortestJobFirst == undefined ||
workItem.Effort == '&nbsp;' ||
workItem.BusinessValue == '&nbsp;' ||
workItem.TimeCriticality == '&nbsp;' ||
workItem.RiskReductionMinusOpportunityEnablement == '&nbsp;' ||
workItem.WeightedShortestJobFirst == '&nbsp;') {
workItem.api = '';
}
else {
let check = effort === workItem.Effort
&& businessValue === workItem.BusinessValue && businessValue === workItem.BusinessValue
&& timeCriticality === workItem.TimeCriticality && timeCriticality === workItem.TimeCriticality
&& riskReductionMinusOpportunityEnablement === workItem.RiskReductionMinusOpportunityEnablement; && riskReductionMinusOpportunityEnablement === workItem.RiskReductionMinusOpportunityEnablement;
@ -193,13 +167,13 @@ function updateRecordCoD(b, r, t, c, e, w, workItem, highestTotalStoryPoints, da
workItem.api = ''; workItem.api = '';
} else { } else {
workItem.api = ` workItem.api = `
### Work Item Patch ${check} WSJF ${workItem.Id} ${effort}:${workItem.Effort}; ${businessValue}:${workItem.BusinessValue}; ${timeCriticality}:${workItem.TimeCriticality}; ${riskReductionMinusOpportunityEnablement}:${workItem.RiskReductionMinusOpportunityEnablement}; ${weightedShortestJobFirst}:${workItem.WeightedShortestJobFirst}; ### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
PATCH https://tfs.intra.infineon.com/tfs/FactoryIntegration/_apis/wit/workitems/${workItem.Id}?api-version=7.0 patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
Authorization: Basic {{PAT}} Authorization: Basic {{PAT}}
Content-Type: application/json-patch+json Content-Type: application/json-patch+json
[ [
{ {
"op": "replace", "op": "replace",
"path": "/fields/Microsoft.VSTS.Common.BusinessValue", "path": "/fields/Microsoft.VSTS.Common.BusinessValue",
@ -230,19 +204,7 @@ Content-Type: application/json-patch+json
"path": "/fields/Custom.WSJFFib", "path": "/fields/Custom.WSJFFib",
"value": "${workItem.WeightedShortestJobFirst}" "value": "${workItem.WeightedShortestJobFirst}"
} }
] ]`;
HTTP 200
[Asserts]
header "Content-Type" == "application/json; charset=utf-8; api-version=7.0"
jsonpath "$.id" == ${workItem.Id}
jsonpath "$.fields['Microsoft.VSTS.Common.BusinessValue']" == ${workItem.BusinessValue}
jsonpath "$.fields['Microsoft.VSTS.Scheduling.Effort']" == ${workItem.Effort}
jsonpath "$.fields['Custom.RRminusOE']" == ${workItem.RiskReductionMinusOpportunityEnablement}
jsonpath "$.fields['Microsoft.VSTS.Common.TimeCriticality']" == ${workItem.TimeCriticality}
jsonpath "$.fields['Custom.WSJF']" == ${workItem.WeightedShortestJobFirst}
jsonpath "$.fields['Custom.WSJFFib']" == ${workItem.WeightedShortestJobFirst}`;
}
} }
} }
} }
@ -302,17 +264,14 @@ function getRecords(b, r, t, c, e, w, data, dataB, workItems) {
workItem.State = getState(workItem.State); workItem.State = getState(workItem.State);
records.push(workItem); records.push(workItem);
} }
if (_windowLocationHRef.indexOf('=EFFORT') > -1) { if (_windowLocationHRef.indexOf('=WSJF') > -1) {
records.sort(compareFunctionEffort); records.sort(compareFunctionWeightedShortestJobFirst);
} }
else if (_windowLocationHRef.indexOf('=LIVE') > -1) { else if (_windowLocationHRef.indexOf('=LIVE') > -1) {
records.sort(compareFunctionSortOrder); records.sort(compareFunctionSortOrder);
} }
else if (_windowLocationHRef.indexOf('=WSJF') > -1) {
records.sort(compareFunctionWeightedShortestJobFirst);
}
else { else {
records.sort(compareFunctionWeightedShortestJobFirstNullFirst); records.sort(compareFunctionParentId);
} }
return records; return records;
} }
@ -347,27 +306,18 @@ function sendValue(fromHtml, element, page, id) {
} }
} }
function getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records) { function getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records) {
let record; let record;
let http = '';
let lineA = ''; let lineA = '';
let lineB = ''; let lineB = '';
let lineC = ''; let lineC = '';
let api = `
### FactoryIntegration (hurl.exe)
GET https://tfs.intra.infineon.com/tfs/FactoryIntegration
HTTP 401
[Asserts]
duration < 12345
`;
let text = 'Id\tRisk Reduction and/or Opportunity Enablement\tTime Criticality\tBusiness Value\tEffort\tWSJF\tCoD\tFeature Total Story Points\tAbsolute Delta\tState\tRequester\tAssigned To\tIteration Path\tSystem\tTitle-123\r\n'; let text = 'Id\tRisk Reduction and/or Opportunity Enablement\tTime Criticality\tBusiness Value\tEffort\tWSJF\tCoD\tFeature Total Story Points\tAbsolute Delta\tState\tRequester\tAssigned To\tIteration Path\tSystem\tTitle-123\r\n';
let html = '<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Priority</th><th>Risk Reduction and/or Opportunity Enablement</th><th>Time Criticality</th><th>Business Value</th><th>Cost of Delay (CoD)</th><th>Effort</th><th>WSJF</th></tr>'; let html = '<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Priority</th><th>Risk Reduction and/or Opportunity Enablement</th><th>Time Criticality</th><th>Business Value</th><th>Cost of Delay (CoD)</th><th>Effort</th><th>WSJF</th></tr>';
for (let i = 0; i < records.length; i++) { for (let i = 0; i < records.length; i++) {
record = records[i]; record = records[i];
if (record.api !== '') { if (record.api !== '') {
api += record.api + '\r\n'; http += record.api + '\r\n';
} }
text += record.Id + '\t' + text += record.Id + '\t' +
record.RiskReductionMinusOpportunityEnablement + '\t' + record.RiskReductionMinusOpportunityEnablement + '\t' +
@ -442,7 +392,7 @@ duration < 12345
console.log(text); console.log(text);
html += lineA + lineB + lineC; html += lineA + lineB + lineC;
} }
return { html, text, api }; return { html, text, http };
} }
function updateSite(c, w) { function updateSite(c, w) {
@ -461,17 +411,17 @@ function updateSite(c, w) {
document.title = document.title.replace('Infineon', 'Infineon'); document.title = document.title.replace('Infineon', 'Infineon');
document.getElementById('siteHeader').innerText = 'Infineon'; document.getElementById('siteHeader').innerText = 'Infineon';
} }
if (_windowLocationHRef.indexOf('=EFFORT') > -1) { if (_windowLocationHRef.indexOf('=WSJF') > -1) {
document.getElementById('th-span').innerHTML = c.th + ' sorted by Effort (null, highest, ..., lowest)'; document.getElementById('th-span').innerHTML = w.th + ' sorted by WSJF';
} }
else if (_windowLocationHRef.indexOf('=LIVE') > -1) { else if (_windowLocationHRef.indexOf('=LIVE') > -1) {
document.getElementById('th-span').innerHTML = c.th + ' Live View'; document.getElementById('th-span').innerHTML = c.th + ' sorted by CoD';
} }
else if (_windowLocationHRef.indexOf('=WSJF') > -1) { else if (_windowLocationHRef.indexOf('=EFFORT') > -1) {
document.getElementById('th-span').innerHTML = w.th + ' sorted by WSJF (highest, ..., lowest, null)'; document.getElementById('th-span').innerHTML = c.th + ' sorted by Parent Id';
} }
else { else {
document.getElementById('th-span').innerHTML = c.th + ' sorted by WSJF (null, highest, ..., lowest)'; document.getElementById('th-span').innerHTML = c.th + ' sorted by Parent Id';
} }
} }
@ -480,16 +430,11 @@ function setDocument(fromHtml, b, r, t, c, e, w, dataA, dataB, workItems) {
console.log(dataA.length); console.log(dataA.length);
if (dataA.length > 0) if (dataA.length > 0)
console.log(dataA[0]); console.log(dataA[0]);
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records); let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
if (fromHtml) { if (fromHtml) {
document.getElementById('HeaderGrid').innerHTML = result.html.replaceAll('>null<', '>&nbsp;<'); document.getElementById('HeaderGrid').innerHTML = result.html.replaceAll('>null<', '>&nbsp;<');
if (_windowLocationHRef.indexOf('=WSJF') === -1) { if (_windowLocationHRef.indexOf('=WSJF') === -1) {
if (_windowLocationHRef.indexOf('=EFFORT') > -1) { document.getElementById('AllTextarea').value = result.text.replaceAll('null', '').replaceAll('&nbsp;', '') + result.http;
document.getElementById('AllTextarea').value = result.api;
}
else {
document.getElementById('AllTextarea').value = result.text.replaceAll('null', '').replaceAll('&nbsp;', '');
}
} }
else { else {
_toggle = !_toggle; _toggle = !_toggle;
@ -714,7 +659,7 @@ if (typeof document == 'undefined') {
if (dataA.length > 0) if (dataA.length > 0)
console.log(dataA[0]); console.log(dataA[0]);
const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems); const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems);
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records); let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
if (result == undefined) { } if (result == undefined) { }
initIndex(fromHtml, username, machineId, windowLocationHRef, workItems, b, r, t, c, e, w, apiUrl, signalRUrl); initIndex(fromHtml, username, machineId, windowLocationHRef, workItems, b, r, t, c, e, w, apiUrl, signalRUrl);
}) })

View File

@ -9,21 +9,20 @@ internal class Fields
#nullable enable #nullable enable
[JsonConstructor] [JsonConstructor]
public Fields(decimal? customRRminusOE, public Fields(float? customRRminusOE,
CustomRequester? customRequester, CustomRequester? customRequester,
decimal? customWSJF, float? customWSJF,
decimal? customWSJFFib,
DateTime microsoftVSTSCommonActivatedDate, DateTime microsoftVSTSCommonActivatedDate,
decimal? microsoftVSTSCommonBusinessValue, float? microsoftVSTSCommonBusinessValue,
DateTime microsoftVSTSCommonClosedDate, DateTime microsoftVSTSCommonClosedDate,
int microsoftVSTSCommonPriority, int microsoftVSTSCommonPriority,
DateTime microsoftVSTSCommonResolvedDate, DateTime microsoftVSTSCommonResolvedDate,
DateTime microsoftVSTSCommonStateChangeDate, DateTime microsoftVSTSCommonStateChangeDate,
decimal? microsoftVSTSCommonTimeCriticality, float? microsoftVSTSCommonTimeCriticality,
decimal? microsoftVSTSSchedulingEffort, float? microsoftVSTSSchedulingEffort,
decimal? microsoftVSTSSchedulingRemainingWork, float? microsoftVSTSSchedulingRemainingWork,
DateTime microsoftVSTSSchedulingStartDate, DateTime microsoftVSTSSchedulingStartDate,
decimal? microsoftVSTSSchedulingStoryPoints, float? microsoftVSTSSchedulingStoryPoints,
DateTime microsoftVSTSSchedulingTargetDate, DateTime microsoftVSTSSchedulingTargetDate,
string systemAreaPath, string systemAreaPath,
SystemAssignedTo systemAssignedTo, SystemAssignedTo systemAssignedTo,
@ -46,7 +45,6 @@ internal class Fields
CustomRRminusOE = customRRminusOE; CustomRRminusOE = customRRminusOE;
CustomRequester = customRequester; CustomRequester = customRequester;
CustomWSJF = customWSJF; CustomWSJF = customWSJF;
CustomWSJFFib = customWSJFFib;
MicrosoftVSTSCommonActivatedDate = microsoftVSTSCommonActivatedDate; MicrosoftVSTSCommonActivatedDate = microsoftVSTSCommonActivatedDate;
MicrosoftVSTSCommonBusinessValue = microsoftVSTSCommonBusinessValue; MicrosoftVSTSCommonBusinessValue = microsoftVSTSCommonBusinessValue;
MicrosoftVSTSCommonClosedDate = microsoftVSTSCommonClosedDate; MicrosoftVSTSCommonClosedDate = microsoftVSTSCommonClosedDate;
@ -78,21 +76,20 @@ internal class Fields
SystemWorkItemType = systemWorkItemType; SystemWorkItemType = systemWorkItemType;
} }
[JsonPropertyName("Custom.RRminusOE")] public decimal? CustomRRminusOE { get; } [JsonPropertyName("Custom.RRminusOE")] public float? CustomRRminusOE { get; }
[JsonPropertyName("Custom.Requester")] public CustomRequester? CustomRequester { get; } [JsonPropertyName("Custom.Requester")] public CustomRequester? CustomRequester { get; }
[JsonPropertyName("Custom.WSJF")] public decimal? CustomWSJF { get; } [JsonPropertyName("Custom.WSJF")] public float? CustomWSJF { get; }
[JsonPropertyName("Custom.WSJFFib")] public decimal? CustomWSJFFib { get; }
[JsonPropertyName("Microsoft.VSTS.Common.ActivatedDate")] public DateTime MicrosoftVSTSCommonActivatedDate { get; } [JsonPropertyName("Microsoft.VSTS.Common.ActivatedDate")] public DateTime MicrosoftVSTSCommonActivatedDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.BusinessValue")] public decimal? MicrosoftVSTSCommonBusinessValue { get; } [JsonPropertyName("Microsoft.VSTS.Common.BusinessValue")] public float? MicrosoftVSTSCommonBusinessValue { get; }
[JsonPropertyName("Microsoft.VSTS.Common.ClosedDate")] public DateTime MicrosoftVSTSCommonClosedDate { get; } [JsonPropertyName("Microsoft.VSTS.Common.ClosedDate")] public DateTime MicrosoftVSTSCommonClosedDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.Priority")] public int MicrosoftVSTSCommonPriority { get; } [JsonPropertyName("Microsoft.VSTS.Common.Priority")] public int MicrosoftVSTSCommonPriority { get; }
[JsonPropertyName("Microsoft.VSTS.Common.ResolvedDate")] public DateTime MicrosoftVSTSCommonResolvedDate { get; } [JsonPropertyName("Microsoft.VSTS.Common.ResolvedDate")] public DateTime MicrosoftVSTSCommonResolvedDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.StateChangeDate")] public DateTime MicrosoftVSTSCommonStateChangeDate { get; } [JsonPropertyName("Microsoft.VSTS.Common.StateChangeDate")] public DateTime MicrosoftVSTSCommonStateChangeDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.TimeCriticality")] public decimal? MicrosoftVSTSCommonTimeCriticality { get; } [JsonPropertyName("Microsoft.VSTS.Common.TimeCriticality")] public float? MicrosoftVSTSCommonTimeCriticality { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.Effort")] public decimal? MicrosoftVSTSSchedulingEffort { get; } [JsonPropertyName("Microsoft.VSTS.Scheduling.Effort")] public float? MicrosoftVSTSSchedulingEffort { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.RemainingWork")] public decimal? MicrosoftVSTSSchedulingRemainingWork { get; } [JsonPropertyName("Microsoft.VSTS.Scheduling.RemainingWork")] public float? MicrosoftVSTSSchedulingRemainingWork { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.StartDate")] public DateTime MicrosoftVSTSSchedulingStartDate { get; } [JsonPropertyName("Microsoft.VSTS.Scheduling.StartDate")] public DateTime MicrosoftVSTSSchedulingStartDate { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.StoryPoints")] public decimal? MicrosoftVSTSSchedulingStoryPoints { get; } [JsonPropertyName("Microsoft.VSTS.Scheduling.StoryPoints")] public float? MicrosoftVSTSSchedulingStoryPoints { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.TargetDate")] public DateTime MicrosoftVSTSSchedulingTargetDate { get; } [JsonPropertyName("Microsoft.VSTS.Scheduling.TargetDate")] public DateTime MicrosoftVSTSSchedulingTargetDate { get; }
[JsonPropertyName("System.AreaPath")] public string SystemAreaPath { get; } [JsonPropertyName("System.AreaPath")] public string SystemAreaPath { get; }
[JsonPropertyName("System.AssignedTo")] public SystemAssignedTo? SystemAssignedTo { get; } [JsonPropertyName("System.AssignedTo")] public SystemAssignedTo? SystemAssignedTo { get; }

View File

@ -62,7 +62,6 @@ internal class Record
title: record.WorkItem.Title, title: record.WorkItem.Title,
violation: record.WorkItem.Violation is null ? violation : record.WorkItem.Violation, violation: record.WorkItem.Violation is null ? violation : record.WorkItem.Violation,
weightedShortestJobFirst: record.WorkItem.WeightedShortestJobFirst, weightedShortestJobFirst: record.WorkItem.WeightedShortestJobFirst,
weightedShortestJobFirstFibonacci: record.WorkItem.WeightedShortestJobFirstFibonacci,
workItemType: record.WorkItem.WorkItemType); workItemType: record.WorkItem.WorkItemType);
result = new(workItem, record.Parent, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>(), record.Tag); result = new(workItem, record.Parent, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>(), record.Tag);
return result; return result;

View File

@ -37,8 +37,7 @@ internal class WorkItem
long? timeCriticality, long? timeCriticality,
string title, string title,
string? violation, string? violation,
decimal? weightedShortestJobFirst, long? weightedShortestJobFirst,
decimal? weightedShortestJobFirstFibonacci,
string workItemType) string workItemType)
{ {
ActivatedDate = activatedDate; ActivatedDate = activatedDate;
@ -70,7 +69,6 @@ internal class WorkItem
Title = title; Title = title;
Violation = violation; Violation = violation;
WeightedShortestJobFirst = weightedShortestJobFirst; WeightedShortestJobFirst = weightedShortestJobFirst;
WeightedShortestJobFirstFibonacci = weightedShortestJobFirstFibonacci;
WorkItemType = workItemType; WorkItemType = workItemType;
} }
@ -107,7 +105,6 @@ internal class WorkItem
title: workItem.Title, title: workItem.Title,
violation: workItem.Violation, violation: workItem.Violation,
weightedShortestJobFirst: workItem.WeightedShortestJobFirst, weightedShortestJobFirst: workItem.WeightedShortestJobFirst,
weightedShortestJobFirstFibonacci: workItem.WeightedShortestJobFirstFibonacci,
workItemType: workItem.WorkItemType); workItemType: workItem.WorkItemType);
return result; return result;
} }
@ -143,7 +140,6 @@ internal class WorkItem
title: workItem.Title, title: workItem.Title,
violation: workItem.Violation, violation: workItem.Violation,
weightedShortestJobFirst: workItem.WeightedShortestJobFirst, weightedShortestJobFirst: workItem.WeightedShortestJobFirst,
weightedShortestJobFirstFibonacci: workItem.WeightedShortestJobFirstFibonacci,
workItemType: workItem.WorkItemType); workItemType: workItem.WorkItemType);
return result; return result;
} }
@ -176,8 +172,7 @@ internal class WorkItem
[JsonPropertyName("TimeCriticality")] public long? TimeCriticality { get; } [JsonPropertyName("TimeCriticality")] public long? TimeCriticality { get; }
[JsonPropertyName("Title")] public string Title { get; } [JsonPropertyName("Title")] public string Title { get; }
[JsonPropertyName("Violation")] public string? Violation { get; } [JsonPropertyName("Violation")] public string? Violation { get; }
[JsonPropertyName("WeightedShortestJobFirst")] public decimal? WeightedShortestJobFirst { get; } [JsonPropertyName("WeightedShortestJobFirst")] public long? WeightedShortestJobFirst { get; }
[JsonPropertyName("WeightedShortestJobFirstFibonacci")] public decimal? WeightedShortestJobFirstFibonacci { get; }
[JsonPropertyName("WorkItemType")] public string WorkItemType { get; } [JsonPropertyName("WorkItemType")] public string WorkItemType { get; }
} }

View File

@ -226,9 +226,9 @@ public class MonIn : IMonIn, IDisposable
{ {
StringBuilder stringBuilder = new(); StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource)) if (string.IsNullOrEmpty(subresource))
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
return stringBuilder.ToString(); return stringBuilder.ToString();
} }
@ -247,14 +247,14 @@ public class MonIn : IMonIn, IDisposable
if (string.IsNullOrEmpty(subresource)) if (string.IsNullOrEmpty(subresource))
{ {
if (unit.Equals(string.Empty) && !interval.HasValue) if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
} }
else if (unit.Equals(string.Empty) && !interval.HasValue) else if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
return stringBuilder.ToString(); return stringBuilder.ToString();
} }

View File

@ -41,6 +41,24 @@ stages:
displayName: "Nuget Clear" displayName: "Nuget Clear"
enabled: false enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: true
- script: | - script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
@ -184,6 +202,24 @@ stages:
displayName: "Nuget Clear" displayName: "Nuget Clear"
enabled: false enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: true
- script: | - script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"

View File

@ -3,7 +3,6 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared.Duplicator; namespace Adaptation.Shared.Duplicator;
@ -144,15 +143,3 @@ public class Description : IDescription, Properties.IDescription
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
} }
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description))]
internal partial class SharedDescriptionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description[]))]
internal partial class SharedDescriptionArraySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -9,6 +9,7 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading; using System.Threading;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -382,12 +383,8 @@ public class FileRead : Properties.IFileRead
else else
{ {
string[] files; string[] files;
string[] directories; string logisticsSequence = _Logistics.Sequence.ToString();
string logisticsSequence; string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
for (int i = 0; i < 10; i++)
{
logisticsSequence = (_Logistics.Sequence + -i).ToString();
directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories) foreach (string directory in directories)
{ {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
@ -395,11 +392,8 @@ public class FileRead : Properties.IFileRead
continue; continue;
results.Add(directory); results.Add(directory);
} }
if (results.Count == 1)
break;
} }
} if ((results is null) || results.Count != 1)
if (results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
return results.ToArray(); return results.ToArray();
} }
@ -446,13 +440,12 @@ public class FileRead : Properties.IFileRead
{ {
List<Properties.IDescription> results = new(); List<Properties.IDescription> results = new();
Duplicator.Description description; Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), Duplicator.SharedDescriptionSourceGenerationContext.Default.Description); description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description); results.Add(description);
} }
return results; return results;

View File

@ -2,14 +2,12 @@ using Adaptation.Shared.Methods;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -187,7 +185,7 @@ internal class ProcessDataStandardFormat
break; break;
} }
} }
string? linesOne = lines.Length > 1 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne); logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0) if (logistics.Count == 0)
sequence = null; sequence = null;
@ -229,19 +227,19 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly(); return results.AsReadOnly();
} }
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
const int columnsLine = 6; const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath); FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null); ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count == 0 ? null : GetFullArray(processDataStandardFormat); JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray(); JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count) if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
result = processDataStandardFormat; result = processDataStandardFormat;
else else
{ {
result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat); result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0) if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = processDataStandardFormat; result = processDataStandardFormat;
} }
@ -337,14 +335,12 @@ internal class ProcessDataStandardFormat
int column; int column;
string value; string value;
JsonProperty jsonProperty; JsonProperty jsonProperty;
List<string> debug = new();
List<string> values = new(); List<string> values = new();
List<string> results = new(); List<string> results = new();
JsonProperty[] jsonProperties; JsonProperty[] jsonProperties;
List<string> unknownColumns = new(); List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++) for (int i = 0; i < jsonElements.Length; i++)
{ {
debug.Clear();
values.Clear(); values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object) if (jsonElements[i].ValueKind != JsonValueKind.Object)
{ {
@ -358,22 +354,16 @@ internal class ProcessDataStandardFormat
{ {
column = processDataStandardFormatMapping.ColumnIndices[c]; column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1) if (column == -1)
{
value = processDataStandardFormatMapping.OldColumnNames[c]; value = processDataStandardFormatMapping.OldColumnNames[c];
debug.Add($"<Item C=-01 Name=\"{value}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{value}\" />");
}
else else
{ {
jsonProperty = jsonProperties[column]; jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString(); value = jsonProperty.Value.ToString();
debug.Add($"<Item C={column + 2:000} Name=\"{processDataStandardFormatMapping.OldColumnNames[c]}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{jsonProperty.Name}\" />");
} }
values.Add(value); values.Add(value);
} }
results.Add(string.Join("\t", values)); results.Add(string.Join("\t", values));
} }
if (Debugger.IsAttached)
File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
result = new(body: new(results), result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames, columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer, footer: processDataStandardFormat.Footer,
@ -388,6 +378,7 @@ internal class ProcessDataStandardFormat
{ {
if (processDataStandardFormat.InputPDSF is null) if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
#pragma warning disable CA1845, IDE0057
string result; string result;
string line; string line;
string value; string value;
@ -408,8 +399,7 @@ internal class ProcessDataStandardFormat
lines.Add(line); lines.Add(line);
} }
string? json = null; string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0) if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0) {
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer); Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs); Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString); json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
@ -449,72 +439,51 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines) private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines) {
{
Dictionary<string, string> results = new(); Dictionary<string, string> results = new();
string[] segments; string[] segments;
foreach (string footerLine in footerLines) foreach (string footerLine in footerLines) {
{
segments = footerLine.Split('\t'); segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) {
{
continue; continue;
} }
if (segments[1].Contains(';')) if (segments[1].Contains(';')) {
{
continue; continue;
} } else {
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]); results.Add(segments[0], segments[1]);
} }
} }
return results; return results;
} }
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs) private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs) {
{
Dictionary<string, Dictionary<string, string>> results = new(); Dictionary<string, Dictionary<string, string>> results = new();
string[] segments; string[] segments;
string[] subSegments; string[] subSegments;
string[] subSubSegments; string[] subSubSegments;
Dictionary<string, string>? keyValue; Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs); results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines) foreach (string footerLine in footerLines) {
{
segments = footerLine.Split('\t'); segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) {
{
continue; continue;
} }
if (!segments[1].Contains(';') || !segments[1].Contains('=')) if (!segments[1].Contains(';') || !segments[1].Contains('=')) {
{
continue; continue;
} } else {
else
{
subSegments = segments[1].Split(';'); subSegments = segments[1].Split(';');
if (subSegments.Length < 1) if (subSegments.Length < 1) {
{
continue; continue;
} }
if (!results.TryGetValue(segments[0], out keyValue)) if (!results.TryGetValue(segments[0], out keyValue)) {
{
results.Add(segments[0], new()); results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue)) if (!results.TryGetValue(segments[0], out keyValue)) {
{
throw new Exception(); throw new Exception();
} }
} }
foreach (string segment in subSegments) foreach (string segment in subSegments) {
{
subSubSegments = segment.Split('='); subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2) if (subSubSegments.Length != 2) {
{
continue; continue;
} }
keyValue.Add(subSubSegments[0], subSubSegments[1]); keyValue.Add(subSubSegments[0], subSubSegments[1]);
@ -527,8 +496,6 @@ internal class ProcessDataStandardFormat
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults) internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{ {
List<string> results = new(); List<string> results = new();
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
if (processDataStandardFormat.Sequence is null) if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T"; string endOffset = "E#######T";
@ -566,25 +533,25 @@ internal class ProcessDataStandardFormat
} }
} }
results.Add("END_HEADER"); results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null)
{
results.Add(string.Empty); results.Add(string.Empty);
List<char> hyphens = new(); List<char> hyphens = new();
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|")); results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty); results.Add(string.Empty);
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
hyphens.Add('-'); hyphens.Add('-');
results.Add($"|{string.Join("|", hyphens)}|"); results.Add($"|{string.Join("|", hyphens)}|");
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|")); results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|")); results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
string xml = GetXml(processDataStandardFormat);
results.Add(xml);
results.Add(string.Empty); results.Add(string.Empty);
results.Add("EOF"); results.Add("EOF");
results.Add(string.Empty); results.Add(string.Empty);
string json = GetJson(processDataStandardFormat); string json = GetJson(processDataStandardFormat);
results.Add(json); results.Add(json);
}
File.WriteAllText(path, string.Join(Environment.NewLine, results)); File.WriteAllText(path, string.Join(Environment.NewLine, results));
} }
@ -654,17 +621,6 @@ internal class ProcessDataStandardFormat
return results; return results;
} }
internal static JsonElement[] GetArray(string reportFullPath, string[] lines, ProcessDataStandardFormat processDataStandardFormat)
{
JsonElement[] results;
string? json = GetRecordsJson(reportFullPath, lines);
if (string.IsNullOrEmpty(json))
results = GetArray(processDataStandardFormat);
else
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText) internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{ {
string result; string result;
@ -877,116 +833,6 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;
string tag;
string value;
string[] segments;
List<string> values;
Dictionary<string, List<string>> results = new();
ReadOnlyCollection<string> body = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
ReadOnlyCollection<string> columns = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
List<string> lines = new() { "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "<records>" };
for (int i = 0; i < body.Count; i++)
{
segments = body[i].Trim().Split('\t');
if (segments.Length != columns.Count)
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("&", "&amp;")
.Replace("<", "&lt;")
.Replace(">", "&gt;")
.Replace("\"", "&quot;")
.Replace("'", "&apos;");
tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
if (i == 0)
{
if (results.ContainsKey(tag))
continue;
results.Add(tag, new List<string>());
}
results[tag].Add(value);
}
}
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (body.Count < 2)
break;
if (keyValuePair.Value.Count != body.Count)
continue;
values = keyValuePair.Value.Distinct().ToList();
if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
{
for (int i = 0; i < body.Count; i++)
keyValuePair.Value[i] = string.Empty;
foreach (string v in values)
{
if (string.IsNullOrEmpty(v))
continue;
keyValuePair.Value[0] = v;
}
}
}
for (int i = 0; i < body.Count; i++)
{
lines.Add(" <record>");
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (keyValuePair.Value.Count != body.Count)
continue;
lines.Add(string.Concat(" <", keyValuePair.Key, '>', keyValuePair.Value[i], "</", keyValuePair.Key, '>'));
}
lines.Add(" </record>");
}
lines.Add("</records>");
result = string.Join(Environment.NewLine, lines);
return result;
}
internal static string GetXml(string reportFullPath, string[]? lines = null)
{
string result;
bool foundXml = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("<?xml"))
foundXml = true;
if (!foundXml)
continue;
if (line.StartsWith("EOF"))
break;
results.Add(line);
}
result = string.Join(Environment.NewLine, results);
return result;
}
private static string? GetRecordsJson(string reportFullPath, string[] lines)
{
string? result;
bool foundRecords = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("\"Records\""))
foundRecords = true;
if (!foundRecords)
continue;
if (line == "],")
break;
results.Add(line);
}
result = results.Count == 0 ? null : $"{string.Join(Environment.NewLine, results.Skip(1))}{Environment.NewLine}]";
return result;
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
@ -997,6 +843,5 @@ internal partial class JsonElementCollectionSourceGenerationContext : JsonSerial
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] [JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))] [JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext {
{
} }

View File

@ -1,34 +1,33 @@
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
namespace Adaptation.Shared; namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping public class ProcessDataStandardFormatMapping
{ {
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; } public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; } public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; } public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<int> columnIndices, public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames, ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames) ReadOnlyCollection<string> oldColumnNames)
{ {
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices; ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames; NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames; OldColumnNames = oldColumnNames;
} }
internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
result = new(columnIndices: columnIndices,
newColumnNames: newColumnNames,
oldColumnNames: oldColumnNames);
return result;
}
} }

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,65 +0,0 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_61_1;
[TestClass]
public class BACKLOG_EQPT : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG_EQPT EAFLoggingUnitTesting { get; private set; }
static BACKLOG_EQPT() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG_EQPT() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG_EQPT(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG_EQPT(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__BACKLOG_EQPT__DownloadWorkItems()
{
string check = ".xlsx";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,65 +0,0 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_61_1;
[TestClass]
public class BACKLOG : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG EAFLoggingUnitTesting { get; private set; }
static BACKLOG() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__BACKLOG__json()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,117 +0,0 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_61_1;
[TestClass]
public class MESAFIBACKLOG : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static MESAFIBACKLOG EAFLoggingUnitTesting { get; private set; }
static MESAFIBACKLOG() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public MESAFIBACKLOG() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public MESAFIBACKLOG(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new MESAFIBACKLOG(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Kanban()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Markdown()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__ADO()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Priority()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Violation()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
using System; using System;
using System.Diagnostics; using System.Diagnostics;

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if v2_60_0 #if true
using Adaptation.FileHandlers.json.WorkItems; using Adaptation.FileHandlers.json.WorkItems;
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;

View File

@ -1,52 +0,0 @@
#if true
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
using System.Threading;
namespace Adaptation._Tests.Extract.Development.v2_61_1;
[TestClass]
public class BACKLOG_EQPT
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Development.v2_61_1.BACKLOG_EQPT _BACKLOG_EQPT;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Development.v2_61_1.BACKLOG_EQPT.ClassInitialize(testContext);
_BACKLOG_EQPT = CreateSelfDescription.Development.v2_61_1.BACKLOG_EQPT.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
[Ignore]
[TestMethod]
public void Development__v2_61_1__BACKLOG_EQPT__DownloadWorkItems() => _BACKLOG_EQPT.Development__v2_61_1__BACKLOG_EQPT__DownloadWorkItems();
[Ignore]
[TestMethod]
public void Development__v2_61_1__BACKLOG_EQPT__DownloadWorkItems638612245609095845__Normal()
{
string check = ".json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_BACKLOG_EQPT.Development__v2_61_1__BACKLOG_EQPT__DownloadWorkItems();
_ = _BACKLOG_EQPT.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
for (int i = 0; i < int.MinValue; i++)
Thread.Sleep(500);
NonThrowTryCatch();
}
}
#endif

View File

@ -1,64 +0,0 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace Adaptation._Tests.Extract.Development.v2_61_1;
[TestClass]
public class BACKLOG
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Development.v2_61_1.BACKLOG _BACKLOG;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Development.v2_61_1.BACKLOG.ClassInitialize(testContext);
_BACKLOG = CreateSelfDescription.Development.v2_61_1.BACKLOG.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__BACKLOG__json() => _BACKLOG.Development__v2_61_1__BACKLOG__json();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__BACKLOG__json638612245609095846__Normal()
{
string check = "*.json";
bool validatePDSF = false;
_BACKLOG.Development__v2_61_1__BACKLOG__json();
MethodBase methodBase = new StackFrame().GetMethod();
Assert.IsFalse(string.IsNullOrEmpty(_BACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _BACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
}
#endif

View File

@ -1,249 +0,0 @@
#if true
using Adaptation.FileHandlers.json.WorkItems;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Text.Json;
namespace Adaptation._Tests.Extract.Development.v2_61_1;
[TestClass]
public class MESAFIBACKLOG
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Development.v2_61_1.MESAFIBACKLOG _MESAFIBACKLOG;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Development.v2_61_1.MESAFIBACKLOG.ClassInitialize(testContext);
_MESAFIBACKLOG = CreateSelfDescription.Development.v2_61_1.MESAFIBACKLOG.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Kanban() => _MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Kanban();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Kanban638323658386612552__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Kanban();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Markdown() => _MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Markdown();
private static ReadOnlyDictionary<string, FileInfo> GetKeyValuePairs(List<FileInfo> collection)
{
Dictionary<string, FileInfo> results = [];
foreach (FileInfo fileInfo in collection)
results.Add(fileInfo.Name, fileInfo);
return new(results);
}
private static ReadOnlyCollection<Record> GetRecords(FileInfo fileInfo)
{
Record[] results;
string json = File.ReadAllText(fileInfo.FullName);
results = JsonSerializer.Deserialize<Record[]>(json);
return new(results);
}
private static void Verify122508(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 10);
// Assert.IsTrue(records.Count == 6);
}
private static void Verify122514(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 25);
// Assert.IsTrue(records.Count == 6);
}
private static void Verify126169(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 1);
// Assert.IsTrue(records.Count == 1);
}
private static void Verify123066(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 24);
// Assert.IsTrue(records.Count == 5);
}
private static void Verify123067(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 24);
// Assert.IsTrue(records.Count == 5);
}
private static void Verify122517(FileInfo fileInfo)
{
ReadOnlyCollection<Record> records = GetRecords(fileInfo);
Assert.IsNotNull(records);
// Assert.IsTrue(records.Count == 14);
// Assert.IsTrue(records.Count == 14);
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Markdown638323658386612552__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Markdown();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__ADO638323658386612552__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__ADO();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Priority638323658386612552__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Priority();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Markdown638779784153157286__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Markdown();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_61_1__MESAFIBACKLOG__Violation638779784153157287__Normal()
{
string check = "*.json";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MESAFIBACKLOG.Development__v2_61_1__MESAFIBACKLOG__Violation();
Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult = fileRead.ReExtract();
Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
ReadOnlyDictionary<string, FileInfo> keyValuePairs = GetKeyValuePairs(extractResult.Item4);
Assert.IsTrue(keyValuePairs.ContainsKey("check-122508.json"));
Assert.IsTrue(keyValuePairs.ContainsKey("check-122514.json"));
Assert.IsTrue(keyValuePairs.ContainsKey("check-126169.json"));
Assert.IsTrue(keyValuePairs.ContainsKey("check-123066.json"));
Assert.IsTrue(keyValuePairs.ContainsKey("check-123067.json"));
Assert.IsTrue(keyValuePairs.ContainsKey("check-122517.json"));
Verify122508(keyValuePairs["check-122508.json"]);
Verify122514(keyValuePairs["check-122514.json"]);
Verify126169(keyValuePairs["check-126169.json"]);
Verify123066(keyValuePairs["check-123066.json"]);
Verify123067(keyValuePairs["check-123067.json"]);
Verify122517(keyValuePairs["check-122517.json"]);
NonThrowTryCatch();
}
}
#endif

View File

@ -193,12 +193,7 @@ public class AdaptationTesting : ISMTP
segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None); segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None);
dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments)); dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments));
if (!Directory.Exists(dummyDirectory)) if (!Directory.Exists(dummyDirectory))
{
_ = Directory.CreateDirectory(dummyDirectory); _ = Directory.CreateDirectory(dummyDirectory);
try
{ Directory.SetLastWriteTime(Path.Combine(dummyRoot, cellInstanceName), DateTime.Now); }
catch { }
}
} }
if (string.IsNullOrEmpty(ticks)) if (string.IsNullOrEmpty(ticks))
{ {

View File

@ -64,8 +64,8 @@ public class BACKLOG : LoggingUnitTesting, IDisposable
StringBuilder results = new(); StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[] (string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{ {
new("BACKLOG", "v2.61.1"), new("BACKLOG", "v2.60.0"),
new("BACKLOG-EQPT", "v2.61.1"), new("BACKLOG-EQPT", "v2.60.0"),
}; };
string development = "http://eaf-dev.mes.infineon.com:9003/CellInstanceServiceV2"; string development = "http://eaf-dev.mes.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion; Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -64,7 +64,7 @@ public class MESAFIBACKLOG : LoggingUnitTesting, IDisposable
StringBuilder results = new(); StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[] (string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{ {
new("MESAFIBACKLOG", "v2.61.1"), new("MESAFIBACKLOG", "v2.60.0"),
}; };
string development = "http://eaf-dev.mes.infineon.com:9003/CellInstanceServiceV2"; string development = "http://eaf-dev.mes.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion; Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -106,6 +106,8 @@
<Compile Include="Adaptation\Eaf\Management\ConfigurationData\Semiconductor\CellInstances\SecsConnectionConfiguration.cs" /> <Compile Include="Adaptation\Eaf\Management\ConfigurationData\Semiconductor\CellInstances\SecsConnectionConfiguration.cs" />
<Compile Include="Adaptation\FileHandlers\ADO\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\ADO\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\ADO\ProcessData.cs" /> <Compile Include="Adaptation\FileHandlers\ADO\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\APC\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Archive\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\CellInstanceConnectionName.cs" /> <Compile Include="Adaptation\FileHandlers\CellInstanceConnectionName.cs" />
<Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FIBacklogMesa.cs" /> <Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FIBacklogMesa.cs" />
<Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FileRead.cs" />
@ -113,6 +115,8 @@
<Compile Include="Adaptation\FileHandlers\CopyToPaths\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\CopyToPaths\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\DownloadExcelFile\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\DownloadExcelFile\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\DownloadWorkItems\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\DownloadWorkItems\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Dummy\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\IQSSi\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\json\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\json\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\json\ProcessData.cs" /> <Compile Include="Adaptation\FileHandlers\json\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\json\WIQL\Attribute.cs" /> <Compile Include="Adaptation\FileHandlers\json\WIQL\Attribute.cs" />
@ -140,6 +144,10 @@
<Compile Include="Adaptation\FileHandlers\Kanban\ProcessData.cs" /> <Compile Include="Adaptation\FileHandlers\Kanban\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\Markdown\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\Markdown\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Markdown\ProcessData.cs" /> <Compile Include="Adaptation\FileHandlers\Markdown\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\MoveMatchingFiles\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsight\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\Aggregation.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\Aggregation.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\Notification.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\Notification.cs" />
@ -147,6 +155,8 @@
<Compile Include="Adaptation\FileHandlers\Priority\Startup.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\Startup.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\WeightedShortestJobFirstModule.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\WeightedShortestJobFirstModule.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\WorkItem.cs" /> <Compile Include="Adaptation\FileHandlers\Priority\WorkItem.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Violation\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\Violation\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Violation\ProcessData.cs" /> <Compile Include="Adaptation\FileHandlers\Violation\ProcessData.cs" />
<Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" /> <Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" />
@ -204,7 +214,7 @@
<Version>0.15.1</Version> <Version>0.15.1</Version>
</PackageReference> </PackageReference>
<PackageReference Include="Infineon.EAF.Runtime"> <PackageReference Include="Infineon.EAF.Runtime">
<Version>2.61.1</Version> <Version>2.60.0</Version>
</PackageReference> </PackageReference>
<PackageReference Include="Nancy.Owin"> <PackageReference Include="Nancy.Owin">
<Version>2.0.0</Version> <Version>2.0.0</Version>
@ -213,7 +223,7 @@
<Version>1.0.0</Version> <Version>1.0.0</Version>
</PackageReference> </PackageReference>
<PackageReference Include="System.Text.Json"> <PackageReference Include="System.Text.Json">
<Version>8.0.3</Version> <Version>8.0.5</Version>
</PackageReference> </PackageReference>
</ItemGroup> </ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />

View File

@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers // You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below: // by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")] // [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.61.1.0")] [assembly: AssemblyVersion("2.60.0.0")]
[assembly: AssemblyFileVersion("2.61.1.0")] [assembly: AssemblyFileVersion("2.60.0.0")]