Compare commits

...

13 Commits

Author SHA1 Message Date
2fc83bb54d Split violations from Markdown
ProcessDataStandardFormat over Tuple

MoveMatchingFiles to use ProcessDataStandardFormatMapping
2025-03-28 12:01:05 -07:00
40177bfb51 MSBuild back to v2.58.0 because of Owin dependency
FeatureCheckIterationPath122508 ignore features not planned

Removed ALIGNMENT-EQPT and ALIGNMENT Tests
2025-03-19 11:23:01 -07:00
e02b70e258 ProcessData over Tuple 2025-03-17 15:07:57 -07:00
534d0ccc5c Infineon.EAF.Runtime v2.59.0 2025-02-17 18:40:58 -07:00
9769e1e106 Switch from oi-metrology-viewer-prod.mes.infineon.com:4438
New Markdown for Total StoryPoints by iteration and assigned to
2025-02-06 13:07:57 -07:00
c3b309347c Added RemainingWork and StoryPoints 2025-02-06 08:58:26 -07:00
0f23ba19cc Changed to Year-Week
Update build pipeline
2025-02-04 15:49:43 -07:00
a343243576 Username form process of VSCode 2025-01-31 14:25:34 -07:00
304bf04afe Cost of Delay 2025-01-28 13:41:18 -07:00
08a23114c9 Weighted Shortest Job First Hub 2025-01-28 13:29:28 -07:00
ca4ebff54c v2.58.0 2025-01-22 09:36:43 -07:00
2d82216d25 Bump 2025-01-07 16:11:17 -07:00
7bcb87c5e5 Tasks 2025-01-07 15:57:37 -07:00
123 changed files with 61622 additions and 3463 deletions

View File

@ -121,6 +121,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case

View File

@ -1 +1,20 @@
[]
[
{
"DocumentId": {
"ProjectId": {
"Id": "88b6bb05-fef2-487e-bb6c-9ae68922c0bb"
},
"Id": "63c5cda5-30ee-4e20-9ec8-d45777057452"
},
"FileName": "MonIn.cs",
"FilePath": "L:\\DevOps\\EAF-Mesa-Integration\\mesafibacklog\\Adaptation\\Infineon\\Monitoring\\MonA\\MonIn.cs",
"FileChanges": [
{
"LineNumber": 268,
"CharNumber": 17,
"DiagnosticId": "CA1816",
"FormatDescription": "warning CA1816: Change MonIn.Dispose() to call GC.SuppressFinalize(object). This will prevent derived types that introduce a finalizer from needing to re-implement \u0027IDisposable\u0027 to call it."
}
]
}
]

View File

@ -4,7 +4,7 @@
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": 25140
"processId": 23840
}
]
}

View File

@ -45,6 +45,5 @@
"titleBar.inactiveForeground": "#e7e7e799",
"commandCenter.border": "#e7e7e799"
},
"peacock.color": "#4a727e",
"cSpell.enabled": false
"peacock.color": "#4a727e"
}

View File

@ -78,7 +78,7 @@
"args": [
"/target:Build",
"/restore:True",
"/p:RestoreSources=https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/%3Bhttps://packagemanagement.eu.infineon.com:4430/api/v2/%3Bhttps://tfs.intra.infineon.com/tfs/FactoryIntegration/_packaging/EAF/nuget/v3/index.json%3Bhttps://tfs.intra.infineon.com/tfs/FactoryIntegration/_packaging/EAF%40Local/nuget/v3/index.json%3Bhttps://api.nuget.org/v3/index.json",
"/p:RestoreSources=https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/%3Bhttps://tfs.intra.infineon.com/tfs/FactoryIntegration/_packaging/EAF/nuget/v3/index.json%3Bhttps://tfs.intra.infineon.com/tfs/FactoryIntegration/_packaging/EAF%40Local/nuget/v3/index.json%3Bhttps://api.nuget.org/v3/index.json",
"/detailedsummary",
"/consoleloggerparameters:PerformanceSummary;ErrorsOnly;",
"/property:Configuration=Debug;TargetFrameworkVersion=v4.8",
@ -86,6 +86,38 @@
],
"problemMatcher": "$msCompile"
},
{
"label": "Project",
"type": "shell",
"command": "code ../MESAFIBACKLOG.csproj",
"problemMatcher": []
},
{
"label": "Readme",
"type": "shell",
"command": "code ../README.md",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",
"L:/DevOps/EAF-Mesa-Integration/MESAFIBACKLOG",
"Day-Helper-2025-03-20",
"false",
"4"
],
"problemMatcher": []
},
{
"label": "Git Config",
"type": "shell",
"command": "code ../.git/config",
"problemMatcher": []
},
{
"label": "Kanbn Console",
"type": "npm",

View File

@ -103,7 +103,9 @@ public class FileRead : Shared.FileRead, IFileRead
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
@ -111,7 +113,8 @@ public class FileRead : Shared.FileRead, IFileRead
{
IProcessData iProcessData = new ProcessData(this, _Logistics, _FileConnectorConfiguration.TargetFileLocation, _URL, results.Item4);
if (iProcessData.Details.Count == 0)
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;

View File

@ -28,7 +28,6 @@ public class ProcessData : IProcessData
{ }
if (url is null)
throw new ArgumentNullException(nameof(url));
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, targetFileLocation, fileInfoCollection);

View File

@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
string logisticsSequence = _Logistics.Sequence.ToString();
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -29,6 +29,7 @@ public class CellInstanceConnectionName
nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Violation) => new Violation.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
};
return result;

View File

@ -119,15 +119,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -103,15 +103,18 @@ public class FileRead : Shared.FileRead, IFileRead
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, _FileConnectorConfiguration.TargetFileLocation, _URL, results.Item4);
IProcessData iProcessData = new ProcessData(this, _Logistics, _Calendar, _FileConnectorConfiguration.TargetFileLocation, _URL, results.Item4);
if (iProcessData.Details.Count == 0)
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;

View File

@ -6,13 +6,15 @@ using log4net;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Kanban;
#nullable enable
public class ProcessData : IProcessData
{
@ -22,47 +24,82 @@ public class ProcessData : IProcessData
private readonly ILog _Log;
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
{ }
if (url is null)
throw new ArgumentNullException(nameof(url));
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, targetFileLocation, fileInfoCollection);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(WriteFiles)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
new(logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), fileInfoCollection);
#nullable enable
public ProcessData(IFileRead fileRead, Logistics logistics, Calendar calendar, string targetFileLocation, string url, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
{
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
if (url is null)
throw new ArgumentNullException(nameof(url));
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, calendar, targetFileLocation, fileInfoCollection);
}
private void WriteFiles(IFileRead fileRead, Logistics logistics, string destinationDirectory, List<FileInfo> fileInfoCollection)
private static void WriteFiles(IFileRead fileRead, Calendar calendar, string destinationDirectory, bool keepRelations, WorkItem[] workItems)
{
string json;
string text;
string jsonOld;
string jsonFile;
string textFile;
string weekOfYear;
WorkItem workItem;
DirectoryInfo directory;
DirectoryInfo kanbnDirectory;
DirectoryInfo tasksDirectory;
DirectoryInfo visualStudioCodeDirectory;
ReadOnlyDictionary<int, Record> keyValuePairs = GetWorkItems(workItems, keepRelations);
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
workItem = keyValuePair.Value.WorkItem;
json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem);
weekOfYear = calendar.GetWeekOfYear(workItem.CreatedDate, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
directory = new(Path.Combine(destinationDirectory, "{}", $"{workItem.CreatedDate:yyyy}", $"{workItem.CreatedDate:yyyy}_Week_{weekOfYear}", $"{workItem.Id}"));
text = GetTaskText(directory.FullName);
visualStudioCodeDirectory = new(Path.Combine(directory.FullName, ".vscode"));
if (!visualStudioCodeDirectory.Exists)
_ = Directory.CreateDirectory(visualStudioCodeDirectory.FullName);
textFile = Path.Combine(visualStudioCodeDirectory.FullName, "tasks.json");
if (fileRead.IsEAFHosted && !File.Exists(textFile))
File.WriteAllText(textFile, text);
kanbnDirectory = new(Path.Combine(directory.FullName, ".kanbn"));
tasksDirectory = new(Path.Combine(kanbnDirectory.FullName, "tasks"));
if (!tasksDirectory.Exists)
_ = Directory.CreateDirectory(tasksDirectory.FullName);
jsonFile = Path.Combine(kanbnDirectory.FullName, $"{workItem.Id}.json");
jsonOld = File.Exists(jsonFile) ? File.ReadAllText(jsonFile) : string.Empty;
if (fileRead.IsEAFHosted && jsonOld != json)
File.WriteAllText(jsonFile, json);
if (keyValuePair.Value.Children is not null && keyValuePair.Value.Children.Length > 0)
WriteFiles(fileRead, tasksDirectory, keyValuePair.Value.Children);
if (visualStudioCodeDirectory.LastWriteTime != workItem.CreatedDate)
Directory.SetLastWriteTime(visualStudioCodeDirectory.FullName, workItem.CreatedDate);
if (kanbnDirectory.LastWriteTime != workItem.CreatedDate)
Directory.SetLastWriteTime(kanbnDirectory.FullName, workItem.CreatedDate);
if (directory.LastWriteTime != workItem.CreatedDate)
Directory.SetLastWriteTime(directory.FullName, workItem.CreatedDate);
if (visualStudioCodeDirectory.CreationTime != workItem.CreatedDate)
Directory.SetCreationTime(visualStudioCodeDirectory.FullName, workItem.CreatedDate);
if (kanbnDirectory.CreationTime != workItem.CreatedDate)
Directory.SetCreationTime(kanbnDirectory.FullName, workItem.CreatedDate);
if (directory.CreationTime != workItem.CreatedDate)
Directory.SetCreationTime(directory.FullName, workItem.CreatedDate);
}
}
private void WriteFiles(IFileRead fileRead, Logistics logistics, Calendar calendar, string destinationDirectory, List<FileInfo> fileInfoCollection)
{
if (fileInfoCollection is null)
throw new ArgumentNullException(nameof(fileInfoCollection));
bool keepRelations = true;
const string taskWorkItemType = "Task";
string json = File.ReadAllText(logistics.ReportFullPath);
WorkItem[]? workItems = JsonSerializer.Deserialize<WorkItem[]>(json);
if (workItems is null)
@ -70,10 +107,30 @@ public class ProcessData : IProcessData
_Details.Add(workItems);
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
ReadOnlyDictionary<int, Record> keyValuePairs = GetWorkItems(workItems, keepRelations);
ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" });
ReadOnlyDictionary<int, string> collection = MoveCurrentAndGetExpectedDirectoriesAndFileCopy(taskWorkItemType, destinationDirectory, json, bugUserStoryTaskWorkItemTypes, keyValuePairs);
WriteFiles(fileRead, fileInfoCollection, taskWorkItemType, destinationDirectory, keyValuePairs, collection);
WriteFiles(fileRead, calendar, destinationDirectory, workItems);
WriteFiles(fileRead, calendar, destinationDirectory, keepRelations, workItems);
}
private static void WriteFiles(IFileRead fileRead, Calendar calendar, string destinationDirectory, WorkItem[] workItems)
{
string old;
string json;
string directory;
string checkFile;
string weekOfYear;
foreach (WorkItem workItem in workItems)
{
weekOfYear = calendar.GetWeekOfYear(workItem.CreatedDate, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
directory = Path.Combine(destinationDirectory, "[]", $"{workItem.CreatedDate:yyyy}", $"{workItem.CreatedDate:yyyy}_Week_{weekOfYear}", $"{workItem.Id}");
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem);
checkFile = Path.Combine(directory, $"{workItem.Id}.json");
old = File.Exists(checkFile) ? File.ReadAllText(checkFile) : string.Empty;
if (!fileRead.IsEAFHosted || old == json)
continue;
File.WriteAllText(checkFile, json);
}
}
private static ReadOnlyDictionary<int, Record> GetWorkItems(WorkItem[] workItems, bool keepRelations)
@ -86,68 +143,6 @@ public class ProcessData : IProcessData
return results;
}
private static ReadOnlyDictionary<int, string> MoveCurrentAndGetExpectedDirectoriesAndFileCopy(string taskWorkItemType, string destinationDirectory, string json, ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs)
{
ReadOnlyDictionary<int, string> results;
string? directory;
ReadOnlyDictionary<int, string> collection = GetCurrentDirectories(destinationDirectory, bugUserStoryTaskWorkItemTypes);
results = GetExpectedDirectoriesAndFileCopy(taskWorkItemType, destinationDirectory, json, bugUserStoryTaskWorkItemTypes, keyValuePairs);
foreach (KeyValuePair<int, string> keyValuePair in collection)
{
if (!results.TryGetValue(keyValuePair.Key, out directory))
MoveToUnknown(destinationDirectory, keyValuePair.Key, keyValuePair.Value);
else
{
if (keyValuePair.Value == directory)
continue;
else
MoveToNew(destinationDirectory, keyValuePair.Key, keyValuePair.Value, directory);
}
}
return results;
}
private static FileInfo GetFileInfoAndMaybeWriteFile(string directory, WorkItem workItem)
{
FileInfo result;
string json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem);
string singletonDirectory = Path.Combine(directory, $"{workItem.Id}");
if (Directory.Exists(singletonDirectory))
{
List<string> files = Directory.GetFiles(singletonDirectory, "*", SearchOption.AllDirectories).ToList();
string checkFile = Path.Combine(singletonDirectory, ".json");
if (files.Remove(checkFile))
File.Delete(checkFile);
if (files.Count == 0)
Directory.Delete(singletonDirectory);
}
result = new(Path.Combine(directory, $"{workItem.Id}.json"));
string old = result.Exists ? File.ReadAllText(result.FullName) : string.Empty;
if (old != json)
File.WriteAllText(result.FullName, json);
return result;
}
private static void WriteFiles(IFileRead fileRead, List<FileInfo> fileInfoCollection, string taskWorkItemType, string destinationDirectory, ReadOnlyDictionary<int, Record> keyValuePairs, ReadOnlyDictionary<int, string> collection)
{
string? directory;
FileInfo fileInfo;
WorkItem workItem;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
workItem = keyValuePair.Value.WorkItem;
if (!collection.TryGetValue(keyValuePair.Key, out directory))
{
if (workItem.WorkItemType != taskWorkItemType || workItem.Parent is not null)
continue;
directory = GetDirectory(destinationDirectory, workItem);
}
fileInfo = GetFileInfoAndMaybeWriteFile(directory, workItem);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(fileInfo);
}
}
private static ReadOnlyDictionary<int, Record> GetKeyValuePairs(ReadOnlyDictionary<int, WorkItem> keyValuePairs, bool keepRelations)
{
Dictionary<int, Record> results = new();
@ -181,116 +176,61 @@ public class ProcessData : IProcessData
return new(results);
}
private static ReadOnlyDictionary<int, string> GetCurrentDirectories(string destinationDirectory, ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes)
private static string GetTaskText(string directory) =>
string.Join(Environment.NewLine, new string[]
{
Dictionary<int, string> results = new();
int id;
string idCheck;
string? fileName;
string[] directories;
string[] split = new string[] { "-" };
foreach (string w in bugUserStoryTaskWorkItemTypes)
{
directories = Directory.GetDirectories(destinationDirectory, $"*-{w.Replace(" ", "-")}", SearchOption.AllDirectories);
foreach (string directory in directories)
{
fileName = Path.GetFileName(directory);
if (string.IsNullOrEmpty(fileName))
continue;
idCheck = fileName.Split(split, StringSplitOptions.None)[0];
if (!int.TryParse(idCheck, out id))
continue;
if (!results.ContainsKey(id))
results.Add(id, directory);
else
MoveToDuplicate(destinationDirectory, directory);
}
}
return new(results);
}
"{",
"\"version\": \"2.0.0\",",
"\"tasks\": [",
"{",
"\"label\": \"File-Folder-Helper AOT s X Day-Helper-2025-02-04\",",
"\"type\": \"shell\",",
"\"command\": \"L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe\",",
"\"args\": [",
"\"s\",",
"\"X\",",
$"\"{directory.Replace('\\', '/')}\",",
"\"Day-Helper-2025-02-04\",",
"],",
"\"problemMatcher\": []",
"}",
"]",
"}",
});
private static void FileCopy(string destinationDirectory, string json, List<string> distinct)
private static void WriteFiles(IFileRead fileRead, DirectoryInfo tasksDirectory, Record[] records)
{
string old;
string json;
string checkFile;
foreach (string iterationPath in distinct)
WorkItem workItem;
foreach (Record record in records)
{
checkFile = Path.Combine(destinationDirectory, iterationPath, "[].json");
workItem = record.WorkItem;
json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem);
checkFile = Path.Combine(tasksDirectory.FullName, $"{workItem.Id}.json");
old = File.Exists(checkFile) ? File.ReadAllText(checkFile) : string.Empty;
if (old != json)
if (!fileRead.IsEAFHosted || old == json)
continue;
File.WriteAllText(checkFile, json);
}
}
private static ReadOnlyDictionary<int, string> GetExpectedDirectoriesAndFileCopy(string taskWorkItemType, string destinationDirectory, string json, ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs)
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
Dictionary<int, string> results = new();
string directory;
WorkItem workItem;
string iterationPath;
List<string> distinct = new();
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
workItem = keyValuePair.Value.WorkItem;
iterationPath = workItem.IterationPath.Replace(" ", "-");
if (!distinct.Contains(iterationPath))
distinct.Add(iterationPath);
if (!bugUserStoryTaskWorkItemTypes.Contains(workItem.WorkItemType))
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
if (workItem.WorkItemType == taskWorkItemType && workItem.Parent is not null)
continue;
directory = GetDirectory(destinationDirectory, workItem);
results.Add(workItem.Id, directory);
results.Add(description);
}
if (distinct.Count > 0)
FileCopy(destinationDirectory, json, distinct);
return new(results);
}
private static void MoveToUnknown(string destinationDirectory, int id, string directory)
{
if (string.IsNullOrEmpty(destinationDirectory))
throw new ArgumentException($"'{nameof(destinationDirectory)}' {id} cannot be null or empty.", nameof(destinationDirectory));
if (string.IsNullOrEmpty(directory))
throw new ArgumentException($"'{nameof(directory)}' cannot be null or empty.", nameof(directory));
}
private static void MoveToNew(string destinationDirectory, int id, string oldDirectory, string newDirectory)
{
if (Directory.Exists(newDirectory))
MoveToDuplicate(destinationDirectory, id, oldDirectory);
else
{
string directory = Path.GetDirectoryName(newDirectory) ?? throw new NotImplementedException();
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
Directory.Move(oldDirectory, newDirectory);
}
}
private static string GetDirectory(string destinationDirectory, WorkItem workItem)
{
string result;
string workItemType = workItem.WorkItemType.Replace(" ", "-");
string iterationPath = workItem.IterationPath.Replace(" ", "-");
result = Path.Combine(destinationDirectory, iterationPath, $"{workItem.Id}-{workItemType}");
return result;
}
private static void MoveToDuplicate(string destinationDirectory, string directory)
{
if (string.IsNullOrEmpty(destinationDirectory))
throw new ArgumentException($"'{nameof(destinationDirectory)}' cannot be null or empty.", nameof(destinationDirectory));
if (string.IsNullOrEmpty(directory))
throw new ArgumentException($"'{nameof(directory)}' cannot be null or empty.", nameof(directory));
}
private static void MoveToDuplicate(string destinationDirectory, int id, string oldDirectory)
{
if (string.IsNullOrEmpty(destinationDirectory))
throw new ArgumentException($"'{nameof(destinationDirectory)}' {id} cannot be null or empty.", nameof(destinationDirectory));
if (string.IsNullOrEmpty(oldDirectory))
throw new ArgumentException($"'{nameof(oldDirectory)}' cannot be null or empty.", nameof(oldDirectory));
return results;
}
}

View File

@ -107,7 +107,9 @@ public class FileRead : Shared.FileRead, IFileRead
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
@ -115,7 +117,8 @@ public class FileRead : Shared.FileRead, IFileRead
{
IProcessData iProcessData = new ProcessData(this, _Logistics, _FileConnectorConfiguration.TargetFileLocation, _URL, _WorkItemTypes, results.Item4);
if (iProcessData.Details.Count == 0)
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;

View File

@ -13,6 +13,8 @@ using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Markdown;
#nullable enable
public class ProcessData : IProcessData
{
@ -22,39 +24,46 @@ public class ProcessData : IProcessData
private readonly ILog _Log;
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection<string> workItemTypes, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
{ }
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, url, workItemTypes, targetFileLocation, fileInfoCollection);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(WriteFiles)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
new(logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), fileInfoCollection);
#nullable enable
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection<string> workItemTypes, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
{
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, url, workItemTypes, targetFileLocation, fileInfoCollection);
}
private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> lines, ReadOnlyCollection<Record> records, string fileName)
{
string markdown = string.Join(Environment.NewLine, lines);
string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md");
string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile);
if (markdown != markdownOld)
File.WriteAllText(markdownFile, markdown);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(markdownFile));
string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace("<a href", "<a target='_blank' href");
string htmlFile = Path.Combine(destinationDirectory, $"{fileName}.html");
string htmlOld = !File.Exists(htmlFile) ? string.Empty : File.ReadAllText(htmlFile);
if (html != htmlOld)
File.WriteAllText(htmlFile, html);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(htmlFile));
string json = JsonSerializer.Serialize(records, new JsonSerializerOptions() { WriteIndented = true });
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
}
private void WriteFiles(IFileRead fileRead, Logistics logistics, string url, ReadOnlyCollection<string> workItemTypes, string destinationDirectory, List<FileInfo> fileInfoCollection)
@ -86,10 +95,9 @@ public class ProcessData : IProcessData
ReadOnlyCollection<Record> records = new(keyValuePairs.Values.ToArray());
ReadOnlyCollection<string> bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" });
ReadOnlyCollection<string> bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" });
ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" });
messages.AddRange(WriteFile(fileRead, destinationDirectory, fileInfoCollection, records, "records"));
messages.AddRange(WriteWithPartentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugFeatureWorkItemTypes, "bugs-features-with-parents"));
messages.AddRange(WriteWithPartentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugUserStoryWorkItemTypes, "bugs-user-stories-with-parents"));
messages.AddRange(WriteWithParentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugFeatureWorkItemTypes, "bugs-features-with-parents"));
messages.AddRange(WriteWithParentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugUserStoryWorkItemTypes, "bugs-user-stories-with-parents"));
foreach (string workItemType in workItemTypes)
{
lines.Clear();
@ -100,89 +108,10 @@ public class ProcessData : IProcessData
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, workItemType);
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckIterationPath122508)}");
lines.Add(string.Empty);
results = FeatureCheckIterationPath122508(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122508");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckTag122514)}");
lines.Add(string.Empty);
results = FeatureCheckTag122514(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122514");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckPriority126169)}");
lines.Add(string.Empty);
results = FeatureCheckPriority126169(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-126169");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckState123066)}");
lines.Add(string.Empty);
results = FeatureCheckState123066(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-123066");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckState123067)}");
lines.Add(string.Empty);
results = FeatureCheckState123067(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-123067");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckStart122517)}");
lines.Add(string.Empty);
results = FeatureCheckStart122517(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122517");
_Details.Add(results);
}
if (messages.Count > 0)
throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}");
}
private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> lines, ReadOnlyCollection<Record> records, string fileName)
{
string markdown = string.Join(Environment.NewLine, lines);
string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md");
string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile);
if (markdown != markdownOld)
File.WriteAllText(markdownFile, markdown);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(markdownFile));
string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace("<a href", "<a target='_blank' href");
string htmlFile = Path.Combine(destinationDirectory, $"{fileName}.html");
string htmlOld = !File.Exists(htmlFile) ? string.Empty : File.ReadAllText(htmlFile);
if (html != htmlOld)
File.WriteAllText(htmlFile, html);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(htmlFile));
string json = JsonSerializer.Serialize(records, new JsonSerializerOptions() { WriteIndented = true });
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
}
private static ReadOnlyDictionary<int, Record> GetWorkItems(IEnumerable<WorkItem> workItems, bool keepRelations)
{
ReadOnlyDictionary<int, Record> results;
@ -193,6 +122,74 @@ public class ProcessData : IProcessData
return results;
}
private static ReadOnlyDictionary<int, Record> GetKeyValuePairs(ReadOnlyDictionary<int, WorkItem> keyValuePairs, bool keepRelations)
{
Dictionary<int, Record> results = new();
Record record;
List<bool> nests = new();
WorkItem? parentWorkItem;
ReadOnlyCollection<Record> childRecords;
ReadOnlyCollection<Record> relatedRecords;
ReadOnlyCollection<Record> successorRecords;
foreach (KeyValuePair<int, WorkItem> keyValuePair in keyValuePairs)
{
nests.Clear();
if (keyValuePair.Value.Parent is null)
parentWorkItem = null;
else
_ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem);
try
{
childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward
relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related
successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward
// predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse
record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations);
}
catch (Exception)
{
record = new(keyValuePair.Value, parentWorkItem, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
}
results.Add(keyValuePair.Key, record);
}
return new(results);
}
private static ReadOnlyCollection<string> WriteFile(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<Record> records, string fileName)
{
List<string> results = new();
string? json = GetJson(records, results);
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (!string.IsNullOrEmpty(json) && json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
return new(results);
}
private static ReadOnlyCollection<string> WriteWithParentsFile(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<Record> records, ReadOnlyCollection<string> workItemTypes, string fileName)
{
List<string> results = new();
Record record;
List<Record> filtered = new();
foreach (Record r in records)
{
if (r.WorkItem.State == "Removed" || !workItemTypes.Contains(r.WorkItem.WorkItemType))
continue;
record = new(r.WorkItem, r.Parent, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
filtered.Add(record);
}
string? json = GetJson(filtered, results);
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (!string.IsNullOrEmpty(json) && json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
return new(results);
}
private static string? GetJson(IEnumerable<Record> records, List<string> results)
{
string? result;
@ -212,39 +209,22 @@ public class ProcessData : IProcessData
return result;
}
private static ReadOnlyCollection<string> WriteFile(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<Record> records, string fileName)
private static void AppendLines(List<char> spaces, List<string> lines, Record record, bool condensed, bool sprintOnly)
{
List<string> results = new();
string? json = GetJson(records, results);
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (!string.IsNullOrEmpty(json) && json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
return new(results);
string line;
spaces.Add('\t');
WorkItem workItem;
if (record.Children is not null)
{
foreach (Record child in record.Children)
{
workItem = child.WorkItem;
line = GetLine(spaces, workItem, child, condensed, sprintOnly).TrimEnd();
lines.Add(line);
AppendLines(spaces, lines, child, condensed, sprintOnly);
}
private static ReadOnlyCollection<string> WriteWithPartentsFile(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<Record> records, ReadOnlyCollection<string> workItemTypes, string fileName)
{
List<string> results = new();
List<Record> filtered = new();
Record record;
foreach (Record r in records)
{
if (r.WorkItem.State == "Removed" || !workItemTypes.Contains(r.WorkItem.WorkItemType))
continue;
record = new(r.WorkItem, r.Parent, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
filtered.Add(record);
}
string? json = GetJson(filtered, results);
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (!string.IsNullOrEmpty(json) && json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
return new(results);
spaces.RemoveAt(0);
}
private static void AppendLines(string url, List<char> spaces, List<string> lines, ReadOnlyCollection<Record> records, string workItemType)
@ -295,57 +275,6 @@ public class ProcessData : IProcessData
}
}
private static void AppendLines(List<char> spaces, List<string> lines, Record record, bool condensed, bool sprintOnly)
{
string line;
spaces.Add('\t');
WorkItem workItem;
if (record.Children is not null)
{
foreach (Record child in record.Children)
{
workItem = child.WorkItem;
line = GetLine(spaces, workItem, child, condensed, sprintOnly).TrimEnd();
lines.Add(line);
AppendLines(spaces, lines, child, condensed, sprintOnly);
}
}
spaces.RemoveAt(0);
}
private static ReadOnlyDictionary<int, Record> GetKeyValuePairs(ReadOnlyDictionary<int, WorkItem> keyValuePairs, bool keepRelations)
{
Dictionary<int, Record> results = new();
Record record;
List<bool> nests = new();
WorkItem? parentWorkItem;
ReadOnlyCollection<Record> childRecords;
ReadOnlyCollection<Record> relatedRecords;
ReadOnlyCollection<Record> successorRecords;
foreach (KeyValuePair<int, WorkItem> keyValuePair in keyValuePairs)
{
nests.Clear();
if (keyValuePair.Value.Parent is null)
parentWorkItem = null;
else
_ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem);
try
{
childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward
relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related
successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward
// predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse
record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations);
}
catch (Exception)
{
record = new(keyValuePair.Value, parentWorkItem, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
}
results.Add(keyValuePair.Key, record);
}
return new(results);
}
private static string GetLine(List<char> spaces, WorkItem workItem, Record record, bool condensed, bool sprintOnly)
{
string result;
@ -359,403 +288,21 @@ public class ProcessData : IProcessData
private static string GetClosed(WorkItem workItem) =>
workItem.State != "Closed" ? "[ ]" : "[x]";
private static void FilterChildren(ReadOnlyCollection<string> workItemTypes, Record record, List<Record> results)
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
if (record.Children is not null)
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
foreach (Record r in record.Children)
{
if (!workItemTypes.Contains(r.WorkItem.WorkItemType))
continue;
results.Add(r);
FilterChildren(workItemTypes, r, results);
}
}
}
private static ReadOnlyCollection<Record> FilterChildren(ReadOnlyCollection<string> workItemTypes, Record record)
{
List<Record> results = new();
FilterChildren(workItemTypes, record, results);
return new(results);
}
private static int GetState(WorkItem workItem) =>
workItem.State switch
{
"New" => 1,
"Active" => 2,
"Resolved" => 3,
"Closed" => 4,
"Removed" => 5,
_ => 8
};
private static string? GetMaxIterationPath122508(ReadOnlyCollection<Record> records)
{
string? result;
List<string> results = new();
foreach (Record record in records)
{
if (results.Contains(record.WorkItem.IterationPath))
continue;
results.Add(record.WorkItem.IterationPath);
}
result = results.Count == 0 ? null : results.Max();
return result;
}
private static ReadOnlyCollection<Record> FeatureCheckIterationPath122508(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
string? maxIterationPath;
List<string> collection = new();
ReadOnlyCollection<Record> records;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
maxIterationPath = GetMaxIterationPath122508(records);
if (string.IsNullOrEmpty(maxIterationPath) || record.WorkItem.IterationPath == maxIterationPath)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) => {record.WorkItem.IterationPath} != {maxIterationPath}");
collection.Add(string.Empty);
lines.AddRange(collection);
results.Add(Record.GetWithoutNesting(record, $"IterationPath:<a target='_blank' href='{url}{record.WorkItem.Id}'>{record.WorkItem.Id}</a>;{record.WorkItem.IterationPath} != {maxIterationPath}"));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching122514(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
string[] segments;
string[] parentTags = record.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
foreach (Record r in records)
{
segments = string.IsNullOrEmpty(r.WorkItem.Tags) ? Array.Empty<string>() : r.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
if (segments.Length > 0 && parentTags.Any(l => segments.Contains(l)))
continue;
results.Add(r);
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckTag122514(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
if (string.IsNullOrEmpty(record.WorkItem.Tags))
recordsNotMatching = new(new Record[] { record });
else
{
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching122514(record, records);
if (!string.IsNullOrEmpty(record.WorkItem.Tags) && recordsNotMatching.Count == 0)
continue;
}
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem}]({url}{r.WorkItem}) {nameof(record.WorkItem.Tags)} != {record.WorkItem.Tags}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"Tag:{record.WorkItem.Tags};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.Tags};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching126169(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
foreach (Record r in records)
{
if (record.WorkItem.Priority is null)
{
results.Add(record);
break;
}
if (r.WorkItem.Priority == record.WorkItem.Priority.Value)
continue;
results.Add(r);
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckPriority126169(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching126169(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.Priority)} != {record.WorkItem.Priority}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"Priority:{record.WorkItem.Priority};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.Priority};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching123066(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
int check;
int state = GetState(record.WorkItem);
List<KeyValuePair<int, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
continue;
check = GetState(r.WorkItem);
if (check == state)
continue;
collection.Add(new(check, r));
}
if (collection.Count > 0)
{
KeyValuePair<int, Record>[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
collection.Clear();
else if (notNewState.Length > 0)
{
int minimum = notNewState.Min(l => l.Key);
if (minimum == state)
collection.Clear();
else if (minimum == 1 && record.WorkItem.State == "New")
collection.Clear();
else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
collection.Clear();
}
}
foreach (KeyValuePair<int, Record> keyValuePair in collection.OrderByDescending(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching123067(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
int check;
int state = GetState(record.WorkItem);
List<KeyValuePair<int, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
continue;
check = GetState(r.WorkItem);
if (check == state)
continue;
collection.Add(new(check, r));
}
if (collection.Count > 0)
{
KeyValuePair<int, Record>[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
collection.Clear();
else if (notNewState.Length > 0)
{
int minimum = notNewState.Min(l => l.Key);
if (minimum == state)
collection.Clear();
else if (minimum == 1 && record.WorkItem.State == "New")
collection.Clear();
else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
collection.Clear();
}
}
foreach (KeyValuePair<int, Record> keyValuePair in collection.OrderByDescending(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching122517(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
if (record.WorkItem.StartDate is null)
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
DateTime dateTime = record.WorkItem.StartDate.Value;
List<KeyValuePair<long, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
if (r.WorkItem.ActivatedDate is null)
continue;
if (dateTime >= r.WorkItem.ActivatedDate.Value)
continue;
collection.Add(new(r.WorkItem.ActivatedDate.Value.Ticks, r));
results.Add(description);
}
foreach (KeyValuePair<long, Record> keyValuePair in collection.OrderBy(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckState123066(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching123066(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"State:{record.WorkItem.State};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.State};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckState123067(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching123067(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"State:{record.WorkItem.State};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.State};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckStart122517(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
if (record.WorkItem.StartDate is null)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching122517(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.ActivatedDate)} != {record.WorkItem.ActivatedDate}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"StartDate:{record.WorkItem.StartDate};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.ActivatedDate};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
return results;
}
}

View File

@ -5,16 +5,68 @@ using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.MoveMatchingFiles;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
internal class PreWith
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal string ErrFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
ErrFile = errFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
}
internal class Pre
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal Pre(string matchingFile, string checkFile)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
}
}
internal class Post
{
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal Post(string checkFile, string errFile)
{
ErrFile = errFile;
CheckFile = checkFile;
}
}
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
@ -41,7 +93,8 @@ public class FileRead : Shared.FileRead, IFileRead
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
void IFileRead.WaitForThread() =>
WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
@ -88,7 +141,7 @@ public class FileRead : Shared.FileRead, IFileRead
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
@ -104,7 +157,69 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping()
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,Test,Count,MesEntity,HeaderUniqueId,UniqueId,Id,Recipe,Date,AreaDeltaFromLastRun,GLimit,HGCV1";
string args5 = "Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,HGCV1";
string args6 = "RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09,HGCV1";
string args7 = "FlatZMean|MeanFlatZ,GradeMean|MeanGrade,NAvgMean|MeanNAvg,NslMean|MeanNsl,PhaseMean|MeanPhase,RhoAvgMean|MeanRhoAvg,RhoslMean|MeanRhosl,RsMean|MeanRs,VdMean|MeanVd,FlatZRadialGradient|RadialGradientFlatZ,GradeRadialGradient|RadialGradientGrade,NAvgRadialGradient|RadialGradientNAvg,NslRadialGradient|RadialGradientNsl,PhaseRadialGradient|RadialGradientPhase,RhoAvgRadialGradient|RadialGradientRhoAvg,RhoslRadialGradient|RadialGradientRhosl,RsRadialGradient|RadialGradientRs,VdRadialGradient|RadialGradientVd,FlatZStdDev|StandardDeviationPercentageFlatZ,GradeStdDev|StandardDeviationPercentageGrade,NAvgStdDev|StandardDeviationPercentageNAvg,NslStdDev|StandardDeviationPercentageNsl,PhaseStdDev|StandardDeviationPercentagePhase,RhoAvgStdDev|StandardDeviationPercentageRhoAvg,RhoslStdDev|StandardDeviationPercentageRhosl,RsStdDev|StandardDeviationPercentageRs,VdStdDev|StandardDeviationPercentageVd,|HGCV1";
string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Area,Folder,HeaderUniqueId,Id,Layer,Model,Pattern,Phase,Plan,RampRate,RDS,SetupFile,StartVoltage,StopVoltage,UniqueId,Wafer,WaferSize,Zone,Ccomp,CondType,FlatZ,FlatZMean,FlatZRadialGradient,FlatZStdDev,GLimit,Grade,GradeMean,GradeRadialGradient,GradeStdDev,NAvg,NAvgMean,NAvgRadialGradient,NAvgStdDev,Nsl,NslMean,NslRadialGradient,NslStdDev,PhaseMean,PhaseRadialGradient,PhaseStdDev,RhoAvg,RhoAvgMean,RhoAvgRadialGradient,RhoAvgStdDev,RhoMethod,Rhosl,RhoslMean,RhoslRadialGradient,RhoslStdDev,RsMean,RsRadialGradient,RsStdDev,Vd,VdMean,VdRadialGradient,VdStdDev,Variation,AreaDeltaFromLastRun,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Index,Operator,StartVoltage,Wafer,StopVoltage,Lot,RampRate,Plan,GLimit,Date,Time,SetupFile,WaferSize,Folder,Ccomp,Pattern,Area,CondType,RhoMethod,Model,MeanNAvg,MeanNsl,MeanVd,MeanFlatZ,MeanRhoAvg,MeanRhosl,MeanPhase,MeanGrade,MeanRs,StandardDeviationPercentageNAvg,StandardDeviationPercentageNsl,StandardDeviationPercentageVd,StandardDeviationPercentageFlatZ,StandardDeviationPercentageRhoAvg,StandardDeviationPercentageRhosl,StandardDeviationPercentagePhase,StandardDeviationPercentageGrade,StandardDeviationPercentageRs,RadialGradientNAvg,RadialGradientNsl,RadialGradientVd,RadialGradientFlatZ,RadialGradientRhoAvg,RadialGradientRhosl,RadialGradientPhase,RadialGradientGrade,RadialGradientRs,Site,X,Y,NAvg,RhoAvg,Nsl,Rhosl,Vd,Phase,FlatZ,Grade,XLeft,XRight,BottomY,TopY,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
string args10 = "0,1,2,-1,-1,3,-1,12,70,8,66,67,-1,19,16,-1,-1,68,22,18,58,10,9,65,14,5,7,-1,6,15,69,17,20,59,26,44,35,11,60,30,48,39,53,23,41,32,55,24,42,33,29,47,38,54,27,45,36,21,56,28,46,37,31,49,40,57,25,43,34,81,80,72,73,74,75,76,77,78,79,83,84,85,86,87,88,89,90,91";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> newColumnNames = new(args9.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(args8.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<int> columnIndices = new(args10.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{
List<PreWith> results = new();
string errFile;
PreWith preWith;
string? checkDirectory;
string noWaitDirectory;
foreach (Pre pre in preCollection)
{
errFile = string.Concat(pre.CheckFile, ".err");
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
if (string.IsNullOrEmpty(checkDirectory))
continue;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
{
List<string> results = new();
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
@ -115,10 +230,137 @@ public class FileRead : Shared.FileRead, IFileRead
results.Add(directory);
}
results.Sort();
return results.AsReadOnly();
}
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
}
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
List<Pre> results = new();
Pre pre;
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
pre = new(matchingFile, checkFile);
results.Add(pre);
}
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach (Post post in postCollection)
{
if (File.Exists(post.ErrFile))
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
if (File.Exists(post.CheckFile))
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
long preWait;
foreach (PreWith preWith in preWithCollection)
{
if (processDataStandardFormat is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
results.Add(post);
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(preWith.ErrFile))
throw new Exception(File.ReadAllText(preWith.ErrFile));
if (!File.Exists(preWith.CheckFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
return results.AsReadOnly();
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormatMapping processDataStandardFormatMapping = GetProcessDataStandardFormatMapping();
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, processDataStandardFormatMapping);
if (processDataStandardFormat is not null)
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
else
{
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
processDataStandardFormat = null;
}
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results;
}
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
{
List<string> results = new();
string[] found;
@ -137,129 +379,7 @@ public class FileRead : Shared.FileRead, IFileRead
break;
}
}
return results;
}
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
{
List<(string matchingFile, string checkFile)> results = new();
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
results.Add(new(matchingFile, checkFile));
}
return results;
}
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
{
List<(string, string, string, string, string)> results = new();
string errFile;
string checkDirectory;
string noWaitDirectory;
foreach ((string matchingFile, string checkFile) in collection)
{
errFile = string.Concat(checkFile, ".err");
checkDirectory = Path.GetDirectoryName(checkFile);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
}
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string checkFile, string errFile)> postCollection = new();
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
{
File.Move(matchingFile, checkFile);
if (Directory.Exists(noWaitDirectory))
{
postCollection.Add(new(checkFile, errFile));
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(errFile))
throw new Exception(File.ReadAllText(errFile));
if (!File.Exists(checkFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach ((string checkFile, string errFile) in postCollection)
{
if (File.Exists(errFile))
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
if (File.Exists(checkFile))
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
{
#nullable enable
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
#nullable disable
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
MoveCollection(dateTime, collection);
return results;
return results.AsReadOnly();
}
}

View File

@ -118,15 +118,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -129,15 +129,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -146,15 +146,15 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -1,38 +1,194 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
#nullable enable
public class Aggregation
{
[JsonConstructor]
public Aggregation(
string average,
int count,
int? inverse,
int maximum,
int minimum,
ReadOnlyCollection<Record> records,
int sum
)
public Aggregation(double inverseAverage,
int valueCount,
double fibonacciAverage,
int? inverseValue,
int valueMaximum,
int valueMinimum,
Notification[] notifications,
int valueSum)
{
Average = average;
Count = count;
Inverse = inverse;
Maximum = maximum;
Minimum = minimum;
Records = records;
Sum = sum;
InverseAverage = inverseAverage;
ValueCount = valueCount;
FibonacciAverage = fibonacciAverage;
InverseValue = inverseValue;
ValueMaximum = valueMaximum;
ValueMinimum = valueMinimum;
Notifications = notifications;
ValueSum = valueSum;
}
[JsonPropertyName("Average")] public string Average { get; }
[JsonPropertyName("Count")] public int Count { get; }
[JsonPropertyName("Inverse")] public int? Inverse { get; }
[JsonPropertyName("Maximum")] public int Maximum { get; }
[JsonPropertyName("Minimum")] public int Minimum { get; }
[JsonPropertyName("Records")] public ReadOnlyCollection<Record> Records { get; }
[JsonPropertyName("Sum")] public int Sum { get; }
public double InverseAverage { get; } // [JsonPropertyName("InverseAverage")]
public int ValueCount { get; } // [JsonPropertyName("ValueCount")]
public double FibonacciAverage { get; } // [JsonPropertyName("Fibonacci")]
public int? InverseValue { get; } // [JsonPropertyName("InverseValue")]
public int ValueMaximum { get; } // [JsonPropertyName("ValueMaximum")]
public int ValueMinimum { get; } // [JsonPropertyName("ValueMinimum")]
public Notification[] Notifications { get; } // [JsonPropertyName("Notifications")]
public int ValueSum { get; } // [JsonPropertyName("ValueSum")]
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(Settings settings, Dictionary<int, List<Notification>> keyValuePairs)
{
Dictionary<int, Aggregation> results = new();
int? inverseValue;
double inverseAverage;
Aggregation aggregation;
double fibonacciAverage;
List<int> collection = new();
int averageFromInverseCeiling;
List<int> inverseCollection = new();
List<int> fibonacciCollection = new();
foreach (KeyValuePair<int, List<Notification>> keyValuePair in keyValuePairs)
{
collection.Clear();
inverseCollection.Clear();
fibonacciCollection.Clear();
foreach (Notification notification in keyValuePair.Value)
{
collection.Add(notification.Value);
if (notification.Inverse is null)
continue;
inverseCollection.Add(notification.Inverse.Value);
if (notification.Fibonacci is null)
continue;
fibonacciCollection.Add(notification.Fibonacci.Value);
}
if (inverseCollection.Count == 0 || fibonacciCollection.Count == 0)
continue;
inverseAverage = Math.Round(inverseCollection.Average(), settings.Digits);
averageFromInverseCeiling = (int)Math.Ceiling(inverseAverage);
inverseValue = Notification.GetInverse(averageFromInverseCeiling);
fibonacciAverage = Math.Round(fibonacciCollection.Average(), settings.Digits);
aggregation = new(inverseAverage: inverseAverage,
valueCount: collection.Count,
fibonacciAverage: fibonacciAverage,
inverseValue: inverseValue,
valueMaximum: collection.Max(),
valueMinimum: collection.Min(),
notifications: keyValuePair.Value.ToArray(),
valueSum: collection.Sum());
results.Add(keyValuePair.Key, aggregation);
}
return new(results);
}
private static ReadOnlyCollection<Notification> GetNotifications(Settings settings, string directory)
{
List<Notification> results = new();
string? key;
string text;
string[] files;
Notification? notification;
List<Notification>? collection;
Dictionary<string, List<Notification>> keyValuePairs = new();
string[] directories = Directory.GetDirectories(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in directories)
{
keyValuePairs.Clear();
files = Directory.GetFiles(subDirectory, settings.SourceFileFilter, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
text = File.ReadAllText(file);
if (string.IsNullOrEmpty(text) || text[0] == '[')
continue;
notification = JsonSerializer.Deserialize(text, NotificationSourceGenerationContext.Default.Notification);
if (notification is null || notification.Id == 0)
continue;
key = !string.IsNullOrEmpty(notification.Username) ? notification.Username : notification.RemoteIpAddress;
if (string.IsNullOrEmpty(key))
continue;
if (!keyValuePairs.TryGetValue(key, out collection))
{
keyValuePairs.Add(key, new());
if (!keyValuePairs.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(notification);
}
foreach (KeyValuePair<string, List<Notification>> keyValuePair in keyValuePairs)
{
if (keyValuePair.Value.Count == 1)
results.Add(keyValuePair.Value[0]);
else
{
notification = keyValuePair.Value.Select(record => new KeyValuePair<long, Notification>(record.Time, record)).OrderBy(pair => pair.Key).Last().Value;
results.Add(notification);
}
}
}
return new(results);
}
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(Settings settings, string directory)
{
ReadOnlyDictionary<int, Aggregation> results;
List<Notification>? collection;
Dictionary<int, List<Notification>> keyValuePairs = new();
ReadOnlyCollection<Notification> notifications = GetNotifications(settings, directory);
foreach (Notification notification in notifications)
{
if (!keyValuePairs.TryGetValue(notification.Id, out collection))
{
keyValuePairs.Add(notification.Id, new());
if (!keyValuePairs.TryGetValue(notification.Id, out collection))
throw new Exception();
}
collection.Add(notification);
}
results = GetKeyValuePairs(settings, keyValuePairs);
return results;
}
internal static ReadOnlyDictionary<string, ReadOnlyDictionary<int, Aggregation>> GetKeyValuePairsAndWriteFiles(Settings settings)
{
Dictionary<string, ReadOnlyDictionary<int, Aggregation>> results = new();
string json;
string jsonOld;
string jsonFile;
string directoryName;
ReadOnlyDictionary<int, Aggregation> keyValuePairs;
if (!Directory.Exists(settings.SourceFileLocation))
_ = Directory.CreateDirectory(settings.SourceFileLocation);
if (!Directory.Exists(settings.TargetFileLocation))
_ = Directory.CreateDirectory(settings.TargetFileLocation);
string[] directories = Directory.GetDirectories(settings.SourceFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
directoryName = Path.GetFileName(directory);
keyValuePairs = GetKeyValuePairs(settings, directory);
jsonFile = Path.Combine(settings.TargetFileLocation, $"{directoryName}.json");
json = JsonSerializer.Serialize(keyValuePairs, AggregationReadOnlyDictionarySourceGenerationContext.Default.ReadOnlyDictionaryInt32Aggregation);
// keyValuePairs = JsonSerializer.Deserialize(json, AggregationReadOnlyDictionarySourceGenerationContext.Default.ReadOnlyDictionaryInt32Aggregation);
jsonOld = File.Exists(jsonFile) ? File.ReadAllText(jsonFile) : string.Empty;
if (json != jsonOld)
File.WriteAllText(jsonFile, json);
results.Add(directoryName, keyValuePairs);
}
return new(results);
}
internal static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(Settings settings, Notification notification)
{
ReadOnlyDictionary<int, Aggregation> results;
Dictionary<int, List<Notification>> keyValuePairs = new() { { notification.Id, new Notification[] { notification }.ToList() } };
results = GetKeyValuePairs(settings, keyValuePairs);
return results;
}
}
@ -47,3 +203,9 @@ internal partial class AggregationSourceGenerationContext : JsonSerializerContex
internal partial class AggregationCollectionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ReadOnlyDictionary<int, Aggregation>))]
internal partial class AggregationReadOnlyDictionarySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -3,28 +3,36 @@ using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.Priority;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
internal static ILog Log => _Log;
internal static Settings Settings => _Settings;
internal static Dictionary<int, WorkItem> WorkItems => _WorkItems;
#pragma warning disable IDE0032, CS8618
private static new ILog _Log;
private static Settings _Settings;
private static Dictionary<int, WorkItem> _WorkItems;
#pragma warning restore IDE0032, CS8618
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_WorkItems = new();
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
_NullData = string.Empty;
_Log = LogManager.GetLogger(typeof(FileRead));
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
@ -33,12 +41,23 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
if (!Debugger.IsAttached && fileConnectorConfiguration.PreProcessingMode != FileConnectorConfiguration.PreProcessingModeEnum.Process)
_Timer = new Timer(Callback, null, (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000), Timeout.Infinite);
else
string parentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation) ?? throw new Exception();
_Settings = new(digits: 5,
parentDirectory: parentDirectory,
priorities: 3,
priorityGroups: 9,
sourceFileFilter: _FileConnectorConfiguration.SourceFileFilter,
sourceFileLocation: _FileConnectorConfiguration.SourceFileLocation,
targetFileLocation: _FileConnectorConfiguration.TargetFileLocation);
string? json = WeightedShortestJobFirstHub.PopulatedWorkItemsAndGetJson(_Settings);
if (!string.IsNullOrEmpty(json))
WeightedShortestJobFirstHub.WriteJson(json);
string cellInstanceNamed = string.Concat("CellInstance.", _EquipmentType);
string url = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, $"{cellInstanceNamed}.Microsoft.Owin.Hosting.WebApp.Start.URL");
if (_IsEAFHosted)
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
_ = Microsoft.Owin.Hosting.WebApp.Start(url);
_Log.Info($"Server running on {url}");
}
}
@ -91,7 +110,7 @@ public class FileRead : Shared.FileRead, IFileRead
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
@ -107,193 +126,14 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
#nullable enable
private static ReadOnlyCollection<Record> GetRecords(string directory, string searchPattern)
{
List<Record> results = new();
string text;
Record? record;
string[] files;
List<Record>? collection;
Dictionary<string, List<Record>> keyValuePairs = new();
string[] directories = Directory.GetDirectories(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in directories)
{
keyValuePairs.Clear();
files = Directory.GetFiles(subDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
text = File.ReadAllText(file);
if (string.IsNullOrEmpty(text) || text[0] == '[')
continue;
record = JsonSerializer.Deserialize<Record>(text);
if (record is null || record.Id == 0)
continue;
if (!keyValuePairs.TryGetValue(record.RemoteIpAddress, out collection))
{
keyValuePairs.Add(record.RemoteIpAddress, new());
if (!keyValuePairs.TryGetValue(record.RemoteIpAddress, out collection))
throw new Exception();
}
collection.Add(record);
}
foreach (KeyValuePair<string, List<Record>> keyValuePair in keyValuePairs)
{
if (keyValuePair.Value.Count == 1)
results.Add(keyValuePair.Value[0]);
else
{
record = keyValuePair.Value.Select(record => new KeyValuePair<long, Record>(record.Time, record)).OrderBy(pair => pair.Key).Last().Value;
results.Add(record);
}
}
}
return new(results);
}
private static int? GetInverse(int value) =>
value switch
{
1 => 3,
2 => 2,
3 => 1,
_ => null
};
private static int? GetInverse(double value)
{
int? result;
if (value > 3)
result = null;
else if (value > 2)
result = 1;
else if (value > 1)
result = 2;
else if (value > 0)
result = 3;
else
result = null;
return result;
}
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(Dictionary<int, List<Record>> keyValuePairs)
{
Dictionary<int, Aggregation> results = new();
Aggregation aggregation;
int? inverse;
double average;
List<int> collection = new();
foreach (KeyValuePair<int, List<Record>> keyValuePair in keyValuePairs)
{
collection.Clear();
foreach (Record record in keyValuePair.Value)
{
inverse = GetInverse(record.Value);
if (inverse is null)
continue;
collection.Add(inverse.Value);
}
average = collection.Average();
inverse = GetInverse(average);
aggregation = new(average.ToString("0.000"),
keyValuePair.Value.Count,
inverse,
keyValuePair.Value.Max(record => record.Value),
keyValuePair.Value.Min(record => record.Value),
new(keyValuePair.Value),
keyValuePair.Value.Sum(record => record.Value));
results.Add(keyValuePair.Key, aggregation);
}
return new(results);
}
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(string directory, string searchPattern)
{
ReadOnlyDictionary<int, Aggregation> results;
List<Record>? collection;
Dictionary<int, List<Record>> keyValuePairs = new();
ReadOnlyCollection<Record> records = GetRecords(directory, searchPattern);
foreach (Record record in records)
{
if (!keyValuePairs.TryGetValue(record.Id, out collection))
{
keyValuePairs.Add(record.Id, new());
if (!keyValuePairs.TryGetValue(record.Id, out collection))
throw new Exception();
}
collection.Add(record);
}
results = GetKeyValuePairs(keyValuePairs);
return results;
}
private static void WriteFiles(string sourceFileLocation, string sourceFileFilter, string targetFileLocation)
{
string json;
string jsonFile;
string directoryName;
if (!Directory.Exists(sourceFileLocation))
_ = Directory.CreateDirectory(sourceFileLocation);
if (!Directory.Exists(targetFileLocation))
_ = Directory.CreateDirectory(targetFileLocation);
ReadOnlyDictionary<int, Aggregation> keyValuePairs;
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string[] directories = Directory.GetDirectories(sourceFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
directoryName = Path.GetFileName(directory);
keyValuePairs = GetKeyValuePairs(directory, sourceFileFilter);
jsonFile = Path.Combine(targetFileLocation, $"{directoryName}.json");
json = JsonSerializer.Serialize(keyValuePairs, jsonSerializerOptions);
File.WriteAllText(jsonFile, json);
}
}
private void Callback(object state)
{
try
{
if (_IsEAFHosted)
WriteFiles(_FileConnectorConfiguration.SourceFileLocation, _FileConnectorConfiguration.SourceFileFilter, _FileConnectorConfiguration.TargetFileLocation);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{
_SMTP.SendHighPriorityEmailMessage(subject, body);
File.WriteAllText(".email", body);
}
catch (Exception) { }
}
try
{
if (_FileConnectorConfiguration?.FileScanningIntervalInSeconds is null)
throw new Exception();
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{
_SMTP.SendHighPriorityEmailMessage(subject, body);
File.WriteAllText(".email", body);
}
catch (Exception) { }
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
results = new(_Logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
return results;
}

View File

@ -0,0 +1,90 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
#nullable enable
public class Notification
{
[JsonConstructor]
public Notification(int? fibonacci,
int id,
int? inverse,
string? machineId,
string page,
string? remoteIpAddress,
string? site,
long time,
string? username,
int value)
{
int? i = inverse is not null ? inverse : GetInverse(value);
Fibonacci = fibonacci is not null ? fibonacci : i is null ? null : GetFibonacci(i.Value);
Id = id;
Inverse = i;
MachineId = machineId;
Page = page;
RemoteIpAddress = remoteIpAddress is not null ? remoteIpAddress : null;
Site = site is not null ? site : "MES";
Time = time;
Username = username;
Value = value;
}
[JsonPropertyName("id")] public int Id { get; }
[JsonPropertyName("fibonacci")] public int? Fibonacci { get; }
[JsonPropertyName("inverse")] public int? Inverse { get; }
[JsonPropertyName("machineId")] public string? MachineId { get; }
[JsonPropertyName("page")] public string Page { get; }
[JsonPropertyName("RemoteIpAddress")] public string? RemoteIpAddress { get; }
[JsonPropertyName("site")] public string? Site { get; }
[JsonPropertyName("time")] public long Time { get; }
[JsonPropertyName("username")] public string? Username { get; }
[JsonPropertyName("value")] public int Value { get; }
internal static int? GetInverse(int value) =>
value switch
{
1 => 5,
2 => 4,
3 => 3,
4 => 2,
5 => 1,
_ => null
};
private static int? GetFibonacci(int value) =>
value switch
{
9 => 55,
8 => 34,
7 => 21,
6 => 13,
5 => 8,
4 => 5,
3 => 3,
2 => 2,
1 => 1,
_ => null
};
internal static Notification GetNotification(Notification notification, string? remoteIpAddress, string? connectionId) =>
new(notification.Fibonacci,
notification.Id,
notification.Inverse,
notification.MachineId,
notification.Page,
remoteIpAddress ?? connectionId,
notification.Site,
notification.Time,
notification.Username,
notification.Value);
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Notification))]
public partial class NotificationSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,37 +0,0 @@
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
public class Record
{
[JsonConstructor]
public Record(
string json,
int id,
string page,
string queryString,
string remoteIpAddress,
long time,
int value
)
{
Json = json;
Id = id;
Page = page;
QueryString = queryString;
RemoteIpAddress = remoteIpAddress;
Time = time;
Value = value;
}
[JsonPropertyName("Json")] public string Json { get; }
[JsonPropertyName("id")] public int Id { get; }
[JsonPropertyName("page")] public string Page { get; }
[JsonPropertyName("QueryString")] public string QueryString { get; }
[JsonPropertyName("RemoteIpAddress")] public string RemoteIpAddress { get; }
[JsonPropertyName("time")] public long Time { get; }
[JsonPropertyName("value")] public int Value { get; }
}

View File

@ -0,0 +1,43 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
#nullable enable
public class Settings
{
[JsonConstructor]
public Settings(int digits,
string parentDirectory,
int priorities,
int priorityGroups,
string sourceFileFilter,
string sourceFileLocation,
string targetFileLocation)
{
Digits = digits;
ParentDirectory = parentDirectory;
Priorities = priorities;
PriorityGroups = priorityGroups;
SourceFileFilter = sourceFileFilter;
SourceFileLocation = sourceFileLocation;
TargetFileLocation = targetFileLocation;
}
public int Digits { get; } // [JsonPropertyName("Digits")]
public string ParentDirectory { get; } // [JsonPropertyName("ParentDirectory")]
public int Priorities { get; } // [JsonPropertyName("Priorities")]
public int PriorityGroups { get; } // [JsonPropertyName("PriorityGroups")]
public string SourceFileFilter { get; } // [JsonPropertyName("SourceFileFilter")]
public string SourceFileLocation { get; } // [JsonPropertyName("SourceFileLocation")]
public string TargetFileLocation { get; } // [JsonPropertyName("TargetFileLocation")]
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Dictionary<int, Settings>))]
internal partial class SettingsDictionarySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,13 @@
using Microsoft.Owin.Cors;
using Owin;
public class Startup
{
public void Configuration(IAppBuilder app)
{
_ = app.UseCors(CorsOptions.AllowAll);
_ = app.MapSignalR();
}
}

View File

@ -0,0 +1,158 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text.Json;
#nullable enable
namespace Adaptation.FileHandlers.Priority;
public class WeightedShortestJobFirstHub : Microsoft.AspNet.SignalR.Hub
{
// public async Task Send(int n)
// {
// await Clients.All.send(n);
// }
private string? GetRemoteIpAddress() =>
Context?.Headers?.Get("X-Real-IP");
public void Send(string name, string message)
{
Console.WriteLine($"{name}:{message};");
// FileRead.Logger.LogWarning($"{name}:{message};");
// FileRead.Log?.Info($"{name}:{message};");
Console.WriteLine(Context?.ConnectionId);
// FileRead.Logger.LogWarning(Context?.ConnectionId);
// FileRead.Log?.Info(Context?.ConnectionId);
string? remoteIpAddress = GetRemoteIpAddress();
Console.WriteLine(remoteIpAddress);
// FileRead.Logger.LogWarning(remoteIpAddress);
// FileRead.Log?.Info(remoteIpAddress);
Clients.All.addMessage(name, message);
}
private static void FileWriteAllText(Settings settings, Notification n)
{
string json = JsonSerializer.Serialize(n, NotificationSourceGenerationContext.Default.Notification);
string directory = Path.Combine(settings.SourceFileLocation, n.Page, n.Id.ToString());
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string checkFile = Path.Combine(directory, $"{n.Time}.json");
File.WriteAllText(checkFile, json);
}
internal static void WriteJson(string json)
{
string jsonFile = Path.Combine(FileRead.Settings.ParentDirectory, "{}.json");
string jsonFileWith = Path.Combine(FileRead.Settings.ParentDirectory, "{[]}.json");
string jsonOld = File.Exists(jsonFileWith) ? File.ReadAllText(jsonFileWith) : string.Empty;
if (json != jsonOld)
{
File.WriteAllText(jsonFileWith, json);
Dictionary<int, WorkItem> w = JsonSerializer.Deserialize(json.Replace($"\"{nameof(Aggregation.Notifications)}\":", "\"ignore\":"), WorkItemDictionarySourceGenerationContext.Default.DictionaryInt32WorkItem) ?? throw new Exception();
json = JsonSerializer.Serialize(w, WorkItemDictionarySourceGenerationContext.Default.DictionaryInt32WorkItem);
File.WriteAllText(jsonFile, json);
}
}
internal static string? PopulatedWorkItemsAndGetJson(Settings settings)
{
string? result = null;
ReadOnlyDictionary<int, WorkItem?> workItems = WorkItem.GetKeyValuePairs(settings);
int useCount = (from l in workItems where l.Value.CostOfDelay is not null select true).Count();
double prioritySize = useCount / settings.Priorities;
double priorityGroupSize = useCount / settings.PriorityGroups;
WorkItem[] sorted = (from l in workItems
where l.Value is not null
orderby l.Value.Site is not null,
l.Value.Site descending,
l.Value.CostOfDelay is not null,
l.Value.CostOfDelay descending,
l.Value.BusinessValue?.FibonacciAverage is not null,
l.Value.BusinessValue?.FibonacciAverage descending,
l.Key
select l.Value).ToArray();
lock (FileRead.WorkItems)
{
int j = 0;
WorkItem w;
double value;
int lastId = -1;
int? sortBeforeId;
WorkItem workItem;
int? sortPriority;
int? sortPriorityGroup;
FileRead.WorkItems.Clear();
for (int i = 0; i < sorted.Length; i++)
{
w = sorted[i];
if (w.CostOfDelay is null)
{
sortBeforeId = null;
sortPriority = null;
sortPriorityGroup = null;
}
else
{
j += 1;
sortBeforeId = lastId;
value = (j / prioritySize) + 1;
sortPriority = (int)Math.Floor(value);
if (sortPriority > settings.Priorities)
sortPriority = settings.Priorities;
value = (j / priorityGroupSize) + 1;
sortPriorityGroup = (int)Math.Floor(value);
if (sortPriorityGroup > settings.PriorityGroups)
sortPriorityGroup = settings.PriorityGroups;
}
workItem = WorkItem.GetWorkItem(w, i, sortBeforeId, sortPriority, sortPriorityGroup);
FileRead.WorkItems.Add(workItem.Id, workItem);
lastId = w.Id;
}
result = JsonSerializer.Serialize(FileRead.WorkItems, WorkItemDictionarySourceGenerationContext.Default.DictionaryInt32WorkItem);
}
return result;
}
private static WorkItem GetWorkItem(Notification notification)
{
WorkItem? result;
lock (FileRead.WorkItems)
{
if (!FileRead.WorkItems.TryGetValue(notification.Id, out result))
throw new Exception();
}
return result;
}
public void NotifyAll(Notification notification)
{
try
{
string? json = null;
string? remoteIpAddress = GetRemoteIpAddress();
Notification n = Notification.GetNotification(notification, remoteIpAddress, Context?.ConnectionId);
Console.WriteLine(n.ToString());
// FileRead.Logger.LogWarning(n.ToString());
// FileRead.Log?.Info(n.ToString());
FileWriteAllText(FileRead.Settings, n);
json = PopulatedWorkItemsAndGetJson(FileRead.Settings);
if (!string.IsNullOrEmpty(json))
WriteJson(json);
if (!string.IsNullOrEmpty(n.RemoteIpAddress))
{
WorkItem workItem = GetWorkItem(n);
Clients.All.updateWorkItem(n.Page, workItem);
}
}
catch (Exception ex)
{ Console.WriteLine($"{ex.Message}{Environment.NewLine}{ex.StackTrace}"); }
// { FileRead.Logger.LogError(ex, "Error!"); }
// { FileRead.Log?.Error("Error!", ex); }
}
}

View File

@ -0,0 +1,205 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
#nullable enable
public class WorkItem
{
[JsonConstructor]
public WorkItem(double? costOfDelay,
Aggregation? businessValue,
Aggregation? effort,
int id,
int? sortBeforeId,
int? sortPriority,
int? sortPriorityGroup,
Aggregation? riskReductionOpportunityEnablement,
string? site,
int? sortOrder,
Aggregation? timeCriticality,
double? weightedShortestJobFirst)
{
CostOfDelay = costOfDelay;
BusinessValue = businessValue;
Effort = effort;
Id = id;
Site = site;
SortBeforeId = sortBeforeId;
SortPriority = sortPriority;
SortPriorityGroup = sortPriorityGroup;
RiskReductionOpportunityEnablement = riskReductionOpportunityEnablement;
SortOrder = sortOrder;
TimeCriticality = timeCriticality;
WeightedShortestJobFirst = weightedShortestJobFirst;
}
const string _PageEffort = "effort";
const string _PageTimeCriticality = "time";
const string _PageBusinessValue = "business";
const string _PageRiskReductionOpportunityEnablement = "risk";
public double? CostOfDelay { get; } // [JsonPropertyName("CostOfDelay")]
public Aggregation? BusinessValue { get; } // [JsonPropertyName("BusinessValue")]
public Aggregation? Effort { get; } // [JsonPropertyName("Effort")]
public int Id { get; } // [JsonPropertyName("Id")]
public string? Site { get; } // [JsonPropertyName("Site")]
public int? SortBeforeId { get; } // [JsonPropertyName("SortBeforeId")]
public int? SortPriority { get; } // [JsonPropertyName("SortPriority")]
public int? SortPriorityGroup { get; } // [JsonPropertyName("SortPriorityGroup")]
public Aggregation? RiskReductionOpportunityEnablement { get; } // [JsonPropertyName("RiskReductionOpportunityEnablement")]
public int? SortOrder { get; } // [JsonPropertyName("SortOrder")]
public Aggregation? TimeCriticality { get; } // [JsonPropertyName("TimeCriticality")]
public double? WeightedShortestJobFirst { get; } // [JsonPropertyName("WeightedShortestJobFirst")]
internal static WorkItem GetWorkItem(WorkItem workItem, int i, int? sortBeforeId, int? sortPriority, int? sortPriorityGroup) =>
new(workItem.CostOfDelay,
workItem.BusinessValue,
workItem.Effort,
workItem.Id,
sortBeforeId,
sortPriority,
sortPriorityGroup,
workItem.RiskReductionOpportunityEnablement,
workItem.Site,
i,
workItem.TimeCriticality,
workItem.WeightedShortestJobFirst);
private static string? GetSite(Aggregation? effort, Aggregation? businessValue, Aggregation? timeCriticality, Aggregation? riskReductionOpportunityEnablement)
{
string? result = null;
if (result is null && effort is not null)
{
foreach (Notification notification in effort.Notifications)
{
if (notification.Site is not null)
{
result = notification.Site;
break;
}
}
}
if (result is null && businessValue is not null)
{
foreach (Notification notification in businessValue.Notifications)
{
if (notification.Site is not null)
{
result = notification.Site;
break;
}
}
}
if (result is null && timeCriticality is not null)
{
foreach (Notification notification in timeCriticality.Notifications)
{
if (notification.Site is not null)
{
result = notification.Site;
break;
}
}
}
if (result is null && riskReductionOpportunityEnablement is not null)
{
foreach (Notification notification in riskReductionOpportunityEnablement.Notifications)
{
if (notification.Site is not null)
{
result = notification.Site;
break;
}
}
}
return result;
}
internal static ReadOnlyDictionary<int, WorkItem?> GetWorkItems(Settings settings, ReadOnlyDictionary<string, ReadOnlyDictionary<int, Aggregation>> keyValuePairs)
{
Dictionary<int, WorkItem?> results = new();
string? site;
WorkItem? workItem;
double? costOfDelay;
Aggregation? effort;
List<int> ids = new();
Aggregation? businessValue;
Aggregation? timeCriticality;
double? weightedShortestJobFirst;
Aggregation? riskReductionOpportunityEnablement;
Dictionary<int, Aggregation?> effortCollection = new();
Dictionary<int, Aggregation?> businessValueCollection = new();
Dictionary<int, Aggregation?> timeCriticalityCollection = new();
Dictionary<int, Aggregation?> riskReductionOpportunityEnablementCollection = new();
foreach (KeyValuePair<string, ReadOnlyDictionary<int, Aggregation>> keyValuePair in keyValuePairs)
{
foreach (KeyValuePair<int, Aggregation> keyValue in keyValuePair.Value)
{
if (!ids.Contains(keyValue.Key))
ids.Add(keyValue.Key);
if (keyValuePair.Key == _PageEffort)
effortCollection.Add(keyValue.Key, keyValue.Value);
else if (keyValuePair.Key == _PageTimeCriticality)
timeCriticalityCollection.Add(keyValue.Key, keyValue.Value);
else if (keyValuePair.Key == _PageBusinessValue)
businessValueCollection.Add(keyValue.Key, keyValue.Value);
else if (keyValuePair.Key == _PageRiskReductionOpportunityEnablement)
riskReductionOpportunityEnablementCollection.Add(keyValue.Key, keyValue.Value);
else
throw new NotImplementedException();
}
}
foreach (int id in ids)
{
if (!effortCollection.TryGetValue(id, out effort))
effort = null;
if (!businessValueCollection.TryGetValue(id, out businessValue))
businessValue = null;
if (!timeCriticalityCollection.TryGetValue(id, out timeCriticality))
timeCriticality = null;
if (!riskReductionOpportunityEnablementCollection.TryGetValue(id, out riskReductionOpportunityEnablement))
riskReductionOpportunityEnablement = null;
site = GetSite(effort, businessValue, timeCriticality, riskReductionOpportunityEnablement);
costOfDelay = businessValue is null
|| timeCriticality is null
|| riskReductionOpportunityEnablement is null ? null : businessValue.FibonacciAverage
+ timeCriticality.FibonacciAverage
+ riskReductionOpportunityEnablement.FibonacciAverage;
weightedShortestJobFirst = costOfDelay is null || effort is null ? null : Math.Round(costOfDelay.Value / effort.FibonacciAverage, settings.Digits);
workItem = new(costOfDelay: costOfDelay,
businessValue: businessValue,
effort: effort,
id: id,
sortBeforeId: null,
sortPriority: null,
sortPriorityGroup: null,
riskReductionOpportunityEnablement: riskReductionOpportunityEnablement,
site: site,
sortOrder: null,
timeCriticality: timeCriticality,
weightedShortestJobFirst: weightedShortestJobFirst);
results.Add(id, workItem);
}
return new(results);
}
internal static ReadOnlyDictionary<int, WorkItem?> GetKeyValuePairs(Settings settings)
{
ReadOnlyDictionary<int, WorkItem?> results;
ReadOnlyDictionary<string, ReadOnlyDictionary<int, Aggregation>> keyValuePairs = Aggregation.GetKeyValuePairsAndWriteFiles(settings);
results = GetWorkItems(settings, keyValuePairs);
return results;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Dictionary<int, WorkItem>))]
internal partial class WorkItemDictionarySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -143,13 +143,13 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)

View File

@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -0,0 +1,127 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text.Json;
namespace Adaptation.FileHandlers.Violation;
public class FileRead : Shared.FileRead, IFileRead
{
private long? _TickOffset;
private readonly string _URL;
private readonly ReadOnlyCollection<string> _WorkItemTypes;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string cellInstanceNamed = string.Concat("CellInstance.", _EquipmentType);
_URL = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, $"{cellInstanceNamed}.URL");
string workItemTypes = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, $"{cellInstanceNamed}.WorkItemTypes");
_WorkItemTypes = new(workItemTypes.Split('|'));
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, _FileConnectorConfiguration.TargetFileLocation, _URL, _WorkItemTypes, results.Item4);
if (iProcessData.Details.Count == 0)
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}

View File

@ -0,0 +1,706 @@
using Adaptation.FileHandlers.json.WorkItems;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Violation;
#nullable enable
public class ProcessData : IProcessData
{
private readonly List<object> _Details;
List<object> Shared.Properties.IProcessData.Details => _Details;
private readonly ILog _Log;
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(WriteFiles)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
new(logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), fileInfoCollection);
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection<string> workItemTypes, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
{ }
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, targetFileLocation, url, fileInfoCollection, workItemTypes);
}
private void WriteFiles(IFileRead fileRead, Logistics logistics, string destinationDirectory, string url, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> __)
{
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
string json = File.ReadAllText(logistics.ReportFullPath);
// WorkItem[]? workItems = JsonSerializer.Deserialize<WorkItem[]>(json);
// if (workItems is null)
// throw new Exception(nameof(workItems));
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
if (jsonElements is null)
throw new Exception(nameof(jsonElements));
WorkItem? workItem;
List<WorkItem> workItems = new();
foreach (JsonElement jsonElement in jsonElements)
{
workItem = JsonSerializer.Deserialize<WorkItem>(jsonElement.ToString());
if (workItem is null)
continue;
workItems.Add(workItem);
}
List<char> spaces = new();
bool keepRelations = false;
List<string> lines = new();
List<string> messages = new();
ReadOnlyCollection<Record> results;
ReadOnlyDictionary<int, Record> keyValuePairs = GetWorkItems(workItems, keepRelations);
ReadOnlyCollection<Record> records = new(keyValuePairs.Values.ToArray());
ReadOnlyCollection<string> userStoryWorkItemTypes = new(new string[] { "User Story" });
ReadOnlyCollection<string> bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" });
ReadOnlyCollection<string> bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" });
ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" });
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckIterationPath122508)}");
lines.Add(string.Empty);
results = FeatureCheckIterationPath122508(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122508");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckTag122514)}");
lines.Add(string.Empty);
results = FeatureCheckTag122514(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122514");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckPriority126169)}");
lines.Add(string.Empty);
results = FeatureCheckPriority126169(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-126169");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckState123066)}");
lines.Add(string.Empty);
results = FeatureCheckState123066(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-123066");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckState123067)}");
lines.Add(string.Empty);
results = FeatureCheckState123067(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-123067");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "Feature";
lines.Add($"# {nameof(FeatureCheckStart122517)}");
lines.Add(string.Empty);
results = FeatureCheckStart122517(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122517");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "User Story";
lines.Add($"# {nameof(UserStoryCheckIterationPath228385)}");
lines.Add(string.Empty);
results = UserStoryCheckIterationPath228385(url, lines, userStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-228385");
_Details.Add(results);
}
if (messages.Count > 0)
throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}");
}
private static ReadOnlyDictionary<int, Record> GetWorkItems(IEnumerable<WorkItem> workItems, bool keepRelations)
{
ReadOnlyDictionary<int, Record> results;
Dictionary<int, WorkItem> keyValuePairs = new();
foreach (WorkItem workItem in workItems)
keyValuePairs.Add(workItem.Id, workItem);
results = GetKeyValuePairs(new(keyValuePairs), keepRelations);
return results;
}
private static ReadOnlyDictionary<int, Record> GetKeyValuePairs(ReadOnlyDictionary<int, WorkItem> keyValuePairs, bool keepRelations)
{
Dictionary<int, Record> results = new();
Record record;
List<bool> nests = new();
WorkItem? parentWorkItem;
ReadOnlyCollection<Record> childRecords;
ReadOnlyCollection<Record> relatedRecords;
ReadOnlyCollection<Record> successorRecords;
foreach (KeyValuePair<int, WorkItem> keyValuePair in keyValuePairs)
{
nests.Clear();
if (keyValuePair.Value.Parent is null)
parentWorkItem = null;
else
_ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem);
try
{
childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward
relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related
successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward
// predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse
record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations);
}
catch (Exception)
{
record = new(keyValuePair.Value, parentWorkItem, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
}
results.Add(keyValuePair.Key, record);
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckIterationPath122508(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> violations = new();
List<string> collection = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> maxIterationPaths;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (!record.WorkItem.IterationPath.Contains('\\'))
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
maxIterationPaths = GetMaxIterationPaths122508(records);
foreach (Record r in maxIterationPaths)
{
if (string.IsNullOrEmpty(r.WorkItem.IterationPath) || record.WorkItem.IterationPath == r.WorkItem.IterationPath)
continue;
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.IterationPath};");
}
if (violations.Count > 0)
{
collection.Insert(0, string.Empty);
collection.Insert(0, $"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
lines.AddRange(collection);
violations.Insert(0, $"<a target='_blank' href='{url}{record.WorkItem.Id}'>IterationPath</a>:{record.WorkItem.IterationPath};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
}
return new(results);
}
private static ReadOnlyCollection<Record> GetMaxIterationPaths122508(ReadOnlyCollection<Record> records)
{
List<Record> results;
List<Record>? collection;
Dictionary<string, List<Record>> keyValuePairs = new();
foreach (Record record in records)
{
if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection))
{
keyValuePairs.Add(record.WorkItem.IterationPath, new());
if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection))
throw new Exception();
}
collection.Add(record);
}
string? max = keyValuePairs.Keys.Max();
results = string.IsNullOrEmpty(max) ? new() : keyValuePairs[max];
return results.AsReadOnly();
}
private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> lines, string _, ReadOnlyCollection<Record> records, string fileName)
{
string markdown = string.Join(Environment.NewLine, lines);
string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md");
string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile);
if (markdown != markdownOld)
File.WriteAllText(markdownFile, markdown);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(markdownFile));
string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace("<a href", "<a target='_blank' href");
string htmlFile = Path.Combine(destinationDirectory, $"{fileName}.html");
string htmlOld = !File.Exists(htmlFile) ? string.Empty : File.ReadAllText(htmlFile);
if (html != htmlOld)
File.WriteAllText(htmlFile, html);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(htmlFile));
string json = JsonSerializer.Serialize(records, new JsonSerializerOptions() { WriteIndented = true });
string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json");
string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile);
if (json != jsonOld)
File.WriteAllText(jsonFile, json);
if (!fileRead.IsEAFHosted)
fileInfoCollection.Add(new(jsonFile));
}
private static ReadOnlyCollection<Record> FeatureCheckTag122514(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
if (string.IsNullOrEmpty(record.WorkItem.Tags))
recordsNotMatching = new(new Record[] { record });
else
{
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching122514(record, records);
if (!string.IsNullOrEmpty(record.WorkItem.Tags) && recordsNotMatching.Count == 0)
continue;
}
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem}]({url}{r.WorkItem}) {nameof(record.WorkItem.Tags)} != {record.WorkItem.Tags}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"<a target='_blank' href='{url}{record.WorkItem.Id}'>Tag</a>:{record.WorkItem.Tags};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.Tags};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> FilterChildren(ReadOnlyCollection<string> workItemTypes, Record record)
{
List<Record> results = new();
FilterChildren(workItemTypes, record, results);
return new(results);
}
private static void FilterChildren(ReadOnlyCollection<string> workItemTypes, Record record, List<Record> results)
{
if (record.Children is not null)
{
foreach (Record r in record.Children)
{
if (!workItemTypes.Contains(r.WorkItem.WorkItemType))
continue;
results.Add(r);
FilterChildren(workItemTypes, r, results);
}
}
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching122514(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
string[] segments;
string[] parentTags = record.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
foreach (Record r in records)
{
segments = string.IsNullOrEmpty(r.WorkItem.Tags) ? Array.Empty<string>() : r.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
if (segments.Length > 0 && parentTags.Any(l => segments.Contains(l)))
continue;
results.Add(r);
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckPriority126169(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching126169(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.Priority)} != {record.WorkItem.Priority}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"<a target='_blank' href='{url}{record.WorkItem.Id}'>Priority</a>:{record.WorkItem.Priority};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.Priority};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching126169(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
foreach (Record r in records)
{
if (record.WorkItem.Priority is null)
{
results.Add(record);
break;
}
if (r.WorkItem.Priority == record.WorkItem.Priority.Value)
continue;
results.Add(r);
}
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckState123066(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching123066(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"<a target='_blank' href='{url}{record.WorkItem.Id}'>State</a>:{record.WorkItem.State};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.State};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching123066(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
int check;
int state = GetState(record.WorkItem);
List<KeyValuePair<int, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
continue;
check = GetState(r.WorkItem);
if (check == state)
continue;
collection.Add(new(check, r));
}
if (collection.Count > 0)
{
KeyValuePair<int, Record>[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
collection.Clear();
else if (notNewState.Length > 0)
{
int minimum = notNewState.Min(l => l.Key);
if (minimum == state)
collection.Clear();
else if (minimum == 1 && record.WorkItem.State == "New")
collection.Clear();
else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
collection.Clear();
}
}
foreach (KeyValuePair<int, Record> keyValuePair in collection.OrderByDescending(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static int GetState(WorkItem workItem) =>
workItem.State switch
{
"New" => 1,
"Active" => 2,
"Resolved" => 3,
"Closed" => 4,
"Removed" => 5,
_ => 8
};
private static ReadOnlyCollection<Record> FeatureCheckState123067(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching123067(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"<a target='_blank' href='{url}{record.WorkItem.Id}'>State</a>:{record.WorkItem.State};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.State};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching123067(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
int check;
int state = GetState(record.WorkItem);
List<KeyValuePair<int, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
continue;
check = GetState(r.WorkItem);
if (check == state)
continue;
collection.Add(new(check, r));
}
if (collection.Count > 0)
{
KeyValuePair<int, Record>[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
collection.Clear();
else if (notNewState.Length > 0)
{
int minimum = notNewState.Min(l => l.Key);
if (minimum == state)
collection.Clear();
else if (minimum == 1 && record.WorkItem.State == "New")
collection.Clear();
else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
collection.Clear();
}
}
foreach (KeyValuePair<int, Record> keyValuePair in collection.OrderByDescending(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static ReadOnlyCollection<Record> FeatureCheckStart122517(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
Record record;
List<string> collection = new();
List<string> violations = new();
ReadOnlyCollection<Record> records;
ReadOnlyCollection<Record> recordsNotMatching;
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed")
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
collection.Clear();
violations.Clear();
if (record.Children is null || record.Children.Length == 0)
continue;
if (record.WorkItem.StartDate is null)
continue;
records = FilterChildren(workItemTypes, record);
recordsNotMatching = GetWorkItemsNotMatching122517(record, records);
if (recordsNotMatching.Count == 0)
continue;
collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
collection.Add(string.Empty);
collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
foreach (Record r in recordsNotMatching)
collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.ActivatedDate)} != {record.WorkItem.ActivatedDate}");
collection.Add(string.Empty);
lines.AddRange(collection);
violations.Add($"<a target='_blank' href='{url}{record.WorkItem.Id}'>StartDate</a>:{record.WorkItem.StartDate};");
foreach (Record r in recordsNotMatching)
violations.Add($"<a target='_blank' href='{url}{r.WorkItem.Id}'>{r.WorkItem.Id}</a>:{r.WorkItem.ActivatedDate};");
results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
}
return new(results);
}
private static ReadOnlyCollection<Record> GetWorkItemsNotMatching122517(Record record, ReadOnlyCollection<Record> records)
{
List<Record> results = new();
if (record.WorkItem.StartDate is null)
throw new Exception();
DateTime dateTime = record.WorkItem.StartDate.Value;
List<KeyValuePair<long, Record>> collection = new();
foreach (Record r in records)
{
if (r.WorkItem.State is "Removed")
continue;
if (r.WorkItem.ActivatedDate is null)
continue;
if (dateTime >= r.WorkItem.ActivatedDate.Value)
continue;
collection.Add(new(r.WorkItem.ActivatedDate.Value.Ticks, r));
}
foreach (KeyValuePair<long, Record> keyValuePair in collection.OrderBy(l => l.Key))
results.Add(keyValuePair.Value);
return new(results);
}
private static ReadOnlyCollection<Record> UserStoryCheckIterationPath228385(string url, List<string> lines, ReadOnlyCollection<string> _, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
long totalStoryPoints;
ReadOnlyDictionary<string, List<Record>> records = GetWorkItemsMatching228385(keyValuePairs, workItemType);
foreach (KeyValuePair<string, List<Record>> keyValuePair in records)
{
totalStoryPoints = 0;
foreach (Record record in keyValuePair.Value)
{
if (record.WorkItem.StoryPoints is null)
continue;
totalStoryPoints += record.WorkItem.StoryPoints.Value;
}
lines.Add(string.Empty);
lines.Add($"## {keyValuePair.Key} => {totalStoryPoints}");
lines.Add(string.Empty);
foreach (Record record in keyValuePair.Value)
lines.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) - {record.WorkItem.Title}");
}
return new(results);
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
ReadOnlyDictionary<string, List<Record>> results;
Record record;
List<Record> records = new();
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed" or "Closed")
continue;
if (!record.WorkItem.IterationPath.Contains('\\'))
continue;
if (record.WorkItem.StoryPoints is null)
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
records.Add(record);
}
Record[] sorted = records.OrderByDescending(l => l.WorkItem.IterationPath).ToArray();
results = GetWorkItemsMatching228385(new(sorted));
return results;
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyCollection<Record> records)
{
Dictionary<string, List<Record>> results = new();
string key;
List<Record>? collection;
foreach (Record record in records)
{
key = $"{record.WorkItem.IterationPath}-{record.WorkItem.AssignedTo}";
if (!results.TryGetValue(key, out collection))
{
results.Add(key, new());
if (!results.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(record);
}
return new(results);
}
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
}
}

View File

@ -161,34 +161,36 @@ public class FileRead : Shared.FileRead, IFileRead
relations.Add(relation);
}
}
workItem = new(fields.MicrosoftVSTSCommonActivatedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonActivatedDate,
fields.SystemAreaPath,
fields.SystemAssignedTo?.DisplayName,
fields.MicrosoftVSTSCommonBusinessValue is null or 0 ? null : (long)fields.MicrosoftVSTSCommonBusinessValue,
fields.SystemChangedDate,
fields.MicrosoftVSTSCommonClosedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonClosedDate,
fields.SystemCommentCount,
fields.SystemCreatedDate,
fields.SystemDescription,
fields.MicrosoftVSTSSchedulingEffort is null or 0 ? null : (long)fields.MicrosoftVSTSSchedulingEffort,
value.Id,
fields.SystemIterationPath,
fields.SystemParent == 0 ? null : fields.SystemParent,
fields.MicrosoftVSTSCommonPriority == 0 ? null : fields.MicrosoftVSTSCommonPriority,
relations.ToArray(),
fields.CustomRequester?.DisplayName,
fields.MicrosoftVSTSCommonResolvedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonResolvedDate,
value.Rev,
fields.CustomRRminusOE is null or 0 ? null : (long)fields.CustomRRminusOE,
fields.MicrosoftVSTSSchedulingStartDate == DateTime.MinValue ? null : fields.MicrosoftVSTSSchedulingStartDate,
fields.SystemState,
fields.SystemTags,
fields.MicrosoftVSTSSchedulingTargetDate == DateTime.MinValue ? null : fields.MicrosoftVSTSSchedulingTargetDate,
fields.MicrosoftVSTSCommonTimeCriticality is null or 0 ? null : (long)fields.MicrosoftVSTSCommonTimeCriticality,
fields.SystemTitle.Trim(),
null,
fields.CustomWSJF is null or 0 ? null : (long)fields.CustomWSJF,
fields.SystemWorkItemType);
workItem = new(activatedDate: fields.MicrosoftVSTSCommonActivatedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonActivatedDate,
areaPath: fields.SystemAreaPath,
assignedTo: fields.SystemAssignedTo?.DisplayName,
businessValue: fields.MicrosoftVSTSCommonBusinessValue is null or 0 ? null : (long)fields.MicrosoftVSTSCommonBusinessValue,
changedDate: fields.SystemChangedDate,
closedDate: fields.MicrosoftVSTSCommonClosedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonClosedDate,
commentCount: fields.SystemCommentCount,
createdDate: fields.SystemCreatedDate,
description: fields.SystemDescription,
effort: fields.MicrosoftVSTSSchedulingEffort is null or 0 ? null : (long)fields.MicrosoftVSTSSchedulingEffort,
id: value.Id,
iterationPath: fields.SystemIterationPath,
parent: fields.SystemParent == 0 ? null : fields.SystemParent,
priority: fields.MicrosoftVSTSCommonPriority == 0 ? null : fields.MicrosoftVSTSCommonPriority,
relations: relations.ToArray(),
remainingWork: fields.MicrosoftVSTSSchedulingRemainingWork is null ? null : (long)fields.MicrosoftVSTSSchedulingRemainingWork,
requester: fields.CustomRequester?.DisplayName,
resolvedDate: fields.MicrosoftVSTSCommonResolvedDate == DateTime.MinValue ? null : fields.MicrosoftVSTSCommonResolvedDate,
revision: value.Rev,
riskReductionMinusOpportunityEnablement: fields.CustomRRminusOE is null or 0 ? null : (long)fields.CustomRRminusOE,
startDate: fields.MicrosoftVSTSSchedulingStartDate == DateTime.MinValue ? null : fields.MicrosoftVSTSSchedulingStartDate,
state: fields.SystemState,
storyPoints: fields.MicrosoftVSTSSchedulingStoryPoints is null ? null : (long)fields.MicrosoftVSTSSchedulingStoryPoints,
tags: fields.SystemTags,
targetDate: fields.MicrosoftVSTSSchedulingTargetDate == DateTime.MinValue ? null : fields.MicrosoftVSTSSchedulingTargetDate,
timeCriticality: fields.MicrosoftVSTSCommonTimeCriticality is null or 0 ? null : (long)fields.MicrosoftVSTSCommonTimeCriticality,
title: fields.SystemTitle.Trim(),
violation: null,
weightedShortestJobFirst: fields.CustomWSJF is null or 0 ? null : (long)fields.CustomWSJF,
workItemType: fields.SystemWorkItemType);
results.Add(workItem.Id, workItem);
}
return new(results);
@ -356,7 +358,9 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveJson(reportFullPath, dateTime);
results = new(_Logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());

View File

@ -19,7 +19,6 @@ public class ProcessData : IProcessData
Logistics logistics,
List<FileInfo> fileInfoCollection)
{
fileInfoCollection.Clear();
_Details = new List<object>();
Parse();
}

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 122508 - Feature iteration should be set to max of children</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/122508.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/122508.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/122508.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-122508.json?v=2024-10-07-18-50");
initIndex("/markdown/check-122508.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 122514 - Features and children must have a Tag</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/122514.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/122514.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/122514.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-122514.json?v=2024-10-07-18-50");
initIndex("/markdown/check-122514.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 122517 - Feature start date should be min activated date of children</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/122517.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/122517.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/122517.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-122517.json?v=2024-10-07-18-50");
initIndex("/markdown/check-122517.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 123066 - When children of a Feature are not New Feature must also not be New</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/123066.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/123066.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/123066.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-123066.json?v=2024-10-07-18-50");
initIndex("/markdown/check-123066.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 123067 - WIP</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/123067.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/123067.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/123067.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-123067.json?v=2024-10-07-18-50");
initIndex("/markdown/check-123067.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - 126169 - Children of a Feature should have the same priority</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/126169.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/126169.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/126169.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -56,7 +56,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/check-126169.json?v=2024-10-07-18-50");
initIndex("/markdown/check-126169.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - Business Value</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/business.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/business.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/business.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -59,7 +59,7 @@ What is the relative value to the Customer or business?
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "business", "Value", "Business Value", "/markdown/PI4-Results/business.json?v=2024-10-07-18-50");
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/", "business", "Value", "Business Value", "/markdown/PI5-Results/business.json?v=2025-01-22-10-49");
});
</script>

View File

@ -0,0 +1,109 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
<title>Infineon - Cost of Delay (CoD) (see @SCALE formula)</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/styles/cod.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/scripts/jquery-1.6.4.min.js"></script>
<script src="/js/scripts/jquery.signalR-2.4.3.min.js"></script>
<script src="/signalr/hubs"></script>
<script src="/js/cod-b.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
<div class="navbar navbar-fixed-top">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<div class="navbar-brand">
<span id="siteHeader">&nbsp;</span> -
<span id="th-span">&nbsp;</span>
<button id="toggle">Toggle</button>
</div>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav">
</ul>
<p class="navbar-text navbar-right">
&nbsp;
</p>
</div>
</div>
</div>
<div class="container-fluid body-content" style="margin-top: 40px; margin-left: 15px;">
<div id="HeaderGridDiv">
<table id="HeaderGrid" border="1"></table>
</div>
<br />&nbsp;
<div id="AllGridDiv">
<table id="AllGrid"></table>
</div>
<textarea id="AllTextarea" rows="20" cols="147"></textarea>
</div>
<script>
$(document).ready(function () {
const username = '';
const machineId = '';
const fromHtml = true;
const signalRUrl = "/signalr";
const windowLocationHRef = window.location.href;
const apiUrl = "https://eaf-dev.mes.infineon.com/api/v1/ado/";
const workItems = {
a: "/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49",
b: "/markdown/{[]}.json?v=2025-01-22-10-49"
};
const b = {
page: "business",
description: "Value",
th: "Business Value",
span: "What is the relative value to the Customer or business?<br>• Do our users prefer this over that?<br>• What is the revenue impact on our business?<br>• Is there a potential penalty or other negative effects if we delay?"
};
const r = {
page: "risk",
description: "Risk",
th: "Risk Reduction and/or Opportunity Enablement",
span: "What else does this do for our business?<br>• Reduce the risk of this or future delivery?<br>• Is there value in the information we will receive?<br>• Enable new business opportunities?"
};
const t = {
page: "time",
description: "Critical",
th: "Time Criticality",
span: "How does user/business value decay over time?<br>• Is there a fixed deadline?<br>• Will they wait for us or move to another Solution?<br>• What is the current effect on Customer satisfaction?"
};
const c = {
page: "cod",
description: "CoD",
th: "Cost of Delay (CoD)",
span: "Cost of Delay (CoD) is the money lost by delaying or not doing a job for a specific time. It's a measure of the economic value of a job over time."
};
const e = {
page: "effort",
description: "Effort",
th: "Effort",
span: "Effort"
};
const w = {
page: "wsjf",
description: "WSJF",
th: "Weightest Shortest Job First calculation (WSJF)",
span: "Weightest Shortest Job First calculation (see @SCALE formula)"
};
initIndex(fromHtml, username, machineId, windowLocationHRef, workItems, b, r, t, c, e, w, apiUrl, signalRUrl);
});
</script>
</body>
</html>

View File

@ -0,0 +1,97 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
<title>Infineon - Cost of Delay (CoD) (see @SCALE formula)</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/cod.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/scripts/jquery.signalR-2.4.3.min.js"></script>
<script src="/signalr/hubs"></script>
<script src="/js/cod.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
<div class="navbar navbar-fixed-top">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<div class="navbar-brand">
<span id="siteHeader">&nbsp;</span> - Cost of Delay (CoD) (see @SCALE formula)
</div>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav">
</ul>
<p class="navbar-text navbar-right">
&nbsp;
</p>
</div>
</div>
</div>
<div class="container-fluid body-content" style="margin-top: 40px; margin-left: 15px;">
<div id="HeaderGridDiv">
<table id="HeaderGrid" border="1"></table>
</div>
<br />&nbsp;
<div id="AllGridDiv">
<table id="AllGrid"></table>
</div>
</div>
<script>
$(document).ready(function () {
const fromHtml = true;
const windowLocationHRef = window.location.href;
const headerGrid = document.getElementById("HeaderGrid");
const workItems = {
a: "/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49",
b: "/markdown/{}.json?v=2025-01-22-10-49"
};
const b = {
page: "business",
description: "Value",
th: "Business Value",
span: "What is the relative value to the Customer or business?<br>• Do our users prefer this over that?<br>• What is the revenue impact on our business?<br>• Is there a potential penalty or other negative effects if we delay?"
};
const r = {
page: "risk",
description: "Risk",
th: "Risk Reduction and/or Opportunity Enablement",
span: "What else does this do for our business?<br>• Reduce the risk of this or future delivery?<br>• Is there value in the information we will receive?<br>• Enable new business opportunities?"
};
const t = {
page: "time",
description: "Critical",
th: "Time Criticality",
span: "How does user/business value decay over time?<br>• Is there a fixed deadline?<br>• Will they wait for us or move to another Solution?<br>• What is the current effect on Customer satisfaction?"
};
const c = {
page: "cod",
description: "CoD",
th: "Cost of Delay (CoD)",
span: "Cost of Delay (CoD) is the money lost by delaying or not doing a job for a specific time. It's a measure of the economic value of a job over time."
};
initIndex(fromHtml, windowLocationHRef, headerGrid, workItems, b, r, t, c, "https://eaf-dev.mes.infineon.com/api/v1/ado/");
});
</script>
</body>
</html>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - Effort</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/effort.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/effort.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/effort.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -55,7 +55,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "effort", "Effort", "Effort", "/markdown/PI4-Results/effort.json?v=2024-10-07-18-50");
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/", "effort", "Effort", "Effort", "/markdown/PI5-Results/effort.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - Risk Reduction and/or Opportunity Enablement</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/risk.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/risk.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/risk.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -59,7 +59,7 @@ What else does this do for our business?
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "risk", "Risk", "Risk Reduction and/or Opportunity Enablement", "/markdown/PI4-Results/risk.json?v=2024-10-07-18-50");
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/", "risk", "Risk", "Risk Reduction and/or Opportunity Enablement", "/markdown/PI5-Results/risk.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - Time Criticality</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/time.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/time.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/time.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -32,7 +32,6 @@ How does user/business value decay over time?
• Is there a fixed deadline?
• Will they wait for us or move to another Solution?
• What is the current effect on Customer satisfaction?
</div>
</div>
<div class="navbar-collapse collapse">
@ -60,7 +59,7 @@ How does user/business value decay over time?
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "time", "Critical", "Time Criticality", "/markdown/PI4-Results/time.json?v=2024-10-07-18-50");
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/", "time", "Critical", "Time Criticality", "/markdown/PI5-Results/time.json?v=2025-01-22-10-49");
});
</script>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - User Stor(ies) with parents</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/with-parents.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/with-parents.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/with-parents.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -55,7 +55,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-user-stories-with-parents.json?v=2024-10-07-18-50");
initIndex("/markdown/bugs-user-stories-with-parents.json?v=2025-01-22-10-49");
});
</script>

View File

@ -0,0 +1,64 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
<title>Infineon - Result of Weightest Shortest Job First calculation (see @SCALE formula)</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/wsjf.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/wsjf-b.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
<div class="navbar navbar-fixed-top">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<div class="navbar-brand">
<span id="siteHeader">&nbsp;</span> - Result of Weightest Shortest Job First calculation (see @SCALE formula)
</div>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav">
</ul>
<p class="navbar-text navbar-right">
&nbsp;
</p>
</div>
</div>
</div>
<div class="container-fluid body-content" style="margin-top: 40px; margin-left: 15px;">
<div style="height: 550px;" id="HeaderGridDiv">
<table id="HeaderGrid" border="1"></table>
</div>
<br />&nbsp;
<div id="AllGridDiv">
<table id="AllGrid"></table>
</div>
</div>
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/");
});
</script>
</body>
</html>

View File

@ -0,0 +1,34 @@
<!DOCTYPE html>
<html>
<head>
<title>SignalR Simple Chat</title>
<style type="text/css">
.container {
background-color: #99CCFF;
border: thick solid #808080;
padding: 20px;
margin: 20px;
}
</style>
</head>
<body>
<div class="container">
<input type="text" id="message" />
<input type="button" id="sendmessage" value="Send" />
<input type="hidden" id="displayname" />
<ul id="discussion"></ul>
</div>
<script src="/js/scripts/jquery-1.6.4.min.js"></script>
<script src="/js/scripts/jquery.signalR-2.4.3.min.js"></script>
<script src="/signalr/hubs"></script>
<script src="/js/wsjf-c.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script>
$(document).ready(function () {
initIndex("/signalr", "https://eaf-dev.mes.infineon.com/api/v1/ado/");
});
</script>
</body>
</html>

View File

@ -6,15 +6,15 @@
<meta name="viewport" content="width=device-width" />
<title>Infineon - Result of Weightest Shortest Job First calculation (see @SCALE formula)</title>
<link href="/styles/bootstrap.min.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2024-10-07-18-50" rel="stylesheet" />
<link href="/igniteui/css/themes/bootstrap3/default/infragistics.theme.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/igniteui/css/structure/infragistics.css?v=2025-01-22-10-49" rel="stylesheet" />
<link href="/styles/wsjf.css?no-cache=2024-10-04-08-34" rel="stylesheet" />
<script src="/js/jquery-3.6.0.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/wsjf.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2024-10-07-18-50" type="text/javascript"></script>
<script src="/js/jquery-3.6.0.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/jquery-ui.min.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/js/wsjf.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.core.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.lob.js?v=2025-01-22-10-49" type="text/javascript"></script>
<script src="/igniteui/js/infragistics.dv.js?v=2025-01-22-10-49" type="text/javascript"></script>
</head>
<body>
@ -55,7 +55,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/bugs-features-with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/");
initIndex("/markdown/bugs-features-with-parents.json?v=2025-01-22-10-49", "https://eaf-dev.mes.infineon.com/api/v1/ado/");
});
</script>

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,22 +1,22 @@
var _apiUrl = null;
function compareFunction(a, b) {
if (a.BusinessValue === null || b.BusinessValue === null) {
if (a.BusinessValue == undefined || b.BusinessValue == undefined) {
var aPollValue = a.PollValue.split('-');
var bPollValue = b.PollValue.split('-');
return bPollValue[0].trim() - aPollValue[0].trim() || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
return bPollValue[0] - aPollValue[0] || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
} else {
return b.BusinessValue - a.BusinessValue || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
}
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -30,7 +30,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -45,7 +45,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -66,7 +66,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -81,45 +81,34 @@ function getPriority(workItemType, priority) {
return result;
}
function getInversePriority(priority) {
function getPollValue(description, pollValue) {
var result;
if (priority == null || priority === 0)
result = "0.000";
else if (priority === 1)
result = "3.000";
else if (priority === 2)
result = "2.000";
else if (priority === 3)
result = "1.000";
if (pollValue == undefined || pollValue.BusinessValue == undefined || pollValue.BusinessValue.InverseAverage == undefined)
result = "";
else if (pollValue.BusinessValue.InverseAverage >= 4)
result = `${pollValue.BusinessValue.InverseAverage} - 1-Highest (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.BusinessValue.InverseAverage >= 3)
result = `${pollValue.BusinessValue.InverseAverage} - 2-High - ${pollValue.Count} Vote(s)`;
else if (pollValue.BusinessValue.InverseAverage >= 2)
result = `${pollValue.BusinessValue.InverseAverage} - 3-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.BusinessValue.InverseAverage >= 1)
result = `${pollValue.BusinessValue.InverseAverage} - 4-Low - ${pollValue.Count} Vote(s)`;
else if (pollValue.BusinessValue.InverseAverage >= 0)
result = `${pollValue.BusinessValue.InverseAverage} - 5-Lowest - ${pollValue.Count} Vote(s)`;
else
result = "0.000";
return result;
}
function getPollValue(description, priority, priorityDisplay, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0 || pollValue.Average === null)
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
else if (pollValue.Average > 2)
result = `${pollValue.Average} - 1-High (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 1)
result = `${pollValue.Average} - 2-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 0)
result = `${pollValue.Average} - 3-Low - ${pollValue.Count} Vote(s)`;
else
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
result = "";
return result;
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -128,12 +117,12 @@ function updateRecordCoD(workItem) {
function updateRecordOther(workItem, dataB, description) {
workItem["State"] = getState(workItem["State"]);
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"]);
workItem["PollValue"] = getPollValue(description, workItem["Priority"], priority, dataB[workItem.Id]);
workItem["PollValue"] = getPollValue(description, dataB[workItem.Id]);
workItem["Priority"] = priority;
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
@ -183,13 +172,46 @@ function sendValue(element, page, id) {
});
}
function getFibonacciValue(average) {
var result;
if (average >= 7)
result = 34;
else if (average >= 6)
result = 21;
else if (average >= 5)
result = 13;
else if (average >= 4)
result = 8;
else if (average >= 3)
result = 5;
else if (average >= 2)
result = 3;
else if (average >= 1)
result = 2;
else if (average >= 0)
result = 1;
else
result = "";
return result;
}
function setRecords(workItems, page, description, th) {
var record;
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Up</th><th>Down</th><th>Poll Value</th><th>" + th + "</th></tr>";
var array = [];
var count = "";
var select = "";
var average = "";
var fibonacciValue = "";
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Poll Average</th><th>Fibonacci</th><th>Poll Description</th><th>Vote(s)</th><th>" + th + "</th><th>Up</th><th>Down</th></tr>";
const element = document.getElementById("HeaderGrid");
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
var length = record.BusinessValue === null ? "" : record.BusinessValue.toString().length;
array = record.PollValue.split('-')
average = array.length > 0 ? array[0] : "";
fibonacciValue = getFibonacciValue(average);
select = array.length > 2 ? array[1] + '-' + array[2] : "";
count = array.length > 3 ? array[3].trim().split(' ')[0] : "";
var length = record.BusinessValue == undefined ? "" : record.BusinessValue.toString().length;
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
@ -201,14 +223,19 @@ function setRecords(workItems, page, description, th) {
"</td><td>" +
'<select onchange="sendValue(this, \'' + page + '\', ' + record.Id + ')">' +
'<option value="9">Unknown</option>' +
'<option value="1">High (Most ' + description + ')</option>' +
'<option value="2">Medium</option>' +
'<option value="3">Low</option>' +
'<option value="1">Highest (Most ' + description + ')</option>' +
'<option value="2">High</option>' +
'<option value="3">Medium</option>' +
'<option value="4">Low</option>' +
'<option value="5">Lowest</option>' +
"</select>" +
"</td><td>" + average +
"</td><td>" + fibonacciValue +
"</td><td>" + select +
"</td><td>" + count +
"</td><td>" + length + " - " + record.BusinessValue +
"</td><td><a href='#' class='up'>Up</a>" +
"</td><td><a href='#' class='down'>Down</a>" +
"</td><td>" + record.PollValue +
"</td><td>" + length + " - " + record.BusinessValue +
"</td></tr>";
}
element.innerHTML = html.replaceAll(">null<", ">&nbsp;<");

File diff suppressed because one or more lines are too long

View File

@ -1,22 +1,22 @@
var _apiUrl = null;
function compareFunction(a, b) {
if (a.Effort === null || b.Effort === null) {
if (a.Effort == undefined || b.Effort == undefined) {
var aPollValue = a.PollValue.split('-');
var bPollValue = b.PollValue.split('-');
return bPollValue[0].trim() - aPollValue[0].trim() || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
return bPollValue[0] - aPollValue[0] || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
} else {
return b.Effort - a.Effort || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
}
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -30,7 +30,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -45,7 +45,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -66,7 +66,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -81,45 +81,34 @@ function getPriority(workItemType, priority) {
return result;
}
function getInversePriority(priority) {
function getPollValue(description, pollValue) {
var result;
if (priority == null || priority === 0)
result = "0.000";
else if (priority === 1)
result = "3.000";
else if (priority === 2)
result = "2.000";
else if (priority === 3)
result = "1.000";
if (pollValue == undefined || pollValue.Effort == undefined || pollValue.Effort.InverseAverage == undefined)
result = "";
else if (pollValue.Effort.InverseAverage >= 4)
result = `${pollValue.Effort.InverseAverage} - 1-Highest (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.Effort.InverseAverage >= 3)
result = `${pollValue.Effort.InverseAverage} - 2-High - ${pollValue.Count} Vote(s)`;
else if (pollValue.Effort.InverseAverage >= 2)
result = `${pollValue.Effort.InverseAverage} - 3-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.Effort.InverseAverage >= 1)
result = `${pollValue.Effort.InverseAverage} - 4-Low - ${pollValue.Count} Vote(s)`;
else if (pollValue.Effort.InverseAverage >= 0)
result = `${pollValue.Effort.InverseAverage} - 5-Lowest - ${pollValue.Count} Vote(s)`;
else
result = "0.000";
return result;
}
function getPollValue(description, priority, priorityDisplay, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0 || pollValue.Average === null)
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
else if (pollValue.Average > 2)
result = `${pollValue.Average} - 1-High (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 1)
result = `${pollValue.Average} - 2-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 0)
result = `${pollValue.Average} - 3-Low - ${pollValue.Count} Vote(s)`;
else
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
result = "";
return result;
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -128,12 +117,12 @@ function updateRecordCoD(workItem) {
function updateRecordOther(workItem, dataB, description) {
workItem["State"] = getState(workItem["State"]);
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"]);
workItem["PollValue"] = getPollValue(description, workItem["Priority"], priority, dataB[workItem.Id]);
workItem["PollValue"] = getPollValue(description, dataB[workItem.Id]);
workItem["Priority"] = priority;
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
@ -183,13 +172,46 @@ function sendValue(element, page, id) {
});
}
function getFibonacciValue(average) {
var result;
if (average >= 7)
result = 34;
else if (average >= 6)
result = 21;
else if (average >= 5)
result = 13;
else if (average >= 4)
result = 8;
else if (average >= 3)
result = 5;
else if (average >= 2)
result = 3;
else if (average >= 1)
result = 2;
else if (average >= 0)
result = 1;
else
result = "";
return result;
}
function setRecords(workItems, page, description, th) {
var record;
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Up</th><th>Down</th><th>Poll Value</th><th>" + th + "</th></tr>";
var array = [];
var count = "";
var select = "";
var average = "";
var fibonacciValue = "";
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Poll Average</th><th>Fibonacci</th><th>Poll Description</th><th>Vote(s)</th><th>" + th + "</th><th>Up</th><th>Down</th></tr>";
const element = document.getElementById("HeaderGrid");
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
var length = record.Effort === null ? "" : record.Effort.toString().length;
array = record.PollValue.split('-')
average = array.length > 0 ? array[0] : "";
fibonacciValue = getFibonacciValue(average);
select = array.length > 2 ? array[1] + '-' + array[2] : "";
count = array.length > 3 ? array[3].trim().split(' ')[0] : "";
var length = record.Effort == undefined ? "" : record.Effort.toString().length;
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
@ -201,14 +223,19 @@ function setRecords(workItems, page, description, th) {
"</td><td>" +
'<select onchange="sendValue(this, \'' + page + '\', ' + record.Id + ')">' +
'<option value="9">Unknown</option>' +
'<option value="1">High (Most ' + description + ')</option>' +
'<option value="2">Medium</option>' +
'<option value="3">Low</option>' +
'<option value="1">Highest (Most ' + description + ')</option>' +
'<option value="2">High</option>' +
'<option value="3">Medium</option>' +
'<option value="4">Low</option>' +
'<option value="5">Lowest</option>' +
"</select>" +
"</td><td>" + average +
"</td><td>" + fibonacciValue +
"</td><td>" + select +
"</td><td>" + count +
"</td><td>" + length + " - " + record.Effort +
"</td><td><a href='#' class='up'>Up</a>" +
"</td><td><a href='#' class='down'>Down</a>" +
"</td><td>" + record.PollValue +
"</td><td>" + length + " - " + record.Effort +
"</td></tr>";
}
element.innerHTML = html.replaceAll(">null<", ">&nbsp;<");

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.CoD === null || b.CoD === null) {
if (a.CoD == undefined || b.CoD == undefined) {
return b.Id - a.Id;
} else {
return b.CoD - a.CoD || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -1,22 +1,22 @@
var _apiUrl = null;
function compareFunction(a, b) {
if (a.RiskReductionMinusOpportunityEnablement === null || b.RiskReductionMinusOpportunityEnablement === null) {
if (a.RiskReductionMinusOpportunityEnablement == undefined || b.RiskReductionMinusOpportunityEnablement == undefined) {
var aPollValue = a.PollValue.split('-');
var bPollValue = b.PollValue.split('-');
return bPollValue[0].trim() - aPollValue[0].trim() || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
return bPollValue[0] - aPollValue[0] || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
} else {
return b.RiskReductionMinusOpportunityEnablement - a.RiskReductionMinusOpportunityEnablement || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
}
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -30,7 +30,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -45,7 +45,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -66,7 +66,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -81,45 +81,34 @@ function getPriority(workItemType, priority) {
return result;
}
function getInversePriority(priority) {
function getPollValue(description, pollValue) {
var result;
if (priority == null || priority === 0)
result = "0.000";
else if (priority === 1)
result = "3.000";
else if (priority === 2)
result = "2.000";
else if (priority === 3)
result = "1.000";
if (pollValue == undefined || pollValue.RiskReductionOpportunityEnablement == undefined || pollValue.RiskReductionOpportunityEnablement.InverseAverage == undefined)
result = "";
else if (pollValue.RiskReductionOpportunityEnablement.InverseAverage >= 4)
result = `${pollValue.RiskReductionOpportunityEnablement.InverseAverage} - 1-Highest (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.RiskReductionOpportunityEnablement.InverseAverage >= 3)
result = `${pollValue.RiskReductionOpportunityEnablement.InverseAverage} - 2-High - ${pollValue.Count} Vote(s)`;
else if (pollValue.RiskReductionOpportunityEnablement.InverseAverage >= 2)
result = `${pollValue.RiskReductionOpportunityEnablement.InverseAverage} - 3-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.RiskReductionOpportunityEnablement.InverseAverage >= 1)
result = `${pollValue.RiskReductionOpportunityEnablement.InverseAverage} - 4-Low - ${pollValue.Count} Vote(s)`;
else if (pollValue.RiskReductionOpportunityEnablement.InverseAverage >= 0)
result = `${pollValue.RiskReductionOpportunityEnablement.InverseAverage} - 5-Lowest - ${pollValue.Count} Vote(s)`;
else
result = "0.000";
return result;
}
function getPollValue(description, priority, priorityDisplay, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0 || pollValue.Average === null)
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
else if (pollValue.Average > 2)
result = `${pollValue.Average} - 1-High (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 1)
result = `${pollValue.Average} - 2-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 0)
result = `${pollValue.Average} - 3-Low - ${pollValue.Count} Vote(s)`;
else
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
result = "";
return result;
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -128,12 +117,12 @@ function updateRecordCoD(workItem) {
function updateRecordOther(workItem, dataB, description) {
workItem["State"] = getState(workItem["State"]);
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"]);
workItem["PollValue"] = getPollValue(description, workItem["Priority"], priority, dataB[workItem.Id]);
workItem["PollValue"] = getPollValue(description, dataB[workItem.Id]);
workItem["Priority"] = priority;
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
@ -183,13 +172,46 @@ function sendValue(element, page, id) {
});
}
function getFibonacciValue(average) {
var result;
if (average >= 7)
result = 34;
else if (average >= 6)
result = 21;
else if (average >= 5)
result = 13;
else if (average >= 4)
result = 8;
else if (average >= 3)
result = 5;
else if (average >= 2)
result = 3;
else if (average >= 1)
result = 2;
else if (average >= 0)
result = 1;
else
result = "";
return result;
}
function setRecords(workItems, page, description, th) {
var record;
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Up</th><th>Down</th><th>Poll Value</th><th>" + th + "</th></tr>";
var array = [];
var count = "";
var select = "";
var average = "";
var fibonacciValue = "";
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Poll Average</th><th>Fibonacci</th><th>Poll Description</th><th>Vote(s)</th><th>" + th + "</th><th>Up</th><th>Down</th></tr>";
const element = document.getElementById("HeaderGrid");
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
var length = record.RiskReductionMinusOpportunityEnablement === null ? "" : record.RiskReductionMinusOpportunityEnablement.toString().length;
array = record.PollValue.split('-')
average = array.length > 0 ? array[0] : "";
fibonacciValue = getFibonacciValue(average);
select = array.length > 2 ? array[1] + '-' + array[2] : "";
count = array.length > 3 ? array[3].trim().split(' ')[0] : "";
var length = record.RiskReductionMinusOpportunityEnablement == undefined ? "" : record.RiskReductionMinusOpportunityEnablement.toString().length;
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
@ -201,14 +223,19 @@ function setRecords(workItems, page, description, th) {
"</td><td>" +
'<select onchange="sendValue(this, \'' + page + '\', ' + record.Id + ')">' +
'<option value="9">Unknown</option>' +
'<option value="1">High (Most ' + description + ')</option>' +
'<option value="2">Medium</option>' +
'<option value="3">Low</option>' +
'<option value="1">Highest (Most ' + description + ')</option>' +
'<option value="2">High</option>' +
'<option value="3">Medium</option>' +
'<option value="4">Low</option>' +
'<option value="5">Lowest</option>' +
"</select>" +
"</td><td>" + average +
"</td><td>" + fibonacciValue +
"</td><td>" + select +
"</td><td>" + count +
"</td><td>" + length + " - " + record.RiskReductionMinusOpportunityEnablement +
"</td><td><a href='#' class='up'>Up</a>" +
"</td><td><a href='#' class='down'>Down</a>" +
"</td><td>" + record.PollValue +
"</td><td>" + length + " - " + record.RiskReductionMinusOpportunityEnablement +
"</td></tr>";
}
element.innerHTML = html.replaceAll(">null<", ">&nbsp;<");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,95 @@
/*!
* ASP.NET SignalR JavaScript Library 2.4.3
* http://signalr.net/
*
* Copyright (c) .NET Foundation. All rights reserved.
* Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
*
*/
/// <reference path="..\..\SignalR.Client.JS\Scripts\jquery-1.6.4.js" />
/// <reference path="jquery.signalR.js" />
(function ($, window, undefined) {
/// <param name="$" type="jQuery" />
"use strict";
if (typeof ($.signalR) !== "function") {
throw new Error("SignalR: SignalR is not loaded. Please ensure jquery.signalR-x.js is referenced before ~/signalr/js.");
}
var signalR = $.signalR;
function makeProxyCallback(hub, callback) {
return function () {
// Call the client hub method
callback.apply(hub, $.makeArray(arguments));
};
}
function registerHubProxies(instance, shouldSubscribe) {
var key, hub, memberKey, memberValue, subscriptionMethod;
for (key in instance) {
if (instance.hasOwnProperty(key)) {
hub = instance[key];
if (!(hub.hubName)) {
// Not a client hub
continue;
}
if (shouldSubscribe) {
// We want to subscribe to the hub events
subscriptionMethod = hub.on;
} else {
// We want to unsubscribe from the hub events
subscriptionMethod = hub.off;
}
// Loop through all members on the hub and find client hub functions to subscribe/unsubscribe
for (memberKey in hub.client) {
if (hub.client.hasOwnProperty(memberKey)) {
memberValue = hub.client[memberKey];
if (!$.isFunction(memberValue)) {
// Not a client hub function
continue;
}
// Use the actual user-provided callback as the "identity" value for the registration.
subscriptionMethod.call(hub, memberKey, makeProxyCallback(hub, memberValue), memberValue);
}
}
}
}
}
$.hubConnection.prototype.createHubProxies = function () {
var proxies = {};
this.starting(function () {
// Register the hub proxies as subscribed
// (instance, shouldSubscribe)
registerHubProxies(proxies, true);
this._registerSubscribedHubs();
}).disconnected(function () {
// Unsubscribe all hub proxies when we "disconnect". This is to ensure that we do not re-add functional call backs.
// (instance, shouldSubscribe)
registerHubProxies(proxies, false);
});
proxies['weightedShortestJobFirstHub'] = this.createHubProxy('weightedShortestJobFirstHub');
proxies['weightedShortestJobFirstHub'].client = { };
proxies['weightedShortestJobFirstHub'].server = {
send: function (name, message) {
return proxies['weightedShortestJobFirstHub'].invoke.apply(proxies['weightedShortestJobFirstHub'], $.merge(["Send"], $.makeArray(arguments)));
}
};
return proxies;
};
signalR.hub = $.hubConnection("/signalr", { useDefaultPath: false });
$.extend(signalR, signalR.hub.createHubProxies());
}(window.jQuery, window));

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +1,22 @@
var _apiUrl = null;
function compareFunction(a, b) {
if (a.TimeCriticality === null || b.TimeCriticality === null) {
if (a.TimeCriticality == undefined || b.TimeCriticality == undefined) {
var aPollValue = a.PollValue.split('-');
var bPollValue = b.PollValue.split('-');
return bPollValue[0].trim() - aPollValue[0].trim() || b.State[0] - a.State[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
return b.State[0] - a.State[0] || bPollValue[0] - aPollValue[0] || bPollValue[bPollValue.length - 1].trim()[0] - aPollValue[aPollValue.length - 1].trim()[0] || b.ParentId - a.ParentId || a.Id - b.Id;
} else {
return b.TimeCriticality - a.TimeCriticality || b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
}
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -30,7 +30,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -45,7 +45,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -66,7 +66,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -81,45 +81,34 @@ function getPriority(workItemType, priority) {
return result;
}
function getInversePriority(priority) {
function getPollValue(description, pollValue) {
var result;
if (priority == null || priority === 0)
result = "0.000";
else if (priority === 1)
result = "3.000";
else if (priority === 2)
result = "2.000";
else if (priority === 3)
result = "1.000";
if (pollValue == undefined || pollValue.TimeCriticality == undefined || pollValue.TimeCriticality.InverseAverage == undefined)
result = "";
else if (pollValue.TimeCriticality.InverseAverage >= 4)
result = `${pollValue.TimeCriticality.InverseAverage} - 1-Highest (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.TimeCriticality.InverseAverage >= 3)
result = `${pollValue.TimeCriticality.InverseAverage} - 2-High - ${pollValue.Count} Vote(s)`;
else if (pollValue.TimeCriticality.InverseAverage >= 2)
result = `${pollValue.TimeCriticality.InverseAverage} - 3-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.TimeCriticality.InverseAverage >= 1)
result = `${pollValue.TimeCriticality.InverseAverage} - 4-Low - ${pollValue.Count} Vote(s)`;
else if (pollValue.TimeCriticality.InverseAverage >= 0)
result = `${pollValue.TimeCriticality.InverseAverage} - 5-Lowest - ${pollValue.Count} Vote(s)`;
else
result = "0.000";
return result;
}
function getPollValue(description, priority, priorityDisplay, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0 || pollValue.Average === null)
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
else if (pollValue.Average > 2)
result = `${pollValue.Average} - 1-High (Most ${description}) - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 1)
result = `${pollValue.Average} - 2-Medium - ${pollValue.Count} Vote(s)`;
else if (pollValue.Average > 0)
result = `${pollValue.Average} - 3-Low - ${pollValue.Count} Vote(s)`;
else
result = getInversePriority(priority) + ' - ' + priorityDisplay + ' - *Priority';
result = "";
return result;
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -128,12 +117,12 @@ function updateRecordCoD(workItem) {
function updateRecordOther(workItem, dataB, description) {
workItem["State"] = getState(workItem["State"]);
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"]);
workItem["PollValue"] = getPollValue(description, workItem["Priority"], priority, dataB[workItem.Id]);
workItem["PollValue"] = getPollValue(description, dataB[workItem.Id]);
workItem["Priority"] = priority;
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
@ -183,13 +172,47 @@ function sendValue(element, page, id) {
});
}
function getFibonacciValue(average) {
var result;
if (average >= 7)
result = 34;
else if (average >= 6)
result = 21;
else if (average >= 5)
result = 13;
else if (average >= 4)
result = 8;
else if (average >= 3)
result = 5;
else if (average >= 2)
result = 3;
else if (average >= 1)
result = 2;
else if (average >= 0)
result = 1;
else
result = "";
return result;
}
function setRecords(workItems, page, description, th) {
var record;
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Up</th><th>Down</th><th>Poll Value</th><th>" + th + "</th></tr>";
var array = [];
var count = "";
var select = "";
var average = "";
var fibonacciValue = "";
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Value</th><th>Poll Average</th><th>Fibonacci</th><th>Poll Description</th><th>Vote(s)</th><th>" + th + "</th><th>Up</th><th>Down</th></tr>";
const element = document.getElementById("HeaderGrid");
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
var length = record.TimeCriticality === null ? "" : record.TimeCriticality.toString().length;
array = record.PollValue.split('-')
average = array.length > 0 ? array[0] : "";
fibonacciValue = getFibonacciValue(average);
select = array.length > 2 ? array[1] + '-' + array[2] : "";
count = array.length > 3 ? array[3].trim().split(' ')[0] : "";
var length = record.TimeCriticality == undefined ? "" : record.TimeCriticality.toString().length;
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
@ -201,14 +224,19 @@ function setRecords(workItems, page, description, th) {
"</td><td>" +
'<select onchange="sendValue(this, \'' + page + '\', ' + record.Id + ')">' +
'<option value="9">Unknown</option>' +
'<option value="1">High (Most ' + description + ')</option>' +
'<option value="2">Medium</option>' +
'<option value="3">Low</option>' +
'<option value="1">Highest (Most ' + description + ')</option>' +
'<option value="2">High</option>' +
'<option value="3">Medium</option>' +
'<option value="4">Low</option>' +
'<option value="5">Lowest</option>' +
"</select>" +
"</td><td>" + average +
"</td><td>" + fibonacciValue +
"</td><td>" + select +
"</td><td>" + count +
"</td><td>" + length + " - " + record.TimeCriticality +
"</td><td><a href='#' class='up'>Up</a>" +
"</td><td><a href='#' class='down'>Down</a>" +
"</td><td>" + record.PollValue +
"</td><td>" + length + " - " + record.TimeCriticality +
"</td></tr>";
}
element.innerHTML = html.replaceAll(">null<", ">&nbsp;<");

View File

@ -1,5 +1,5 @@
function compareFunction(a, b) {
if (a.ParentCoD === null || b.ParentCoD === null) {
if (a.ParentCoD == undefined || b.ParentCoD == undefined) {
return b.ParentCoD - a.ParentCoD || b.Id - a.Id;
} else {
return b.State[0] - a.State[0] || b.ParentId - a.ParentId || a.Id - b.Id;
@ -7,12 +7,12 @@ function compareFunction(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -26,7 +26,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -41,7 +41,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -62,7 +62,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -78,14 +78,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem.Effort === null)
if (workItem != undefined) {
if (workItem.Effort == undefined)
workItem.Effort = 10123;
if (workItem.BusinessValue === null)
if (workItem.BusinessValue == undefined)
workItem.BusinessValue = 99999;
if (workItem.TimeCriticality === null)
if (workItem.TimeCriticality == undefined)
workItem.TimeCriticality = 99999;
if (workItem.RiskReductionMinusOpportunityEnablement === null)
if (workItem.RiskReductionMinusOpportunityEnablement == undefined)
workItem.RiskReductionMinusOpportunityEnablement = 99999;
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablement + workItem.TimeCriticality + workItem.BusinessValue;
}
@ -97,7 +97,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;

View File

@ -0,0 +1,321 @@
var _apiUrl = null;
function compareFunction(a, b) {
return b.WeightedShortestJobFirst - a.WeightedShortestJobFirst || b.CoDRank - b.CoDRank || b.ParentId - a.ParentId || a.Id - b.Id;
}
function compareEffortFunction(a, b) {
return a.Effort - b.Effort || b.ParentId - a.ParentId || a.Id - b.Id;
}
function compareBusinessValueFunction(a, b) {
return a.BusinessValue - b.BusinessValue || b.ParentId - a.ParentId || a.Id - b.Id;
}
function compareTimeCriticalityFunction(a, b) {
return a.TimeCriticality - b.TimeCriticality || b.ParentId - a.ParentId || a.Id - b.Id;
}
function compareRiskReductionMinusOpportunityEnablementFunction(a, b) {
return b.RiskReductionMinusOpportunityEnablement - b.RiskReductionMinusOpportunityEnablement || b.ParentId - a.ParentId || a.Id - b.Id;
}
function compareCostOfDelay(a, b) {
return b.CoD - b.CoD || b.ParentId - a.ParentId || a.Id - b.Id;
}
function showOne(rowData) {
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
$("#AllGrid").igGrid({
autoGenerateColumns: true,
dataSource: data,
width: "100%",
showHeader: false,
});
}
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
}
function detailSelectionChangedRunInfo(evt, ui) {
if (ui.row.index === 0)
return;
var rowData = ui.owner.grid.dataSource.dataView()[ui.row.index];
showOne(rowData);
}
function getState(state) {
var result;
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
else if (state === "Active")
result = `2-${state}`;
else if (state === "Resolved")
result = `3-${state}`;
else if (state === "Closed")
result = `4-${state}`;
else if (state === "Removed")
result = `5-${state}`;
else
result = `8-${state}`;
return result;
}
function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
else if (priority === 2)
result = `${priority}-Med`;
else if (priority === 3)
result = `${priority}-Low`;
else if (priority === 4)
result = `${priority}-TBD`;
else
result = "8-Not";
return result;
}
function updateRecordCoD(workItem) {
if (workItem != undefined) {
if (workItem["Effort"] == undefined)
workItem["Effort"] = 1;
if (workItem["BusinessValue"] == undefined)
workItem["BusinessValue"] = 99999;
if (workItem["TimeCriticality"] == undefined)
workItem["TimeCriticality"] = 99999;
if (workItem["RiskReductionMinusOpportunityEnablement"] == undefined)
workItem["RiskReductionMinusOpportunityEnablement"] = 99999;
}
}
function getFibonacci(length) {
var results = [];
for (var i = 0; i < length; i++) {
results.push(21);
}
var index = 0;
var fibonacci = [3, 5, 8, 13, 20];
var factor = (length / fibonacci.length).toFixed();
for (var j = 0; j < fibonacci.length; j++) {
for (var i = 0; i < factor; i++) {
results[index] = fibonacci[j];
index += 1;
}
}
// for (var i = 0; i < results.length; i++) {
// console.log(results[i]);
// }
return results;
}
function updateCoD(records) {
var workItem;
var collection = [];
for (var i = 0; i < records.length; i++) {
workItem = records[i];
if (workItem.Priority[0] === '2') {
workItem.EffortFibonacci = 2;
workItem.BusinessValueFibonacci = 2;
workItem.TimeCriticalityFibonacci = 2;
workItem.RiskReductionMinusOpportunityEnablementFibonacci = 2;
continue;
}
else if (workItem.Priority[0] === '3') {
workItem.EffortFibonacci = 1;
workItem.BusinessValueFibonacci = 1;
workItem.TimeCriticalityFibonacci = 1;
workItem.RiskReductionMinusOpportunityEnablementFibonacci = 1;
continue;
}
collection.push(workItem);
}
var fibonacci = getFibonacci(collection.length);
collection.sort(compareEffortFunction);
for (var i = 0; i < collection.length; i++) {
workItem = collection[i];
workItem.EffortFibonacci = fibonacci[i];
}
records.sort(compareEffortFunction);
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.EffortRank = (((i + 1) / records.length) * 100).toFixed();
}
collection.sort(compareBusinessValueFunction);
for (var i = 0; i < collection.length; i++) {
workItem = collection[i];
workItem.BusinessValueFibonacci = fibonacci[i];
}
records.sort(compareBusinessValueFunction);
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.BusinessValueRank = (((i + 1) / records.length) * 100).toFixed();
}
collection.sort(compareTimeCriticalityFunction);
for (var i = 0; i < collection.length; i++) {
workItem = collection[i];
workItem.TimeCriticalityFibonacci = fibonacci[i];
}
records.sort(compareTimeCriticalityFunction);
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.TimeCriticalityRank = (((i + 1) / records.length) * 100).toFixed();
}
collection.sort(compareRiskReductionMinusOpportunityEnablementFunction);
for (var i = 0; i < collection.length; i++) {
workItem = collection[i];
workItem.RiskReductionMinusOpportunityEnablementFibonacci = fibonacci[i];
}
records.sort(compareRiskReductionMinusOpportunityEnablementFunction);
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.RiskReductionMinusOpportunityEnablementRank = (((i + 1) / records.length) * 100).toFixed();
}
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.CoD = workItem.RiskReductionMinusOpportunityEnablementFibonacci + workItem.TimeCriticalityFibonacci + workItem.BusinessValueFibonacci;
}
records.sort(compareCostOfDelay);
for (var i = 0; i < records.length; i++) {
workItem = records[i];
workItem.CoDRank = (((i + 1) / records.length) * 100).toFixed();
}
for (var i = 0; i < records.length; i++) {
workItem = records[i];
if (workItem.Priority[0] !== '1' && workItem.Priority[0] !== '4') {
workItem.WeightedShortestJobFirst = 0.000001;
workItem.WeightedShortestJobFirstRank = 0;
}
else {
workItem.WeightedShortestJobFirstRank = (((i + 1) / records.length) * 100).toFixed();
workItem.WeightedShortestJobFirst = (workItem.CoD / workItem.EffortFibonacci).toFixed(3);
}
}
}
function updateRecordOther(workItem) {
workItem["State"] = getState(workItem["State"]);
workItem["Priority"] = getPriority(workItem["WorkItemType"], workItem["Priority"]);
}
function updateRecordParent(parent, workItem) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
workItem["ParentCoD"] = 9999999;
}
else {
workItem["ParentId"] = parent["Id"];
workItem["ParentCoD"] = parent["CoD"];
workItem["ParentTitle"] = parent["Title"];
workItem["ParentState"] = getState(parent["State"]);
}
}
function getRecords(data) {
var parent;
var workItem;
var records = [];
for (var i = 0; i < data.length; i++) {
parent = data[i].Parent;
workItem = data[i].WorkItem;
if (workItem.WorkItemType !== 'Feature')
continue;
if (workItem.State !== 'Active' && workItem.State !== 'New')
continue;
if (workItem.Tags != null && workItem.Tags.includes("Ignore"))
continue;
if ((window.location.href.indexOf('=LEO') > -1 && workItem.AreaPath !== 'ART SPS\\LEO') || (window.location.href.indexOf('=MES') > -1 && workItem.AreaPath !== 'ART SPS\\MES'))
continue;
updateRecordCoD(parent);
updateRecordCoD(workItem);
updateRecordOther(workItem);
updateRecordParent(parent, workItem);
records.push(workItem);
}
updateCoD(records);
records.sort(compareFunction);
return records;
}
function setRecords(workItems) {
var record;
var html = "<tr><th>Parent Id</th><th>Parent Title</th><th>Id</th><th>Requester</th><th>Title</th><th>Assigned To</th><th>System(s)</th><th>State</th><th>Priority</th><th>Risk Reduction and/or Opportunity Enablement</th><th>Time Criticality</th><th>Business Value</th><th>CoD</th><th>Effort</th><th>WSJF</th><th>Up</th><th>Down</th></tr>";
const element = document.getElementById("HeaderGrid");
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
"</td><td>" + record.Requester +
"</td><td>" + record.Title +
"</td><td>" + record.AssignedTo +
"</td><td>" + record.Tags +
"</td><td>" + record.State +
"</td><td>" + record.Priority +
"</td><td>" + record.RiskReductionMinusOpportunityEnablementRank + '% - ' + record.RiskReductionMinusOpportunityEnablement + ' - ' + record.RiskReductionMinusOpportunityEnablementFibonacci +
"</td><td>" + record.TimeCriticalityRank + '% - ' +record.TimeCriticality + ' - ' + record.TimeCriticalityFibonacci +
"</td><td>" + record.BusinessValueRank + '% - ' +record.BusinessValue + ' - ' + record.BusinessValueFibonacci +
"</td><td>" + record.CoDRank + '% - ' + record.CoD +
"</td><td>" + record.EffortRank + '% - ' +record.Effort + ' - ' + record.EffortFibonacci +
"</td><td>" + record.WeightedShortestJobFirst +
"</td><td><a href='#' class='up'>Up</a></td><td><a href='#' class='down'>Down</a></td></tr>";
}
element.innerHTML = html.replaceAll(">null<", ">&nbsp;<");
}
function updateSite() {
if (window.location.href.indexOf('=LEO') > -1) {
document.title = document.title.replace("Infineon", "HiRel (Leominster)");
document.getElementById("siteHeader").innerText = "HiRel (Leominster)";
}
else if (window.location.href.indexOf('=MES') > -1) {
document.title = document.title.replace("Infineon", "Mesa");
document.getElementById("siteHeader").innerText = "Mesa";
}
else {
document.title = document.title.replace("Infineon", "Infineon");
document.getElementById("siteHeader").innerText = "Infineon";
}
}
function initIndex(url, apiUrl) {
_apiUrl = apiUrl;
updateSite();
$.getJSON(url, { _: new Date().getTime() }, function (data) {
var records = getRecords(data);
console.log(data.length);
if (data.length > 0)
console.log(data[0]);
setRecords(records);
$(".up,.down").click(function () {
var row = $(this).parents("tr:first");
if ($(this).is(".up")) {
row.insertBefore(row.prev());
} else {
row.insertAfter(row.next());
}
});
});
$("#HeaderGrid").on("dblclick", "tr", loadOne);
}

View File

@ -0,0 +1,47 @@
var _apiUrl = null;
function initIndex(url, apiUrl) {
_apiUrl = apiUrl;
//Set the hubs URL for the connection
$.connection.hub.url = url;
$.connection.hub.logging = true;
// Declare a proxy to reference the hub.
// var chat = $.connection.myHub;
var chat = $.connection.weightedShortestJobFirstHub;
// Create a function that the hub can call to broadcast messages.
chat.client.addNotification = function (name, message) {
// Html encode display name and message.
console.log(message);
var encodedName = $('<div />').text(name).html();
var encodedMsg = $('<div />').text("hash").html();
// Add the message to the page.
$('#discussion').append('<li><strong>' + encodedName
+ '</strong>:&nbsp;&nbsp;' + encodedMsg + '</li>');
};
// Get the user name and store it to prepend to messages.
$('#displayname').val(prompt('Enter your name:', ''));
// Set initial focus to message input box.
$('#message').focus();
// Start the connection.
$.connection.hub.start({ transport: 'longPolling' }).done(function () {
$('#sendmessage').click(function () {
// Call the Send method on the hub.
// chat.server.send($('#displayname').val(), $('#message').val());
var notification = {
"Json": null,
"id": 110743,
"page": "effort",
"QueryString": "time=1737573418926\u0026id=110743\u0026page=effort\u0026value=1",
"RemoteIpAddress": "10.95.36.87",
"time": 1737573418926,
"value": 1
};
chat.server.notifyAll(notification);
// Clear text box and reset focus for next comment.
$('#message').val('').focus();
});
});
}

View File

@ -25,12 +25,12 @@ function compareCostOfDelay(a, b) {
}
function showOne(rowData) {
if (rowData == null)
if (rowData == undefined)
return;
var data = [];
data.push({ name: "Edit in ADO", value: '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + rowData["Id"] + '">Edit in ADO ' + rowData["Id"] + '</a>' });
for (const property in rowData) {
if (rowData[property] == null)
if (rowData[property] == undefined)
continue;
data.push({ name: property, value: rowData[property].toString() });
}
@ -44,7 +44,7 @@ function showOne(rowData) {
function loadOne() {
var selectedRow = $("#HeaderGrid").data("igGridSelection").selectedRow();
if (selectedRow == null)
if (selectedRow == undefined)
return;
var rowData = $("#HeaderGrid").data("igGrid").dataSource.dataView()[selectedRow.index];
showOne(rowData);
@ -59,7 +59,7 @@ function detailSelectionChangedRunInfo(evt, ui) {
function getState(state) {
var result;
if (state == null)
if (state == undefined)
result = "9-Null";
else if (state === "New")
result = `1-${state}`;
@ -80,7 +80,7 @@ function getPriority(workItemType, priority) {
var result;
if (workItemType === "Bug")
result = "0-Bug";
else if (priority == null || priority === 0)
else if (priority == undefined || priority === 0)
result = "9-Null";
else if (priority === 1)
result = `${priority}-High`;
@ -96,14 +96,14 @@ function getPriority(workItemType, priority) {
}
function updateRecordCoD(workItem) {
if (workItem !== null) {
if (workItem["Effort"] === null)
if (workItem != undefined) {
if (workItem["Effort"] == undefined)
workItem["Effort"] = 1;
if (workItem["BusinessValue"] === null)
if (workItem["BusinessValue"] == undefined)
workItem["BusinessValue"] = 99999;
if (workItem["TimeCriticality"] === null)
if (workItem["TimeCriticality"] == undefined)
workItem["TimeCriticality"] = 99999;
if (workItem["RiskReductionMinusOpportunityEnablement"] === null)
if (workItem["RiskReductionMinusOpportunityEnablement"] == undefined)
workItem["RiskReductionMinusOpportunityEnablement"] = 99999;
}
}
@ -218,7 +218,7 @@ function updateRecordOther(workItem) {
}
function updateRecordParent(parent, workItem) {
if (parent === null) {
if (parent == undefined) {
workItem["ParentId"] = 9999999;
workItem["ParentTitle"] = null;
workItem["ParentState"] = null;
@ -241,7 +241,7 @@ function getRecords(data) {
workItem = data[i].WorkItem;
if (workItem.WorkItemType !== 'Feature')
continue;
if (workItem.State !== 'New' && workItem.State !== 'Active')
if (workItem.State !== 'Active' && workItem.State !== 'New')
continue;
if (workItem.Tags != null && workItem.Tags.includes("Ignore"))
continue;
@ -265,7 +265,7 @@ function setRecords(workItems) {
for (var i = 0; i < workItems.length; i++) {
record = workItems[i];
html += "<tr><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.ParentId + '">' + record.ParentId + "</a>" +
"</td><td>" + record.Title +
"</td><td>" + record.ParentTitle +
"</td><td>" + '<a target="_blank" href="https://tfs.intra.infineon.com/tfs/FactoryIntegration/ART%20SPS/_workitems/edit/' + record.Id + '">' + record.Id + "</a>" +
"</td><td>" + record.Requester +
"</td><td>" + record.Title +

View File

@ -0,0 +1,37 @@
#HeaderGridDiv,
#DetailsGridDiv {
font-size: 12px;
min-width: 1200px;
max-width: 1200px;
}
#HeaderGrid {
font-family: monospace;
margin-top: 60px;
}
#HeaderGrid tr td {
max-width: 200px;
padding: 5px;
}
#AllGrid {
font-family: monospace;
}
.navbar-brand {
min-width: 1200px;
background-color: whitesmoke;
}
tr:nth-of-type(odd) {
background-color: #eee;
}
#AllTextarea {
font-family: monospace;
}
#th-span {
margin-right: 500px;
}

View File

@ -20,7 +20,9 @@ internal class Fields
DateTime microsoftVSTSCommonStateChangeDate,
float? microsoftVSTSCommonTimeCriticality,
float? microsoftVSTSSchedulingEffort,
float? microsoftVSTSSchedulingRemainingWork,
DateTime microsoftVSTSSchedulingStartDate,
float? microsoftVSTSSchedulingStoryPoints,
DateTime microsoftVSTSSchedulingTargetDate,
string systemAreaPath,
SystemAssignedTo systemAssignedTo,
@ -40,8 +42,8 @@ internal class Fields
string systemTitle,
string systemWorkItemType)
{
CustomRequester = customRequester;
CustomRRminusOE = customRRminusOE;
CustomRequester = customRequester;
CustomWSJF = customWSJF;
MicrosoftVSTSCommonActivatedDate = microsoftVSTSCommonActivatedDate;
MicrosoftVSTSCommonBusinessValue = microsoftVSTSCommonBusinessValue;
@ -51,7 +53,9 @@ internal class Fields
MicrosoftVSTSCommonStateChangeDate = microsoftVSTSCommonStateChangeDate;
MicrosoftVSTSCommonTimeCriticality = microsoftVSTSCommonTimeCriticality;
MicrosoftVSTSSchedulingEffort = microsoftVSTSSchedulingEffort;
MicrosoftVSTSSchedulingRemainingWork = microsoftVSTSSchedulingRemainingWork;
MicrosoftVSTSSchedulingStartDate = microsoftVSTSSchedulingStartDate;
MicrosoftVSTSSchedulingStoryPoints = microsoftVSTSSchedulingStoryPoints;
MicrosoftVSTSSchedulingTargetDate = microsoftVSTSSchedulingTargetDate;
SystemAreaPath = systemAreaPath;
SystemAssignedTo = systemAssignedTo;
@ -72,8 +76,8 @@ internal class Fields
SystemWorkItemType = systemWorkItemType;
}
[JsonPropertyName("Custom.Requester")] public CustomRequester? CustomRequester { get; }
[JsonPropertyName("Custom.RRminusOE")] public float? CustomRRminusOE { get; }
[JsonPropertyName("Custom.Requester")] public CustomRequester? CustomRequester { get; }
[JsonPropertyName("Custom.WSJF")] public float? CustomWSJF { get; }
[JsonPropertyName("Microsoft.VSTS.Common.ActivatedDate")] public DateTime MicrosoftVSTSCommonActivatedDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.BusinessValue")] public float? MicrosoftVSTSCommonBusinessValue { get; }
@ -83,7 +87,9 @@ internal class Fields
[JsonPropertyName("Microsoft.VSTS.Common.StateChangeDate")] public DateTime MicrosoftVSTSCommonStateChangeDate { get; }
[JsonPropertyName("Microsoft.VSTS.Common.TimeCriticality")] public float? MicrosoftVSTSCommonTimeCriticality { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.Effort")] public float? MicrosoftVSTSSchedulingEffort { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.RemainingWork")] public float? MicrosoftVSTSSchedulingRemainingWork { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.StartDate")] public DateTime MicrosoftVSTSSchedulingStartDate { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.StoryPoints")] public float? MicrosoftVSTSSchedulingStoryPoints { get; }
[JsonPropertyName("Microsoft.VSTS.Scheduling.TargetDate")] public DateTime MicrosoftVSTSSchedulingTargetDate { get; }
[JsonPropertyName("System.AreaPath")] public string SystemAreaPath { get; }
[JsonPropertyName("System.AssignedTo")] public SystemAssignedTo? SystemAssignedTo { get; }

View File

@ -30,34 +30,36 @@ internal class Record
internal static Record GetWithoutNesting(Record record, string? violation)
{
Record result;
WorkItem workItem = new(record.WorkItem.ActivatedDate,
record.WorkItem.AreaPath,
record.WorkItem.AssignedTo,
record.WorkItem.BusinessValue,
record.WorkItem.ChangedDate,
record.WorkItem.ClosedDate,
record.WorkItem.CommentCount,
record.WorkItem.CreatedDate,
record.WorkItem.Description,
record.WorkItem.Effort,
record.WorkItem.Id,
record.WorkItem.IterationPath,
record.WorkItem.Parent,
record.WorkItem.Priority,
record.WorkItem.Relations,
record.WorkItem.Requester,
record.WorkItem.ResolvedDate,
record.WorkItem.Revision,
record.WorkItem.RiskReductionMinusOpportunityEnablement,
record.WorkItem.StartDate,
record.WorkItem.State,
record.WorkItem.Tags,
record.WorkItem.TargetDate,
record.WorkItem.TimeCriticality,
record.WorkItem.Title,
record.WorkItem.Violation is null ? violation : record.WorkItem.Violation,
record.WorkItem.WeightedShortestJobFirst,
record.WorkItem.WorkItemType);
WorkItem workItem = new(activatedDate: record.WorkItem.ActivatedDate,
areaPath: record.WorkItem.AreaPath,
assignedTo: record.WorkItem.AssignedTo,
businessValue: record.WorkItem.BusinessValue,
changedDate: record.WorkItem.ChangedDate,
closedDate: record.WorkItem.ClosedDate,
commentCount: record.WorkItem.CommentCount,
createdDate: record.WorkItem.CreatedDate,
description: record.WorkItem.Description,
effort: record.WorkItem.Effort,
id: record.WorkItem.Id,
iterationPath: record.WorkItem.IterationPath,
parent: record.WorkItem.Parent,
priority: record.WorkItem.Priority,
relations: record.WorkItem.Relations,
remainingWork: record.WorkItem.RemainingWork,
requester: record.WorkItem.Requester,
resolvedDate: record.WorkItem.ResolvedDate,
revision: record.WorkItem.Revision,
riskReductionMinusOpportunityEnablement: record.WorkItem.RiskReductionMinusOpportunityEnablement,
startDate: record.WorkItem.StartDate,
state: record.WorkItem.State,
storyPoints: record.WorkItem.StoryPoints,
tags: record.WorkItem.Tags,
targetDate: record.WorkItem.TargetDate,
timeCriticality: record.WorkItem.TimeCriticality,
title: record.WorkItem.Title,
violation: record.WorkItem.Violation is null ? violation : record.WorkItem.Violation,
weightedShortestJobFirst: record.WorkItem.WeightedShortestJobFirst,
workItemType: record.WorkItem.WorkItemType);
result = new(workItem, record.Parent, Array.Empty<Record>(), Array.Empty<Record>(), Array.Empty<Record>());
return result;
}

View File

@ -24,12 +24,14 @@ internal class WorkItem
int? parent,
int? priority,
Relation[]? relations,
long? remainingWork,
string? requester,
DateTime? resolvedDate,
int revision,
long? riskReductionMinusOpportunityEnablement,
DateTime? startDate,
string state,
long? storyPoints,
string tags,
DateTime? targetDate,
long? timeCriticality,
@ -53,12 +55,14 @@ internal class WorkItem
Parent = parent;
Priority = priority;
Relations = relations;
RemainingWork = remainingWork;
Requester = requester;
ResolvedDate = resolvedDate;
Revision = revision;
RiskReductionMinusOpportunityEnablement = riskReductionMinusOpportunityEnablement;
StartDate = startDate;
State = state;
StoryPoints = storyPoints;
Tags = tags;
TargetDate = targetDate;
TimeCriticality = timeCriticality;
@ -72,67 +76,71 @@ internal class WorkItem
public static WorkItem Get(WorkItem workItem, Relation[] relations)
{
WorkItem result = new(workItem.ActivatedDate,
workItem.AreaPath,
workItem.AssignedTo,
workItem.BusinessValue,
workItem.ChangedDate,
workItem.ClosedDate,
workItem.CommentCount,
workItem.CreatedDate,
workItem.Description,
workItem.Effort,
workItem.Id,
workItem.IterationPath,
workItem.Parent,
workItem.Priority,
relations,
workItem.Requester,
workItem.ResolvedDate,
workItem.Revision,
workItem.RiskReductionMinusOpportunityEnablement,
workItem.StartDate,
workItem.State,
workItem.Tags,
workItem.TargetDate,
workItem.TimeCriticality,
workItem.Title,
workItem.Violation,
workItem.WeightedShortestJobFirst,
workItem.WorkItemType);
WorkItem result = new(activatedDate: workItem.ActivatedDate,
areaPath: workItem.AreaPath,
assignedTo: workItem.AssignedTo,
businessValue: workItem.BusinessValue,
changedDate: workItem.ChangedDate,
closedDate: workItem.ClosedDate,
commentCount: workItem.CommentCount,
createdDate: workItem.CreatedDate,
description: workItem.Description,
effort: workItem.Effort,
id: workItem.Id,
iterationPath: workItem.IterationPath,
parent: workItem.Parent,
priority: workItem.Priority,
relations: relations,
remainingWork: workItem.RemainingWork,
requester: workItem.Requester,
resolvedDate: workItem.ResolvedDate,
revision: workItem.Revision,
riskReductionMinusOpportunityEnablement: workItem.RiskReductionMinusOpportunityEnablement,
startDate: workItem.StartDate,
state: workItem.State,
storyPoints: workItem.StoryPoints,
tags: workItem.Tags,
targetDate: workItem.TargetDate,
timeCriticality: workItem.TimeCriticality,
title: workItem.Title,
violation: workItem.Violation,
weightedShortestJobFirst: workItem.WeightedShortestJobFirst,
workItemType: workItem.WorkItemType);
return result;
}
public static WorkItem? GetWithOutRelations(WorkItem? workItem)
{
WorkItem? result = workItem is null ? null : new(workItem.ActivatedDate,
workItem.AreaPath,
workItem.AssignedTo,
workItem.BusinessValue,
workItem.ChangedDate,
workItem.ClosedDate,
workItem.CommentCount,
workItem.CreatedDate,
workItem.Description,
workItem.Effort,
workItem.Id,
workItem.IterationPath,
workItem.Parent,
workItem.Priority,
Array.Empty<Relation>(),
workItem.Requester,
workItem.ResolvedDate,
workItem.Revision,
workItem.RiskReductionMinusOpportunityEnablement,
workItem.StartDate,
workItem.State,
workItem.Tags,
workItem.TargetDate,
workItem.TimeCriticality,
workItem.Title,
workItem.Violation,
workItem.WeightedShortestJobFirst,
workItem.WorkItemType);
WorkItem? result = workItem is null ? null : new(activatedDate: workItem.ActivatedDate,
areaPath: workItem.AreaPath,
assignedTo: workItem.AssignedTo,
businessValue: workItem.BusinessValue,
changedDate: workItem.ChangedDate,
closedDate: workItem.ClosedDate,
commentCount: workItem.CommentCount,
createdDate: workItem.CreatedDate,
description: workItem.Description,
effort: workItem.Effort,
id: workItem.Id,
iterationPath: workItem.IterationPath,
parent: workItem.Parent,
priority: workItem.Priority,
relations: Array.Empty<Relation>(),
remainingWork: workItem.RemainingWork,
requester: workItem.Requester,
resolvedDate: workItem.ResolvedDate,
revision: workItem.Revision,
riskReductionMinusOpportunityEnablement: workItem.RiskReductionMinusOpportunityEnablement,
startDate: workItem.StartDate,
state: workItem.State,
storyPoints: workItem.StoryPoints,
tags: workItem.Tags,
targetDate: workItem.TargetDate,
timeCriticality: workItem.TimeCriticality,
title: workItem.Title,
violation: workItem.Violation,
weightedShortestJobFirst: workItem.WeightedShortestJobFirst,
workItemType: workItem.WorkItemType);
return result;
}
@ -151,12 +159,14 @@ internal class WorkItem
[JsonPropertyName("Parent")] public int? Parent { get; }
[JsonPropertyName("Priority")] public int? Priority { get; }
[JsonPropertyName("Relations")] public Relation[]? Relations { get; }
[JsonPropertyName("RemainingWork")] public long? RemainingWork { get; }
[JsonPropertyName("Requester")] public string? Requester { get; }
[JsonPropertyName("ResolvedDate")] public DateTime? ResolvedDate { get; }
[JsonPropertyName("Revision")] public int Revision { get; }
[JsonPropertyName("RiskReductionMinusOpportunityEnablement")] public long? RiskReductionMinusOpportunityEnablement { get; }
[JsonPropertyName("StartDate")] public DateTime? StartDate { get; }
[JsonPropertyName("State")] public string State { get; }
[JsonPropertyName("StoryPoints")] public long? StoryPoints { get; }
[JsonPropertyName("Tags")] public string Tags { get; }
[JsonPropertyName("TargetDate")] public DateTime? TargetDate { get; }
[JsonPropertyName("TimeCriticality")] public long? TimeCriticality { get; }

View File

@ -1,163 +1 @@
trigger:
branches:
include:
- Development
paths:
include:
- "Adaptation/*"
pool:
name: eaf
demands: MESAFIBACKLOG-Development
steps:
- script: |
set coreVersion=net7.0
echo %coreVersion%
echo ##vso[task.setvariable variable=CoreVersion;]%coreVersion%
echo $(CoreVersion)
displayName: CoreVersion
- script: |
set configuration=Debug
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: |
set nugetSource=https://eaf-dev-reporting.mes.infineon.com/v3/index.json
echo %nugetSource%
echo ##vso[task.setvariable variable=NugetSource;]%nugetSource%
echo $(NugetSource)
displayName: NugetSource
- script: |
set gitCommit=$(Build.SourceVersion)
set gitCommitSeven=%gitCommit:~0,7%
echo %gitCommitSeven%
echo ##vso[task.setvariable variable=GitCommitSeven;]%gitCommitSeven%
echo $(GitCommitSeven)
displayName: GitCommitSeven
- script: |
echo $(Build.BuildId)
echo $(Build.Reason)
echo $(Build.Repository.Id)
echo $(Build.Repository.Name)
echo $(Build.SourceVersion)
echo $(CoreVersion)
echo $(Configuration)
echo $(NugetSource)
echo $(GitCommitSeven)
REM echo $(pipelinePassword)
displayName: "Echo Check"
- script: '"C:\program files\dotnet\dotnet.exe" nuget locals all --clear'
displayName: "Nuget Clear"
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
"C:\program files\dotnet\dotnet.exe" user-secrets set "GitCommitSeven" "$(GitCommitSeven)"
"C:\program files\dotnet\dotnet.exe" user-secrets list
workingDirectory: Adaptation
displayName: "Safe storage of app secrets - Adaptation"
- script: '"C:\program files\dotnet\dotnet.exe" build --configuration $(Configuration) --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Build - Adaptation"
- powershell: Get-ChildItem .\ -include TestResults -Recurse | foreach ($_) { remove-item $_.fullname -Force -Recurse }
workingDirectory: "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)"
displayName: "PowerShell Script"
continueOnError: true
- script: "dotnet test --configuration $(Configuration)"
workingDirectory: Adaptation
displayName: "Core Test"
- script: 'move /y "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)/05_TestResults/TestResults" $(System.DefaultWorkingDirectory)'
displayName: "Move Results"
- script: '"C:\program files\dotnet\dotnet.exe" tool restore'
workingDirectory: Adaptation
displayName: "Tool Restore"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" ReportGenerator -reports:$(System.DefaultWorkingDirectory)/TestResults/**/coverage.cobertura.xml -targetDir:$(Build.ArtifactStagingDirectory)\Coverage -reportTypes:Html_Dark'
workingDirectory: Adaptation
displayName: "Report Generator"
enabled: false
- task: PublishTestResults@2
displayName: "Publish Test Results **/*.trx"
inputs:
testResultsFormat: VSTest
testResultsFiles: "**/*.trx"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: PublishTestResults@2
displayName: "Publish Test Results */coverage.cobertura.xml"
inputs:
testResultsFormat: VSTest
testResultsFiles: "*/coverage.cobertura.xml"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: mspremier.CreateWorkItem.CreateWorkItem-task.CreateWorkItem@1
displayName: "Create work item"
inputs:
teamProject: "Mesa_FI"
workItemType: Bug
title: $(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)
assignedTo: "$(Build.RequestedForId)"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" publish --configuration $(Configuration) --runtime win-x64 --self-contained -o $(Build.ArtifactStagingDirectory)\Adaptation --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Publish"
enabled: false
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Restore /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8 /p:RestoreSources=$(NugetSource)'
displayName: "MSBuild Restore"
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Build /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8'
displayName: MSBuild
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)-$(Build.Repository.Name)>bin\$(Configuration)\$(Build.Repository.Name).txt'
displayName: "Commit Id"
- task: CopyFiles@2
displayName: 'Copy Files to: D:\Framework4.8'
inputs:
Contents: "*$(Build.Repository.Name)*"
SourceFolder: 'bin\$(Configuration)'
TargetFolder: 'D:\Framework4.8\$(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)'
OverWrite: true
enabled: false
- task: CopyFiles@2
displayName: 'Copy Files to: \\mesfs.infineon.com\EC_EAFRepository'
inputs:
Contents: "*$(Build.Repository.Name)*"
SourceFolder: 'bin\$(Configuration)'
TargetFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\Adaptation_$(Build.Repository.Name)'
OverWrite: true
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Tests"
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Adaptation"
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)>bin_x_x_\$(Configuration)\$(CoreVersion)\win-x64\$(Build.Repository.Name).txt'
displayName: "Force Fail"
enabled: false
# dotnet nuget push --source https://tfs.intra.infineon.com/tfs/FactoryIntegration/fccf8303-9cf7-4788-a9e0-4042541503fa/_packaging/EAF-Mesa-Integration/nuget/v3/index.json --api-key az D:\vsts-agent-win-x64-2.210.1-eaf\nuget\infineon.mesa.pdf.text.stripper.4.8.0.1.nupkg

View File

@ -35,39 +35,57 @@
<RuntimeHostConfigurationOption Include="AssemblyName" Value="MESAFIBACKLOG" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.2" />
<PackageReference Include="CommonMark.NET" Version="0.15.1" />
<PackageReference Include="coverlet.collector" Version="6.0.3" />
<PackageReference Include="FFMpegCore" Version="5.1.0" />
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Media" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Text" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Util" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Instances" Version="3.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.2" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="8.0.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="8.0.0" />
<PackageReference Include="MSTest.TestAdapter" Version="3.6.1" />
<PackageReference Include="MSTest.TestFramework" Version="3.6.1" />
<PackageReference Include="Pdfbox" Version="1.1.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="RoboSharp" Version="1.5.3" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="8.0.1" />
<PackageReference Include="System.Data.OleDb" Version="8.0.1" />
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Media" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Text" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.Util" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="Instances" Version="3.0.1" />
<PackageReference Include="log4net" Version="3.0.3"></PackageReference>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="9.0.0" />
<PackageReference Include="MSTest.TestAdapter" Version="3.7.0" />
<PackageReference Include="MSTest.TestFramework" Version="3.7.0" />
<PackageReference Include="Pdfbox" Version="1.1.1">
<NoWarn>NU1701</NoWarn>
</PackageReference>
<PackageReference Include="RoboSharp" Version="1.6.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="9.0.0" />
<PackageReference Include="System.Data.OleDb" Version="9.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.8.6" />
<PackageReference Include="System.Drawing.Common" Version="8.0.10" />
<PackageReference Include="System.Text.Json" Version="8.0.5" />
<PackageReference Include="Tesseract" Version="5.2.0" />
<PackageReference Include="System.Drawing.Common" Version="9.0.0" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="CommonMark.NET" Version="0.15.1" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNet.WebApi.SelfHost" Version="5.2.9" />
@ -76,16 +94,24 @@
<PackageReference Include="Microsoft.TeamFoundationServer.Client" Version="16.205.1" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2">
<NoWarn>NU1701</NoWarn>
</PackageReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Yoda.DotNetCore" Version="5.4.3" />
<PackageReference Include="Tibco.Rendezvous.DotNetCore" Version="8.5.0" />
<PackageReference Include="Infineon.Yoda.DotNetCore" Version="5.4.1" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Tesseract" Version="5.2.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="External.Common.Logging.Core" Version="3.3.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="External.Common.Logging" Version="3.3.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="External.log4net" Version="2.0.8"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Microsoft.AspNet.SignalR" Version="2.4.3" />
<PackageReference Include="Microsoft.AspNet.SignalR.Core" Version="2.4.3" />
<PackageReference Include="Microsoft.Owin" Version="4.0.0" />
<PackageReference Include="Microsoft.Owin.Cors" Version="4.0.0" />
<PackageReference Include="Microsoft.Owin.Hosting" Version="4.0.0" />
<PackageReference Include="Microsoft.Owin.Host.HttpListener" Version="4.0.0" />
</ItemGroup>
<ItemGroup>
<None Include="appsettings.json">

View File

@ -1,3 +1,9 @@
# D:
# cd D:\EAF-Mesa-Integration
# config --url https://tfs.intra.infineon.com/tfs/FactoryIntegration --pool "EAF Mesa Integration" --agent mestsa003-meseafsvc --work _work --runAsService --auth negotiate --userName infineon\phares
# D:
# cd D:\EAF-Mesa-Integration
# config --url https://tfs.intra.infineon.com/tfs/FactoryIntegration --pool "EAF Mesa Integration" --agent mestsa07ec-ecmeseaf --work _work --runAsService --auth negotiate --userName infineon\phares
trigger:
branches:
include:
@ -6,41 +12,21 @@ trigger:
include:
- "Adaptation/*"
stages:
- stage: DevServer
displayName: DevServer
pool:
name: eaf
demands: MESAFIBACKLOG
name: EAF Mesa Integration
demands: MESAFIBACKLOG-Development
variables:
coreVersion: "net8.0"
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnvironment
steps:
- script: |
set coreVersion=net7.0
echo %coreVersion%
echo ##vso[task.setvariable variable=CoreVersion;]%coreVersion%
echo $(CoreVersion)
displayName: CoreVersion
- script: |
set configuration=Release
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: |
REM set nugetSource=https://eaf-prod.mes.infineon.com/v3/index.json
set nugetSource=https://eaf-dev-reporting.mes.infineon.com/v3/index.json
echo %nugetSource%
echo ##vso[task.setvariable variable=NugetSource;]%nugetSource%
echo $(NugetSource)
displayName: NugetSource
- script: |
set gitCommit=$(Build.SourceVersion)
set gitCommitSeven=%gitCommit:~0,7%
echo %gitCommitSeven%
echo ##vso[task.setvariable variable=GitCommitSeven;]%gitCommitSeven%
echo $(GitCommitSeven)
displayName: GitCommitSeven
- script: |
echo $(Build.BuildId)
echo $(Build.Reason)
@ -48,10 +34,7 @@ steps:
echo $(Build.Repository.Name)
echo $(Build.SourceVersion)
echo $(CoreVersion)
echo $(Configuration)
echo $(NugetSource)
echo $(GitCommitSeven)
REM echo $(pipelinePassword)
displayName: "Echo Check"
- script: '"C:\program files\dotnet\dotnet.exe" nuget locals all --clear'
@ -61,21 +44,77 @@ steps:
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
"C:\program files\dotnet\dotnet.exe" user-secrets set "GitCommitSeven" "$(GitCommitSeven)"
"C:\program files\dotnet\dotnet.exe" user-secrets set "GitCommit" "$(Build.SourceVersion)"
"C:\program files\dotnet\dotnet.exe" user-secrets list
workingDirectory: Adaptation
displayName: "Safe storage of app secrets - Adaptation"
- job: BuildDebug
dependsOn:
- SetupEnvironment
steps:
- script: |
set configuration=Debug
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: '"C:\program files\dotnet\dotnet.exe" build --configuration $(Configuration) --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Build - Adaptation"
- job: BuildRelease
dependsOn:
- SetupEnvironment
steps:
- script: |
set configuration=Release
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Restore /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8 /p:RestoreSources=$(NugetSource)'
displayName: "MSBuild Restore"
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Build /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8'
displayName: MSBuild
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Build /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8'
displayName: MSBuild
- script: 'echo $(Build.Repository.Name)-$(Build.BuildId)-$(Build.SourceVersion)>bin\$(Configuration)\$(Build.Repository.Name).txt'
displayName: "Commit Id"
- task: CopyFiles@2
displayName: 'Copy Files to: D:\EAF'
inputs:
Contents: "*$(Build.Repository.Name)*"
SourceFolder: 'bin\$(Configuration)'
TargetFolder: 'D:\EAF\EAF Deployment Storage\Adaptation_$(Build.Repository.Name)'
OverWrite: true
enabled: true
- job: TestDebug
dependsOn:
- SetupEnvironment
- BuildDebug
- BuildRelease
steps:
- script: |
set configuration=Debug
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- powershell: Get-ChildItem .\ -include TestResults -Recurse | foreach ($_) { remove-item $_.fullname -Force -Recurse }
workingDirectory: "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)"
displayName: "PowerShell Script"
continueOnError: true
- script: "dotnet test --configuration $(Configuration) --runtime win-x86"
- script: "dotnet test --configuration $(Configuration)"
workingDirectory: Adaptation
displayName: "Core Test"
@ -97,7 +136,7 @@ steps:
inputs:
testResultsFormat: VSTest
testResultsFiles: "**/*.trx"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
testRunTitle: "$(Build.BuildId)-$(Build.SourceVersion)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: PublishTestResults@2
@ -105,60 +144,141 @@ steps:
inputs:
testResultsFormat: VSTest
testResultsFiles: "*/coverage.cobertura.xml"
testRunTitle: "$(GitCommitSeven)-$(Build.BuildId)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
testRunTitle: "$(Build.BuildId)-$(Build.SourceVersion)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: mspremier.CreateWorkItem.CreateWorkItem-task.CreateWorkItem@1
displayName: "Create work item"
inputs:
teamProject: "Mesa_FI"
workItemType: Bug
title: $(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)
assignedTo: "$(Build.RequestedForId)"
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Adaptation"
- script: 'echo $(Build.BuildId)-$(Build.SourceVersion)-bin_x_x_\$(Configuration)\$(CoreVersion)\win-x64\$(Build.Repository.Name).txt'
displayName: "Force Fail"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" publish --configuration $(Configuration) --runtime win-x64 --self-contained -o $(Build.ArtifactStagingDirectory)\Adaptation --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Publish"
- stage: ProductionServer
displayName: ProductionServer
pool:
name: EAF Mesa Integration
demands: MESAFIBACKLOG
variables:
coreVersion: "net8.0"
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
echo $(Build.Reason)
echo $(Build.Repository.Id)
echo $(Build.Repository.Name)
echo $(Build.SourceVersion)
echo $(CoreVersion)
echo $(NugetSource)
displayName: "Echo Check"
- script: '"C:\program files\dotnet\dotnet.exe" nuget locals all --clear'
displayName: "Nuget Clear"
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
"C:\program files\dotnet\dotnet.exe" user-secrets set "GitCommit" "$(Build.SourceVersion)"
"C:\program files\dotnet\dotnet.exe" user-secrets list
workingDirectory: Adaptation
displayName: "Safe storage of app secrets - Adaptation"
- job: BuildRelease
dependsOn:
- SetupEnvironment
steps:
- script: |
set configuration=Release
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Restore /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8 /p:RestoreSources=$(NugetSource)'
displayName: "MSBuild Restore"
- script: '"C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe" /target:Build /detailedsummary /consoleloggerparameters:PerformanceSummary;ErrorsOnly; /property:Configuration=$(Configuration);TargetFrameworkVersion=v4.8'
displayName: MSBuild
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)-$(Build.Repository.Name)>bin\$(Configuration)\$(Build.Repository.Name).txt'
- script: 'echo $(Build.Repository.Name)-$(Build.BuildId)-$(Build.SourceVersion)>bin\$(Configuration)\$(Build.Repository.Name).txt'
displayName: "Commit Id"
- task: CopyFiles@2
displayName: 'Copy Files to: D:\Framework4.8'
inputs:
Contents: "*$(Build.Repository.Name)*"
SourceFolder: 'bin\$(Configuration)'
TargetFolder: 'D:\Framework4.8\$(GitCommitSeven)-$(Build.BuildId)-$(Build.Repository.Name)-$(Configuration)'
OverWrite: true
- task: CopyFiles@2
displayName: 'Copy Files to: \\mesfs.infineon.com\EC_EAFRepository'
displayName: 'Copy Files to: D:\EAF'
inputs:
Contents: "*$(Build.Repository.Name)*"
SourceFolder: 'bin\$(Configuration)'
TargetFolder: 'D:\EAF\EAF Deployment Storage\Adaptation_$(Build.Repository.Name)'
OverWrite: true
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
- script: '"C:\program files\dotnet\dotnet.exe" build --configuration $(Configuration) --source $(NugetSource)'
workingDirectory: Adaptation
displayName: "Core Clean - Tests"
displayName: "Core Build - Adaptation"
- job: TestRelease
dependsOn:
- SetupEnvironment
- BuildRelease
steps:
- script: |
set configuration=Release
echo %configuration%
echo ##vso[task.setvariable variable=Configuration;]%configuration%
echo $(Configuration)
displayName: Configuration
- powershell: Get-ChildItem .\ -include TestResults -Recurse | foreach ($_) { remove-item $_.fullname -Force -Recurse }
workingDirectory: "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)"
displayName: "PowerShell Script"
continueOnError: true
- script: "dotnet test --configuration $(Configuration)"
workingDirectory: Adaptation
displayName: "Core Test"
- script: 'move /y "$(System.DefaultWorkingDirectory)/../../../$(Build.Repository.Name)/05_TestResults/TestResults" $(System.DefaultWorkingDirectory)'
displayName: "Move Results"
- script: '"C:\program files\dotnet\dotnet.exe" tool restore'
workingDirectory: Adaptation
displayName: "Tool Restore"
enabled: false
- script: '"C:\program files\dotnet\dotnet.exe" ReportGenerator -reports:$(System.DefaultWorkingDirectory)/TestResults/**/coverage.cobertura.xml -targetDir:$(Build.ArtifactStagingDirectory)\Coverage -reportTypes:Html_Dark'
workingDirectory: Adaptation
displayName: "Report Generator"
enabled: false
- task: PublishTestResults@2
displayName: "Publish Test Results **/*.trx"
inputs:
testResultsFormat: VSTest
testResultsFiles: "**/*.trx"
testRunTitle: "$(Build.BuildId)-$(Build.SourceVersion)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- task: PublishTestResults@2
displayName: "Publish Test Results */coverage.cobertura.xml"
inputs:
testResultsFormat: VSTest
testResultsFiles: "*/coverage.cobertura.xml"
testRunTitle: "$(Build.BuildId)-$(Build.SourceVersion)-$(CoreVersion)-$(Configuration)-$(Build.Repository.Name)"
searchFolder: "$(System.DefaultWorkingDirectory)/TestResults"
- script: |
"C:\program files\dotnet\dotnet.exe" clean --configuration $(Configuration)
workingDirectory: Adaptation
displayName: "Core Clean - Adaptation"
continueOnError: true
- script: 'echo $(Build.SourceVersion)-$(Build.BuildId)>bin_x_x_\$(Configuration)\$(CoreVersion)\win-x64\$(Build.Repository.Name).txt'
- script: 'echo $(Build.BuildId)-$(Build.SourceVersion)-bin_x_x_\$(Configuration)\$(CoreVersion)\win-x64\$(Build.Repository.Name).txt'
displayName: "Force Fail"
enabled: false
enabled: true

File diff suppressed because it is too large Load Diff

View File

@ -35,6 +35,9 @@ public class Logistics : ILogistics
public long Sequence => _Sequence;
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
private static string DefaultMesEntity(DateTime dateTime) =>
string.Concat(dateTime.Ticks, "_MES_ENTITY");
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
@ -84,13 +87,13 @@ public class Logistics : ILogistics
_Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
{
string key;
DateTime dateTime;
string[] segments;
_FileInfo = new(reportFullPath);
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
_Logistics1 = processDataStandardFormat.Logistics.ToList();
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
_NullData = null;
@ -190,8 +193,6 @@ public class Logistics : ILogistics
}
}
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal void Update(string mid, string processJobID)
{
_MID = mid;

View File

@ -0,0 +1 @@


View File

@ -1,18 +1,22 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared;
public class ProcessDataStandardFormat
#nullable enable
internal class ProcessDataStandardFormat
{
public enum SearchFor
internal enum SearchFor
{
EquipmentIntegration = 1,
BusinessIntegration = 2,
@ -20,322 +24,38 @@ public class ProcessDataStandardFormat
Archive = 4
}
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
internal long? Sequence { get; private set; }
internal ReadOnlyCollection<string> Body { get; private set; }
internal ReadOnlyCollection<string> Columns { get; private set; }
internal ReadOnlyCollection<string> Logistics { get; private set; }
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
ReadOnlyCollection<string> columns,
ReadOnlyCollection<string> logistics,
long? sequence)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
Body = body;
Columns = columns;
Logistics = logistics;
Sequence = sequence;
}
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
{
string segment;
List<string> body = new();
StringBuilder logistics = new();
lines ??= File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = Array.Empty<string>();
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
_ = logistics.AppendLine(lines[i]);
}
break;
}
}
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
}
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
{
JsonElement[] results;
string logistics = pdsf.Item1;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
else
{
string value;
string[] segments;
StringBuilder stringBuilder = new();
foreach (string bodyLine in bodyLines)
{
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("},");
}
_ = stringBuilder.Remove(stringBuilder.Length - 3, 3);
results = JsonSerializer.Deserialize<JsonElement[]>(string.Concat("[", stringBuilder, "]"));
}
return results;
}
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<string, List<string>> results = new();
string[] segments;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
foreach (string column in columns)
results.Add(column, new List<string>());
foreach (string bodyLine in bodyLines)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= columns.Length)
continue;
results[columns[c]].Add(segments[c]);
}
}
return results;
}
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.SystemExport, addSpaces, separator);
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string> collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
}
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null);
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.TryGetValue(string.Concat(keyValuePair.Key, firstDuplicate), out List<string> value) && value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
public static List<string> PDSFToFixedWidth(string reportFullPath)
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
List<string> results = new();
if (!File.Exists(reportFullPath))
@ -404,4 +124,534 @@ public class ProcessDataStandardFormat
return results;
}
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null)
{
ProcessDataStandardFormat result;
string segment;
List<string> body = new();
List<string> logistics = new();
lines ??= File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = Array.Empty<string>();
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
logistics.Add(lines[i]);
}
break;
}
}
result = new(logistics.AsReadOnly(), columns.AsReadOnly(), body.AsReadOnly(), null);
return result;
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
string[] segments;
List<string> body = new();
bool lookForLogistics = false;
List<string> logistics = new();
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
logistics.Add(lines[i]);
}
break;
}
}
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(body: body.AsReadOnly(),
columns: new(columns),
logistics: logistics.AsReadOnly(),
sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
}
return results;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
{
ProcessDataStandardFormat result;
int column;
string value;
JsonProperty jsonProperty;
List<string> values = new();
List<string> results = new();
JsonProperty[] jsonProperties;
List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++)
{
values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object)
{
unknownColumns.Add(string.Empty);
break;
}
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
continue;
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
{
column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1)
value = processDataStandardFormatMapping.OldColumnNames[c];
else
{
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
}
values.Add(value);
}
results.Add(string.Join("\t", values));
}
result = new(body: new(results),
columns: processDataStandardFormat.Columns,
logistics: processDataStandardFormat.Logistics,
sequence: processDataStandardFormat.Sequence);
return result;
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat)
{
List<string> results = new();
if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
results.Add("HEADER_TAG\tHEADER_VALUE");
results.Add("FORMAT\t2.00");
results.Add("NUMBER_PASSES\t0001");
results.Add($"HEADER_OFFSET\t{headerOffset}");
results.Add($"DATA_OFFSET\t{dataOffset}");
results.Add($"END_OFFSET\t{endOffset}");
results.Add($"\"{string.Join("\",\t\"", processDataStandardFormat.Columns)}\"");
results.AddRange(processDataStandardFormat.Body);
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
results.Add("DELIMITER\t;");
results.Add($"START_TIME_FORMAT\t{format}");
results.Add($"START_TIME\t{startTime}");
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
results.AddRange(processDataStandardFormat.Logistics);
File.WriteAllText(path, string.Join(Environment.NewLine, results));
}
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<string, List<string>> results = new();
string[] segments;
foreach (string column in processDataStandardFormat.Columns)
results.Add(column, new List<string>());
foreach (string bodyLine in processDataStandardFormat.Body)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= processDataStandardFormat.Columns.Count)
continue;
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
}
}
return results;
}
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
{
JsonElement[] results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
}
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
}
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string>? collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
}
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string? nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,33 @@
using System.Collections.ObjectModel;
namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping
{
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames)
{
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames;
}
}

View File

@ -1,4 +1,4 @@
#if true
#if v2_57_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_57_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_57_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_57_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_57_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -0,0 +1,65 @@
#if v2_58_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_58_0;
[TestClass]
public class ALIGNMENT_EQPT : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static ALIGNMENT_EQPT EAFLoggingUnitTesting { get; private set; }
static ALIGNMENT_EQPT() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public ALIGNMENT_EQPT() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public ALIGNMENT_EQPT(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new ALIGNMENT_EQPT(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__ALIGNMENT_EQPT__DownloadExcelFile()
{
string check = ".xlsx";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if v2_58_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_58_0;
[TestClass]
public class ALIGNMENT : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static ALIGNMENT EAFLoggingUnitTesting { get; private set; }
static ALIGNMENT() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public ALIGNMENT() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public ALIGNMENT(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new ALIGNMENT(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__ALIGNMENT__ConvertExcelToJson()
{
string check = "*.xlsx";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if v2_58_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_58_0;
[TestClass]
public class BACKLOG_EQPT : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG_EQPT EAFLoggingUnitTesting { get; private set; }
static BACKLOG_EQPT() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG_EQPT() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG_EQPT(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG_EQPT(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__BACKLOG_EQPT__DownloadWorkItems()
{
string check = ".xlsx";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if v2_58_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_58_0;
[TestClass]
public class BACKLOG : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG EAFLoggingUnitTesting { get; private set; }
static BACKLOG() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__BACKLOG__json()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,104 @@
#if v2_58_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_58_0;
[TestClass]
public class MESAFIBACKLOG : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static MESAFIBACKLOG EAFLoggingUnitTesting { get; private set; }
static MESAFIBACKLOG() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public MESAFIBACKLOG() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public MESAFIBACKLOG(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new MESAFIBACKLOG(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__MESAFIBACKLOG__Kanban()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__MESAFIBACKLOG__Markdown()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__MESAFIBACKLOG__ADO()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_58_0__MESAFIBACKLOG__Priority()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_59_0;
[TestClass]
public class BACKLOG_EQPT : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG_EQPT EAFLoggingUnitTesting { get; private set; }
static BACKLOG_EQPT() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG_EQPT() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG_EQPT(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG_EQPT(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_59_0__BACKLOG_EQPT__DownloadWorkItems()
{
string check = ".xlsx";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Development.v2_59_0;
[TestClass]
public class BACKLOG : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BACKLOG EAFLoggingUnitTesting { get; private set; }
static BACKLOG() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BACKLOG() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BACKLOG(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BACKLOG(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Development__v2_59_0__BACKLOG__json()
{
string check = "*.json";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

Some files were not shown because too many files have changed in this diff Show More