Use Priority when Poll not set

This commit is contained in:
Mike Phares 2024-10-17 15:28:29 -07:00
parent 07d08a9fbc
commit c0fdbad707
13 changed files with 405 additions and 28 deletions

View File

@ -25,6 +25,7 @@ public class CellInstanceConnectionName
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")

View File

@ -0,0 +1,37 @@
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
public class Aggregation
{
[JsonConstructor]
public Aggregation(
string average,
int count,
int? inverse,
int maximum,
int minimum,
ReadOnlyCollection<Record> records,
int sum
)
{
Average = average;
Count = count;
Inverse = inverse;
Maximum = maximum;
Minimum = minimum;
Records = records;
Sum = sum;
}
[JsonPropertyName("Average")] public string Average { get; }
[JsonPropertyName("Count")] public int Count { get; }
[JsonPropertyName("Inverse")] public int? Inverse { get; }
[JsonPropertyName("Maximum")] public int Maximum { get; }
[JsonPropertyName("Minimum")] public int Minimum { get; }
[JsonPropertyName("Records")] public ReadOnlyCollection<Record> Records { get; }
[JsonPropertyName("Sum")] public int Sum { get; }
}

View File

@ -0,0 +1,295 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.Priority;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
if (!Debugger.IsAttached && fileConnectorConfiguration.PreProcessingMode != FileConnectorConfiguration.PreProcessingModeEnum.Process)
_Timer = new Timer(Callback, null, (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000), Timeout.Infinite);
else
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private static ReadOnlyCollection<Record> GetRecords(string directory, string searchPattern)
{
List<Record> results = new();
string text;
Record? record;
string[] files;
List<Record>? collection;
Dictionary<string, List<Record>> keyValuePairs = new();
string[] directories = Directory.GetDirectories(directory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in directories)
{
keyValuePairs.Clear();
files = Directory.GetFiles(subDirectory, searchPattern, SearchOption.TopDirectoryOnly);
foreach (string file in files)
{
text = File.ReadAllText(file);
if (string.IsNullOrEmpty(text) || text[0] == '[')
continue;
record = JsonSerializer.Deserialize<Record>(text);
if (record is null || record.Id == 0)
continue;
if (!keyValuePairs.TryGetValue(record.RemoteIpAddress, out collection))
{
keyValuePairs.Add(record.RemoteIpAddress, new());
if (!keyValuePairs.TryGetValue(record.RemoteIpAddress, out collection))
throw new Exception();
}
collection.Add(record);
}
foreach (KeyValuePair<string, List<Record>> keyValuePair in keyValuePairs)
{
if (keyValuePair.Value.Count == 1)
results.Add(keyValuePair.Value[0]);
else
{
record = keyValuePair.Value.Select(record => new KeyValuePair<long, Record>(record.Time, record)).OrderBy(pair => pair.Key).Last().Value;
results.Add(record);
}
}
}
return new(results);
}
private static int? GetInverse(int value) =>
value switch
{
1 => 3,
2 => 2,
3 => 1,
_ => null
};
private static int? GetInverse(double value)
{
int? result;
if (value > 3)
result = null;
else if (value > 2)
result = 1;
else if (value > 1)
result = 2;
else if (value > 0)
result = 3;
else
result = null;
return result;
}
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(Dictionary<int, List<Record>> keyValuePairs)
{
Dictionary<int, Aggregation> results = new();
Aggregation aggregation;
int? inverse;
double average;
List<int> collection = new();
foreach (KeyValuePair<int, List<Record>> keyValuePair in keyValuePairs)
{
collection.Clear();
foreach (Record record in keyValuePair.Value)
{
inverse = GetInverse(record.Value);
if (inverse is null)
continue;
collection.Add(inverse.Value);
}
average = collection.Average();
inverse = GetInverse(average);
aggregation = new(average.ToString("0.000"),
keyValuePair.Value.Count,
inverse,
keyValuePair.Value.Max(record => record.Value),
keyValuePair.Value.Min(record => record.Value),
new(keyValuePair.Value),
keyValuePair.Value.Sum(record => record.Value));
results.Add(keyValuePair.Key, aggregation);
}
return new(results);
}
private static ReadOnlyDictionary<int, Aggregation> GetKeyValuePairs(string directory, string searchPattern)
{
ReadOnlyDictionary<int, Aggregation> results;
List<Record>? collection;
Dictionary<int, List<Record>> keyValuePairs = new();
ReadOnlyCollection<Record> records = GetRecords(directory, searchPattern);
foreach (Record record in records)
{
if (!keyValuePairs.TryGetValue(record.Id, out collection))
{
keyValuePairs.Add(record.Id, new());
if (!keyValuePairs.TryGetValue(record.Id, out collection))
throw new Exception();
}
collection.Add(record);
}
results = GetKeyValuePairs(keyValuePairs);
return results;
}
private static void WriteFiles(string sourceFileLocation, string sourceFileFilter, string targetFileLocation)
{
string json;
string jsonFile;
string directoryName;
if (!Directory.Exists(sourceFileLocation))
_ = Directory.CreateDirectory(sourceFileLocation);
if (!Directory.Exists(targetFileLocation))
_ = Directory.CreateDirectory(targetFileLocation);
ReadOnlyDictionary<int, Aggregation> keyValuePairs;
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string[] directories = Directory.GetDirectories(sourceFileLocation, "*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
directoryName = Path.GetFileName(directory);
keyValuePairs = GetKeyValuePairs(directory, sourceFileFilter);
jsonFile = Path.Combine(targetFileLocation, $"{directoryName}.json");
json = JsonSerializer.Serialize(keyValuePairs, jsonSerializerOptions);
File.WriteAllText(jsonFile, json);
}
}
private void Callback(object state)
{
try
{
if (_IsEAFHosted)
WriteFiles(_FileConnectorConfiguration.SourceFileLocation, _FileConnectorConfiguration.SourceFileFilter, _FileConnectorConfiguration.TargetFileLocation);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
if (_FileConnectorConfiguration?.FileScanningIntervalInSeconds is null)
throw new Exception();
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
#pragma warning disable IDE0060
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
#pragma warning restore IDE0060
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
_Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
results = new(_Logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
return results;
}
}

View File

@ -0,0 +1,37 @@
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Priority;
public class Record
{
[JsonConstructor]
public Record(
string json,
int id,
string page,
string queryString,
string remoteIpAddress,
long time,
int value
)
{
Json = json;
Id = id;
Page = page;
QueryString = queryString;
RemoteIpAddress = remoteIpAddress;
Time = time;
Value = value;
}
[JsonPropertyName("Json")] public string Json { get; }
[JsonPropertyName("id")] public int Id { get; }
[JsonPropertyName("page")] public string Page { get; }
[JsonPropertyName("QueryString")] public string QueryString { get; }
[JsonPropertyName("RemoteIpAddress")] public string RemoteIpAddress { get; }
[JsonPropertyName("time")] public long Time { get; }
[JsonPropertyName("value")] public int Value { get; }
}

View File

@ -59,7 +59,7 @@ What is the relative value to the Customer or business?
<script>
$(document).ready(function () {
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "business", "Value", "/markdown/_PI4/business.json?v=2024-10-07-18-50");
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "business", "Value", "/markdown/PI4-Results/business.json?v=2024-10-07-18-50");
});
</script>

View File

@ -55,7 +55,7 @@
<script>
$(document).ready(function () {
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "effort", "Effort", "/markdown/_PI4/effort.json?v=2024-10-07-18-50");
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "effort", "Effort", "/markdown/PI4-Results/effort.json?v=2024-10-07-18-50");
});
</script>

View File

@ -59,7 +59,7 @@ What else does this do for our business?
<script>
$(document).ready(function () {
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "risk", "Risk", "/markdown/_PI4/risk.json?v=2024-10-07-18-50");
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "risk", "Risk", "/markdown/PI4-Results/risk.json?v=2024-10-07-18-50");
});
</script>

View File

@ -60,7 +60,7 @@ How does user/business value decay over time?
<script>
$(document).ready(function () {
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "time", "Critical", "/markdown/_PI4/time.json?v=2024-10-07-18-50");
initIndex("/markdown/with-parents.json?v=2024-10-07-18-50", "https://oi-metrology-viewer-prod.mes.infineon.com:4438/api/v1/ado/", "time", "Critical", "/markdown/PI4-Results/time.json?v=2024-10-07-18-50");
});
</script>

View File

@ -75,13 +75,13 @@ function getPriority(workItemType, priority) {
return result;
}
function getPollValue(pollValue) {
function getPollValue(priority, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0)
result = "0-9-Unknown";
result = priority;
else {
if (pollValue.Average !== null)
result = "0-8-Not";
result = priority;
if (pollValue.Average > 2)
result = `${pollValue.Average}-1-${pollValue.Count}-High (Most Critical)`;
else if (pollValue.Average > 1)
@ -89,7 +89,7 @@ function getPollValue(pollValue) {
else if (pollValue.Average > 0)
result = `${pollValue.Average}-3-${pollValue.Count}-Low`;
else
result = "0-8-Not";
result = priority;
}
return result;
}
@ -120,8 +120,9 @@ function getWorkItems(data, dataB) {
workItem["ParentState"] = getState(parent["State"]);
}
workItem["State"] = getState(workItem["State"])
workItem["PollValue"] = getPollValue(dataB[workItem.Id]);
workItem["Priority"] = getPriority(workItem["WorkItemType"], workItem["Priority"])
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"])
workItem["Priority"] = priority;
workItem["PollValue"] = getPollValue(priority, dataB[workItem.Id]);
workItems.push(workItem);
}
workItems.sort(compareFunction);

View File

@ -75,13 +75,13 @@ function getPriority(workItemType, priority) {
return result;
}
function getPollValue(pollValue) {
function getPollValue(priority, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0)
result = "0-9-Unknown";
result = priority;
else {
if (pollValue.Average !== null)
result = "0-8-Not";
result = priority;
if (pollValue.Average > 2)
result = `${pollValue.Average}-1-${pollValue.Count}-High (Most Critical)`;
else if (pollValue.Average > 1)
@ -89,7 +89,7 @@ function getPollValue(pollValue) {
else if (pollValue.Average > 0)
result = `${pollValue.Average}-3-${pollValue.Count}-Low`;
else
result = "0-8-Not";
result = priority;
}
return result;
}
@ -120,8 +120,9 @@ function getWorkItems(data, dataB) {
workItem["ParentState"] = getState(parent["State"]);
}
workItem["State"] = getState(workItem["State"])
workItem["PollValue"] = getPollValue(dataB[workItem.Id]);
workItem["Priority"] = getPriority(workItem["WorkItemType"], workItem["Priority"])
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"])
workItem["Priority"] = priority;
workItem["PollValue"] = getPollValue(priority, dataB[workItem.Id]);
workItems.push(workItem);
}
workItems.sort(compareFunction);

View File

@ -75,13 +75,13 @@ function getPriority(workItemType, priority) {
return result;
}
function getPollValue(pollValue) {
function getPollValue(priority, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0)
result = "0-9-Unknown";
result = priority;
else {
if (pollValue.Average !== null)
result = "0-8-Not";
result = priority;
if (pollValue.Average > 2)
result = `${pollValue.Average}-1-${pollValue.Count}-High (Most Critical)`;
else if (pollValue.Average > 1)
@ -89,7 +89,7 @@ function getPollValue(pollValue) {
else if (pollValue.Average > 0)
result = `${pollValue.Average}-3-${pollValue.Count}-Low`;
else
result = "0-8-Not";
result = priority;
}
return result;
}
@ -120,8 +120,9 @@ function getWorkItems(data, dataB) {
workItem["ParentState"] = getState(parent["State"]);
}
workItem["State"] = getState(workItem["State"])
workItem["PollValue"] = getPollValue(dataB[workItem.Id]);
workItem["Priority"] = getPriority(workItem["WorkItemType"], workItem["Priority"])
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"])
workItem["Priority"] = priority;
workItem["PollValue"] = getPollValue(priority, dataB[workItem.Id]);
workItems.push(workItem);
}
workItems.sort(compareFunction);

View File

@ -75,13 +75,13 @@ function getPriority(workItemType, priority) {
return result;
}
function getPollValue(pollValue) {
function getPollValue(priority, pollValue) {
var result;
if (pollValue === undefined || pollValue.Records.length === undefined || pollValue.Records.length === 0)
result = "0-9-Unknown";
result = priority;
else {
if (pollValue.Average !== null)
result = "0-8-Not";
result = priority;
if (pollValue.Average > 2)
result = `${pollValue.Average}-1-${pollValue.Count}-High (Most Critical)`;
else if (pollValue.Average > 1)
@ -89,7 +89,7 @@ function getPollValue(pollValue) {
else if (pollValue.Average > 0)
result = `${pollValue.Average}-3-${pollValue.Count}-Low`;
else
result = "0-8-Not";
result = priority;
}
return result;
}
@ -120,8 +120,9 @@ function getWorkItems(data, dataB) {
workItem["ParentState"] = getState(parent["State"]);
}
workItem["State"] = getState(workItem["State"])
workItem["PollValue"] = getPollValue(dataB[workItem.Id]);
workItem["Priority"] = getPriority(workItem["WorkItemType"], workItem["Priority"])
var priority = getPriority(workItem["WorkItemType"], workItem["Priority"])
workItem["Priority"] = priority;
workItem["PollValue"] = getPollValue(priority, dataB[workItem.Id]);
workItems.push(workItem);
}
workItems.sort(compareFunction);

View File

@ -144,6 +144,9 @@
<Compile Include="Adaptation\FileHandlers\OpenInsight\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\Aggregation.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\Priority\Record.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
<Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" />