process-data-standard-format with pipes
EDA logic
This commit is contained in:
parent
f8421a092b
commit
61fea7c581
6
Adaptation/.vscode/launch.json
vendored
6
Adaptation/.vscode/launch.json
vendored
@ -5,6 +5,12 @@
|
||||
"type": "coreclr",
|
||||
"request": "attach",
|
||||
"processId": 21452
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "node Launch Current Opened File",
|
||||
"program": "${file}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -301,6 +301,8 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
long preWait;
|
||||
foreach (PreWith preWith in preWithCollection)
|
||||
{
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (processDataStandardFormat is null)
|
||||
File.Move(preWith.MatchingFile, preWith.CheckFile);
|
||||
else
|
||||
@ -350,6 +352,8 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
processDataStandardFormat = null;
|
||||
}
|
||||
if (!_IsEAFHosted && processDataStandardFormat is not null)
|
||||
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
|
@ -149,34 +149,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results.ToString();
|
||||
}
|
||||
|
||||
private static string GetJson(int columnsLine, string[] columns, string[] body)
|
||||
{
|
||||
#pragma warning disable CA1845, IDE0057
|
||||
string result = "[\n";
|
||||
string line;
|
||||
string value;
|
||||
string[] segments;
|
||||
if (columns.Length == 0)
|
||||
columns = body[columnsLine].Trim().Split('\t');
|
||||
for (int i = columnsLine + 1; i < body.Length; i++)
|
||||
{
|
||||
line = "{";
|
||||
segments = body[i].Trim().Split('\t');
|
||||
if (segments.Length != columns.Length)
|
||||
break;
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
|
||||
}
|
||||
line = line.Substring(0, line.Length - 1) + '}' + ',' + '\n';
|
||||
result += line;
|
||||
}
|
||||
result = result.Substring(0, result.Length - 1) + ']';
|
||||
return result;
|
||||
#pragma warning restore CA1845, IDE0057
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<RsM.Description> descriptions, Test[] tests)
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
@ -201,7 +173,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (!string.IsNullOrEmpty(lines))
|
||||
{
|
||||
long? subGroupId;
|
||||
_ = GetJson(0, processDataStandardFormat.Columns.ToArray(), processDataStandardFormat.Body.ToArray());
|
||||
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
|
||||
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
|
||||
|
@ -152,7 +152,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
|
||||
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
|
||||
{
|
||||
checkFile = Path.Combine(matchDirectory, Path.GetFileName(matchDirectoryFile));
|
||||
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
|
||||
if (File.Exists(checkFile))
|
||||
continue;
|
||||
File.Move(jobIdDirectoryFile, checkFile);
|
||||
|
@ -26,19 +26,25 @@ internal class ProcessDataStandardFormat
|
||||
|
||||
internal long? Sequence { get; private set; }
|
||||
internal ReadOnlyCollection<string> Body { get; private set; }
|
||||
internal ReadOnlyCollection<string> Footer { get; private set; }
|
||||
internal ReadOnlyCollection<string> Header { get; private set; }
|
||||
internal ReadOnlyCollection<string> Columns { get; private set; }
|
||||
internal ProcessDataStandardFormat? InputPDSF { get; private set; }
|
||||
internal ReadOnlyCollection<string> Logistics { get; private set; }
|
||||
internal ReadOnlyCollection<string> InputLines { get; private set; }
|
||||
|
||||
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
|
||||
ReadOnlyCollection<string> columns,
|
||||
ReadOnlyCollection<string> inputLines,
|
||||
ReadOnlyCollection<string> footer,
|
||||
ReadOnlyCollection<string> header,
|
||||
ProcessDataStandardFormat? inputPDSF,
|
||||
ReadOnlyCollection<string> logistics,
|
||||
long? sequence)
|
||||
{
|
||||
Body = body;
|
||||
Columns = columns;
|
||||
InputLines = inputLines;
|
||||
Footer = footer;
|
||||
Header = header;
|
||||
InputPDSF = inputPDSF;
|
||||
Logistics = logistics;
|
||||
Sequence = sequence;
|
||||
}
|
||||
@ -56,7 +62,7 @@ internal class ProcessDataStandardFormat
|
||||
GetString(SearchFor.Archive, addSpaces, separator);
|
||||
|
||||
internal static ProcessDataStandardFormat GetEmpty() =>
|
||||
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null);
|
||||
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(Array.Empty<string>()), null);
|
||||
|
||||
internal static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
{
|
||||
@ -127,19 +133,26 @@ internal class ProcessDataStandardFormat
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null)
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
string segment;
|
||||
List<string> body = new();
|
||||
List<string> logistics = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
string[] segments;
|
||||
if (lines.Length < 7)
|
||||
bool addToFooter = false;
|
||||
List<string> body = new();
|
||||
List<string> header = new();
|
||||
List<string> footer = new();
|
||||
List<string> columns = new();
|
||||
ReadOnlyCollection<string> logistics;
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
if (lines.Length < columnsLine + 1)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
segments = lines[6].Trim().Split('\t');
|
||||
List<string> columns = new();
|
||||
{
|
||||
segments = lines[columnsLine].Trim().Split('\t');
|
||||
for (int i = 0; i < columnsLine; i++)
|
||||
header.Add(lines[i]);
|
||||
}
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
@ -158,37 +171,49 @@ internal class ProcessDataStandardFormat
|
||||
}
|
||||
}
|
||||
}
|
||||
bool lookForLogistics = false;
|
||||
for (int r = 7; r < lines.Length; r++)
|
||||
for (int r = columnsLine + 1; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
addToFooter = true;
|
||||
if (!addToFooter)
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
else
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (!lines[i].StartsWith("LOGISTICS_") || lines[i].StartsWith("END_HEADER"))
|
||||
break;
|
||||
logistics.Add(lines[i]);
|
||||
}
|
||||
break;
|
||||
footer.Add(lines[r]);
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (lines.Length > 0 && body.Count == 0 && columns.Count == 0 && logistics.Count == 0)
|
||||
logistics.Add(lines[1]);
|
||||
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
|
||||
logistics = GetLogistics(footer, linesOne: linesOne);
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: columns.AsReadOnly(),
|
||||
inputLines: lines.ToList().AsReadOnly(),
|
||||
logistics: logistics.AsReadOnly(),
|
||||
footer: footer.AsReadOnly(),
|
||||
header: header.AsReadOnly(),
|
||||
inputPDSF: null,
|
||||
logistics: logistics,
|
||||
sequence: null);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetLogistics(List<string> footer, string? linesOne)
|
||||
{
|
||||
List<string> results = new();
|
||||
bool foundLogistics1 = false;
|
||||
foreach (string line in footer)
|
||||
{
|
||||
if (line.StartsWith("END_HEADER"))
|
||||
break;
|
||||
if (line.StartsWith("LOGISTICS_1"))
|
||||
foundLogistics1 = true;
|
||||
if (foundLogistics1 && line.StartsWith("LOGISTICS_"))
|
||||
results.Add(line);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(linesOne) && results.Count == 0)
|
||||
results.Add(linesOne);
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
||||
{
|
||||
ProcessDataStandardFormat? result;
|
||||
@ -196,7 +221,7 @@ internal class ProcessDataStandardFormat
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
|
||||
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
|
||||
if (jsonElements is null || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
|
||||
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
@ -212,9 +237,11 @@ internal class ProcessDataStandardFormat
|
||||
ProcessDataStandardFormat result;
|
||||
long sequence;
|
||||
string[] segments;
|
||||
bool addToFooter = false;
|
||||
List<string> body = new();
|
||||
bool lookForLogistics = false;
|
||||
List<string> logistics = new();
|
||||
List<string> header = new();
|
||||
List<string> footer = new();
|
||||
ReadOnlyCollection<string> logistics;
|
||||
lines ??= File.ReadAllLines(path);
|
||||
if (lines.Length <= columnsLine)
|
||||
segments = Array.Empty<string>();
|
||||
@ -223,28 +250,24 @@ internal class ProcessDataStandardFormat
|
||||
segments = lines[columnsLine].Split('\t');
|
||||
if (segments.Length != expectedColumns)
|
||||
segments = Array.Empty<string>();
|
||||
for (int i = 0; i < columnsLine; i++)
|
||||
header.Add(lines[i]);
|
||||
}
|
||||
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
|
||||
for (int r = columnsLine + 1; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
addToFooter = true;
|
||||
if (!addToFooter)
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
else
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (!lines[i].StartsWith("LOGISTICS_") || lines[i].StartsWith("END_HEADER"))
|
||||
break;
|
||||
logistics.Add(lines[i]);
|
||||
}
|
||||
break;
|
||||
footer.Add(lines[r]);
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
}
|
||||
}
|
||||
logistics = GetLogistics(footer, linesOne: null);
|
||||
if (logistics.Count == 0)
|
||||
sequence = lastWriteTime.Ticks;
|
||||
else
|
||||
@ -254,8 +277,10 @@ internal class ProcessDataStandardFormat
|
||||
}
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: new(columns),
|
||||
inputLines: lines.ToList().AsReadOnly(),
|
||||
logistics: logistics.AsReadOnly(),
|
||||
footer: footer.AsReadOnly(),
|
||||
header: header.AsReadOnly(),
|
||||
inputPDSF: null,
|
||||
logistics: logistics,
|
||||
sequence: sequence);
|
||||
return result;
|
||||
}
|
||||
@ -346,7 +371,9 @@ internal class ProcessDataStandardFormat
|
||||
}
|
||||
result = new(body: new(results),
|
||||
columns: processDataStandardFormatMapping.OldColumnNames,
|
||||
inputLines: processDataStandardFormat.InputLines,
|
||||
footer: processDataStandardFormat.Footer,
|
||||
header: processDataStandardFormat.Header,
|
||||
inputPDSF: processDataStandardFormat,
|
||||
logistics: processDataStandardFormat.Logistics,
|
||||
sequence: processDataStandardFormat.Sequence);
|
||||
return result;
|
||||
@ -379,7 +406,19 @@ internal class ProcessDataStandardFormat
|
||||
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
|
||||
results.AddRange(processDataStandardFormat.Logistics);
|
||||
results.Add("EOF");
|
||||
results.AddRange(processDataStandardFormat.InputLines.Select(l => l.Replace('\t', '|')));
|
||||
if (processDataStandardFormat.InputPDSF is not null)
|
||||
{
|
||||
List<char> hyphens = new();
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
||||
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
||||
hyphens.Add('-');
|
||||
results.Add($"|{string.Join("|", hyphens)}|");
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
|
||||
}
|
||||
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
||||
}
|
||||
|
||||
|
187
Adaptation/_Tests/Static/cde.js
Normal file
187
Adaptation/_Tests/Static/cde.js
Normal file
@ -0,0 +1,187 @@
|
||||
"use strict";
|
||||
|
||||
// $('dcp.CDE4/csv/Avg', '') == '' ? '' : (parseFloat($('dcp.CDE4/csv/Dev', '0')) / parseFloat($('dcp.CDE4/csv/Avg', '0'))).toFixed(2) + '%'
|
||||
|
||||
// getValue($('dcp.CDE4/csv/Avg', ''), $('dcp.CDE4/csv/Dev', '0'), $('dcp.CDE4/csv/Avg', '0'));
|
||||
|
||||
function roundNumber(number, digits) {
|
||||
let result;
|
||||
const multiple = Math.pow(10, digits);
|
||||
result = Math.round(number * multiple) / multiple;
|
||||
return result;
|
||||
}
|
||||
|
||||
function getValue(avgRaw, devZeroDefault, avgZeroDefault) {
|
||||
let result;
|
||||
if (avgRaw == '')
|
||||
result = '';
|
||||
else {
|
||||
let raw = parseFloat(devZeroDefault) / parseFloat(avgZeroDefault);
|
||||
result = roundNumber(raw, 2);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const dev = 0.383;
|
||||
const avg = 33.5810;
|
||||
const value = getValue(avg, dev);
|
||||
console.log("value: " + value);
|
||||
|
||||
// getValue(self, self.length, $('dcp.CDE4/csv/MesEntity', ''), $('dcp.CDE4/csv/Title', ''), $('dcp.CDE4/csv/Recipe', ''), getContextData('2', 'cds.PROCESS_JOBID', ''));
|
||||
|
||||
function tryParseInt(str, defaultValue) {
|
||||
const parsedValue = parseInt(str);
|
||||
return isNaN(parsedValue) ? defaultValue : parsedValue;
|
||||
}
|
||||
|
||||
function getReactorAndRDS(defaultReactor, defaultRDS, text, formattedText, segments) {
|
||||
let result = {};
|
||||
let rds;
|
||||
let reactor;
|
||||
let rdsValue;
|
||||
if (text == null || text.length === 0 || segments.length === 0 || formattedText == null || formattedText.length === 0)
|
||||
reactor = defaultReactor;
|
||||
else
|
||||
reactor = segments[0];
|
||||
if (segments.length <= 1 || !tryParseInt(segments[1], false) || rdsValue < 99)
|
||||
rds = defaultRDS;
|
||||
else
|
||||
rds = segments[1];
|
||||
if (reactor.length > 3) {
|
||||
rds = reactor;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
result = { reactor: reactor, rds: rds };
|
||||
return result;
|
||||
}
|
||||
|
||||
function getLayerAndPSN(defaultLayer, defaultPSN, segments) {
|
||||
let result = {};
|
||||
let psn;
|
||||
let layer;
|
||||
if (segments.length <= 2) {
|
||||
psn = defaultPSN;
|
||||
layer = defaultLayer;
|
||||
}
|
||||
else {
|
||||
let segmentsB = segments[2].split('.');
|
||||
psn = segmentsB[0];
|
||||
if (segmentsB.length <= 1)
|
||||
layer = defaultLayer;
|
||||
else {
|
||||
layer = segmentsB[1];
|
||||
if (layer.length > 1 && layer[0] === '0')
|
||||
layer = layer.substring(1);
|
||||
}
|
||||
}
|
||||
result = { layer: layer, psn: psn };
|
||||
return result;
|
||||
}
|
||||
|
||||
function getZone(segments) {
|
||||
let result = '';
|
||||
if (segments.length <= 3)
|
||||
result = '';
|
||||
else {
|
||||
result = segments[3];
|
||||
if (result.length > 1 && result[0] === '0')
|
||||
result = result.substring(1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function getArray(text) {
|
||||
let result = {};
|
||||
let lot = '';
|
||||
let psn = '';
|
||||
let rds = '';
|
||||
let zone = '';
|
||||
let layer = '';
|
||||
let reactor = '';
|
||||
let employee = '';
|
||||
let defaultPSN = '';
|
||||
let defaultRDS = '';
|
||||
let defaultZone = '';
|
||||
let defaultLayer = '';
|
||||
let defaultReactor = '';
|
||||
let defaultEmployee = '';
|
||||
const processOnly = /^[a-zA-z]{2,3}/;
|
||||
const processOnlyB = /^[a-zA-z][0-9]{2,4}$/;
|
||||
const normal = /^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}/;
|
||||
const fileSystemSafe = /[\\,\/,\:,\*,\?,\"",\<,\>,\|]/;
|
||||
if (text.match(processOnlyB)) {
|
||||
lot = text.toUpperCase();
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
employee = defaultEmployee;
|
||||
}
|
||||
else if (text == null || text.length === 0 || (text.length == 2 && text.match(processOnly))) {
|
||||
lot = text;
|
||||
employee = lot;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
else if (text.match(normal)) {
|
||||
let segments = text.split('.');
|
||||
lot = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
layer = segments[1];
|
||||
reactor = segments[0];
|
||||
employee = defaultEmployee;
|
||||
if (segments.length <= 2)
|
||||
zone = defaultZone;
|
||||
else
|
||||
zone = segments[2];
|
||||
}
|
||||
else {
|
||||
lot = text.replace(fileSystemSafe, "_").split('\r')[0].split('\n')[0];
|
||||
if (lot.length > 2 && lot[0] == '1' && (lot[1] == 'T' || lot[1] == 't'))
|
||||
lot = lot.substring(2);
|
||||
let segments = lot.split('-');
|
||||
let reactorAndRDS = getReactorAndRDS(defaultReactor, defaultRDS, text, lot, segments);
|
||||
reactor = reactorAndRDS.reactor;
|
||||
rds = reactorAndRDS.rds;
|
||||
let layerAndPSN = getLayerAndPSN(defaultLayer, defaultPSN, segments);
|
||||
layer = layerAndPSN.layer;
|
||||
psn = layerAndPSN.psn;
|
||||
zone = getZone(segments);
|
||||
if (segments.length <= 4)
|
||||
employee = defaultEmployee;
|
||||
else
|
||||
employee = segments[4];
|
||||
}
|
||||
result = { rds: rds, psn: psn, reactor: reactor, layer: layer, zone: zone, employee: employee, lot: lot };
|
||||
return result;
|
||||
};
|
||||
|
||||
function getValueReactor(value, length, mesEntity, title, recipe, processJobId) {
|
||||
if (recipe.toUpperCase() === 'RLOW_STD')
|
||||
return mesEntity;
|
||||
else if (recipe.toUpperCase() === 'RMID_STD')
|
||||
return mesEntity;
|
||||
else if (recipe.toUpperCase() === 'RHI_STD')
|
||||
return mesEntity;
|
||||
else if (recipe.toUpperCase() === 'THINSPC')
|
||||
return mesEntity;
|
||||
else if (length === 0 && processJobId == 0)
|
||||
return getArray(title).reactor;
|
||||
else if (length === 0)
|
||||
return processJobId;
|
||||
else
|
||||
return value;
|
||||
}
|
||||
|
||||
const self = '';
|
||||
const mesEntity = 'CDE4';
|
||||
const title = '75-663192-5281.1';
|
||||
const recipe = '8IN_INF';
|
||||
const processJobId = '0';
|
||||
const valueReactor = getValueReactor(self, self.length, mesEntity, title, recipe, processJobId);
|
||||
console.log("value: " + valueReactor);
|
Loading…
x
Reference in New Issue
Block a user