diff --git a/Adaptation/.vscode/launch.json b/Adaptation/.vscode/launch.json index 6a2de47..f8bf7e8 100644 --- a/Adaptation/.vscode/launch.json +++ b/Adaptation/.vscode/launch.json @@ -1,16 +1,43 @@ { "configurations": [ { - "name": ".NET Core Attach", - "type": "coreclr", - "request": "attach", - "processId": 16660 + "mode": "debug", + "name": "Go launch file", + "program": "${file}", + "request": "launch", + "type": "go" }, { - "type": "node", - "request": "launch", - "name": "node Launch Current Opened File", - "program": "${file}" + "name": "node Launch Current Opened File", + "program": "${file}", + "request": "launch", + "type": "node" + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Debug File", + "program": "${file}", + "request": "launch", + "stopOnEntry": false, + "type": "bun", + "watchMode": false + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Run File", + "noDebug": true, + "program": "${file}", + "request": "launch", + "type": "bun", + "watchMode": false + }, + { + "name": ".NET Core Attach", + "processId": 32760, + "request": "attach", + "type": "coreclr" } ] -} +} \ No newline at end of file diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index ebd71ad..c9dc904 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -116,6 +116,20 @@ public class FileRead : Shared.FileRead, IFileRead WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); } + private void WriteFile(string reportFullPath, DateTime dateTime, List descriptions) where T : Shared.Properties.IDescription + { + bool isDummyRun = false; + string successDirectory = string.Empty; + List<(Shared.Properties.IScopeInfo, string)> collection = new(); + string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); + if (!Directory.Exists(duplicateDirectory)) + _ = Directory.CreateDirectory(duplicateDirectory); + string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml"); + string xml = ProcessDataStandardFormat.GetXml(reportFullPath); + File.WriteAllText(duplicateFile, xml); + WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); + } + private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; @@ -127,6 +141,8 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); + if (string.IsNullOrEmpty(reportFullPath) && _IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) + WriteFile(reportFullPath, dateTime, descriptions); results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index e094654..73c6d12 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -88,9 +88,9 @@ public class FileRead : Shared.FileRead, IFileRead string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names"); string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names"); string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices"); - _ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames, - processDataStandardFormatMappingNewColumnNames, - processDataStandardFormatMappingColumnIndices); + _ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames, + processDataStandardFormatMappingNewColumnNames, + processDataStandardFormatMappingColumnIndices); } void IFileRead.Move(Tuple> extractResults, Exception exception) @@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices) - { - ProcessDataStandardFormatMapping result; - string[] segmentsB; - List distinct = new(); - Dictionary keyValuePairs = new(); - string args4 = "Time,HeaderUniqueId,UniqueId,Date"; - string args5 = "Thickness 14 3mm Edge Mean,Thickness 14 3mm Edge % from R/2,Thickness 14 5mm Edge Mean,Thickness 14 5mm Edge % from R/2,Thickness 14 Center Mean,Thickness 14 Average,Thickness 14 Std Dev,Thickness 14 R/2 Mean"; - string args6 = "Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14"; - string args7 = "Test|EventId,Employee|Operator,Lot|Wafer,MeanThickness|WaferMeanThickness,RVThickness|RadialVariationThickness,ThicknessFourteen3mmEdgeMean|Thickness 14 3mm Edge Mean,ThicknessFourteen3mmEdgePercent|Thickness 14 3mm Edge % from R/2,ThicknessFourteen5mmEdgeMean|Thickness 14 5mm Edge Mean,ThicknessFourteen5mmEdgePercent|Thickness 14 5mm Edge % from R/2,ThicknessFourteenCenterMean|Thickness 14 Center Mean,ThicknessFourteenCriticalPointsAverage|Thickness 14 Average,ThicknessFourteenCriticalPointsStdDev|Thickness 14 Std Dev,ThicknessFourteenMeanFrom|Thickness 14 R/2 Mean,|BIORAD2"; - // string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,HeaderUniqueId,Layer,PassFail,Position,RDS,Title,UniqueId,Wafer,Zone,MeanThickness,RVThickness,StdDev,Thickness,Slot,ThicknessFourteen3mmEdgeMean,ThicknessFourteen3mmEdgePercent,ThicknessFourteen5mmEdgeMean,ThicknessFourteen5mmEdgePercent,ThicknessFourteenCenterMean,ThicknessFourteenCriticalPointsAverage,ThicknessFourteenCriticalPointsStdDev,ThicknessFourteenMeanFrom,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14"; - // string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Title,Recipe,DateTime,Operator,Batch,Cassette,UsedLast,Wafer,Position,Thickness,WaferMeanThickness,StdDev,PassFail,Line,RadialVariationThickness,Slot,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Thickness 14 3mm Edge Mean,Thickness 14 3mm Edge % from R/2,Thickness 14 5mm Edge Mean,Thickness 14 5mm Edge % from R/2,Thickness 14 Center Mean,Thickness 14 Average,Thickness 14 Std Dev,Thickness 14 R 2/Mean,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14,EventId"; - // string args10 = "0,1,2,52,3,6,5,9,10,14,24,25,8,12,-1,26,19,15,23,7,-1,14,27,17,21,18,16,22,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51"; - string[] segments = args7.Split(','); - ReadOnlyCollection ignoreColumns = new(args4.Split(',')); - ReadOnlyCollection backfillColumns = new(args5.Split(',')); - ReadOnlyCollection indexOnlyColumns = new(args6.Split(',')); - ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(',')); - ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(',')); - ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray()); - foreach (string segment in segments) - { - segmentsB = segment.Split('|'); - if (segmentsB.Length != 2) - continue; - if (distinct.Contains(segmentsB[0])) - continue; - distinct.Add(segmentsB[0]); - keyValuePairs.Add(segmentsB[0], segmentsB[1]); - } - result = new(backfillColumns: backfillColumns, - columnIndices: columnIndices, - newColumnNames: newColumnNames, - ignoreColumns: ignoreColumns, - indexOnlyColumns: indexOnlyColumns, - keyValuePairs: new(keyValuePairs), - oldColumnNames: oldColumnNames); - return result; - } - private static ReadOnlyCollection GetPreWithCollection(ReadOnlyCollection
 preCollection)
     {
         List results = new();
diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs
index 03b4109..48aacab 100644
--- a/Adaptation/Shared/FileRead.cs
+++ b/Adaptation/Shared/FileRead.cs
@@ -383,17 +383,24 @@ public class FileRead : Properties.IFileRead
         else
         {
             string[] files;
-            string logisticsSequence = _Logistics.Sequence.ToString();
-            string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
-            foreach (string directory in directories)
+            string[] directories;
+            string logisticsSequence;
+            for (int i = 0; i < 10; i++)
             {
-                files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
-                if (files.Length == 0)
-                    continue;
-                results.Add(directory);
+                logisticsSequence = (_Logistics.Sequence + -i).ToString();
+                directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
+                foreach (string directory in directories)
+                {
+                    files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
+                    if (files.Length == 0)
+                        continue;
+                    results.Add(directory);
+                }
+                if (results.Count == 1)
+                    break;
             }
         }
-        if ((results is null) || results.Count != 1)
+        if (results.Count != 1)
             throw new Exception("Didn't find directory by logistics sequence");
         return results.ToArray();
     }
diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs
index 82237b2..0b7a561 100644
--- a/Adaptation/Shared/ProcessDataStandardFormat.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormat.cs
@@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
 using System;
 using System.Collections.Generic;
 using System.Collections.ObjectModel;
+using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
 using System.Text.Json.Serialization;
+using System.Text.RegularExpressions;
 
 namespace Adaptation.Shared;
 
@@ -227,19 +229,19 @@ internal class ProcessDataStandardFormat
         return results.AsReadOnly();
     }
 
-    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
     {
         ProcessDataStandardFormat result;
         const int columnsLine = 6;
         FileInfo fileInfo = new(reportFullPath);
         ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
-        JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
+        JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
         JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
-        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
+        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
             result = processDataStandardFormat;
         else
         {
-            result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
+            result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
             if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
                 result = processDataStandardFormat;
         }
@@ -335,12 +337,14 @@ internal class ProcessDataStandardFormat
         int column;
         string value;
         JsonProperty jsonProperty;
+        List debug = new();
         List values = new();
         List results = new();
         JsonProperty[] jsonProperties;
         List unknownColumns = new();
         for (int i = 0; i < jsonElements.Length; i++)
         {
+            debug.Clear();
             values.Clear();
             if (jsonElements[i].ValueKind != JsonValueKind.Object)
             {
@@ -354,16 +358,22 @@ internal class ProcessDataStandardFormat
             {
                 column = processDataStandardFormatMapping.ColumnIndices[c];
                 if (column == -1)
+                {
                     value = processDataStandardFormatMapping.OldColumnNames[c];
+                    debug.Add($"");
+                }
                 else
                 {
                     jsonProperty = jsonProperties[column];
                     value = jsonProperty.Value.ToString();
+                    debug.Add($"");
                 }
                 values.Add(value);
             }
             results.Add(string.Join("\t", values));
         }
+        if (Debugger.IsAttached)
+            File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
         result = new(body: new(results),
                      columns: processDataStandardFormatMapping.OldColumnNames,
                      footer: processDataStandardFormat.Footer,
@@ -378,7 +388,6 @@ internal class ProcessDataStandardFormat
     {
         if (processDataStandardFormat.InputPDSF is null)
             throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
-#pragma warning disable CA1845, IDE0057
         string result;
         string line;
         string value;
@@ -569,6 +578,9 @@ internal class ProcessDataStandardFormat
         results.Add(string.Empty);
         results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
         results.Add(string.Empty);
+        string xml = GetXml(processDataStandardFormat);
+        results.Add(xml);
+        results.Add(string.Empty);
         results.Add("EOF");
         results.Add(string.Empty);
         string json = GetJson(processDataStandardFormat);
@@ -854,6 +866,60 @@ internal class ProcessDataStandardFormat
         return result;
     }
 
+    internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        string result;
+        string tag;
+        string value;
+        string[] segments;
+        ReadOnlyCollection body = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
+        ReadOnlyCollection columns = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
+        List lines = new() { "", "" };
+        for (int i = 0; i < body.Count; i++)
+        {
+            lines.Add("  ");
+            segments = body[i].Trim().Split('\t');
+            if (segments.Length != columns.Count)
+                break;
+            for (int c = 0; c < segments.Length; c++)
+            {
+                value = segments[c].Replace("&", "&")
+                                   .Replace("<", "<")
+                                   .Replace(">", ">")
+                                   .Replace("\"", """)
+                                   .Replace("'", "'");
+                tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
+                lines.Add(string.Concat("    <", tag, '>', value, "'));
+            }
+            lines.Add("  ");
+        }
+        lines.Add("");
+        result = string.Join(Environment.NewLine, lines);
+        return result;
+    }
+
+    internal static string GetXml(string reportFullPath, string[]? lines = null)
+    {
+        string result;
+        bool foundXml = false;
+        List results = new();
+        lines ??= File.ReadAllLines(reportFullPath);
+        foreach (string line in lines)
+        {
+            if (line.StartsWith(" BackfillColumns { get; private set; }
     public ReadOnlyCollection ColumnIndices { get; private set; }
-    public ReadOnlyCollection IgnoreColumns { get; private set; }
-    public ReadOnlyCollection IndexOnlyColumns { get; private set; }
-    public ReadOnlyDictionary KeyValuePairs { get; private set; }
     public ReadOnlyCollection NewColumnNames { get; private set; }
     public ReadOnlyCollection OldColumnNames { get; private set; }
 
-    public ProcessDataStandardFormatMapping(ReadOnlyCollection backfillColumns,
-                                            ReadOnlyCollection columnIndices,
-                                            ReadOnlyCollection ignoreColumns,
-                                            ReadOnlyCollection indexOnlyColumns,
-                                            ReadOnlyDictionary keyValuePairs,
+    public ProcessDataStandardFormatMapping(ReadOnlyCollection columnIndices,
                                             ReadOnlyCollection newColumnNames,
                                             ReadOnlyCollection oldColumnNames)
     {
-        BackfillColumns = backfillColumns;
         ColumnIndices = columnIndices;
-        IgnoreColumns = ignoreColumns;
-        IndexOnlyColumns = indexOnlyColumns;
-        KeyValuePairs = keyValuePairs;
         NewColumnNames = newColumnNames;
         OldColumnNames = oldColumnNames;
     }
 
+    internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
+    {
+        ProcessDataStandardFormatMapping result;
+        ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
+        ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
+        ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
+        result = new(columnIndices: columnIndices,
+                     newColumnNames: newColumnNames,
+                     oldColumnNames: oldColumnNames);
+        return result;
+    }
+
 }
\ No newline at end of file
diff --git a/Adaptation/_Tests/Extract/Production/v2.60.0/MET08THFTIRQS408M.cs b/Adaptation/_Tests/Extract/Production/v2.60.0/MET08THFTIRQS408M.cs
index 3bcd118..5562f56 100644
--- a/Adaptation/_Tests/Extract/Production/v2.60.0/MET08THFTIRQS408M.cs
+++ b/Adaptation/_Tests/Extract/Production/v2.60.0/MET08THFTIRQS408M.cs
@@ -37,6 +37,29 @@ public class MET08THFTIRQS408M
     [TestMethod]
     public void Production__v2_60_0__MET08THFTIRQS408M__MoveMatchingFiles() => _MET08THFTIRQS408M.Production__v2_60_0__MET08THFTIRQS408M__MoveMatchingFiles();
 
+#if DEBUG
+    [Ignore]
+#endif
+    [TestMethod]
+    public void Production__v2_60_0__MET08THFTIRQS408M__MoveMatchingFiles638402505394171507__Normal()
+    {
+        DateTime dateTime;
+        string check = "*.pdsf";
+        MethodBase methodBase = new StackFrame().GetMethod();
+        _MET08THFTIRQS408M.Production__v2_60_0__MET08THFTIRQS408M__MoveMatchingFiles();
+        string test = System.Text.RegularExpressions.Regex.Replace("Thickness 14 5mm Edge % from R/2", @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
+        Assert.AreEqual("Thickness_14_5mm_Edge___from_R_2", test);
+        string[] variables = _MET08THFTIRQS408M.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
+        IFileRead fileRead = _MET08THFTIRQS408M.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
+        Logistics logistics = new(fileRead);
+        dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
+        Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
+        dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
+        Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
+        _ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
+        NonThrowTryCatch();
+    }
+
 #if DEBUG
     [Ignore]
 #endif