Ready to Test

This commit is contained in:
2022-07-26 09:34:09 -07:00
commit 2afec95704
1004 changed files with 164796 additions and 0 deletions

View File

@ -0,0 +1,265 @@
using Microsoft.Extensions.Configuration;
using System;
using System.Globalization;
using System.IO;
namespace OI.Metrology.Archive.Services;
using OI.Metrology.Shared.DataModels;
using OI.Metrology.Shared.Repositories;
using OI.Metrology.Shared.Services;
using System.Data.SqlClient;
public class AttachmentsService : IAttachmentsService
{
private IConfiguration Config { get; }
protected IMetrologyRepo _Repo;
public AttachmentsService(IConfiguration config, IMetrologyRepo repo)
{
Config = config;
_Repo = repo;
}
protected Stream GetAttachmentStream(String tableName, Guid attachmentId, string filename)
{
string attachmentsRootPath = Config[Constants.AttachmentPathKey];
if (attachmentId.Equals(Guid.Empty))
throw new Exception("No attachments found");
DateTime insertDate = Convert.ToDateTime(_Repo.GetAttachmentInsertDateByGUID(tableName, attachmentId));
int year = insertDate.Year;
DateTime d = insertDate;
CultureInfo cul = CultureInfo.CurrentCulture;
int weekNum = cul.Calendar.GetWeekOfYear(d, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
string workWeek = "WW" + weekNum.ToString("00");
string dateDir = year + @"\" + workWeek;
string fullPath = Path.Combine(attachmentsRootPath, tableName + "_", dateDir, attachmentId.ToString(), filename);
//Check to see if file exists in the "New" directory structure, if not change the path back to the old. and check there
if (!File.Exists(fullPath))
{
fullPath = Path.Combine(attachmentsRootPath, tableName, attachmentId.ToString(), filename);
}
if (!File.Exists(fullPath))
throw new Exception("File not found");
return new FileStream(fullPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
}
//protected System.IO.Stream GetAttachmentStream(String tableName, Guid attachmentId, string filename)
//{
// //This part seems to be key
// var attachmentsRootPath = _config[Constants.AttachmentPathKey];
// //if (attachmentId.Equals(Guid.Empty))
// //throw new Exception("No attachments found");
// //string list = Directory.GetDirectories(attachmentsRootPath).Where(s => s.Contains(attachmentId.ToString())).SingleOrDefault();
// //var fullPath = list;
// var fullPath = System.IO.Path.Combine(attachmentsRootPath, tableName, attachmentId.ToString(), filename);
// if (!System.IO.File.Exists(fullPath))
// throw new Exception("File not found " + fullPath);
// return new System.IO.FileStream(fullPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
//}
protected Stream GetAttachmentStreamArchive(String tableName, Guid attachmentId, string filename)
{
//This part seems to be key
string attachmentsRootPath = Config[Constants.AttachmentPathKey];
if (attachmentId.Equals(Guid.Empty))
throw new Exception("No attachments found");
string fullPath = Path.Combine(attachmentsRootPath, tableName, "2019\\09", attachmentId.ToString(), filename);
if (!File.Exists(fullPath))
throw new Exception("File not found");
return new FileStream(fullPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
}
public Stream GetAttachmentStreamByTitle(ToolType toolType, bool header, string title, string filename)
{
if (toolType == null)
throw new Exception("Invalid tool type");
string queryString = "SELECT * FROM " + toolType.DataTableName + " WHERE AttachmentId = @attachmentId";
if (header)
{
queryString = "SELECT * FROM " + toolType.HeaderTableName + " WHERE AttachmentId = '" + title + "'";
}
DateTime SearchDate = new();
string connectionString = @"Server=messv01ec.ec.local\PROD1,53959;Database=Metrology_Archive;Integrated Security=True";
using (SqlConnection connection = new(connectionString))
{
SqlCommand command = new(queryString, connection);
//command.Parameters.AddWithValue("@attachmentId", attachmentId);
connection.Open();
SqlDataReader reader = command.ExecuteReader();
try
{
while (reader.Read())
{
Console.WriteLine(string.Format("{0}", reader["InsertDate"]));// etc
string test = string.Format("{0}", reader["InsertDate"]);
SearchDate = Convert.ToDateTime(string.Format("{0}", reader["InsertDate"]));
}
}
finally
{
// Always call Close when done reading.
reader.Close();
}
}
string SearchMonth = SearchDate.Month.ToString();
if (SearchDate.Month < 10)
{
_ = "0" + SearchMonth;
}
_ = SearchDate.Year.ToString();
_ = Config[Constants.AttachmentPathKey];
string tableName;
//var attachmentsRootPath = "\\\\messv02ecc1.ec.local\\EC_Metrology_Si\\MetrologyAttachments\\TencorRunData\\2019\\09";
Guid attachmentId;
if (header)
{
tableName = toolType.HeaderTableName;
attachmentId = _Repo.GetHeaderAttachmentIDByTitle(toolType.ID, title);
}
else
{
tableName = toolType.DataTableName;
attachmentId = _Repo.GetDataAttachmentIDByTitle(toolType.ID, title);
}
return GetAttachmentStream(tableName, attachmentId, filename);
}
public Stream GetAttachmentStreamByAttachmentId(ToolType toolType, bool header, Guid attachmentId, string filename)
{
if (toolType == null)
throw new Exception("Invalid tool type");
/*string queryString = "SELECT * FROM " + toolType.DataTableName + " WHERE AttachmentId = @attachmentId";
if (header)
{
queryString = "SELECT * FROM " + toolType.HeaderTableName + " WHERE AttachmentId = '" + attachmentId + "'";
}
DateTime SearchDate = new DateTime();
string connectionString = @"Server=messv01ec.ec.local\PROD1,53959;Database=Metrology_Archive;Integrated Security=True";
using (SqlConnection connection = new SqlConnection(connectionString))
{
SqlCommand command = new SqlCommand(queryString, connection);
command.Parameters.AddWithValue("@attachmentId", attachmentId);
connection.Open();
SqlDataReader reader = command.ExecuteReader();
try
{
while (reader.Read())
{
Console.WriteLine(string.Format("{0}", reader["InsertDate"]));// etc
//string test = string.Format("{0}", reader["InsertDate"]);
SearchDate = Convert.ToDateTime(string.Format("{0}", reader["Date"]));
}
}
finally
{
// Always call Close when done reading.
reader.Close();
}
}
string SearchMonth = SearchDate.Month.ToString();
if (SearchDate.Month < 10)
{
SearchMonth = "0" + SearchMonth;
}
string SearchYear = SearchDate.Year.ToString();*/
//string DateSearchTermin = +""+
_ = Config[Constants.AttachmentPathKey];
string tableName;
if (header)
tableName = toolType.HeaderTableName;
else
tableName = toolType.DataTableName;
//System.IO.Stream test = GetAttachmentStream(tableName, attachmentId, filename, SearchYear);
return GetAttachmentStream(tableName, attachmentId, filename);
//return test;
}
public Stream GetAttachmentStreamByAttachmentIdArchive(ToolType toolType, bool header, Guid attachmentId, string filename)
{
if (toolType == null)
throw new Exception("Invalid tool type");
_ = Config[Constants.AttachmentPathKey];
string tableName;
if (header)
tableName = toolType.HeaderTableName;
else
tableName = toolType.DataTableName;
return GetAttachmentStreamArchive(tableName, attachmentId, filename);
}
public void SaveAttachment(ToolType toolType, long headerId, string dataUniqueId, string filename, Microsoft.AspNetCore.Http.IFormFile uploadedFile)
{
if (toolType == null)
throw new Exception("Invalid tool type");
using System.Transactions.TransactionScope trans = _Repo.StartTransaction();
Guid attachmentId = Guid.Empty;
string tableName = "";
if (string.IsNullOrWhiteSpace(dataUniqueId))
{
attachmentId = _Repo.GetHeaderAttachmentID(toolType.ID, headerId);
tableName = toolType.HeaderTableName;
}
else
{
attachmentId = _Repo.GetDataAttachmentID(toolType.ID, headerId, dataUniqueId);
tableName = toolType.DataTableName;
}
if (Equals(attachmentId, Guid.Empty))
throw new Exception("Invalid attachment ID");
string attachmentsRootPath = Config[Constants.AttachmentPathKey];
string directoryPath = Path.Combine(attachmentsRootPath, tableName, attachmentId.ToString());
if (!Directory.Exists(directoryPath))
_ = Directory.CreateDirectory(directoryPath);
string fullPath = Path.Combine(directoryPath, filename);
using (FileStream s = new(fullPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None))
{
uploadedFile.CopyTo(s);
}
trans.Complete();
}
public void SaveAttachment(ToolType toolType, long headerId, string dataUniqueId, string filename, object uploadedFile)
{
Microsoft.AspNetCore.Http.IFormFile formFile = (Microsoft.AspNetCore.Http.IFormFile)uploadedFile;
SaveAttachment(toolType, headerId, dataUniqueId, filename, formFile);
}
}

View File

@ -0,0 +1,236 @@
using Newtonsoft.Json.Linq;
using OI.Metrology.Shared.DataModels;
using OI.Metrology.Shared.Repositories;
using OI.Metrology.Shared.Services;
using System;
using System.Collections.Generic;
using System.Linq;
namespace OI.Metrology.Archive.Services;
public class InboundDataService : IInboundDataService
{
protected IMetrologyRepo _Repo;
public InboundDataService(IMetrologyRepo repo) => _Repo = repo;
public long DoSQLInsert(JToken jsonbody, ToolType toolType, List<ToolTypeMetadata> metaData)
{
JArray detailsArray = null;
string uniqueId = "";
foreach (JToken jt in jsonbody.Children())
{
if (jt is JProperty jp)
{
if (string.Equals(jp.Name, "Details", StringComparison.OrdinalIgnoreCase))
{
if (jp.First is JArray array)
detailsArray = array;
}
else if (string.Equals(jp.Name, "UniqueId", StringComparison.OrdinalIgnoreCase))
{
uniqueId = Convert.ToString(((JValue)jp.Value).Value);
}
}
}
long headerId = 0;
using (System.Transactions.TransactionScope transScope = _Repo.StartTransaction())
{
try
{
_Repo.PurgeExistingData(toolType.ID, uniqueId);
}
catch (Exception ex)
{
throw new Exception("Failed to purge existing data: " + ex.Message, ex);
}
try
{
headerId = _Repo.InsertToolDataJSON(jsonbody, -1, metaData, toolType.HeaderTableName);
}
catch (Exception ex)
{
throw new Exception("Insert failed for header: " + ex.Message, ex);
}
int detailrow = 1;
try
{
if (detailsArray != null)
{
foreach (JToken detail in detailsArray)
{
_ = _Repo.InsertToolDataJSON(detail, headerId, metaData, toolType.DataTableName);
detailrow += 1;
}
}
}
catch (Exception ex)
{
throw new Exception("Insert failed for detail row " + detailrow.ToString() + ": " + ex.Message, ex);
}
transScope.Complete();
}
return headerId;
}
// this method is for validating the json contents of the inbound request, it will make sure all required fields are included
// errors are generated for missing fields, and warnings are generated for additional fields not in the metadata
// this is recursive, detailIndex = 0 is for the header, then it calls itself for each of the details rows (in any)
public void ValidateJSONFields(JToken jsonbody, int detailIndex, List<ToolTypeMetadata> metaData, List<string> errors, List<string> warnings)
{
bool isHeader = detailIndex == 0;
string rowDesc = isHeader ? "header" : "detail index " + detailIndex.ToString(); // human readable description for error messages
// filter the metadata list by header/detail
List<ToolTypeMetadata> fields = metaData.Where(md => md.Header == isHeader).ToList();
// get list of ApiFields from the metadata, exclude blank ApiName
List<string> apiFields = fields.Where(f => string.IsNullOrWhiteSpace(f.ApiName) == false).Select(f => f.ApiName.Trim().ToUpper()).ToList();
// get list of ApiFields from the metadata with blank ColumnName - we ignore these fields in the jsonbody
List<string> ignoreFields = fields.Where(f => (string.IsNullOrWhiteSpace(f.ApiName) == false) && string.IsNullOrWhiteSpace(f.ColumnName)).Select(f => f.ApiName.Trim().ToUpper()).ToList();
// keep a list of valid fields found in the jsonbody so we can check for duplicates
List<string> validFields = new();
// get list of container fields in the ApiFields, ex: Points\Thickness will add Points to the list
List<string> containerFields = apiFields.Where(f => f.Contains('\\')).Select(f => f.Split('\\')[0].Trim().ToUpper()).Distinct().ToList();
// pointer to the Details array from the jsonbody, this is hard-coded as the subfield for the common Header/Detail structure
JArray detailsArray = null;
// process fields in the json body
foreach (JToken jt in jsonbody.Children())
{
if (jt is JProperty jp)
{
string jpName = jp.Name.Trim().ToUpper();
if (apiFields.Contains(jpName))
{
// Normal field detected, remove it from the list so we know which fields are missing
_ = apiFields.Remove(jpName);
// Check for duplicates
if (validFields.Contains(jpName))
errors.Add("Duplicated field on " + rowDesc + ": " + jp.Name);
else
validFields.Add(jpName);
}
else if (string.Equals(jp.Name, "Details", StringComparison.OrdinalIgnoreCase))
{
// Details container field found
if (!isHeader)
errors.Add("Details field not expected on " + rowDesc);
if (jp.First is JArray array)
detailsArray = array;
else if ((jp.First is JValue value) && (value.Value == null))
detailsArray = null;
else
errors.Add("Invalid details field");
}
else if (ignoreFields.Contains(jpName))
{
// ignore these fields
}
else if (containerFields.Contains(jpName))
{
// ignore fields that are container fields
}
else
{
// output warnings if extra fields are found
warnings.Add("Extra field on " + rowDesc + ": " + jp.Name);
}
}
}
// process container fields, ex: Points
ValidateJSONContainerFields(jsonbody, rowDesc, apiFields, containerFields, errors, warnings);
if (containerFields.Count > 1)
errors.Add("Only one container field is supported");
if (isHeader && (containerFields.Count > 0))
errors.Add("Container field is only allowed on detail");
// output errors for fields that were not found in the json
foreach (string f in apiFields)
{
errors.Add("Missing field on " + rowDesc + ": " + f);
}
// if a Details container if found, process it by recursion
if (detailsArray != null)
{
int i = 1;
foreach (JToken detail in detailsArray)
{
ValidateJSONFields(detail, i, metaData, errors, warnings);
i += 1;
}
}
}
// this method is for validating the special container fields (only used for stratus biorad)
// the container fields are used to collapse a 3 tier structure into the 2 tier used in sharepoint
protected static void ValidateJSONContainerFields(JToken jsonbody, string rowDesc, List<string> apiFields, List<string> containerFields, List<string> errors, List<string> warnings)
{
// process container fields, ex: Points
foreach (string containerField in containerFields)
{
// get the json data for this container field, ex: Points
JProperty contJP = jsonbody.Children<JProperty>().Where(jp => string.Equals(jp.Name, containerField, StringComparison.OrdinalIgnoreCase)).SingleOrDefault();
if ((contJP != null) && (contJP.Value is JArray array))
{
JArray contJPArray = array;
// Get a list of properties in the container field from the json body, but pre-pend the container field name, ex: Points\Position
List<string> contFieldProperties = new();
foreach (JToken sfJT in contJPArray.Values<JToken>()) // for each row in the json container
{
foreach (JProperty subJTJP in sfJT.Children<JProperty>()) // for each property for the row
{
string propname = (containerField + '\\' + subJTJP.Name).ToUpper();
if (!contFieldProperties.Contains(propname))
contFieldProperties.Add(propname);
}
}
// Get list of field bindings for this container field, ex: Points\Position, Points\Thickness
List<string> contFieldBindings = apiFields.Where(f => f.StartsWith(containerField, StringComparison.OrdinalIgnoreCase)).Select(f => f.ToUpper()).ToList();
foreach (string contFieldBinding in contFieldBindings)
{
// check if the jsonbody has this property in the container field
if (contFieldProperties.Contains(contFieldBinding))
{
_ = contFieldProperties.Remove(contFieldBinding); // remove from the list of properties so we know it was found
_ = apiFields.Remove(contFieldBinding); // remove from the list of missing fields
}
else
{
errors.Add("Missing field on " + rowDesc + ": " + contFieldBinding);
}
}
// Output warnings for extra properties in the container field
foreach (string contFieldProp in contFieldProperties)
{
warnings.Add("Extra field on " + rowDesc + ": " + contFieldProperties);
}
}
else
{
errors.Add("Missing container field on " + rowDesc + ": " + containerField);
}
}
}
}