public async Task <HttpResponseMessage> Put([FromBody] PutDataApiModel data) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DataStructureManager dataStructureManager = new DataStructureManager(); ApiConfigurator apiHelper = new ApiConfigurator(); DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); //load from apiConfig int cellLimit = 10000; if (apiHelper != null && apiHelper.Settings.ContainsKey(ApiConfigurator.CELLS)) { Int32.TryParse(apiHelper.Settings[ApiConfigurator.CELLS], out cellLimit); } try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (data.DatasetId > 0) { Dataset d = datasetManager.GetDataset(data.DatasetId); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + data.DatasetId + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), data.DatasetId, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region incomming values check // check incomming values if (data.DatasetId == 0) { error += "dataset id should be greater then 0."; } //if (data.UpdateMethod == null) error += "update method is not set"; //if (data.Count == 0) error += "count should be greater then 0. "; if (data.Columns == null) { error += "cloumns should not be null. "; } if (data.Data == null) { error += "data is empty. "; } if (data.PrimaryKeys == null || data.PrimaryKeys.Count() == 0) { error += "the UpdateMethod update has been selected but there are no primary keys available. "; } if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(data.DatasetId); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Dataset not exist.")); } DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(dataset); string title = dsv.Title; if ((data.Data.Count() * data.Columns.Count()) > cellLimit) { #region async upload with big data // if dataste is not in the dataset DataApiHelper helper = new DataApiHelper(dataset, user, data, title, UploadMethod.Update); Task.Run(() => helper.Run()); #endregion async upload with big data Debug.WriteLine("end of api call"); return(Request.CreateResponse(HttpStatusCode.OK, "Data has been successfully received and is being processed. For larger data, as in this case, we will keep you informed by mail about the next steps.")); } else { #region direct update var es = new EmailService(); UploadHelper uploadHelper = new UploadHelper(); try { //load strutcured data structure StructuredDataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataset.DataStructure.Id); List <Error> errors = new List <Error>(); if (dataStructure == null) { return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, "The Datastructure does not exist.")); } APIDataReader reader = new APIDataReader(dataStructure, new ApiFileReaderInfo()); List <VariableIdentifier> source = new List <VariableIdentifier>(); reader.SetSubmitedVariableIdentifiers(data.Columns.ToList()); #region primary key check //prepare primary keys ids from the exiting dataset List <long> variableIds = new List <long>(); foreach (var variable in dataStructure.Variables) { if (data.PrimaryKeys.Any(p => p.ToLower().Equals(variable.Label.ToLower()))) { variableIds.Add(variable.Id); } } // prepare pk index list from data int[] primaryKeyIndexes = new int[data.PrimaryKeys.Length]; for (int i = 0; i < data.PrimaryKeys.Length; i++) { string pk = data.PrimaryKeys[i]; primaryKeyIndexes[i] = data.Columns.ToList().IndexOf(pk); } //check primary with data : uniqueness bool IsUniqueInDb = uploadHelper.IsUnique2(dataset.Id, variableIds); bool IsUniqueInData = uploadHelper.IsUnique(primaryKeyIndexes, data.Data); if (!IsUniqueInDb || !IsUniqueInData) { StringBuilder sb = new StringBuilder("Error/s in Primary Keys selection:<br>"); if (!IsUniqueInDb) { sb.AppendLine("The selected key is not unique in the data in the dataset."); } if (!IsUniqueInData) { sb.AppendLine("The selected key is not unique in the received data."); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion primary key check #region validate datastructure foreach (string c in data.Columns) { source.Add(new VariableIdentifier() { name = c }); } errors = reader.ValidateComparisonWithDatatsructure(source); if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Datastructure is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion validate datastructure #region validate data errors = new List <Error>(); // validate rows for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; errors.AddRange(reader.ValidateRow(row.ToList(), i)); } if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Data is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion validate data #region update data List <long> datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetManager.GetDatasetLatestVersion(dataset.Id)); if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, user.UserName) || datasetManager.CheckOutDataset(dataset.Id, user.UserName)) { workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); List <DataTuple> datatuples = new List <DataTuple>(); for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; datatuples.Add(reader.ReadRow(row.ToList(), i)); } //Update Method -- UPDATE //splite datatuples into new and updated tuples if (datatuples.Count > 0) { var splittedDatatuples = uploadHelper.GetSplitDatatuples(datatuples, variableIds, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = user.UserName, Comment = "Data", ActionType = AuditActionType.Edit }; datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(dataset.Id, data.Data.Length + " rows via api.", user.UserName); //send email es.Send(MessageHelper.GetUpdateDatasetHeader(dataset.Id), MessageHelper.GetUpdateDatasetMessage(dataset.Id, title, user.DisplayName), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } #endregion update data return(Request.CreateResponse(HttpStatusCode.OK, "Data successfully uploaded.")); } catch (Exception ex) { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(dataset.Id, title), MessageHelper.GetPushApiUploadFailMessage(dataset.Id, user.UserName, new string[] { "Upload failed: " + ex.Message }), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(Request.CreateResponse(HttpStatusCode.InternalServerError, ex.Message)); } #endregion direct update } } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); dataStructureManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
public async Task <bool> PKCheck() { List <string> errors = new List <string>(); try { // primary Key check is only available by put api , so in this case it must be a putapiModel // and need to convert to it to get the primary keys lsit PutDataApiModel data = (PutDataApiModel)_data; string[] pks = null; if (data != null) { pks = data.PrimaryKeys; } variableIds = new List <long>(); if (pks != null && _dataStructure != null) { //check if primary keys are exiting in the datastrutcure foreach (var variable in _dataStructure.Variables) { if (pks.Any(p => p.ToLower().Equals(variable.Label.ToLower()))) { variableIds.Add(variable.Id); } } if (!variableIds.Count.Equals(pks.Count())) { errors.Add("The list of primary keys is unequal to the existing equal variables in the datatructure."); return(false); } bool IsUniqueInDb = uploadHelper.IsUnique2(_dataset.Id, variableIds); bool IsUniqueInFile = uploadHelper.IsUnique(_dataset.Id, variableIds, ".tsv", Path.GetFileName(_filepath), _filepath, new AsciiFileReaderInfo(), _dataStructure.Id); if (!IsUniqueInDb || !IsUniqueInFile) { if (!IsUniqueInDb) { errors.Add("The selected key is not unique in the data in the dataset."); } if (!IsUniqueInFile) { errors.Add("The selected key is not unique in the received data."); } } } else { errors.Add("The list of primary keys is empty."); } if (errors.Count == 0) { return(await Validate()); } else { return(false); } } finally { var es = new EmailService(); es.Send(MessageHelper.GetPushApiPKCheckHeader(_dataset.Id, _title), MessageHelper.GetPushApiPKCheckMessage(_dataset.Id, _user.UserName, errors.ToArray()), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } }