public List <Error> FinishUpload2(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); try { List <Error> temp = new List <Error>(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); //datatuple list List <DataTuple> rows; DatasetManager dm = new DatasetManager(); Dataset ds = dm.GetDataset(id); DatasetVersion workingCopy = new DatasetVersion(); #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <AbstractTuple> datatupleFromDatabase = dm.GetDatasetVersionEffectiveTuples(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } } while (rows.Count() > 0); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion // start download generator // filepath //string path = ""; //if (workingCopy != null) //{ // path = GenerateDownloadFile(workingCopy); // dm.EditDatasetVersion(workingCopy, null, null, null); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Updated); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { workingCopy = dm.GetDatasetLatestVersion(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } return(temp); } finally { dsm.Dispose(); } }
//temporary solution: norman :FinishUpload2 public List <Error> FinishUpload(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); try { List <Error> temp = new List <Error>(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(ds); string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { string title = ""; long datasetid = ds.Id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); title = xmlDatasetHelper.GetInformation(ds.Id, NameAttributeValues.title); try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetid, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { try { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return(temp); } finally { dm.Dispose(); dsm.Dispose(); } }
public async Task <HttpResponseMessage> Put(int id) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (id > 0) { Dataset d = datasetManager.GetDataset(id); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + id + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), id, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region check incomming metadata Stream requestStream = await this.Request.Content.ReadAsStreamAsync(); string contentType = this.Request.Content.Headers.ContentType.MediaType; if (string.IsNullOrEmpty(contentType) || (!contentType.Equals("application/xml") && !contentType.Equals("text/plain"))) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "The transmitted file is not a xml document.")); } if (requestStream == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Metadata xml was not received.")); } #endregion check incomming metadata #region incomming values check // check incomming values if (id == 0) { error += "dataset id should be greater then 0."; } ////if (data.UpdateMethod == null) error += "update method is not set"; ////if (data.Count == 0) error += "count should be greater then 0. "; //if (data.Columns == null) error += "cloumns should not be null. "; //if (data.Data == null) error += "data is empty. "; //if (data.PrimaryKeys == null || data.PrimaryKeys.Count() == 0) error += "the UpdateMethod update has been selected but there are no primary keys available. "; if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(id); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Dataset not exist.")); } #region convert metadata XmlDocument metadataForImport = new XmlDocument(); metadataForImport.Load(requestStream); // metadataStructure ID var metadataStructureId = dataset.MetadataStructure.Id; var metadataStructrueName = this.GetUnitOfWork().GetReadOnlyRepository <MetadataStructure>().Get(metadataStructureId).Name; // loadMapping file var path_mappingFile = Path.Combine(AppConfiguration.GetModuleWorkspacePath("DIM"), XmlMetadataImportHelper.GetMappingFileName(metadataStructureId, TransmissionType.mappingFileImport, metadataStructrueName)); // XML mapper + mapping file var xmlMapperManager = new XmlMapperManager(TransactionDirection.ExternToIntern); xmlMapperManager.Load(path_mappingFile, "IDIV"); // generate intern metadata without internal attributes var metadataResult = xmlMapperManager.Generate(metadataForImport, 1, true); // generate intern template metadata xml with needed attribtes var xmlMetadatWriter = new XmlMetadataWriter(BExIS.Xml.Helpers.XmlNodeMode.xPath); var metadataXml = xmlMetadatWriter.CreateMetadataXml(metadataStructureId, XmlUtility.ToXDocument(metadataResult)); var metadataXmlTemplate = XmlMetadataWriter.ToXmlDocument(metadataXml); // set attributes FROM metadataXmlTemplate TO metadataResult var completeMetadata = XmlMetadataImportHelper.FillInXmlValues(metadataResult, metadataXmlTemplate); #endregion convert metadata if (completeMetadata != null) { string title = ""; if (datasetManager.IsDatasetCheckedOutFor(id, user.Name) || datasetManager.CheckOutDataset(id, user.Name)) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(id); workingCopy.Metadata = completeMetadata; workingCopy.Title = xmlDatasetHelper.GetInformation(id, completeMetadata, NameAttributeValues.title); workingCopy.Description = xmlDatasetHelper.GetInformation(id, completeMetadata, NameAttributeValues.description); //check if modul exist int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set status if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo(); } workingCopy.StateInfo.State = DatasetStateInfo.NotValid.ToString(); title = workingCopy.Title; if (string.IsNullOrEmpty(title)) { title = "No Title available."; } datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(id, "via api.", user.Name, ViewCreationBehavior.None); } // ToDo add Index update to this api //if (this.IsAccessible("DDM", "SearchIndex", "ReIndexSingle")) //{ // var x = this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); //} LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Created); var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(id), MessageHelper.GetUpdateDatasetMessage(id, title, user.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } return(Request.CreateErrorResponse(HttpStatusCode.OK, "Metadata successfully updated ")); } catch (Exception ex) { return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
public async Task <HttpResponseMessage> Post([FromBody] PushDataApiModel data) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DataStructureManager dataStructureManager = new DataStructureManager(); ApiConfigurator apiHelper = new ApiConfigurator(); DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); //load from apiConfig int cellLimit = 100000; if (apiHelper != null && apiHelper.Settings.ContainsKey(ApiConfigurator.CELLS)) { Int32.TryParse(apiHelper.Settings[ApiConfigurator.CELLS], out cellLimit); } try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (data.DatasetId > 0) { Dataset d = datasetManager.GetDataset(data.DatasetId); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + data.DatasetId + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), data.DatasetId, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region incomming values check // check incomming values if (data.DatasetId == 0) { error += "dataset id should be greater then 0."; } //if (data.UpdateMethod == null) error += "update method is not set"; if (data.Columns == null) { error += "cloumns should not be null. "; } if (data.Data == null) { error += "data is empty. "; } if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(data.DatasetId); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, "Dataset not exist.")); } DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(dataset); string title = dsv.Title; if ((data.Data.Count() * data.Columns.Count()) > cellLimit) { #region async upload with big data // if dataste is not in the dataset DataApiHelper helper = new DataApiHelper(dataset, user, data, title, UploadMethod.Append); Task.Run(() => helper.Run()); #endregion async upload with big data Debug.WriteLine("end of api call"); return(Request.CreateResponse(HttpStatusCode.OK, "Data has been successfully received and is being processed. For larger data, as in this case, we will keep you informed by mail about the next steps.")); } else { #region direct upload var es = new EmailService(); try { //load strutcured data structure StructuredDataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataset.DataStructure.Id); List <Error> errors = new List <Error>(); if (dataStructure == null) { return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, "The Datastructure does not exist.")); } APIDataReader reader = new APIDataReader(dataStructure, new ApiFileReaderInfo()); List <VariableIdentifier> source = new List <VariableIdentifier>(); reader.SetSubmitedVariableIdentifiers(data.Columns.ToList()); //validate datastructure foreach (string c in data.Columns) { source.Add(new VariableIdentifier() { name = c }); } errors = reader.ValidateComparisonWithDatatsructure(source); if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Datastructure is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } errors = new List <Error>(); // validate rows for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; errors.AddRange(reader.ValidateRow(row.ToList(), i)); } if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Data is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, user.UserName) || datasetManager.CheckOutDataset(dataset.Id, user.UserName)) { workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); List <DataTuple> datatuples = new List <DataTuple>(); for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; datatuples.Add(reader.ReadRow(row.ToList(), i)); } if (datatuples.Count > 0) { ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = user.UserName, Comment = "Data", ActionType = AuditActionType.Edit }; datasetManager.EditDatasetVersion(workingCopy, datatuples, null, null); } datasetManager.CheckInDataset(dataset.Id, data.Data.Length + " rows via api.", user.UserName); //send email es.Send(MessageHelper.GetUpdateDatasetHeader(dataset.Id), MessageHelper.GetUpdateDatasetMessage(dataset.Id, title, user.DisplayName), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } return(Request.CreateResponse(HttpStatusCode.OK, "Data successfully uploaded.")); } catch (Exception ex) { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(dataset.Id, title), MessageHelper.GetPushApiUploadFailMessage(dataset.Id, user.UserName, new string[] { "Upload failed: " + ex.Message }), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(Request.CreateResponse(HttpStatusCode.InternalServerError, ex.Message)); } #endregion direct upload } } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); dataStructureManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
//temporary solution: norman :FinishUpload2 public async Task <List <Error> > FinishUpload() { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); IOUtility iOUtility = new IOUtility(); List <Error> temp = new List <Error>(); long id = 0; string title = ""; int numberOfRows = 0; int numberOfSkippedRows = 0; try { DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); //Dataset ds = null; bool inputWasAltered = false; if (Bus.ContainsKey(TaskManager.DATASET_ID) && Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { id = Convert.ToInt32(Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(Bus[TaskManager.DATASTRUCTURE_ID]); //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(id); title = latestVersion.Title; string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { Bus[TaskManager.CURRENTPACKAGE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion Progress Informations #region structured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { long datasetid = id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); try { // load all data tuple ids from the latest version List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(id)); // load structured data structure StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm") || iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; //schleife dm.CheckOutDatasetIfNot(id, User.Name); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(id, User.Name)) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", id, User.Name)); } workingCopy = dm.GetDatasetWorkingCopy(id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } ExcelReader reader = null; ExcelFileReaderInfo excelFileReaderInfo = null; if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { excelFileReaderInfo = (ExcelFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]; } reader = new ExcelReader(sds, excelFileReaderInfo); do { counter++; Bus[TaskManager.CURRENTPACKAGE] = counter; //open stream Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = new List <DataTuple>(); if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { if (reader.Position < excelFileReaderInfo.DataEndRow) { rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } } else { rows = reader.ReadTemplateFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } //Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { } } Stream?.Close(); //count rows numberOfRows += rows.Count(); } while (rows.Count() > 0 && rows.Count() <= packageSize); numberOfSkippedRows = reader.NumberOSkippedfRows; } #endregion excel reader #region ascii reader if (iOUtility.IsSupportedAsciiFile(Bus[TaskManager.EXTENTION].ToString())) { // open file AsciiReader reader = new AsciiReader(sds, (AsciiFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]); if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { workingCopy = dm.GetDatasetWorkingCopy(id); //set packagsize for one loop int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) //check wheter there is a dataset status in the upload wizard bus { // based the dataset status and/ or the upload method if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); // add all datatuples to the datasetversion } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) // datatuples allready exist { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else // if there is no dataset status in the bus, use dataset status edit { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } //count rows numberOfRows += rows.Count(); } while ((rows.Count() > 0 && rows.Count() <= packageSize) || inputWasAltered == true); numberOfSkippedRows = reader.NumberOSkippedfRows; } //Stream.Close(); } #endregion ascii reader #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion contentdescriptors #region set System value into metadata if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "Data", ActionType = newdataset ? AuditActionType.Create : AuditActionType.Edit }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } #endregion set System value into metadata // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(id, numberOfRows + " rows", User.Name); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(datasetid), MessageHelper.GetUpdateDatasetMessage(datasetid, title, User.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion structured data #region unstructured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { try { workingCopy = dm.GetDatasetWorkingCopy(id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy.VersionNo += 1; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "File", ActionType = AuditActionType.Create }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } //filename string filename = ""; if (Bus.ContainsKey(TaskManager.FILENAME)) { filename = Bus[TaskManager.FILENAME]?.ToString(); } // ToDo: Get Comment from ui and users dm.CheckInDataset(id, filename, User.Name, ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion unstructured data } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(id, User.Name); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Dataset, ex.Message)); dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } finally { if (RunningASync) { var user = User; if (temp.Any()) { var es = new EmailService(); es.Send(MessageHelper.GetPushApiUploadFailHeader(id, title), MessageHelper.GetPushApiUploadFailMessage(id, user.Name, temp.Select(e => e.ToString()).ToArray()), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } else { var es = new EmailService(); es.Send(MessageHelper.GetASyncFinishUploadHeader(id, title), MessageHelper.GetASyncFinishUploadMessage(id, title, numberOfRows, numberOfSkippedRows), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } } dm.Dispose(); dsm.Dispose(); } return(temp); }
/// <summary> /// Submit a Dataset based on the imformations /// in the CreateTaskManager /// </summary> public long SubmitDataset(bool valid) { #region create dataset DatasetManager dm = new DatasetManager(); DataStructureManager dsm = new DataStructureManager(); ResearchPlanManager rpm = new ResearchPlanManager(); XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); string title = ""; long datasetId = 0; bool newDataset = true; try { TaskManager = (CreateTaskmanager)Session["CreateDatasetTaskmanager"]; if (TaskManager.Bus.ContainsKey(CreateTaskmanager.DATASTRUCTURE_ID) && TaskManager.Bus.ContainsKey(CreateTaskmanager.RESEARCHPLAN_ID) && TaskManager.Bus.ContainsKey(CreateTaskmanager.METADATASTRUCTURE_ID)) { // for e new dataset if (!TaskManager.Bus.ContainsKey(CreateTaskmanager.ENTITY_ID)) { long datastructureId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.DATASTRUCTURE_ID]); long researchPlanId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.RESEARCHPLAN_ID]); long metadataStructureId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.METADATASTRUCTURE_ID]); DataStructure dataStructure = dsm.StructuredDataStructureRepo.Get(datastructureId); //if datastructure is not a structured one if (dataStructure == null) { dataStructure = dsm.UnStructuredDataStructureRepo.Get(datastructureId); } ResearchPlan rp = rpm.Repo.Get(researchPlanId); MetadataStructureManager msm = new MetadataStructureManager(); MetadataStructure metadataStructure = msm.Repo.Get(metadataStructureId); var ds = dm.CreateEmptyDataset(dataStructure, rp, metadataStructure); datasetId = ds.Id; // add security if (GetUsernameOrDefault() != "DEFAULT") { EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } } else { datasetId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.ENTITY_ID]); newDataset = false; } TaskManager = (CreateTaskmanager)Session["CreateDatasetTaskmanager"]; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(datasetId); if (TaskManager.Bus.ContainsKey(CreateTaskmanager.METADATA_XML)) { XDocument xMetadata = (XDocument)TaskManager.Bus[CreateTaskmanager.METADATA_XML]; workingCopy.Metadata = Xml.Helpers.XmlWriter.ToXmlDocument(xMetadata); } //set status if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo(); } if (valid) { workingCopy.StateInfo.State = DatasetStateInfo.Valid.ToString(); } else { workingCopy.StateInfo.State = DatasetStateInfo.NotValid.ToString(); } title = xmlDatasetHelper.GetInformationFromVersion(workingCopy.Id, NameAttributeValues.title); if (string.IsNullOrEmpty(title)) { title = "No Title available."; } TaskManager.AddToBus(CreateTaskmanager.ENTITY_TITLE, title);//workingCopy.Metadata.SelectNodes("Metadata/Description/Description/Title/Title")[0].InnerText); TaskManager.AddToBus(CreateTaskmanager.ENTITY_ID, datasetId); dm.EditDatasetVersion(workingCopy, null, null, null); dm.CheckInDataset(datasetId, "Metadata was submited.", GetUsernameOrDefault(), ViewCreationBehavior.None); //add to index // ToDo check which SearchProvider it is, default luceneprovider // BUG: invalid call to ddm method // TODO: mODULARITY ->Call DDM Reindex /* * <Export tag="internalApi" id="SearchIndex" * title="Reindex Search" description="Reindex Search" icon="" * controller="SearchIndex" action="Get" * extends="" /> */ // WORKAROUND: do not reindex //ISearchProvider provider = IoCFactory.Container.ResolveForSession<ISearchProvider>() as ISearchProvider; //provider?.UpdateSingleDatasetIndex(datasetId, IndexingAction.CREATE); if (this.IsAccessibale("DDM", "SearchIndex", "ReIndexSingle")) { var x = this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } LoggerFactory.LogData(datasetId.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Created); if (newDataset) { var es = new EmailService(); es.Send(MessageHelper.GetCreateDatasetHeader(), MessageHelper.GetCreateDatasetMessage(datasetId, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } else { var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetId, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } } return(datasetId); } } catch (Exception ex) { var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), ex.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { dm.Dispose(); rpm.Dispose(); dsm.Dispose(); } #endregion create dataset return(-1); }
public Dataset UpdateOneTupleForDataset(Dataset dataset, StructuredDataStructure dataStructure, long id, int value) { dataset.Status.Should().Be(DatasetStatus.CheckedIn); dataset.Should().NotBeNull(); DatasetManager dm = new DatasetManager(); try { if (dm.IsDatasetCheckedOutFor(dataset.Id, "David") || dm.CheckOutDataset(dataset.Id, "David")) { dataset.Status.Should().Be(DatasetStatus.CheckedOut, "Dataset must be in Checkedout status."); DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(dataset.Id); DataTuple dt = new DataTuple(); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.First().Id, Value = value }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(1).First().Id, Value = "Test" }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(2).First().Id, Value = 5 }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(3).First().Id, Value = true }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(4).First().Id, Value = DateTime.Now.ToString() }); dt.Dematerialize(); dt.Should().NotBeNull(); dt.JsonVariableValues.Should().NotBeNull(); List <DataTuple> tuples = new List <DataTuple>(); DataTuple newDt = new DataTuple(); newDt.Id = id; newDt.XmlAmendments = dt.XmlAmendments; newDt.JsonVariableValues = dt.JsonVariableValues; newDt.Materialize(); newDt.OrderNo = 1; tuples.Add(newDt); dm.EditDatasetVersion(workingCopy, null, tuples, null); dataset.Status.Should().Be(DatasetStatus.CheckedOut, "Dataset must be in Checkedout status."); } return(dataset); } catch (Exception ex) { return(null); } finally { dm.Dispose(); } }
public Dataset GenerateTuplesForDataset(Dataset dataset, StructuredDataStructure dataStructure, long numberOfTuples, string username) { dataset.Status.Should().Be(DatasetStatus.CheckedIn); dataset.Should().NotBeNull(); numberOfTuples.Should().BeGreaterThan(0); var r = new Random(); DatasetManager dm = new DatasetManager(); try { if (dm.IsDatasetCheckedOutFor(dataset.Id, username) || dm.CheckOutDataset(dataset.Id, username)) { dataset.Status.Should().Be(DatasetStatus.CheckedOut, "Dataset must be in Checkedout status."); DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(dataset.Id); DataTuple dt = new DataTuple(); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.First().Id, Value = r.Next() }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(1).First().Id, Value = "Test" }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(2).First().Id, Value = r.Next() }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(3).First().Id, Value = true }); dt.VariableValues.Add(new VariableValue() { VariableId = dataStructure.Variables.Skip(4).First().Id, Value = "01.01.2017" }); dt.Dematerialize(); dt.Should().NotBeNull(); //dt.XmlVariableValues.Should().NotBeNull(); List <DataTuple> tuples = new List <DataTuple>(); for (int i = 0; i < numberOfTuples; i++) { DataTuple newDt = new DataTuple(); newDt.XmlAmendments = dt.XmlAmendments; //newDt.XmlVariableValues = dt.XmlVariableValues; newDt.JsonVariableValues = dt.JsonVariableValues; newDt.Materialize(); newDt.OrderNo = i; tuples.Add(newDt); } dm.EditDatasetVersion(workingCopy, tuples, null, null); dataset.Status.Should().Be(DatasetStatus.CheckedOut, "Dataset must be in Checkedout status."); } return(dataset); } catch (Exception ex) { return(null); } finally { dm.Dispose(); } }
public async Task <bool> Upload() { Debug.WriteLine("start upload data"); FileStream Stream = null; DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); long id = _dataset.Id; string userName = _user.UserName; var es = new EmailService(); try { List <long> datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetManager.GetDatasetLatestVersion(_dataset.Id)); if (FileHelper.FileExist(_filepath) && (datasetManager.IsDatasetCheckedOutFor(id, userName) || datasetManager.CheckOutDataset(id, userName))) { workingCopy = datasetManager.GetDatasetWorkingCopy(id); ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = userName, Comment = "Data", ActionType = AuditActionType.Edit }; //schleife int counter = 0; bool inputWasAltered = false; do { counter++; inputWasAltered = false; Stream = reader.Open(_filepath); rows = reader.ReadFile(Stream, Path.GetFileName(_filepath), id, packageSize); Stream.Close(); // if errors exist, send email to user and stop process if (reader.ErrorMessages.Count > 0) { List <string> errorArray = new List <string>(); foreach (var e in reader.ErrorMessages) { errorArray.Add(e.GetMessage()); } //send error messages es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, errorArray.ToArray()), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } //Update Method -- append or update if (_uploadMethod == UploadMethod.Append) { if (rows.Count > 0) { datasetManager.EditDatasetVersion(workingCopy, rows, null, null); inputWasAltered = true; } } else if (_uploadMethod == UploadMethod.Update) { if (rows.Count() > 0) { var splittedDatatuples = uploadHelper.GetSplitDatatuples(rows, variableIds, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } while (rows.Count() > 0 || inputWasAltered == true); datasetManager.CheckInDataset(id, "via api", userName); string title = workingCopy.Title; //send email es.Send(MessageHelper.GetUpdateDatasetHeader(id), MessageHelper.GetUpdateDatasetMessage(id, title, _user.DisplayName), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } else { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { "The temporarily stored data could not be read or the dataset is already in checkout status." }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } return(true); } catch (Exception ex) { if (datasetManager.IsDatasetCheckedOutFor(id, userName)) { datasetManager.UndoCheckoutDataset(id, userName); } //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { ex.Message }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } finally { Debug.WriteLine("end of upload"); } }