//[MeasurePerformance] //temporary solution: norman //For original solution, look into Aquadiva Code public List <Error> FinishUpload(EasyUploadTaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); DataContainerManager dam = new DataContainerManager(); //SubjectManager sm = new SubjectManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); List <Error> temp = new List <Error>(); try { using (IUnitOfWork unitOfWork = this.GetUnitOfWork()) { // initialize all necessary manager DataTuple[] rows = null; //First, try to validate - if there are errors, return immediately string JsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_JSON_DATA].ToString(); List <Error> ValidationErrors = ValidateRows(JsonArray); if (ValidationErrors.Count != 0) { temp.AddRange(ValidationErrors); return(temp); } string timestamp = DateTime.UtcNow.ToString("r"); string title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.FILENAME]); if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DESCRIPTIONTITLE)) { string tmp = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); if (!String.IsNullOrWhiteSpace(tmp)) { title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); } } StructuredDataStructure sds = null; List <VariableIdentifier> identifiers = new List <VariableIdentifier>(); //Used in Excel reader //Try to find an exact matching datastructure Boolean foundReusableDataStructure = false; List <RowModel> headers = (List <RowModel>)TaskManager.Bus[EasyUploadTaskManager.ROWS]; //For some reason, MappedHeaders might be in a different order in this list than what's indicated by its IDs - to prevent mismatching, sort the headers headers.Sort((m1, m2) => m1.Index.CompareTo(m2.Index)); List <StructuredDataStructure> allDatastructures = dsm.StructuredDataStructureRepo.Get().ToList(); foreach (StructuredDataStructure existingStructure in allDatastructures) { if (!foundReusableDataStructure) { //For now a datastructure is considered an exact match if it contains variables with //the same names (labels), datatypes and units in the correct order List <Variable> variablesOfExistingStructure = existingStructure.Variables.ToList(); foundReusableDataStructure = true; if (variablesOfExistingStructure.Count != headers.Count) { foundReusableDataStructure = false; } else { for (int i = 0; i < variablesOfExistingStructure.Count; i++) { Variable exVar = variablesOfExistingStructure.ElementAt(i); RowModel currentHeader = headers.ElementAt(i); if (exVar.Label != currentHeader.Name || exVar.Unit.Id != currentHeader.SelectedUnit.UnitId || exVar.DataAttribute.DataType.Id != currentHeader.SelectedDataType.DataTypeId) { foundReusableDataStructure = false; } } } if (foundReusableDataStructure) { sds = existingStructure; foreach (Variable exVar in variablesOfExistingStructure) { VariableIdentifier vi = new VariableIdentifier { name = exVar.Label, id = exVar.Id }; identifiers.Add(vi); } } } } if (!foundReusableDataStructure) { sds = dsm.CreateStructuredDataStructure(title, title + " " + timestamp, "", "", DataStructureCategory.Generic); } TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_ID, sds.Id); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_TITLE, title + " " + timestamp); if (!TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DATASET_TITLE)) { TaskManager.AddToBus(EasyUploadTaskManager.DATASET_TITLE, title); TaskManager.AddToBus(EasyUploadTaskManager.TITLE, title); } MetadataStructure metadataStructure = null; if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.SCHEMA)) { long metadataStructureId = Convert.ToInt64(TaskManager.Bus[EasyUploadTaskManager.SCHEMA]); metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Id == metadataStructureId).FirstOrDefault(); } else { //Default option but shouldn't happen because previous steps can't be finished without selecting the metadata-structure metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Name.ToLower().Contains("eml")).FirstOrDefault(); } ResearchPlan rp = unitOfWork.GetReadOnlyRepository <ResearchPlan>().Get().FirstOrDefault(); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_ID, rp.Id); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_TITLE, rp.Title); #region Progress Information if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGE, 0); } #endregion Progress Information if (!foundReusableDataStructure) { #region Set Variables and information for new DataStructure XmlDocument xmldoc = new XmlDocument(); XmlElement extraElement = xmldoc.CreateElement("extra"); XmlElement orderElement = xmldoc.CreateElement("order"); //Sorting necessary to prevent problems when inserting the tuples headers.OrderBy(r => r.Index); var dataTypeRepo = unitOfWork.GetReadOnlyRepository <DataType>(); var unitRepo = unitOfWork.GetReadOnlyRepository <Unit>(); var dataAttributeRepo = unitOfWork.GetReadOnlyRepository <DataAttribute>(); List <DataAttribute> allDataAttributes = dataAttributeRepo.Get().ToList(); foreach (RowModel header in headers) { int i = headers.IndexOf(header); DataType dataType = dataTypeRepo.Get(header.SelectedDataType.DataTypeId); Unit CurrentSelectedUnit = unitRepo.Get(header.SelectedUnit.UnitId); DataAttribute CurrentDataAttribute = new DataAttribute(); //If possible, map the chosen variable name, unit and datatype to an existing DataAttribute (Exact match) DataAttribute existingDataAttribute = allDataAttributes.Where(da => da.Name.ToLower().Equals(TrimAndLimitString(header.SelectedDataAttribute.Name).ToLower()) && da.Unit.Dimension == CurrentSelectedUnit.Dimension).FirstOrDefault(); if (existingDataAttribute != null) { CurrentDataAttribute = existingDataAttribute; } else { //No matching DataAttribute => Create a new one CurrentDataAttribute = dam.CreateDataAttribute(TrimAndLimitString(header.Name), header.Name, header.SelectedDataAttribute.Description, false, false, "", MeasurementScale.Categorial, DataContainerType.ReferenceType, "", dataType, CurrentSelectedUnit, null, null, null, null, null, null); } Variable newVariable = dsm.AddVariableUsage(sds, CurrentDataAttribute, true, header.Name, "", "", "", CurrentSelectedUnit); VariableIdentifier vi = new VariableIdentifier { name = newVariable.Label, id = newVariable.Id }; identifiers.Add(vi); XmlElement newVariableXml = xmldoc.CreateElement("variable"); newVariableXml.InnerText = Convert.ToString(newVariable.Id); orderElement.AppendChild(newVariableXml); } extraElement.AppendChild(orderElement); xmldoc.AppendChild(extraElement); sds.Extra = xmldoc; sds.Name = "generated import structure " + timestamp; sds.Description = "automatically generated structured data structure by user " + GetUsernameOrDefault() + " for file " + title + " on " + timestamp; #endregion Set Variables and information for new DataStructure } Dataset ds = null; ds = dm.CreateEmptyDataset(sds, rp, metadataStructure); long datasetId = ds.Id; long sdsId = sds.Id; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion dsv = dm.GetDatasetWorkingCopy(datasetId); long METADATASTRUCTURE_ID = metadataStructure.Id; XmlMetadataWriter xmlMetadatWriter = new XmlMetadataWriter(XmlNodeMode.xPath); XDocument metadataX = xmlMetadatWriter.CreateMetadataXml(METADATASTRUCTURE_ID); XmlDocument metadataXml = XmlMetadataWriter.ToXmlDocument(metadataX); dsv.Metadata = metadataXml; try { dsv.Metadata = xmlDatasetHelper.SetInformation(dsv, metadataXml, NameAttributeValues.title, title); dsv.Title = title; } catch (NullReferenceException ex) { //Reference of the title node is missing throw new NullReferenceException("The extra-field of this metadata-structure is missing the title-node-reference!"); } dm.EditDatasetVersion(dsv, null, null, null); } #region security // add security if (GetUsernameOrDefault() != "DEFAULT") { foreach (RightType rightType in Enum.GetValues(typeof(RightType)).Cast <RightType>()) { //The user gets full permissions // add security if (GetUsernameOrDefault() != "DEFAULT") { entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } } } #endregion security #region excel reader int packageSize = 100000; int numberOfRows = 0; //HACK ? TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(ds.Id); counter++; TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = counter; //rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), oldSds, (int)id, packageSize).ToArray(); List <string> selectedDataAreaJsonArray = (List <string>)TaskManager.Bus[EasyUploadTaskManager.SHEET_DATA_AREA]; string selectedHeaderAreaJsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_HEADER_AREA].ToString(); List <int[]> areaDataValuesList = new List <int[]>(); foreach (string area in selectedDataAreaJsonArray) { areaDataValuesList.Add(JsonConvert.DeserializeObject <int[]>(area)); } int[] areaHeaderValues = JsonConvert.DeserializeObject <int[]>(selectedHeaderAreaJsonArray); Orientation orientation = 0; switch (TaskManager.Bus[EasyUploadTaskManager.SHEET_FORMAT].ToString()) { case "LeftRight": orientation = Orientation.rowwise; break; case "Matrix": //orientation = Orientation.matrix; break; default: orientation = Orientation.columnwise; break; } String worksheetUri = null; //Get the Uri to identify the correct worksheet if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.ACTIVE_WOKSHEET_URI)) { worksheetUri = TaskManager.Bus[EasyUploadTaskManager.ACTIVE_WOKSHEET_URI].ToString(); } int batchSize = (new Object()).GetUnitOfWork().PersistenceManager.PreferredPushSize; int batchnr = 1; foreach (int[] areaDataValues in areaDataValuesList) { //First batch starts at the start of the current data area int currentBatchStartRow = areaDataValues[0] + 1; while (currentBatchStartRow <= areaDataValues[2] + 1) //While the end of the current data area has not yet been reached { //End row is start row plus batch size int currentBatchEndRow = currentBatchStartRow + batchSize; //Set the indices for the reader EasyUploadFileReaderInfo fri = new EasyUploadFileReaderInfo { DataStartRow = currentBatchStartRow, //End row is either at the end of the batch or the end of the marked area //DataEndRow = (currentBatchEndRow > areaDataValues[2] + 1) ? areaDataValues[2] + 1 : currentBatchEndRow, DataEndRow = Math.Min(currentBatchEndRow, areaDataValues[2] + 1), //Column indices as marked in a previous step DataStartColumn = areaDataValues[1] + 1, DataEndColumn = areaDataValues[3] + 1, //Header area as marked in a previous step VariablesStartRow = areaHeaderValues[0] + 1, VariablesStartColumn = areaHeaderValues[1] + 1, VariablesEndRow = areaHeaderValues[2] + 1, VariablesEndColumn = areaHeaderValues[3] + 1, Offset = areaDataValues[1], Orientation = orientation }; //Create a new reader each time because the reader saves ALL tuples it read and therefore the batch processing wouldn't work EasyUploadExcelReader reader = new EasyUploadExcelReader(sds, fri); // open file Stream = reader.Open(TaskManager.Bus[EasyUploadTaskManager.FILEPATH].ToString()); //Set variable identifiers because they might differ from the variable names in the file reader.setSubmittedVariableIdentifiers(identifiers); //Read the rows and convert them to DataTuples rows = reader.ReadFile(Stream, TaskManager.Bus[EasyUploadTaskManager.FILENAME].ToString(), fri, (int)datasetId, worksheetUri); //After reading the rows, add them to the dataset if (rows != null) { dm.EditDatasetVersion(workingCopy, rows.ToList(), null, null); numberOfRows += rows.Count(); } //Close the Stream so the next ExcelReader can open it again Stream.Close(); //Debug information int lines = (areaDataValues[2] + 1) - (areaDataValues[0] + 1); int batches = lines / batchSize; batchnr++; //Next batch starts after the current one currentBatchStartRow = currentBatchEndRow + 1; } } #endregion excel reader //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = GetUsernameOrDefault(), Comment = "Data", ActionType = AuditActionType.Create }; dm.EditDatasetVersion(workingCopy, null, null, null); dm.CheckInDataset(ds.Id, "Import " + numberOfRows + " rows", GetUsernameOrDefault()); //Reindex search if (this.IsAccessible("DDM", "SearchIndex", "ReIndexSingle")) { this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } TaskManager.AddToBus(EasyUploadTaskManager.DATASET_ID, ds.Id); return(temp); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Other, "An error occured during the upload. " + "Please try again later. If this problem keeps occuring, please contact your administrator.")); return(temp); } finally { dsm.Dispose(); dm.Dispose(); dam.Dispose(); //sm.Dispose(); entityPermissionManager.Dispose(); } }
//[MeasurePerformance] //temporary solution: norman //For original solution, look into Aquadiva Code public List <Error> FinishUpload(EasyUploadTaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); DataContainerManager dam = new DataContainerManager(); //SubjectManager sm = new SubjectManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); List <Error> temp = new List <Error>(); try { using (IUnitOfWork unitOfWork = this.GetUnitOfWork()) { // initialize all necessary manager DataTuple[] rows = null; //First, try to validate - if there are errors, return immediately string JsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_JSON_DATA].ToString(); List <Error> ValidationErrors = ValidateRows(JsonArray); if (ValidationErrors.Count != 0) { temp.AddRange(ValidationErrors); return(temp); } string timestamp = DateTime.UtcNow.ToString("r"); string title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.FILENAME]); if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DESCRIPTIONTITLE)) { string tmp = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); if (!String.IsNullOrWhiteSpace(tmp)) { title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); } } StructuredDataStructure sds = dsm.CreateStructuredDataStructure(title, title + " " + timestamp, "", "", DataStructureCategory.Generic); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_ID, sds.Id); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_TITLE, title + " " + timestamp); if (!TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DATASET_TITLE)) { TaskManager.AddToBus(EasyUploadTaskManager.DATASET_TITLE, title); TaskManager.AddToBus(EasyUploadTaskManager.TITLE, title); } MetadataStructure metadataStructure = null; if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.SCHEMA)) { long metadataStructureId = Convert.ToInt64(TaskManager.Bus[EasyUploadTaskManager.SCHEMA]); metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Id == metadataStructureId).FirstOrDefault(); } else { //Default option but shouldn't happen because previous steps can't be finished without selecting the metadata-structure metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Name.ToLower().Contains("eml")).FirstOrDefault(); } ResearchPlan rp = unitOfWork.GetReadOnlyRepository <ResearchPlan>().Get().FirstOrDefault(); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_ID, rp.Id); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_TITLE, rp.Title); #region Progress Information if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGE, 0); } #endregion #region DataStructure XmlDocument xmldoc = new XmlDocument(); XmlElement extraElement = xmldoc.CreateElement("extra"); XmlElement orderElement = xmldoc.CreateElement("order"); List <Tuple <int, string, UnitInfo> > MappedHeaders = (List <Tuple <int, string, UnitInfo> >)TaskManager.Bus[EasyUploadTaskManager.VERIFICATION_MAPPEDHEADERUNITS]; //Sorting necessary to prevent problems when inserting the tuples MappedHeaders.Sort((head1, head2) => head1.Item1.CompareTo(head2.Item1)); List <VariableIdentifier> identifiers = new List <VariableIdentifier>(); var dataTypeRepo = unitOfWork.GetReadOnlyRepository <DataType>(); var unitRepo = unitOfWork.GetReadOnlyRepository <Unit>(); var dataAttributeRepo = unitOfWork.GetReadOnlyRepository <DataAttribute>(); List <DataAttribute> allDataAttributes = dataAttributeRepo.Get().ToList(); foreach (Tuple <int, string, UnitInfo> Entry in MappedHeaders) { int i = MappedHeaders.IndexOf(Entry); DataType dataType = dataTypeRepo.Get(Entry.Item3.SelectedDataTypeId); Unit CurrentSelectedUnit = unitRepo.Get(Entry.Item3.UnitId); DataAttribute CurrentDataAttribute = new DataAttribute(); //If possible, map the chosen variable name, unit and datatype to an existing DataAttribute (Exact match) DataAttribute existingDataAttribute = allDataAttributes.Where(da => da.Name.ToLower().Equals(TrimAndLimitString(Entry.Item2).ToLower()) && da.DataType.Id == dataType.Id && da.Unit.Id == CurrentSelectedUnit.Id).FirstOrDefault(); if (existingDataAttribute != null) { CurrentDataAttribute = existingDataAttribute; } else { //No matching DataAttribute => Create a new one CurrentDataAttribute = dam.CreateDataAttribute(TrimAndLimitString(Entry.Item2), Entry.Item2, "", false, false, "", MeasurementScale.Categorial, DataContainerType.ReferenceType, "", dataType, CurrentSelectedUnit, null, null, null, null, null, null); } Variable newVariable = dsm.AddVariableUsage(sds, CurrentDataAttribute, true, Entry.Item2, "", "", ""); VariableIdentifier vi = new VariableIdentifier { name = newVariable.Label, id = newVariable.Id }; identifiers.Add(vi); XmlElement newVariableXml = xmldoc.CreateElement("variable"); newVariableXml.InnerText = Convert.ToString(newVariable.Id); orderElement.AppendChild(newVariableXml); } extraElement.AppendChild(orderElement); xmldoc.AppendChild(extraElement); sds.Extra = xmldoc; sds.Name = "generated import structure " + timestamp; sds.Description = "automatically generated structured data structure by user " + GetUsernameOrDefault() + " for file " + title + " on " + timestamp; #endregion Dataset ds = null; ds = dm.CreateEmptyDataset(sds, rp, metadataStructure); //TODO Should a template be created? /*ExcelTemplateProvider etp = new ExcelTemplateProvider(); * etp.CreateTemplate(sds);*/ long datasetId = ds.Id; long sdsId = sds.Id; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion dsv = dm.GetDatasetWorkingCopy(datasetId); long METADATASTRUCTURE_ID = metadataStructure.Id; XmlMetadataWriter xmlMetadatWriter = new XmlMetadataWriter(XmlNodeMode.xPath); XDocument metadataX = xmlMetadatWriter.CreateMetadataXml(METADATASTRUCTURE_ID); XmlDocument metadataXml = XmlMetadataWriter.ToXmlDocument(metadataX); dsv.Metadata = metadataXml; try { dsv.Metadata = xmlDatasetHelper.SetInformation(dsv, metadataXml, NameAttributeValues.title, title); } catch (NullReferenceException ex) { //Reference of the title node is missing throw new NullReferenceException("The extra-field of this metadata-structure is missing the title-node-reference!"); } dm.EditDatasetVersion(dsv, null, null, null); } #region security // add security if (GetUsernameOrDefault() != "DEFAULT") { //PermissionManager pm = new PermissionManager(); //User user = sm.GetUserByName(GetUsernameOrDefault()); //Rights-Management /* * TODO: Use the BExIS Party API for that * * */ /* * UserPiManager upm = new UserPiManager(); * List<long> piList = (new UserSelectListModel(GetUsernameOrDefault())).UserList.Select(x => x.Id).ToList(); */ foreach (RightType rightType in Enum.GetValues(typeof(RightType)).Cast <RightType>()) { //The user gets full permissions // add security if (GetUsernameOrDefault() != "DEFAULT") { entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } // adding the rights for the pis /*foreach (long piId in piList) * { * //Each pi gets full permissions * pm.CreateDataPermission(piId, 1, ds.Id, rightType); * * // get all pi members * List<UserPi> currentMembers = upm.GetAllPiMember(piId).ToList(); * * foreach (UserPi currentMapping in currentMembers) * { * switch (rightType) * { * //Each member of each of the pis gets view-rights * case RightType.View: * pm.CreateDataPermission(currentMapping.UserId, 1, ds.Id, rightType); * break; * //Each member of each of the pis gets download-rights * case RightType.Download: * pm.CreateDataPermission(currentMapping.UserId, 1, ds.Id, rightType); * break; * default: * //No other permissions - is this call necessary? * pm.CreateDataPermission(currentMapping.UserId, 0, ds.Id, rightType); * break; * } * } * }*/ } } #endregion security #region excel reader int packageSize = 10000; //HACK ? TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(ds.Id); counter++; TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = counter; //rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), oldSds, (int)id, packageSize).ToArray(); List <string> selectedDataAreaJsonArray = (List <string>)TaskManager.Bus[EasyUploadTaskManager.SHEET_DATA_AREA]; string selectedHeaderAreaJsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_HEADER_AREA].ToString(); List <int[]> areaDataValuesList = new List <int[]>(); foreach (string area in selectedDataAreaJsonArray) { areaDataValuesList.Add(JsonConvert.DeserializeObject <int[]>(area)); } int[] areaHeaderValues = JsonConvert.DeserializeObject <int[]>(selectedHeaderAreaJsonArray); Orientation orientation = 0; switch (TaskManager.Bus[EasyUploadTaskManager.SHEET_FORMAT].ToString()) { case "LeftRight": orientation = Orientation.rowwise; break; case "Matrix": //orientation = Orientation.matrix; break; default: orientation = Orientation.columnwise; break; } String worksheetUri = null; //Get the Uri to identify the correct worksheet if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.ACTIVE_WOKSHEET_URI)) { worksheetUri = TaskManager.Bus[EasyUploadTaskManager.ACTIVE_WOKSHEET_URI].ToString(); } int batchSize = (new Object()).GetUnitOfWork().PersistenceManager.PreferredPushSize; int batchnr = 1; foreach (int[] areaDataValues in areaDataValuesList) { //First batch starts at the start of the current data area int currentBatchStartRow = areaDataValues[0] + 1; while (currentBatchStartRow <= areaDataValues[2] + 1) //While the end of the current data area has not yet been reached { //Create a new reader each time because the reader saves ALL tuples it read and therefore the batch processing wouldn't work EasyUploadExcelReader reader = new EasyUploadExcelReader(); // open file Stream = reader.Open(TaskManager.Bus[EasyUploadTaskManager.FILEPATH].ToString()); //End row is start row plus batch size int currentBatchEndRow = currentBatchStartRow + batchSize; //Set the indices for the reader EasyUploadFileReaderInfo fri = new EasyUploadFileReaderInfo { DataStartRow = currentBatchStartRow, //End row is either at the end of the batch or the end of the marked area //DataEndRow = (currentBatchEndRow > areaDataValues[2] + 1) ? areaDataValues[2] + 1 : currentBatchEndRow, DataEndRow = Math.Min(currentBatchEndRow, areaDataValues[2] + 1), //Column indices as marked in a previous step DataStartColumn = areaDataValues[1] + 1, DataEndColumn = areaDataValues[3] + 1, //Header area as marked in a previous step VariablesStartRow = areaHeaderValues[0] + 1, VariablesStartColumn = areaHeaderValues[1] + 1, VariablesEndRow = areaHeaderValues[2] + 1, VariablesEndColumn = areaHeaderValues[3] + 1, Offset = areaDataValues[1], Orientation = orientation }; //Set variable identifiers because they might differ from the variable names in the file reader.setSubmittedVariableIdentifiers(identifiers); //Read the rows and convert them to DataTuples rows = reader.ReadFile(Stream, TaskManager.Bus[EasyUploadTaskManager.FILENAME].ToString(), fri, sds, (int)datasetId, worksheetUri); //After reading the rows, add them to the dataset if (rows != null) { dm.EditDatasetVersion(workingCopy, rows.ToList(), null, null); } //Close the Stream so the next ExcelReader can open it again Stream.Close(); //Debug information int lines = (areaDataValues[2] + 1) - (areaDataValues[0] + 1); int batches = lines / batchSize; batchnr++; //Next batch starts after the current one currentBatchStartRow = currentBatchEndRow + 1; } } #endregion dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //Reindex search if (this.IsAccessibale("DDM", "SearchIndex", "ReIndexSingle")) { this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } TaskManager.AddToBus(EasyUploadTaskManager.DATASET_ID, ds.Id); return(temp); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Other, "An error occured during the upload. " + "Please try again later. If this problem keeps occuring, please contact your administrator.")); return(temp); } finally { dsm.Dispose(); dm.Dispose(); dam.Dispose(); //sm.Dispose(); entityPermissionManager.Dispose(); } }