public ActionResult CreateTestDatasets(int n) { DatasetManager datasetManager = new DatasetManager(); DataStructureManager dataStructureManager = new DataStructureManager(); MetadataStructureManager metadataStructureManager = new MetadataStructureManager(); ResearchPlanManager researchPlanManager = new ResearchPlanManager(); try { var structure = dataStructureManager.UnStructuredDataStructureRepo.Get(1); var metadatastructure = metadataStructureManager.Repo.Get(1); var researchplan = researchPlanManager.Repo.Get(1); var xmlDatasetHelper = new XmlDatasetHelper(); var xmlMetadatWriter = new XmlMetadataWriter(XmlNodeMode.xPath); var metadataXml = xmlMetadatWriter.CreateMetadataXml(metadatastructure.Id); for (int i = 0; i < n; i++) { var dataset = datasetManager.CreateEmptyDataset(structure, researchplan, metadatastructure); if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, "test") || datasetManager.CheckOutDataset(dataset.Id, "test")) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(dataset.Id, "", "test", ViewCreationBehavior.None); workingCopy.Metadata = Xml.Helpers.XmlWriter.ToXmlDocument(metadataXml); string xpath = xmlDatasetHelper.GetInformationPath(metadatastructure.Id, NameAttributeValues.title); workingCopy.Metadata.SelectSingleNode(xpath).InnerText = i.ToString(); workingCopy.Title = i.ToString(); datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(dataset.Id, "", "test", ViewCreationBehavior.None); } } } catch (Exception ex) { throw ex; } finally { datasetManager.Dispose(); dataStructureManager.Dispose(); metadataStructureManager.Dispose(); researchPlanManager.Dispose(); } return(View("Index")); }
public void GetDatasetVersionEffectiveDataTuples_CalledOlderVersionWithPaging_ReturnListOfAbstractTuples() { //Arrange DatasetManager datasetManager = null; int pageSize = 4; int pageNumber = 2; try { datasetManager = new DatasetManager(); var latestDatasetVersionId = datasetManager.GetDatasetLatestVersionId(datasetId); //get latest datatupleid before create a new dataset and data using (var uow = this.GetUnitOfWork()) { var latestDataTuple = uow.GetReadOnlyRepository <DataTuple>().Get().LastOrDefault(); var firstDataTuple = uow.GetReadOnlyRepository <DataTuple>().Get().Where(dt => dt.DatasetVersion.Id.Equals(latestDatasetVersionId)).FirstOrDefault(); if (latestDataTuple != null) { latestDataTupleId = latestDataTuple.Id; } if (firstDataTuple != null) { firstDataTupleId = firstDataTuple.Id; } } var dataset = datasetManager.GetDataset(datasetId); dataset = dsHelper.UpdateOneTupleForDataset(dataset, (StructuredDataStructure)dataset.DataStructure, firstDataTupleId, 1, datasetManager); datasetManager.CheckInDataset(dataset.Id, "for testing datatuples with versions", username, ViewCreationBehavior.None); dataset = dsHelper.UpdateOneTupleForDataset(dataset, (StructuredDataStructure)dataset.DataStructure, latestDataTupleId, 2, datasetManager); datasetManager.CheckInDataset(dataset.Id, "for testing datatuples with versions", username, ViewCreationBehavior.None); //Act List <DatasetVersion> datasetversions = datasetManager.GetDatasetVersions(datasetId).OrderBy(d => d.Timestamp).ToList(); var resultAll = datasetManager.GetDatasetVersionEffectiveTuples(datasetversions.ElementAt(datasetversions.Count - 2)); List <long> comapreIds = resultAll.OrderBy(dt => dt.OrderNo).Skip(pageNumber * pageSize).Take(pageSize).Select(dt => dt.Id).ToList(); var result = datasetManager.GetDatasetVersionEffectiveTuples(datasetversions.ElementAt(datasetversions.Count - 2), pageNumber, pageSize); // get datatuples from the one before the latest var resultIds = result.Select(dt => dt.Id).ToList(); //Assert Assert.That(comapreIds, Is.EquivalentTo(resultIds)); } finally { datasetManager.Dispose(); } }
public void GetDataTuples_CallOlderVersionAfterTwoUpdates_ReturnIQueryable() { List <long> datatupleIds; try { //Arrange using (var datasetManager = new DatasetManager()) { var dataset = datasetManager.GetDataset(datasetId); var datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); Assert.IsNotNull(dataset); Assert.IsNotNull(datasetVersion); //get latest datatupleid before create a new dataset and data using (var uow = this.GetUnitOfWork()) { datatupleIds = uow.GetReadOnlyRepository <DataTuple>().Get().Where(dt => dt.DatasetVersion.Id.Equals(datasetVersion.Id)).Select(dt => dt.Id).ToList(); } dataset = dsHelper.UpdateOneTupleForDataset(dataset, (StructuredDataStructure)dataset.DataStructure, datatupleIds[0], 1000, datasetManager); datasetManager.CheckInDataset(dataset.Id, "for testing datatuples with versions", username, ViewCreationBehavior.None); dataset = dsHelper.UpdateOneTupleForDataset(dataset, (StructuredDataStructure)dataset.DataStructure, datatupleIds[1], 2000, datasetManager); datasetManager.CheckInDataset(dataset.Id, "for testing datatuples with versions", username, ViewCreationBehavior.None); } //Act using (var datasetManager = new DatasetManager()) { List <DatasetVersion> datasetversions = datasetManager.GetDatasetVersions(datasetId).OrderBy(d => d.Timestamp).ToList(); Assert.That(datasetversions.Count, Is.EqualTo(3)); foreach (var dsv in datasetversions) { int c = datasetManager.GetDataTuplesCount(dsv.Id); var result = datasetManager.GetDataTuples(dsv.Id); // get datatuples from the one before the latest int cm = result.Count(); Assert.That(c, Is.EqualTo(10)); Assert.That(c, Is.EqualTo(result.Count())); } } } catch (Exception ex) { throw ex; } }
public ActionResult Delete(long datasetId, String fileName) { using (var dm = new DatasetManager()) { var filePath = Path.Combine(AppConfiguration.DataPath, "Datasets", datasetId.ToString(), "Attachments", fileName); FileHelper.Delete(filePath); var dataset = dm.GetDataset(datasetId); var datasetVersion = dm.GetDatasetLatestVersion(dataset); var contentDescriptor = datasetVersion.ContentDescriptors.FirstOrDefault(item => item.Name == fileName); if (contentDescriptor == null) { throw new Exception("There is not any content descriptor having file name '" + fileName + "'. "); } datasetVersion.ContentDescriptors.Remove(contentDescriptor); datasetVersion.ModificationInfo = new EntityAuditInfo() { Performer = GetUsernameOrDefault(), Comment = "Attachment", ActionType = AuditActionType.Delete }; dm.EditDatasetVersion(datasetVersion, null, null, null); dm.CheckInDataset(dataset.Id, fileName, GetUsernameOrDefault(), ViewCreationBehavior.None); } return(RedirectToAction("showdata", "data", new { area = "ddm", id = datasetId })); }
//copy of Dcm.UI.Helpers.DataASyncUploadHelper.FinishUpload (adapted) public ActionResult SaveFile(long entityId) { Dataset ds = null; DatasetVersion workingCopy = new DatasetVersion(); string storePath = ""; using (var dm = new DatasetManager()) { ds = dm.GetDataset(entityId); // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { try { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); storePath = SaveFileInContentDiscriptor(workingCopy); } workingCopy.StateInfo = new EntityStateInfo(); workingCopy.StateInfo.State = DatasetStateInfo.Valid.ToString(); //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = GetUsernameOrDefault(), Comment = "File", ActionType = AuditActionType.Create }; dm.EditDatasetVersion(workingCopy, null, null, null); string filename = Path.GetFileName(storePath); //TaskManager TaskManager = (TaskManager)Session["TaskManager"]; //if (TaskManager.Bus.ContainsKey(TaskManager.FILENAME)) //{ // filename = TaskManager.Bus[TaskManager.FILENAME]?.ToString(); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, filename, GetUsernameOrDefault(), ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } return(RedirectToAction("Index", "ShowPublication", new { area = "PUB", id = entityId })); }
public void OneTimeSetUp() { helper = new TestSetupHelper(WebApiConfig.Register, false); var dm = new DatasetManager(); var rsm = new ResearchPlanManager(); var mdm = new MetadataStructureManager(); dsHelper = new DatasetHelper(); try { dsHelper.PurgeAllDatasets(); dsHelper.PurgeAllDataStructures(); dsHelper.PurgeAllResearchPlans(); // generate Data numberOfTuples = 50000; StructuredDataStructure dataStructure = dsHelper.CreateADataStructure(); dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); var rp = dsHelper.CreateResearchPlan(); rp.Should().NotBeNull("Failed to meet a precondition: a research plan is required."); var mds = mdm.Repo.Query().First(); mds.Should().NotBeNull("Failed to meet a precondition: a metadata strcuture is required."); Dataset dataset = dm.CreateEmptyDataset(dataStructure, rp, mds); datasetId = dataset.Id; // add datatuples dataset = dsHelper.GenerateTuplesForDataset(dataset, dataStructure, numberOfTuples, username); dm.CheckInDataset(dataset.Id, "for testing datatuples with versions", username); } finally { dm.CheckInDataset(datasetId, "for testing datatuples with versions", username, ViewCreationBehavior.None); } }
public void EditDatasetVersion_DeleteADataTupleAfterUpdate_ReturnUpdatedVersion() { Dataset dataset; DatasetVersion latest; using (DatasetManager datasetManager = new DatasetManager()) { //Arrange dataset = datasetManager.GetDataset(datasetId); latest = datasetManager.GetDatasetLatestVersion(datasetId); //update the dataset dataset = dsHelper.UpdateAnyTupleForDataset(dataset, dataset.DataStructure as StructuredDataStructure, datasetManager); datasetManager.CheckInDataset(datasetId, "for testing update all datatuple", username, ViewCreationBehavior.None); } using (DatasetManager datasetManager = new DatasetManager()) { try { latest = datasetManager.GetDatasetLatestVersion(datasetId); int before = datasetManager.GetDataTuplesCount(latest.Id); var tuple = datasetManager.GetDataTuples(latest.Id).LastOrDefault(); //Act if (datasetManager.IsDatasetCheckedOutFor(datasetId, "David") || datasetManager.CheckOutDataset(datasetId, "David")) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(datasetId); List <AbstractTuple> deleteTuples = new List <AbstractTuple>(); deleteTuples.Add(tuple); workingCopy = datasetManager.EditDatasetVersion(workingCopy, null, null, deleteTuples.Select(d => d.Id).ToList()); datasetManager.CheckInDataset(datasetId, "delete one datatuple for testing", username, ViewCreationBehavior.None); } latest = datasetManager.GetDatasetLatestVersion(datasetId); int after = datasetManager.GetDataTuplesCount(latest.Id); //Assert Assert.That(before, Is.GreaterThan(after)); } catch (Exception ex) { throw ex; } } }
public async void uploadFiles(List <HttpContent> attachments, long datasetId, String description, string userName, DatasetManager dm) { var dataset = dm.GetDataset(datasetId); // var datasetVersion = dm.GetDatasetLatestVersion(dataset); if (dm.IsDatasetCheckedOutFor(datasetId, userName) || dm.CheckOutDataset(datasetId, userName)) { DatasetVersion datasetVersion = dm.GetDatasetWorkingCopy(datasetId); StringBuilder files = new StringBuilder(); foreach (var httpContent in attachments) { var dataStream = await httpContent.ReadAsStreamAsync(); var dataPath = AppConfiguration.DataPath; var fileName = httpContent.Headers.ContentDisposition.FileName; fileName = Regex.Replace(fileName.Trim(), "[^A-Za-z0-9_.() ]+", ""); if (!Directory.Exists(Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments"))) { Directory.CreateDirectory(Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments")); } var destinationPath = Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments", fileName); using (var fileStream = File.Create(destinationPath)) { FileHelper.CopyStream(dataStream, fileStream); fileStream.Close(); AddFileInContentDiscriptor(datasetVersion, fileName, description); if (files.Length > 0) { files.Append(", "); } files.Append(fileName); } } ////set modification datasetVersion.ModificationInfo = new EntityAuditInfo() { Performer = userName, Comment = "Attachment", ActionType = AuditActionType.Edit }; dm.EditDatasetVersion(datasetVersion, null, null, null); dm.CheckInDataset(dataset.Id, "File/s :" + files.ToString() + " via api", userName, ViewCreationBehavior.None); } }
public void CreateDatasetVersionTest() { long numberOfTuples = 1000; var dm = new DatasetManager(); var rsm = new ResearchPlanManager(); var mdm = new MetadataStructureManager(); try { var dsHelper = new DatasetHelper(); StructuredDataStructure dataStructure = dsHelper.CreateADataStructure(); dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); var rp = dsHelper.CreateResearchPlan(); rp.Should().NotBeNull("Failed to meet a precondition: a research plan is required."); var mds = mdm.Repo.Query().First(); mds.Should().NotBeNull("Failed to meet a precondition: a metadata strcuture is required."); Dataset dataset = dm.CreateEmptyDataset(dataStructure, rp, mds); dataset = dsHelper.GenerateTuplesForDataset(dataset, dataStructure, numberOfTuples, "Javad"); dataset.Should().NotBeNull("The dataset tuple generation has failed!"); dm.CheckInDataset(dataset.Id, "for testing purposes 2", "Javad", ViewCreationBehavior.None); //dm.SyncView(ds.Id, ViewCreationBehavior.Create); //dm.SyncView(ds.Id, ViewCreationBehavior.Refresh); dm.SyncView(dataset.Id, ViewCreationBehavior.Create | ViewCreationBehavior.Refresh); dataset.Id.Should().BeGreaterThan(0, "Dataset was not persisted."); dataset.LastCheckIOTimestamp.Should().NotBeAfter(DateTime.UtcNow, "The dataset's timestamp is wrong."); dataset.DataStructure.Should().NotBeNull("Dataset must have a data structure."); dataset.Status.Should().Be(DatasetStatus.CheckedIn, "Dataset must be in the CheckedIn status."); dm.GetDatasetLatestVersionEffectiveTupleCount(dataset.Id).Should().Be(numberOfTuples); dm.DatasetVersionRepo.Evict(); dm.DataTupleRepo.Evict(); dm.DatasetRepo.Evict(); dm.PurgeDataset(dataset.Id, true); dsHelper.PurgeAllDataStructures(); } finally { dm.Dispose(); rsm.Dispose(); mdm.Dispose(); } }
public void EditDatasetVersion_DeleteADataTuple_ReturnUpdatedVersion() { DatasetManager datasetManager = new DatasetManager(); try { //Arrange DatasetVersion latest = datasetManager.GetDatasetLatestVersion(datasetId); int before = datasetManager.GetDataTuplesCount(latest.Id); var tuple = datasetManager.GetDataTuples(latest.Id).LastOrDefault(); //Act if (datasetManager.IsDatasetCheckedOutFor(datasetId, "David") || datasetManager.CheckOutDataset(datasetId, "David")) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(datasetId); List <DataTuple> deleteTuples = new List <DataTuple>(); deleteTuples.Add(tuple as DataTuple); workingCopy = datasetManager.EditDatasetVersion(workingCopy, null, null, deleteTuples.Select(d => d.Id).ToList()); datasetManager.CheckInDataset(datasetId, "delete one datatuple for testing", username, ViewCreationBehavior.None); } latest = datasetManager.GetDatasetLatestVersion(datasetId); int after = datasetManager.GetDataTuplesCount(latest.Id); //Assert Assert.That(before, Is.GreaterThan(after)); } catch (Exception ex) { throw ex; } finally { datasetManager.Dispose(); } }
public void GetDataTuples_WhenCalledOneOlderVersion_ReturnIQueryableOfAbstractTuples() { //Arrange DatasetManager datasetManager = null; try { datasetManager = new DatasetManager(); //get latest datatupleid before create a new dataset and data using (var uow = this.GetUnitOfWork()) { var latestDataTuple = uow.GetReadOnlyRepository <DataTuple>().Get().LastOrDefault(); if (latestDataTuple != null) { latestDataTupleId = latestDataTuple.Id; } } var dataset = datasetManager.GetDataset(datasetId); dataset = dsHelper.UpdateOneTupleForDataset(dataset, (StructuredDataStructure)dataset.DataStructure, latestDataTupleId, 1, datasetManager); datasetManager.CheckInDataset(dataset.Id, "for testing datatuples with versions", username, ViewCreationBehavior.None); //Act List <DatasetVersion> datasetversions = datasetManager.GetDatasetVersions(datasetId).OrderBy(d => d.Timestamp).ToList(); var result = datasetManager.GetDataTuples(datasetversions.ElementAt(datasetversions.Count - 2).Id); // get datatuples from the one before the latest //Assert Assert.That(result.Count(), Is.EqualTo(10)); } finally { datasetManager.Dispose(); } }
public bool deleteFile(string path) { path = Server.UrlDecode(path); { using (EntityPermissionManager entityPermissionManager = new EntityPermissionManager()) using (DatasetManager datasetManager = new DatasetManager()) { try { DatasetInfo datasetInfo = (DatasetInfo)Session["DatasetInfo"]; string entityType = (string)Session["EntityType"]; DatasetVersion workingCopy = new DatasetVersion(); string status = DatasetStateInfo.NotValid.ToString(); string[] temp = path.Split('\\'); long datasetID = datasetInfo.DatasetId; status = datasetManager.GetDatasetLatestVersion(datasetID).StateInfo.State; bool access = entityPermissionManager.HasEffectiveRight(HttpContext.User.Identity.Name, typeof(Dataset), datasetID, RightType.Delete); if (access && (datasetManager.IsDatasetCheckedOutFor(datasetID, HttpContext.User.Identity.Name) || datasetManager.CheckOutDataset(datasetID, HttpContext.User.Identity.Name))) { try { workingCopy = datasetManager.GetDatasetWorkingCopy(datasetID); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); ContentDescriptor contentDescriptor = workingCopy.ContentDescriptors.Where(cd => cd.URI.Equals(path)).FirstOrDefault(); datasetManager.DeleteContentDescriptor(contentDescriptor); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = HttpContext.User?.Identity?.Name, Comment = "File", ActionType = AuditActionType.Delete }; // set system key values int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } workingCopy.Metadata = setSystemValuesToMetadata(v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata); datasetManager.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users datasetManager.CheckInDataset(datasetID, temp.Last(), HttpContext.User.Identity.Name, ViewCreationBehavior.None); Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(true); } catch { datasetManager.CheckInDataset(datasetID, "Failed to delete File " + temp.Last(), HttpContext.User.Identity.Name, ViewCreationBehavior.None); Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(false); } } Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(false); } catch { return(false); } } } }
//temporary solution: norman :FinishUpload2 public async Task <List <Error> > FinishUpload() { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); IOUtility iOUtility = new IOUtility(); List <Error> temp = new List <Error>(); long id = 0; string title = ""; int numberOfRows = 0; int numberOfSkippedRows = 0; try { DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); //Dataset ds = null; bool inputWasAltered = false; if (Bus.ContainsKey(TaskManager.DATASET_ID) && Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { id = Convert.ToInt32(Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(Bus[TaskManager.DATASTRUCTURE_ID]); //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(id); title = latestVersion.Title; string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { Bus[TaskManager.CURRENTPACKAGE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion Progress Informations #region structured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { long datasetid = id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); try { // load all data tuple ids from the latest version List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(id)); // load structured data structure StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm") || iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; //schleife dm.CheckOutDatasetIfNot(id, User.Name); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(id, User.Name)) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", id, User.Name)); } workingCopy = dm.GetDatasetWorkingCopy(id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } ExcelReader reader = null; ExcelFileReaderInfo excelFileReaderInfo = null; if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { excelFileReaderInfo = (ExcelFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]; } reader = new ExcelReader(sds, excelFileReaderInfo); do { counter++; Bus[TaskManager.CURRENTPACKAGE] = counter; //open stream Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = new List <DataTuple>(); if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { if (reader.Position < excelFileReaderInfo.DataEndRow) { rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } } else { rows = reader.ReadTemplateFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } //Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { } } Stream?.Close(); //count rows numberOfRows += rows.Count(); } while (rows.Count() > 0 && rows.Count() <= packageSize); numberOfSkippedRows = reader.NumberOSkippedfRows; } #endregion excel reader #region ascii reader if (iOUtility.IsSupportedAsciiFile(Bus[TaskManager.EXTENTION].ToString())) { // open file AsciiReader reader = new AsciiReader(sds, (AsciiFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]); if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { workingCopy = dm.GetDatasetWorkingCopy(id); //set packagsize for one loop int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) //check wheter there is a dataset status in the upload wizard bus { // based the dataset status and/ or the upload method if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); // add all datatuples to the datasetversion } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) // datatuples allready exist { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else // if there is no dataset status in the bus, use dataset status edit { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } //count rows numberOfRows += rows.Count(); } while ((rows.Count() > 0 && rows.Count() <= packageSize) || inputWasAltered == true); numberOfSkippedRows = reader.NumberOSkippedfRows; } //Stream.Close(); } #endregion ascii reader #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion contentdescriptors #region set System value into metadata if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "Data", ActionType = newdataset ? AuditActionType.Create : AuditActionType.Edit }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } #endregion set System value into metadata // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(id, numberOfRows + " rows", User.Name); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(datasetid), MessageHelper.GetUpdateDatasetMessage(datasetid, title, User.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion structured data #region unstructured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { try { workingCopy = dm.GetDatasetWorkingCopy(id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy.VersionNo += 1; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "File", ActionType = AuditActionType.Create }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } //filename string filename = ""; if (Bus.ContainsKey(TaskManager.FILENAME)) { filename = Bus[TaskManager.FILENAME]?.ToString(); } // ToDo: Get Comment from ui and users dm.CheckInDataset(id, filename, User.Name, ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion unstructured data } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(id, User.Name); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Dataset, ex.Message)); dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } finally { if (RunningASync) { var user = User; if (temp.Any()) { var es = new EmailService(); es.Send(MessageHelper.GetPushApiUploadFailHeader(id, title), MessageHelper.GetPushApiUploadFailMessage(id, user.Name, temp.Select(e => e.ToString()).ToArray()), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } else { var es = new EmailService(); es.Send(MessageHelper.GetASyncFinishUploadHeader(id, title), MessageHelper.GetASyncFinishUploadMessage(id, title, numberOfRows, numberOfSkippedRows), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } } dm.Dispose(); dsm.Dispose(); } return(temp); }
public async Task <HttpResponseMessage> Put(int id) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (id > 0) { Dataset d = datasetManager.GetDataset(id); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + id + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), id, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region check incomming metadata Stream requestStream = await this.Request.Content.ReadAsStreamAsync(); string contentType = this.Request.Content.Headers.ContentType.MediaType; if (string.IsNullOrEmpty(contentType) || (!contentType.Equals("application/xml") && !contentType.Equals("text/plain"))) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "The transmitted file is not a xml document.")); } if (requestStream == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Metadata xml was not received.")); } #endregion check incomming metadata #region incomming values check // check incomming values if (id == 0) { error += "dataset id should be greater then 0."; } ////if (data.UpdateMethod == null) error += "update method is not set"; ////if (data.Count == 0) error += "count should be greater then 0. "; //if (data.Columns == null) error += "cloumns should not be null. "; //if (data.Data == null) error += "data is empty. "; //if (data.PrimaryKeys == null || data.PrimaryKeys.Count() == 0) error += "the UpdateMethod update has been selected but there are no primary keys available. "; if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(id); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Dataset not exist.")); } #region convert metadata XmlDocument metadataForImport = new XmlDocument(); metadataForImport.Load(requestStream); // metadataStructure ID var metadataStructureId = dataset.MetadataStructure.Id; var metadataStructrueName = this.GetUnitOfWork().GetReadOnlyRepository <MetadataStructure>().Get(metadataStructureId).Name; // loadMapping file var path_mappingFile = Path.Combine(AppConfiguration.GetModuleWorkspacePath("DIM"), XmlMetadataImportHelper.GetMappingFileName(metadataStructureId, TransmissionType.mappingFileImport, metadataStructrueName)); // XML mapper + mapping file var xmlMapperManager = new XmlMapperManager(TransactionDirection.ExternToIntern); xmlMapperManager.Load(path_mappingFile, "IDIV"); // generate intern metadata without internal attributes var metadataResult = xmlMapperManager.Generate(metadataForImport, 1, true); // generate intern template metadata xml with needed attribtes var xmlMetadatWriter = new XmlMetadataWriter(BExIS.Xml.Helpers.XmlNodeMode.xPath); var metadataXml = xmlMetadatWriter.CreateMetadataXml(metadataStructureId, XmlUtility.ToXDocument(metadataResult)); var metadataXmlTemplate = XmlMetadataWriter.ToXmlDocument(metadataXml); // set attributes FROM metadataXmlTemplate TO metadataResult var completeMetadata = XmlMetadataImportHelper.FillInXmlValues(metadataResult, metadataXmlTemplate); #endregion convert metadata if (completeMetadata != null) { string title = ""; if (datasetManager.IsDatasetCheckedOutFor(id, user.Name) || datasetManager.CheckOutDataset(id, user.Name)) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(id); workingCopy.Metadata = completeMetadata; workingCopy.Title = xmlDatasetHelper.GetInformation(id, completeMetadata, NameAttributeValues.title); workingCopy.Description = xmlDatasetHelper.GetInformation(id, completeMetadata, NameAttributeValues.description); //check if modul exist int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set status if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo(); } workingCopy.StateInfo.State = DatasetStateInfo.NotValid.ToString(); title = workingCopy.Title; if (string.IsNullOrEmpty(title)) { title = "No Title available."; } datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(id, "via api.", user.Name, ViewCreationBehavior.None); } // ToDo add Index update to this api //if (this.IsAccessible("DDM", "SearchIndex", "ReIndexSingle")) //{ // var x = this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); //} LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Created); var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(id), MessageHelper.GetUpdateDatasetMessage(id, title, user.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } return(Request.CreateErrorResponse(HttpStatusCode.OK, "Metadata successfully updated ")); } catch (Exception ex) { return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
[TestCase(2)] // primary key as double public void EditDatasetVersion_UpdateAllDataTuples_SameNumberOfDatatuples(int primaryKeyIndex) { Dataset dataset; DatasetVersion latest; List <DataTuple> incoming = new List <DataTuple>(); int count = 0; int expectedCount = 0; List <long> datatupleFromDatabaseIds = new List <long>(); using (DatasetManager datasetManager = new DatasetManager()) { //Arrange dataset = datasetManager.GetDataset(datasetId); latest = datasetManager.GetDatasetLatestVersion(datasetId); datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(latest); //get updated tuples as incoming datatuples incoming = dsHelper.GetUpdatedDatatuples(latest, dataset.DataStructure as StructuredDataStructure, datasetManager); //because of updateing all datatuples the incoming number is should be equal then the existing one expectedCount = incoming.Count; } using (DatasetManager datasetManager = new DatasetManager()) { try { if (datasetManager.IsDatasetCheckedOutFor(datasetId, "David") || datasetManager.CheckOutDataset(datasetId, "David")) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(datasetId); List <long> primaryKeys = new List <long>(); //get primarykey ids // var 1 = int = 1 // var 2 = string = 2 // var 3 = double = 3 // var 4 = boolean = 4 // var 5 = datetime = 5 List <long> varIds = ((StructuredDataStructure)workingCopy.Dataset.DataStructure).Variables.Select(v => v.Id).ToList(); primaryKeys.Add(varIds.ElementAt(primaryKeyIndex)); //Act Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); UploadHelper uploadhelper = new UploadHelper(); splittedDatatuples = uploadhelper.GetSplitDatatuples(incoming, primaryKeys, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); datasetManager.CheckInDataset(datasetId, count + " rows", "David"); //Assert long c = datasetManager.GetDatasetVersionEffectiveTupleCount(workingCopy); Assert.That(c, Is.EqualTo(expectedCount)); } } catch (Exception ex) { throw ex; } } }
public async Task <HttpResponseMessage> Post([FromBody] PushDataApiModel data) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DataStructureManager dataStructureManager = new DataStructureManager(); ApiConfigurator apiHelper = new ApiConfigurator(); DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); //load from apiConfig int cellLimit = 100000; if (apiHelper != null && apiHelper.Settings.ContainsKey(ApiConfigurator.CELLS)) { Int32.TryParse(apiHelper.Settings[ApiConfigurator.CELLS], out cellLimit); } try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (data.DatasetId > 0) { Dataset d = datasetManager.GetDataset(data.DatasetId); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + data.DatasetId + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), data.DatasetId, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region incomming values check // check incomming values if (data.DatasetId == 0) { error += "dataset id should be greater then 0."; } //if (data.UpdateMethod == null) error += "update method is not set"; if (data.Columns == null) { error += "cloumns should not be null. "; } if (data.Data == null) { error += "data is empty. "; } if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(data.DatasetId); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, "Dataset not exist.")); } DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(dataset); string title = dsv.Title; if ((data.Data.Count() * data.Columns.Count()) > cellLimit) { #region async upload with big data // if dataste is not in the dataset DataApiHelper helper = new DataApiHelper(dataset, user, data, title, UploadMethod.Append); Task.Run(() => helper.Run()); #endregion async upload with big data Debug.WriteLine("end of api call"); return(Request.CreateResponse(HttpStatusCode.OK, "Data has been successfully received and is being processed. For larger data, as in this case, we will keep you informed by mail about the next steps.")); } else { #region direct upload var es = new EmailService(); try { //load strutcured data structure StructuredDataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataset.DataStructure.Id); List <Error> errors = new List <Error>(); if (dataStructure == null) { return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, "The Datastructure does not exist.")); } APIDataReader reader = new APIDataReader(dataStructure, new ApiFileReaderInfo()); List <VariableIdentifier> source = new List <VariableIdentifier>(); reader.SetSubmitedVariableIdentifiers(data.Columns.ToList()); //validate datastructure foreach (string c in data.Columns) { source.Add(new VariableIdentifier() { name = c }); } errors = reader.ValidateComparisonWithDatatsructure(source); if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Datastructure is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } errors = new List <Error>(); // validate rows for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; errors.AddRange(reader.ValidateRow(row.ToList(), i)); } if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Data is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateErrorResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, user.UserName) || datasetManager.CheckOutDataset(dataset.Id, user.UserName)) { workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); List <DataTuple> datatuples = new List <DataTuple>(); for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; datatuples.Add(reader.ReadRow(row.ToList(), i)); } if (datatuples.Count > 0) { ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = user.UserName, Comment = "Data", ActionType = AuditActionType.Edit }; datasetManager.EditDatasetVersion(workingCopy, datatuples, null, null); } datasetManager.CheckInDataset(dataset.Id, data.Data.Length + " rows via api.", user.UserName); //send email es.Send(MessageHelper.GetUpdateDatasetHeader(dataset.Id), MessageHelper.GetUpdateDatasetMessage(dataset.Id, title, user.DisplayName), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } return(Request.CreateResponse(HttpStatusCode.OK, "Data successfully uploaded.")); } catch (Exception ex) { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(dataset.Id, title), MessageHelper.GetPushApiUploadFailMessage(dataset.Id, user.UserName, new string[] { "Upload failed: " + ex.Message }), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(Request.CreateResponse(HttpStatusCode.InternalServerError, ex.Message)); } #endregion direct upload } } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); dataStructureManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
//[MeasurePerformance] //temporary solution: norman //For original solution, look into Aquadiva Code public List <Error> FinishUpload(EasyUploadTaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); DataContainerManager dam = new DataContainerManager(); //SubjectManager sm = new SubjectManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); List <Error> temp = new List <Error>(); try { using (IUnitOfWork unitOfWork = this.GetUnitOfWork()) { // initialize all necessary manager DataTuple[] rows = null; //First, try to validate - if there are errors, return immediately string JsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_JSON_DATA].ToString(); List <Error> ValidationErrors = ValidateRows(JsonArray); if (ValidationErrors.Count != 0) { temp.AddRange(ValidationErrors); return(temp); } string timestamp = DateTime.UtcNow.ToString("r"); string title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.FILENAME]); if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DESCRIPTIONTITLE)) { string tmp = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); if (!String.IsNullOrWhiteSpace(tmp)) { title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); } } StructuredDataStructure sds = dsm.CreateStructuredDataStructure(title, title + " " + timestamp, "", "", DataStructureCategory.Generic); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_ID, sds.Id); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_TITLE, title + " " + timestamp); if (!TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DATASET_TITLE)) { TaskManager.AddToBus(EasyUploadTaskManager.DATASET_TITLE, title); TaskManager.AddToBus(EasyUploadTaskManager.TITLE, title); } MetadataStructure metadataStructure = null; if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.SCHEMA)) { long metadataStructureId = Convert.ToInt64(TaskManager.Bus[EasyUploadTaskManager.SCHEMA]); metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Id == metadataStructureId).FirstOrDefault(); } else { //Default option but shouldn't happen because previous steps can't be finished without selecting the metadata-structure metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Name.ToLower().Contains("eml")).FirstOrDefault(); } ResearchPlan rp = unitOfWork.GetReadOnlyRepository <ResearchPlan>().Get().FirstOrDefault(); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_ID, rp.Id); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_TITLE, rp.Title); #region Progress Information if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGE, 0); } #endregion #region DataStructure XmlDocument xmldoc = new XmlDocument(); XmlElement extraElement = xmldoc.CreateElement("extra"); XmlElement orderElement = xmldoc.CreateElement("order"); List <Tuple <int, string, UnitInfo> > MappedHeaders = (List <Tuple <int, string, UnitInfo> >)TaskManager.Bus[EasyUploadTaskManager.VERIFICATION_MAPPEDHEADERUNITS]; //Sorting necessary to prevent problems when inserting the tuples MappedHeaders.Sort((head1, head2) => head1.Item1.CompareTo(head2.Item1)); List <VariableIdentifier> identifiers = new List <VariableIdentifier>(); var dataTypeRepo = unitOfWork.GetReadOnlyRepository <DataType>(); var unitRepo = unitOfWork.GetReadOnlyRepository <Unit>(); var dataAttributeRepo = unitOfWork.GetReadOnlyRepository <DataAttribute>(); List <DataAttribute> allDataAttributes = dataAttributeRepo.Get().ToList(); foreach (Tuple <int, string, UnitInfo> Entry in MappedHeaders) { int i = MappedHeaders.IndexOf(Entry); DataType dataType = dataTypeRepo.Get(Entry.Item3.SelectedDataTypeId); Unit CurrentSelectedUnit = unitRepo.Get(Entry.Item3.UnitId); DataAttribute CurrentDataAttribute = new DataAttribute(); //If possible, map the chosen variable name, unit and datatype to an existing DataAttribute (Exact match) DataAttribute existingDataAttribute = allDataAttributes.Where(da => da.Name.ToLower().Equals(TrimAndLimitString(Entry.Item2).ToLower()) && da.DataType.Id == dataType.Id && da.Unit.Id == CurrentSelectedUnit.Id).FirstOrDefault(); if (existingDataAttribute != null) { CurrentDataAttribute = existingDataAttribute; } else { //No matching DataAttribute => Create a new one CurrentDataAttribute = dam.CreateDataAttribute(TrimAndLimitString(Entry.Item2), Entry.Item2, "", false, false, "", MeasurementScale.Categorial, DataContainerType.ReferenceType, "", dataType, CurrentSelectedUnit, null, null, null, null, null, null); } Variable newVariable = dsm.AddVariableUsage(sds, CurrentDataAttribute, true, Entry.Item2, "", "", ""); VariableIdentifier vi = new VariableIdentifier { name = newVariable.Label, id = newVariable.Id }; identifiers.Add(vi); XmlElement newVariableXml = xmldoc.CreateElement("variable"); newVariableXml.InnerText = Convert.ToString(newVariable.Id); orderElement.AppendChild(newVariableXml); } extraElement.AppendChild(orderElement); xmldoc.AppendChild(extraElement); sds.Extra = xmldoc; sds.Name = "generated import structure " + timestamp; sds.Description = "automatically generated structured data structure by user " + GetUsernameOrDefault() + " for file " + title + " on " + timestamp; #endregion Dataset ds = null; ds = dm.CreateEmptyDataset(sds, rp, metadataStructure); //TODO Should a template be created? /*ExcelTemplateProvider etp = new ExcelTemplateProvider(); * etp.CreateTemplate(sds);*/ long datasetId = ds.Id; long sdsId = sds.Id; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion dsv = dm.GetDatasetWorkingCopy(datasetId); long METADATASTRUCTURE_ID = metadataStructure.Id; XmlMetadataWriter xmlMetadatWriter = new XmlMetadataWriter(XmlNodeMode.xPath); XDocument metadataX = xmlMetadatWriter.CreateMetadataXml(METADATASTRUCTURE_ID); XmlDocument metadataXml = XmlMetadataWriter.ToXmlDocument(metadataX); dsv.Metadata = metadataXml; try { dsv.Metadata = xmlDatasetHelper.SetInformation(dsv, metadataXml, NameAttributeValues.title, title); } catch (NullReferenceException ex) { //Reference of the title node is missing throw new NullReferenceException("The extra-field of this metadata-structure is missing the title-node-reference!"); } dm.EditDatasetVersion(dsv, null, null, null); } #region security // add security if (GetUsernameOrDefault() != "DEFAULT") { //PermissionManager pm = new PermissionManager(); //User user = sm.GetUserByName(GetUsernameOrDefault()); //Rights-Management /* * TODO: Use the BExIS Party API for that * * */ /* * UserPiManager upm = new UserPiManager(); * List<long> piList = (new UserSelectListModel(GetUsernameOrDefault())).UserList.Select(x => x.Id).ToList(); */ foreach (RightType rightType in Enum.GetValues(typeof(RightType)).Cast <RightType>()) { //The user gets full permissions // add security if (GetUsernameOrDefault() != "DEFAULT") { entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } // adding the rights for the pis /*foreach (long piId in piList) * { * //Each pi gets full permissions * pm.CreateDataPermission(piId, 1, ds.Id, rightType); * * // get all pi members * List<UserPi> currentMembers = upm.GetAllPiMember(piId).ToList(); * * foreach (UserPi currentMapping in currentMembers) * { * switch (rightType) * { * //Each member of each of the pis gets view-rights * case RightType.View: * pm.CreateDataPermission(currentMapping.UserId, 1, ds.Id, rightType); * break; * //Each member of each of the pis gets download-rights * case RightType.Download: * pm.CreateDataPermission(currentMapping.UserId, 1, ds.Id, rightType); * break; * default: * //No other permissions - is this call necessary? * pm.CreateDataPermission(currentMapping.UserId, 0, ds.Id, rightType); * break; * } * } * }*/ } } #endregion security #region excel reader int packageSize = 10000; //HACK ? TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(ds.Id); counter++; TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = counter; //rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), oldSds, (int)id, packageSize).ToArray(); List <string> selectedDataAreaJsonArray = (List <string>)TaskManager.Bus[EasyUploadTaskManager.SHEET_DATA_AREA]; string selectedHeaderAreaJsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_HEADER_AREA].ToString(); List <int[]> areaDataValuesList = new List <int[]>(); foreach (string area in selectedDataAreaJsonArray) { areaDataValuesList.Add(JsonConvert.DeserializeObject <int[]>(area)); } int[] areaHeaderValues = JsonConvert.DeserializeObject <int[]>(selectedHeaderAreaJsonArray); Orientation orientation = 0; switch (TaskManager.Bus[EasyUploadTaskManager.SHEET_FORMAT].ToString()) { case "LeftRight": orientation = Orientation.rowwise; break; case "Matrix": //orientation = Orientation.matrix; break; default: orientation = Orientation.columnwise; break; } String worksheetUri = null; //Get the Uri to identify the correct worksheet if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.ACTIVE_WOKSHEET_URI)) { worksheetUri = TaskManager.Bus[EasyUploadTaskManager.ACTIVE_WOKSHEET_URI].ToString(); } int batchSize = (new Object()).GetUnitOfWork().PersistenceManager.PreferredPushSize; int batchnr = 1; foreach (int[] areaDataValues in areaDataValuesList) { //First batch starts at the start of the current data area int currentBatchStartRow = areaDataValues[0] + 1; while (currentBatchStartRow <= areaDataValues[2] + 1) //While the end of the current data area has not yet been reached { //Create a new reader each time because the reader saves ALL tuples it read and therefore the batch processing wouldn't work EasyUploadExcelReader reader = new EasyUploadExcelReader(); // open file Stream = reader.Open(TaskManager.Bus[EasyUploadTaskManager.FILEPATH].ToString()); //End row is start row plus batch size int currentBatchEndRow = currentBatchStartRow + batchSize; //Set the indices for the reader EasyUploadFileReaderInfo fri = new EasyUploadFileReaderInfo { DataStartRow = currentBatchStartRow, //End row is either at the end of the batch or the end of the marked area //DataEndRow = (currentBatchEndRow > areaDataValues[2] + 1) ? areaDataValues[2] + 1 : currentBatchEndRow, DataEndRow = Math.Min(currentBatchEndRow, areaDataValues[2] + 1), //Column indices as marked in a previous step DataStartColumn = areaDataValues[1] + 1, DataEndColumn = areaDataValues[3] + 1, //Header area as marked in a previous step VariablesStartRow = areaHeaderValues[0] + 1, VariablesStartColumn = areaHeaderValues[1] + 1, VariablesEndRow = areaHeaderValues[2] + 1, VariablesEndColumn = areaHeaderValues[3] + 1, Offset = areaDataValues[1], Orientation = orientation }; //Set variable identifiers because they might differ from the variable names in the file reader.setSubmittedVariableIdentifiers(identifiers); //Read the rows and convert them to DataTuples rows = reader.ReadFile(Stream, TaskManager.Bus[EasyUploadTaskManager.FILENAME].ToString(), fri, sds, (int)datasetId, worksheetUri); //After reading the rows, add them to the dataset if (rows != null) { dm.EditDatasetVersion(workingCopy, rows.ToList(), null, null); } //Close the Stream so the next ExcelReader can open it again Stream.Close(); //Debug information int lines = (areaDataValues[2] + 1) - (areaDataValues[0] + 1); int batches = lines / batchSize; batchnr++; //Next batch starts after the current one currentBatchStartRow = currentBatchEndRow + 1; } } #endregion dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //Reindex search if (this.IsAccessibale("DDM", "SearchIndex", "ReIndexSingle")) { this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } TaskManager.AddToBus(EasyUploadTaskManager.DATASET_ID, ds.Id); return(temp); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Other, "An error occured during the upload. " + "Please try again later. If this problem keeps occuring, please contact your administrator.")); return(temp); } finally { dsm.Dispose(); dm.Dispose(); dam.Dispose(); //sm.Dispose(); entityPermissionManager.Dispose(); } }
/// <summary> /// Submit a Dataset based on the imformations /// in the CreateTaskManager /// </summary> public long SubmitDataset(bool valid) { #region create dataset DatasetManager dm = new DatasetManager(); DataStructureManager dsm = new DataStructureManager(); ResearchPlanManager rpm = new ResearchPlanManager(); XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); string title = ""; long datasetId = 0; bool newDataset = true; try { TaskManager = (CreateTaskmanager)Session["CreateDatasetTaskmanager"]; if (TaskManager.Bus.ContainsKey(CreateTaskmanager.DATASTRUCTURE_ID) && TaskManager.Bus.ContainsKey(CreateTaskmanager.RESEARCHPLAN_ID) && TaskManager.Bus.ContainsKey(CreateTaskmanager.METADATASTRUCTURE_ID)) { // for e new dataset if (!TaskManager.Bus.ContainsKey(CreateTaskmanager.ENTITY_ID)) { long datastructureId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.DATASTRUCTURE_ID]); long researchPlanId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.RESEARCHPLAN_ID]); long metadataStructureId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.METADATASTRUCTURE_ID]); DataStructure dataStructure = dsm.StructuredDataStructureRepo.Get(datastructureId); //if datastructure is not a structured one if (dataStructure == null) { dataStructure = dsm.UnStructuredDataStructureRepo.Get(datastructureId); } ResearchPlan rp = rpm.Repo.Get(researchPlanId); MetadataStructureManager msm = new MetadataStructureManager(); MetadataStructure metadataStructure = msm.Repo.Get(metadataStructureId); var ds = dm.CreateEmptyDataset(dataStructure, rp, metadataStructure); datasetId = ds.Id; // add security if (GetUsernameOrDefault() != "DEFAULT") { EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } } else { datasetId = Convert.ToInt64(TaskManager.Bus[CreateTaskmanager.ENTITY_ID]); newDataset = false; } TaskManager = (CreateTaskmanager)Session["CreateDatasetTaskmanager"]; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(datasetId); if (TaskManager.Bus.ContainsKey(CreateTaskmanager.METADATA_XML)) { XDocument xMetadata = (XDocument)TaskManager.Bus[CreateTaskmanager.METADATA_XML]; workingCopy.Metadata = Xml.Helpers.XmlWriter.ToXmlDocument(xMetadata); } //set status if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo(); } if (valid) { workingCopy.StateInfo.State = DatasetStateInfo.Valid.ToString(); } else { workingCopy.StateInfo.State = DatasetStateInfo.NotValid.ToString(); } title = xmlDatasetHelper.GetInformationFromVersion(workingCopy.Id, NameAttributeValues.title); if (string.IsNullOrEmpty(title)) { title = "No Title available."; } TaskManager.AddToBus(CreateTaskmanager.ENTITY_TITLE, title);//workingCopy.Metadata.SelectNodes("Metadata/Description/Description/Title/Title")[0].InnerText); TaskManager.AddToBus(CreateTaskmanager.ENTITY_ID, datasetId); dm.EditDatasetVersion(workingCopy, null, null, null); dm.CheckInDataset(datasetId, "Metadata was submited.", GetUsernameOrDefault(), ViewCreationBehavior.None); //add to index // ToDo check which SearchProvider it is, default luceneprovider // BUG: invalid call to ddm method // TODO: mODULARITY ->Call DDM Reindex /* * <Export tag="internalApi" id="SearchIndex" * title="Reindex Search" description="Reindex Search" icon="" * controller="SearchIndex" action="Get" * extends="" /> */ // WORKAROUND: do not reindex //ISearchProvider provider = IoCFactory.Container.ResolveForSession<ISearchProvider>() as ISearchProvider; //provider?.UpdateSingleDatasetIndex(datasetId, IndexingAction.CREATE); if (this.IsAccessibale("DDM", "SearchIndex", "ReIndexSingle")) { var x = this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } LoggerFactory.LogData(datasetId.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Created); if (newDataset) { var es = new EmailService(); es.Send(MessageHelper.GetCreateDatasetHeader(), MessageHelper.GetCreateDatasetMessage(datasetId, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } else { var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetId, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } } return(datasetId); } } catch (Exception ex) { var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), ex.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { dm.Dispose(); rpm.Dispose(); dsm.Dispose(); } #endregion create dataset return(-1); }
public void uploadFiles(IEnumerable <HttpPostedFileBase> attachments, long datasetId, String description) { var filemNames = ""; using (var dm = new DatasetManager()) { var dataset = dm.GetDataset(datasetId); // var datasetVersion = dm.GetDatasetLatestVersion(dataset); DatasetVersion latestVersion = dm.GetDatasetLatestVersion(datasetId); string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion datasetVersion = dm.GetDatasetWorkingCopy(datasetId); //set StateInfo of the previus version if (datasetVersion.StateInfo == null) { datasetVersion.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { datasetVersion.StateInfo.State = status; } foreach (var file in attachments) { var fileName = Path.GetFileName(file.FileName); filemNames += fileName.ToString() + ","; var dataPath = AppConfiguration.DataPath; if (!Directory.Exists(Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments"))) { Directory.CreateDirectory(Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments")); } var destinationPath = Path.Combine(dataPath, "Datasets", datasetId.ToString(), "Attachments", fileName); file.SaveAs(destinationPath); AddFileInContentDiscriptor(datasetVersion, fileName, description); } //set modification datasetVersion.ModificationInfo = new EntityAuditInfo() { Performer = GetUsernameOrDefault(), Comment = "Attachment", ActionType = AuditActionType.Create }; string filenameList = string.Join(", ", attachments.Select(f => f.FileName).ToArray()); dm.EditDatasetVersion(datasetVersion, null, null, null); dm.CheckInDataset(dataset.Id, filenameList, GetUsernameOrDefault(), ViewCreationBehavior.None); } } }
//[MeasurePerformance] //temporary solution: norman //For original solution, look into Aquadiva Code public List <Error> FinishUpload(EasyUploadTaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); DataContainerManager dam = new DataContainerManager(); //SubjectManager sm = new SubjectManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); List <Error> temp = new List <Error>(); try { using (IUnitOfWork unitOfWork = this.GetUnitOfWork()) { // initialize all necessary manager DataTuple[] rows = null; //First, try to validate - if there are errors, return immediately string JsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_JSON_DATA].ToString(); List <Error> ValidationErrors = ValidateRows(JsonArray); if (ValidationErrors.Count != 0) { temp.AddRange(ValidationErrors); return(temp); } string timestamp = DateTime.UtcNow.ToString("r"); string title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.FILENAME]); if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DESCRIPTIONTITLE)) { string tmp = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); if (!String.IsNullOrWhiteSpace(tmp)) { title = Convert.ToString(TaskManager.Bus[EasyUploadTaskManager.DESCRIPTIONTITLE]); } } StructuredDataStructure sds = null; List <VariableIdentifier> identifiers = new List <VariableIdentifier>(); //Used in Excel reader //Try to find an exact matching datastructure Boolean foundReusableDataStructure = false; List <RowModel> headers = (List <RowModel>)TaskManager.Bus[EasyUploadTaskManager.ROWS]; //For some reason, MappedHeaders might be in a different order in this list than what's indicated by its IDs - to prevent mismatching, sort the headers headers.Sort((m1, m2) => m1.Index.CompareTo(m2.Index)); List <StructuredDataStructure> allDatastructures = dsm.StructuredDataStructureRepo.Get().ToList(); foreach (StructuredDataStructure existingStructure in allDatastructures) { if (!foundReusableDataStructure) { //For now a datastructure is considered an exact match if it contains variables with //the same names (labels), datatypes and units in the correct order List <Variable> variablesOfExistingStructure = existingStructure.Variables.ToList(); foundReusableDataStructure = true; if (variablesOfExistingStructure.Count != headers.Count) { foundReusableDataStructure = false; } else { for (int i = 0; i < variablesOfExistingStructure.Count; i++) { Variable exVar = variablesOfExistingStructure.ElementAt(i); RowModel currentHeader = headers.ElementAt(i); if (exVar.Label != currentHeader.Name || exVar.Unit.Id != currentHeader.SelectedUnit.UnitId || exVar.DataAttribute.DataType.Id != currentHeader.SelectedDataType.DataTypeId) { foundReusableDataStructure = false; } } } if (foundReusableDataStructure) { sds = existingStructure; foreach (Variable exVar in variablesOfExistingStructure) { VariableIdentifier vi = new VariableIdentifier { name = exVar.Label, id = exVar.Id }; identifiers.Add(vi); } } } } if (!foundReusableDataStructure) { sds = dsm.CreateStructuredDataStructure(title, title + " " + timestamp, "", "", DataStructureCategory.Generic); } TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_ID, sds.Id); TaskManager.AddToBus(EasyUploadTaskManager.DATASTRUCTURE_TITLE, title + " " + timestamp); if (!TaskManager.Bus.ContainsKey(EasyUploadTaskManager.DATASET_TITLE)) { TaskManager.AddToBus(EasyUploadTaskManager.DATASET_TITLE, title); TaskManager.AddToBus(EasyUploadTaskManager.TITLE, title); } MetadataStructure metadataStructure = null; if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.SCHEMA)) { long metadataStructureId = Convert.ToInt64(TaskManager.Bus[EasyUploadTaskManager.SCHEMA]); metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Id == metadataStructureId).FirstOrDefault(); } else { //Default option but shouldn't happen because previous steps can't be finished without selecting the metadata-structure metadataStructure = unitOfWork.GetReadOnlyRepository <MetadataStructure>() .Get(m => m.Name.ToLower().Contains("eml")).FirstOrDefault(); } ResearchPlan rp = unitOfWork.GetReadOnlyRepository <ResearchPlan>().Get().FirstOrDefault(); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_ID, rp.Id); TaskManager.AddToBus(EasyUploadTaskManager.RESEARCHPLAN_TITLE, rp.Title); #region Progress Information if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.CURRENTPACKAGE)) { TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(EasyUploadTaskManager.CURRENTPACKAGE, 0); } #endregion Progress Information if (!foundReusableDataStructure) { #region Set Variables and information for new DataStructure XmlDocument xmldoc = new XmlDocument(); XmlElement extraElement = xmldoc.CreateElement("extra"); XmlElement orderElement = xmldoc.CreateElement("order"); //Sorting necessary to prevent problems when inserting the tuples headers.OrderBy(r => r.Index); var dataTypeRepo = unitOfWork.GetReadOnlyRepository <DataType>(); var unitRepo = unitOfWork.GetReadOnlyRepository <Unit>(); var dataAttributeRepo = unitOfWork.GetReadOnlyRepository <DataAttribute>(); List <DataAttribute> allDataAttributes = dataAttributeRepo.Get().ToList(); foreach (RowModel header in headers) { int i = headers.IndexOf(header); DataType dataType = dataTypeRepo.Get(header.SelectedDataType.DataTypeId); Unit CurrentSelectedUnit = unitRepo.Get(header.SelectedUnit.UnitId); DataAttribute CurrentDataAttribute = new DataAttribute(); //If possible, map the chosen variable name, unit and datatype to an existing DataAttribute (Exact match) DataAttribute existingDataAttribute = allDataAttributes.Where(da => da.Name.ToLower().Equals(TrimAndLimitString(header.SelectedDataAttribute.Name).ToLower()) && da.Unit.Dimension == CurrentSelectedUnit.Dimension).FirstOrDefault(); if (existingDataAttribute != null) { CurrentDataAttribute = existingDataAttribute; } else { //No matching DataAttribute => Create a new one CurrentDataAttribute = dam.CreateDataAttribute(TrimAndLimitString(header.Name), header.Name, header.SelectedDataAttribute.Description, false, false, "", MeasurementScale.Categorial, DataContainerType.ReferenceType, "", dataType, CurrentSelectedUnit, null, null, null, null, null, null); } Variable newVariable = dsm.AddVariableUsage(sds, CurrentDataAttribute, true, header.Name, "", "", "", CurrentSelectedUnit); VariableIdentifier vi = new VariableIdentifier { name = newVariable.Label, id = newVariable.Id }; identifiers.Add(vi); XmlElement newVariableXml = xmldoc.CreateElement("variable"); newVariableXml.InnerText = Convert.ToString(newVariable.Id); orderElement.AppendChild(newVariableXml); } extraElement.AppendChild(orderElement); xmldoc.AppendChild(extraElement); sds.Extra = xmldoc; sds.Name = "generated import structure " + timestamp; sds.Description = "automatically generated structured data structure by user " + GetUsernameOrDefault() + " for file " + title + " on " + timestamp; #endregion Set Variables and information for new DataStructure } Dataset ds = null; ds = dm.CreateEmptyDataset(sds, rp, metadataStructure); long datasetId = ds.Id; long sdsId = sds.Id; if (dm.IsDatasetCheckedOutFor(datasetId, GetUsernameOrDefault()) || dm.CheckOutDataset(datasetId, GetUsernameOrDefault())) { DatasetVersion dsv = dm.GetDatasetWorkingCopy(datasetId); long METADATASTRUCTURE_ID = metadataStructure.Id; XmlMetadataWriter xmlMetadatWriter = new XmlMetadataWriter(XmlNodeMode.xPath); XDocument metadataX = xmlMetadatWriter.CreateMetadataXml(METADATASTRUCTURE_ID); XmlDocument metadataXml = XmlMetadataWriter.ToXmlDocument(metadataX); dsv.Metadata = metadataXml; try { dsv.Metadata = xmlDatasetHelper.SetInformation(dsv, metadataXml, NameAttributeValues.title, title); dsv.Title = title; } catch (NullReferenceException ex) { //Reference of the title node is missing throw new NullReferenceException("The extra-field of this metadata-structure is missing the title-node-reference!"); } dm.EditDatasetVersion(dsv, null, null, null); } #region security // add security if (GetUsernameOrDefault() != "DEFAULT") { foreach (RightType rightType in Enum.GetValues(typeof(RightType)).Cast <RightType>()) { //The user gets full permissions // add security if (GetUsernameOrDefault() != "DEFAULT") { entityPermissionManager.Create <User>(GetUsernameOrDefault(), "Dataset", typeof(Dataset), ds.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); } } } #endregion security #region excel reader int packageSize = 100000; int numberOfRows = 0; //HACK ? TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } DatasetVersion workingCopy = dm.GetDatasetWorkingCopy(ds.Id); counter++; TaskManager.Bus[EasyUploadTaskManager.CURRENTPACKAGE] = counter; //rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), oldSds, (int)id, packageSize).ToArray(); List <string> selectedDataAreaJsonArray = (List <string>)TaskManager.Bus[EasyUploadTaskManager.SHEET_DATA_AREA]; string selectedHeaderAreaJsonArray = TaskManager.Bus[EasyUploadTaskManager.SHEET_HEADER_AREA].ToString(); List <int[]> areaDataValuesList = new List <int[]>(); foreach (string area in selectedDataAreaJsonArray) { areaDataValuesList.Add(JsonConvert.DeserializeObject <int[]>(area)); } int[] areaHeaderValues = JsonConvert.DeserializeObject <int[]>(selectedHeaderAreaJsonArray); Orientation orientation = 0; switch (TaskManager.Bus[EasyUploadTaskManager.SHEET_FORMAT].ToString()) { case "LeftRight": orientation = Orientation.rowwise; break; case "Matrix": //orientation = Orientation.matrix; break; default: orientation = Orientation.columnwise; break; } String worksheetUri = null; //Get the Uri to identify the correct worksheet if (TaskManager.Bus.ContainsKey(EasyUploadTaskManager.ACTIVE_WOKSHEET_URI)) { worksheetUri = TaskManager.Bus[EasyUploadTaskManager.ACTIVE_WOKSHEET_URI].ToString(); } int batchSize = (new Object()).GetUnitOfWork().PersistenceManager.PreferredPushSize; int batchnr = 1; foreach (int[] areaDataValues in areaDataValuesList) { //First batch starts at the start of the current data area int currentBatchStartRow = areaDataValues[0] + 1; while (currentBatchStartRow <= areaDataValues[2] + 1) //While the end of the current data area has not yet been reached { //End row is start row plus batch size int currentBatchEndRow = currentBatchStartRow + batchSize; //Set the indices for the reader EasyUploadFileReaderInfo fri = new EasyUploadFileReaderInfo { DataStartRow = currentBatchStartRow, //End row is either at the end of the batch or the end of the marked area //DataEndRow = (currentBatchEndRow > areaDataValues[2] + 1) ? areaDataValues[2] + 1 : currentBatchEndRow, DataEndRow = Math.Min(currentBatchEndRow, areaDataValues[2] + 1), //Column indices as marked in a previous step DataStartColumn = areaDataValues[1] + 1, DataEndColumn = areaDataValues[3] + 1, //Header area as marked in a previous step VariablesStartRow = areaHeaderValues[0] + 1, VariablesStartColumn = areaHeaderValues[1] + 1, VariablesEndRow = areaHeaderValues[2] + 1, VariablesEndColumn = areaHeaderValues[3] + 1, Offset = areaDataValues[1], Orientation = orientation }; //Create a new reader each time because the reader saves ALL tuples it read and therefore the batch processing wouldn't work EasyUploadExcelReader reader = new EasyUploadExcelReader(sds, fri); // open file Stream = reader.Open(TaskManager.Bus[EasyUploadTaskManager.FILEPATH].ToString()); //Set variable identifiers because they might differ from the variable names in the file reader.setSubmittedVariableIdentifiers(identifiers); //Read the rows and convert them to DataTuples rows = reader.ReadFile(Stream, TaskManager.Bus[EasyUploadTaskManager.FILENAME].ToString(), fri, (int)datasetId, worksheetUri); //After reading the rows, add them to the dataset if (rows != null) { dm.EditDatasetVersion(workingCopy, rows.ToList(), null, null); numberOfRows += rows.Count(); } //Close the Stream so the next ExcelReader can open it again Stream.Close(); //Debug information int lines = (areaDataValues[2] + 1) - (areaDataValues[0] + 1); int batches = lines / batchSize; batchnr++; //Next batch starts after the current one currentBatchStartRow = currentBatchEndRow + 1; } } #endregion excel reader //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = GetUsernameOrDefault(), Comment = "Data", ActionType = AuditActionType.Create }; dm.EditDatasetVersion(workingCopy, null, null, null); dm.CheckInDataset(ds.Id, "Import " + numberOfRows + " rows", GetUsernameOrDefault()); //Reindex search if (this.IsAccessible("DDM", "SearchIndex", "ReIndexSingle")) { this.Run("DDM", "SearchIndex", "ReIndexSingle", new RouteValueDictionary() { { "id", datasetId } }); } TaskManager.AddToBus(EasyUploadTaskManager.DATASET_ID, ds.Id); return(temp); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Other, "An error occured during the upload. " + "Please try again later. If this problem keeps occuring, please contact your administrator.")); return(temp); } finally { dsm.Dispose(); dm.Dispose(); dam.Dispose(); //sm.Dispose(); entityPermissionManager.Dispose(); } }
public List <Error> FinishUpload2(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); try { List <Error> temp = new List <Error>(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); //datatuple list List <DataTuple> rows; DatasetManager dm = new DatasetManager(); Dataset ds = dm.GetDataset(id); DatasetVersion workingCopy = new DatasetVersion(); #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <AbstractTuple> datatupleFromDatabase = dm.GetDatasetVersionEffectiveTuples(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } } while (rows.Count() > 0); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion // start download generator // filepath //string path = ""; //if (workingCopy != null) //{ // path = GenerateDownloadFile(workingCopy); // dm.EditDatasetVersion(workingCopy, null, null, null); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Updated); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { workingCopy = dm.GetDatasetLatestVersion(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } return(temp); } finally { dsm.Dispose(); } }
//temporary solution: norman :FinishUpload2 public List <Error> FinishUpload(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); try { List <Error> temp = new List <Error>(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(ds); string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { string title = ""; long datasetid = ds.Id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); title = xmlDatasetHelper.GetInformation(ds.Id, NameAttributeValues.title); try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetid, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { try { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return(temp); } finally { dm.Dispose(); dsm.Dispose(); } }
public HttpResponseMessage Post([FromBody] PostApiDatasetModel dataset) { var request = Request.CreateResponse(); User user = null; string error = ""; long datasetId = 0; long researchPlanId = 1; using (DatasetManager datasetManager = new DatasetManager()) using (DataStructureManager dataStructureManager = new DataStructureManager()) using (ResearchPlanManager researchPlanManager = new ResearchPlanManager()) using (UserManager userManager = new UserManager()) using (EntityPermissionManager entityPermissionManager = new EntityPermissionManager()) using (MetadataStructureManager metadataStructureManager = new MetadataStructureManager()) { try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { request.Content = new StringContent("Bearer token not exist."); return(request); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { request.Content = new StringContent("Token is not valid."); return(request); } #endregion security #region incomming values check // check incomming values if (dataset.Title == null) { error += "title not existing."; } if (dataset.Description == null) { error += "description not existing."; } if (dataset.MetadataStructureId == 0) { error += "metadata structure id should not be null. "; } if (dataset.DataStructureId == 0) { error += "datastructure id should not be null. "; } if (!string.IsNullOrEmpty(error)) { request.Content = new StringContent(error); return(request); } #endregion incomming values check #region create dataset DataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataset.DataStructureId); //if datastructure is not a structured one if (dataStructure == null) { dataStructure = dataStructureManager.UnStructuredDataStructureRepo.Get(dataset.DataStructureId); } if (dataStructure == null) { request.Content = new StringContent("A data structure with id " + dataset.DataStructureId + "does not exist."); return(request); } ResearchPlan rp = researchPlanManager.Repo.Get(researchPlanId); if (rp == null) { request.Content = new StringContent("A research plan with id " + researchPlanId + "does not exist."); return(request); } MetadataStructure metadataStructure = metadataStructureManager.Repo.Get(dataset.MetadataStructureId); if (metadataStructure == null) { request.Content = new StringContent("A metadata structure with id " + dataset.MetadataStructureId + "does not exist."); return(request); } var newDataset = datasetManager.CreateEmptyDataset(dataStructure, rp, metadataStructure); datasetId = newDataset.Id; // add security entityPermissionManager.Create <User>(user.UserName, "Dataset", typeof(Dataset), newDataset.Id, Enum.GetValues(typeof(RightType)).Cast <RightType>().ToList()); //add title and description to the metadata if (datasetManager.IsDatasetCheckedOutFor(datasetId, user.UserName) || datasetManager.CheckOutDataset(datasetId, user.UserName)) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(datasetId); XmlMetadataWriter xmlMetadataWriter = new XmlMetadataWriter(XmlNodeMode.xPath); XDocument xdoc = xmlMetadataWriter.CreateMetadataXml(dataset.MetadataStructureId); workingCopy.Metadata = XmlUtility.ToXmlDocument(xdoc); XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); workingCopy.Metadata = xmlDatasetHelper.SetInformation(workingCopy, workingCopy.Metadata, NameAttributeValues.title, dataset.Title); workingCopy.Title = dataset.Title; workingCopy.Metadata = xmlDatasetHelper.SetInformation(workingCopy, workingCopy.Metadata, NameAttributeValues.description, dataset.Description); workingCopy.Description = dataset.Description; ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = user.UserName, Comment = "Metadata", ActionType = AuditActionType.Create }; datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(datasetId, "Title and description were added to the dataset via the api.", user.UserName, ViewCreationBehavior.None); } request.Content = new StringContent("the dataset " + dataset.Title + "(" + datasetId + ") was successfully created."); return(request); #endregion create dataset } catch (Exception ex) { request.Content = new StringContent(ex.Message); return(request); } } }
public void CreateAndExpressionForQueryingTest() { var dsHelper = new DatasetHelper(); StructuredDataStructure dataStructure = dsHelper.CreateADataStructure(); dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); string var1Name = "var" + dataStructure.Variables.First().Id; string var2Name = "var" + dataStructure.Variables.Skip(1).First().Id; FilterExpression fex = BinaryFilterExpression .And( new FilterNumberItemExpression() { Field = new Field() { DataType = Utils.NH.Querying.DataType.Ineteger, Name = var1Name } , Operator = NumberOperator.Operation.GreaterThan , Value = 12 } , new FilterStringItemExpression() { Field = new Field() { DataType = Utils.NH.Querying.DataType.String, Name = var2Name } , Operator = StringOperator.Operation.EndsWith , Value = "Test" } ); fex.ToSQL().Should().Be($"(({var1Name}) > (12)) AND (({var2Name}) ILIKE ('%Test'))"); // this is to show how to apply a NOT operator on any other expression. // It can be applied on Numeric, String, Date, and any other type of expression FilterExpression notFex = UnaryFilterExpression.Not(fex); notFex.ToSQL().Should().Be($"NOT ((({var1Name}) > (12)) AND (({var2Name}) ILIKE ('%Test')))"); notFex.ToSQL().Should().Be($"NOT ({fex.ToSQL()})"); OrderByExpression orderByExpr = new OrderByExpression( new List <OrderItemExpression>() { new OrderItemExpression(var1Name), new OrderItemExpression(var2Name, SortDirection.Descending) }); orderByExpr.ToSQL().Should().Be($"{var1Name} ASC, {var2Name} DESC"); // create a dataset and test the filter, sorting, and projectgion long numberOfTuples = 100; var dm = new DatasetManager(); var rsm = new ResearchPlanManager(); var mdm = new MetadataStructureManager(); try { dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); var rp = dsHelper.CreateResearchPlan(); rp.Should().NotBeNull("Failed to meet a precondition: a research plan is required."); var mds = mdm.Repo.Query().First(); mds.Should().NotBeNull("Failed to meet a precondition: a metadata strcuture is required."); Dataset dataset = dm.CreateEmptyDataset(dataStructure, rp, mds); dataset = dsHelper.GenerateTuplesForDataset(dataset, dataStructure, numberOfTuples, "Javad"); dataset.Should().NotBeNull("The dataset tuple generation has failed!"); dm.CheckInDataset(dataset.Id, "for testing purposes 2", "Javad", ViewCreationBehavior.None); dm.SyncView(dataset.Id, ViewCreationBehavior.Create | ViewCreationBehavior.Refresh); dataset.Id.Should().BeGreaterThan(0, "Dataset was not persisted."); dataset.LastCheckIOTimestamp.Should().NotBeAfter(DateTime.UtcNow, "The dataset's timestamp is wrong."); dataset.DataStructure.Should().NotBeNull("Dataset must have a data structure."); dataset.Status.Should().Be(DatasetStatus.CheckedIn, "Dataset must be in the CheckedIn status."); dm.GetDatasetLatestVersionEffectiveTupleCount(dataset.Id).Should().Be(numberOfTuples); // pass this filter to get a subset of dataset X var dst = dm.GetLatestDatasetVersionTuples(dataset.Id, fex, null, null, 1, 10); dst.Should().NotBeNull(); dst.Rows.Count.Should().BeLessOrEqualTo(10); dm.DatasetVersionRepo.Evict(); dm.DataTupleRepo.Evict(); dm.DatasetRepo.Evict(); dm.PurgeDataset(dataset.Id, true); dsHelper.PurgeAllDataStructures(); } finally { dm.Dispose(); rsm.Dispose(); mdm.Dispose(); } }
public void ProjectExpressionTest() { var dsHelper = new DatasetHelper(); StructuredDataStructure dataStructure = dsHelper.CreateADataStructure(); dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); string var1Name = "var" + dataStructure.Variables.First().Id; string var3Name = "var" + dataStructure.Variables.Skip(2).First().Id; //create prjection expression ProjectionExpression projectionExpression = new ProjectionExpression(); projectionExpression.Items.Add(new ProjectionItemExpression() { FieldName = var1Name }); projectionExpression.Items.Add(new ProjectionItemExpression() { FieldName = var3Name }); // create a dataset and test the filter, sorting, and projectgion long numberOfTuples = 10; var dm = new DatasetManager(); var rsm = new ResearchPlanManager(); var mdm = new MetadataStructureManager(); try { dataStructure.Should().NotBeNull("Failed to meet a precondition: a data strcuture is required."); var rp = dsHelper.CreateResearchPlan(); rp.Should().NotBeNull("Failed to meet a precondition: a research plan is required."); var mds = mdm.Repo.Query().First(); mds.Should().NotBeNull("Failed to meet a precondition: a metadata strcuture is required."); Dataset dataset = dm.CreateEmptyDataset(dataStructure, rp, mds); dataset = dsHelper.GenerateTuplesForDataset(dataset, dataStructure, numberOfTuples, "Javad"); dataset.Should().NotBeNull("The dataset tuple generation has failed!"); dm.CheckInDataset(dataset.Id, "for testing purposes 2", "Javad", ViewCreationBehavior.None); dm.SyncView(dataset.Id, ViewCreationBehavior.Create | ViewCreationBehavior.Refresh); dataset.Id.Should().BeGreaterThan(0, "Dataset was not persisted."); dataset.LastCheckIOTimestamp.Should().NotBeAfter(DateTime.UtcNow, "The dataset's timestamp is wrong."); dataset.DataStructure.Should().NotBeNull("Dataset must have a data structure."); dataset.Status.Should().Be(DatasetStatus.CheckedIn, "Dataset must be in the CheckedIn status."); dm.GetDatasetLatestVersionEffectiveTupleCount(dataset.Id).Should().Be(numberOfTuples); // pass this filter to get a subset of dataset X var dst = dm.GetLatestDatasetVersionTuples(dataset.Id, null, null, projectionExpression, 1, 3); dst.Should().NotBeNull(); dst.Rows.Count.Should().BeLessOrEqualTo(3); dst.Columns.Count.Should().BeLessOrEqualTo(3, "Projection failed, wrong number of columns"); dm.DatasetVersionRepo.Evict(); dm.DataTupleRepo.Evict(); dm.DatasetRepo.Evict(); dm.PurgeDataset(dataset.Id, true); dsHelper.PurgeAllDataStructures(); } catch (Exception ex) { throw ex; } finally { dm.Dispose(); rsm.Dispose(); mdm.Dispose(); } }
public async Task <bool> Upload() { Debug.WriteLine("start upload data"); FileStream Stream = null; DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); long id = _dataset.Id; string userName = _user.UserName; var es = new EmailService(); try { List <long> datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetManager.GetDatasetLatestVersion(_dataset.Id)); if (FileHelper.FileExist(_filepath) && (datasetManager.IsDatasetCheckedOutFor(id, userName) || datasetManager.CheckOutDataset(id, userName))) { workingCopy = datasetManager.GetDatasetWorkingCopy(id); ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = userName, Comment = "Data", ActionType = AuditActionType.Edit }; //schleife int counter = 0; bool inputWasAltered = false; do { counter++; inputWasAltered = false; Stream = reader.Open(_filepath); rows = reader.ReadFile(Stream, Path.GetFileName(_filepath), id, packageSize); Stream.Close(); // if errors exist, send email to user and stop process if (reader.ErrorMessages.Count > 0) { List <string> errorArray = new List <string>(); foreach (var e in reader.ErrorMessages) { errorArray.Add(e.GetMessage()); } //send error messages es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, errorArray.ToArray()), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } //Update Method -- append or update if (_uploadMethod == UploadMethod.Append) { if (rows.Count > 0) { datasetManager.EditDatasetVersion(workingCopy, rows, null, null); inputWasAltered = true; } } else if (_uploadMethod == UploadMethod.Update) { if (rows.Count() > 0) { var splittedDatatuples = uploadHelper.GetSplitDatatuples(rows, variableIds, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } while (rows.Count() > 0 || inputWasAltered == true); datasetManager.CheckInDataset(id, "via api", userName); string title = workingCopy.Title; //send email es.Send(MessageHelper.GetUpdateDatasetHeader(id), MessageHelper.GetUpdateDatasetMessage(id, title, _user.DisplayName), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } else { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { "The temporarily stored data could not be read or the dataset is already in checkout status." }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } return(true); } catch (Exception ex) { if (datasetManager.IsDatasetCheckedOutFor(id, userName)) { datasetManager.UndoCheckoutDataset(id, userName); } //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { ex.Message }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } finally { Debug.WriteLine("end of upload"); } }