//[MeasurePerformance] public static DataTable ConvertPrimaryDataToDatatable(DatasetManager datasetManager, DatasetVersion datasetVersion, string tableName = "", bool useLabelsAsColumnNames = false) { DataStructureManager dsm = new DataStructureManager(); try { DataTable dt = new DataTable(); if (string.IsNullOrEmpty(tableName)) { dt.TableName = "Primary data table"; } else { dt.TableName = tableName; } StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(datasetVersion.Dataset.DataStructure.Id); var tupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); if (tupleIds != null && tupleIds.Count > 0 && sds != null) { buildTheHeader(sds, useLabelsAsColumnNames, dt); buildTheBody(datasetManager, tupleIds, dt, sds, useLabelsAsColumnNames); } return(dt); } finally { dsm.Dispose(); } }
public DataTable ConvertDatasetVersion(DatasetManager datasetManager, DatasetVersion datasetVersion, string tableName = "") { DataTable dt = new DataTable(); if (string.IsNullOrEmpty(tableName)) { dt.TableName = "Primary data table"; } else { dt.TableName = tableName; } using (DataStructureManager dsm = new DataStructureManager()) { StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(datasetVersion.Dataset.DataStructure.Id); var tupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); if (tupleIds != null && tupleIds.Count > 0 && sds != null) { buildTheHeader(sds, dt); buildTheBody(datasetManager, tupleIds, dt, sds); } } return(dt); }
[TestCase(2)] // primary key as double public void GetSplitDatatuples_AllDataTuplesEdited_SameNumberOfDatatuples(int primaryKeyIndex) { Dataset dataset; DatasetVersion latest; List <DataTuple> incoming = new List <DataTuple>(); int count = 0; int expectedCount = 0; List <long> datatupleFromDatabaseIds = new List <long>(); using (DatasetManager datasetManager = new DatasetManager()) { //Arrange dataset = datasetManager.GetDataset(datasetId); latest = datasetManager.GetDatasetLatestVersion(datasetId); datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(latest); //get updated tuples as incoming datatuples incoming = dsHelper.GetUpdatedDatatuples(latest, dataset.DataStructure as StructuredDataStructure, datasetManager); //because of updateing all datatuples the incoming number is should be equal then the existing one expectedCount = incoming.Count; } try { List <long> primaryKeys = new List <long>(); //get primarykey ids // var 1 = int = 1 // var 2 = string = 2 // var 3 = double = 3 // var 4 = boolean = 4 // var 5 = datetime = 5 List <long> varIds = ((StructuredDataStructure)dataset.DataStructure).Variables.Select(v => v.Id).ToList(); primaryKeys.Add(varIds.ElementAt(primaryKeyIndex)); //Act Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); UploadHelper uploadhelper = new UploadHelper(); splittedDatatuples = uploadhelper.GetSplitDatatuples(incoming, primaryKeys, null, ref datatupleFromDatabaseIds); //Assert int newCount = splittedDatatuples["new"].Count; int editCount = splittedDatatuples["edit"].Count; Assert.That(newCount, Is.EqualTo(0)); Assert.That(editCount, Is.EqualTo(expectedCount)); } catch (Exception ex) { throw ex; } }
public void GetSplitDatatuples_CombinedPrimaryKeyOneUpdatedDataTuple_ExpectedNumberOfEditDatatuples() { Dataset dataset; DatasetVersion latest; List <DataTuple> incoming = new List <DataTuple>(); int count = 0; int expectedCount = 0; List <long> datatupleFromDatabaseIds = new List <long>(); //setup the primary key conbination List <int> primaryKeysIndex = new List <int> { 0, 2 }; List <long> primaryKeyIds = new List <long>(); using (DatasetManager datasetManager = new DatasetManager()) { //Arrange dataset = datasetManager.GetDataset(datasetId); latest = datasetManager.GetDatasetLatestVersion(datasetId); datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(latest); foreach (var dtid in datatupleFromDatabaseIds) { var datatuple = datasetManager.DataTupleRepo.Get(dtid); datatuple.Materialize(); incoming.Add(datatuple); } //updated last datatuple in text dsHelper.GetUpdatedDatatuple(incoming.Last(), 1); //get varids of primary key combination List <long> allVarIds = ((StructuredDataStructure)dataset.DataStructure).Variables.Select(v => v.Id).ToList(); foreach (int pkIndex in primaryKeysIndex) { primaryKeyIds.Add(allVarIds.ElementAt(pkIndex)); } //Act Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); UploadHelper uploadhelper = new UploadHelper(); splittedDatatuples = uploadhelper.GetSplitDatatuples(incoming, primaryKeyIds, null, ref datatupleFromDatabaseIds); //Assert Assert.That(splittedDatatuples["new"].Count, Is.EqualTo(0)); Assert.That(splittedDatatuples["edit"].Count, Is.EqualTo(1)); } }
public ActionResult FlipDateTime(long id, long variableid) { DatasetManager datasetManager = new DatasetManager(); try { DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(id); IEnumerable <long> datatupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(dsv); foreach (var tid in datatupleIds) { DataTuple dataTuple = datasetManager.DataTupleRepo.Get(tid); dataTuple.Materialize(); bool needUpdate = false; foreach (var vv in dataTuple.VariableValues) { string systemType = vv.DataAttribute.DataType.SystemType; if (systemType.Equals(typeof(DateTime).Name) && vv.VariableId.Equals(variableid)) { string value = vv.Value.ToString(); vv.Value = flip(value, out needUpdate); } } if (needUpdate) { dataTuple.Dematerialize(); datasetManager.UpdateDataTuple(dataTuple); } } } catch (Exception ex) { } finally { datasetManager.Dispose(); } return(RedirectToAction("Index")); }
[TestCase(2)] // primary key as double public void EditDatasetVersion_UpdateAllDataTuples_SameNumberOfDatatuples(int primaryKeyIndex) { Dataset dataset; DatasetVersion latest; List <DataTuple> incoming = new List <DataTuple>(); int count = 0; int expectedCount = 0; List <long> datatupleFromDatabaseIds = new List <long>(); using (DatasetManager datasetManager = new DatasetManager()) { //Arrange dataset = datasetManager.GetDataset(datasetId); latest = datasetManager.GetDatasetLatestVersion(datasetId); datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(latest); //get updated tuples as incoming datatuples incoming = dsHelper.GetUpdatedDatatuples(latest, dataset.DataStructure as StructuredDataStructure, datasetManager); //because of updateing all datatuples the incoming number is should be equal then the existing one expectedCount = incoming.Count; } using (DatasetManager datasetManager = new DatasetManager()) { try { if (datasetManager.IsDatasetCheckedOutFor(datasetId, "David") || datasetManager.CheckOutDataset(datasetId, "David")) { DatasetVersion workingCopy = datasetManager.GetDatasetWorkingCopy(datasetId); List <long> primaryKeys = new List <long>(); //get primarykey ids // var 1 = int = 1 // var 2 = string = 2 // var 3 = double = 3 // var 4 = boolean = 4 // var 5 = datetime = 5 List <long> varIds = ((StructuredDataStructure)workingCopy.Dataset.DataStructure).Variables.Select(v => v.Id).ToList(); primaryKeys.Add(varIds.ElementAt(primaryKeyIndex)); //Act Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); UploadHelper uploadhelper = new UploadHelper(); splittedDatatuples = uploadhelper.GetSplitDatatuples(incoming, primaryKeys, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); datasetManager.CheckInDataset(datasetId, count + " rows", "David"); //Assert long c = datasetManager.GetDatasetVersionEffectiveTupleCount(workingCopy); Assert.That(c, Is.EqualTo(expectedCount)); } } catch (Exception ex) { throw ex; } } }
//temporary solution: norman :FinishUpload2 public List <Error> FinishUpload(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); try { List <Error> temp = new List <Error>(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(ds); string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { string title = ""; long datasetid = ds.Id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); title = xmlDatasetHelper.GetInformation(ds.Id, NameAttributeValues.title); try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetid, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { try { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return(temp); } finally { dm.Dispose(); dsm.Dispose(); } }
/// <summary> /// test unique of primary keys on a dataset /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="datasetId"></param> /// <param name="primaryKeys"></param> /// <returns></returns> ////[MeasurePerformance] public Boolean IsUnique2(long datasetId, List <long> primaryKeys) { DatasetManager datasetManager = new DatasetManager(); try { Hashtable hashtable = new Hashtable(); // load data Dataset dataset = datasetManager.GetDataset(datasetId); DatasetVersion datasetVersion; List <long> dataTupleIds = new List <long>(); if (datasetManager.IsDatasetCheckedIn(datasetId)) { datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); #region load all datatuples first int size = 10000; int counter = 0; IEnumerable <long> dataTuplesIds; dataTuplesIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); IEnumerable <long> currentIds; DataTuple dt; do { currentIds = dataTupleIds.Skip(counter * size).Take(size); //byte[] pKey; string pKey; foreach (long dtId in currentIds) { dt = datasetManager.DataTupleRepo.Query(d => d.Id.Equals(dtId)).FirstOrDefault(); //pKey = getPrimaryKeysAsByteArray(dt, primaryKeys); pKey = pKey = getPrimaryKeysAsStringFromXml(dt, primaryKeys); if (pKey.Count() > 0) { try { //Debug.WriteLine(pKey +" : " +Utility.ComputeKey(pKey)); hashtable.Add(pKey, ""); //hashtable.Add(pKey, 0); } catch { return(false); } } } counter++; }while (currentIds.Count() >= (size * counter)); #endregion } else { throw new Exception("Dataset is not checked in."); } return(true); } finally { datasetManager.Dispose(); } }
public async Task <HttpResponseMessage> Put([FromBody] PutDataApiModel data) { var request = Request.CreateResponse(); User user = null; string error = ""; DatasetManager datasetManager = new DatasetManager(); UserManager userManager = new UserManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DataStructureManager dataStructureManager = new DataStructureManager(); ApiConfigurator apiHelper = new ApiConfigurator(); DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); //load from apiConfig int cellLimit = 10000; if (apiHelper != null && apiHelper.Settings.ContainsKey(ApiConfigurator.CELLS)) { Int32.TryParse(apiHelper.Settings[ApiConfigurator.CELLS], out cellLimit); } try { #region security string token = this.Request.Headers.Authorization?.Parameter; if (String.IsNullOrEmpty(token)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Bearer token not exist.")); } user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (user == null) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "Token is not valid.")); } //check permissions //entity permissions if (data.DatasetId > 0) { Dataset d = datasetManager.GetDataset(data.DatasetId); if (d == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "the dataset with the id (" + data.DatasetId + ") does not exist.")); } if (!entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), data.DatasetId, RightType.Write)) { return(Request.CreateErrorResponse(HttpStatusCode.Unauthorized, "The token is not authorized to write into the dataset.")); } } #endregion security #region incomming values check // check incomming values if (data.DatasetId == 0) { error += "dataset id should be greater then 0."; } //if (data.UpdateMethod == null) error += "update method is not set"; //if (data.Count == 0) error += "count should be greater then 0. "; if (data.Columns == null) { error += "cloumns should not be null. "; } if (data.Data == null) { error += "data is empty. "; } if (data.PrimaryKeys == null || data.PrimaryKeys.Count() == 0) { error += "the UpdateMethod update has been selected but there are no primary keys available. "; } if (!string.IsNullOrEmpty(error)) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, error)); } #endregion incomming values check Dataset dataset = datasetManager.GetDataset(data.DatasetId); if (dataset == null) { return(Request.CreateErrorResponse(HttpStatusCode.PreconditionFailed, "Dataset not exist.")); } DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(dataset); string title = dsv.Title; if ((data.Data.Count() * data.Columns.Count()) > cellLimit) { #region async upload with big data // if dataste is not in the dataset DataApiHelper helper = new DataApiHelper(dataset, user, data, title, UploadMethod.Update); Task.Run(() => helper.Run()); #endregion async upload with big data Debug.WriteLine("end of api call"); return(Request.CreateResponse(HttpStatusCode.OK, "Data has been successfully received and is being processed. For larger data, as in this case, we will keep you informed by mail about the next steps.")); } else { #region direct update var es = new EmailService(); UploadHelper uploadHelper = new UploadHelper(); try { //load strutcured data structure StructuredDataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataset.DataStructure.Id); List <Error> errors = new List <Error>(); if (dataStructure == null) { return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, "The Datastructure does not exist.")); } APIDataReader reader = new APIDataReader(dataStructure, new ApiFileReaderInfo()); List <VariableIdentifier> source = new List <VariableIdentifier>(); reader.SetSubmitedVariableIdentifiers(data.Columns.ToList()); #region primary key check //prepare primary keys ids from the exiting dataset List <long> variableIds = new List <long>(); foreach (var variable in dataStructure.Variables) { if (data.PrimaryKeys.Any(p => p.ToLower().Equals(variable.Label.ToLower()))) { variableIds.Add(variable.Id); } } // prepare pk index list from data int[] primaryKeyIndexes = new int[data.PrimaryKeys.Length]; for (int i = 0; i < data.PrimaryKeys.Length; i++) { string pk = data.PrimaryKeys[i]; primaryKeyIndexes[i] = data.Columns.ToList().IndexOf(pk); } //check primary with data : uniqueness bool IsUniqueInDb = uploadHelper.IsUnique2(dataset.Id, variableIds); bool IsUniqueInData = uploadHelper.IsUnique(primaryKeyIndexes, data.Data); if (!IsUniqueInDb || !IsUniqueInData) { StringBuilder sb = new StringBuilder("Error/s in Primary Keys selection:<br>"); if (!IsUniqueInDb) { sb.AppendLine("The selected key is not unique in the data in the dataset."); } if (!IsUniqueInData) { sb.AppendLine("The selected key is not unique in the received data."); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion primary key check #region validate datastructure foreach (string c in data.Columns) { source.Add(new VariableIdentifier() { name = c }); } errors = reader.ValidateComparisonWithDatatsructure(source); if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Datastructure is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion validate datastructure #region validate data errors = new List <Error>(); // validate rows for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; errors.AddRange(reader.ValidateRow(row.ToList(), i)); } if (errors != null && errors.Count > 0) { StringBuilder sb = new StringBuilder("The Data is not valid."); foreach (var e in errors) { sb.AppendLine(e.ToHtmlString()); } return(Request.CreateResponse(HttpStatusCode.ExpectationFailed, sb.ToString())); } #endregion validate data #region update data List <long> datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetManager.GetDatasetLatestVersion(dataset.Id)); if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, user.UserName) || datasetManager.CheckOutDataset(dataset.Id, user.UserName)) { workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); List <DataTuple> datatuples = new List <DataTuple>(); for (int i = 0; i < data.Data.Length; i++) { string[] row = data.Data[i]; datatuples.Add(reader.ReadRow(row.ToList(), i)); } //Update Method -- UPDATE //splite datatuples into new and updated tuples if (datatuples.Count > 0) { var splittedDatatuples = uploadHelper.GetSplitDatatuples(datatuples, variableIds, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = user.UserName, Comment = "Data", ActionType = AuditActionType.Edit }; datasetManager.EditDatasetVersion(workingCopy, null, null, null); datasetManager.CheckInDataset(dataset.Id, data.Data.Length + " rows via api.", user.UserName); //send email es.Send(MessageHelper.GetUpdateDatasetHeader(dataset.Id), MessageHelper.GetUpdateDatasetMessage(dataset.Id, title, user.DisplayName), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } #endregion update data return(Request.CreateResponse(HttpStatusCode.OK, "Data successfully uploaded.")); } catch (Exception ex) { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(dataset.Id, title), MessageHelper.GetPushApiUploadFailMessage(dataset.Id, user.UserName, new string[] { "Upload failed: " + ex.Message }), new List <string>() { user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(Request.CreateResponse(HttpStatusCode.InternalServerError, ex.Message)); } #endregion direct update } } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); dataStructureManager.Dispose(); userManager.Dispose(); request.Dispose(); } }
//temporary solution: norman :FinishUpload2 public async Task <List <Error> > FinishUpload() { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); IOUtility iOUtility = new IOUtility(); List <Error> temp = new List <Error>(); long id = 0; string title = ""; int numberOfRows = 0; int numberOfSkippedRows = 0; try { DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); //Dataset ds = null; bool inputWasAltered = false; if (Bus.ContainsKey(TaskManager.DATASET_ID) && Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { id = Convert.ToInt32(Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(Bus[TaskManager.DATASTRUCTURE_ID]); //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(id); title = latestVersion.Title; string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { Bus[TaskManager.CURRENTPACKAGE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion Progress Informations #region structured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { long datasetid = id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); try { // load all data tuple ids from the latest version List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(id)); // load structured data structure StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm") || iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; //schleife dm.CheckOutDatasetIfNot(id, User.Name); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(id, User.Name)) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", id, User.Name)); } workingCopy = dm.GetDatasetWorkingCopy(id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } ExcelReader reader = null; ExcelFileReaderInfo excelFileReaderInfo = null; if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { excelFileReaderInfo = (ExcelFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]; } reader = new ExcelReader(sds, excelFileReaderInfo); do { counter++; Bus[TaskManager.CURRENTPACKAGE] = counter; //open stream Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = new List <DataTuple>(); if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { if (reader.Position < excelFileReaderInfo.DataEndRow) { rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } } else { rows = reader.ReadTemplateFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } //Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { } } Stream?.Close(); //count rows numberOfRows += rows.Count(); } while (rows.Count() > 0 && rows.Count() <= packageSize); numberOfSkippedRows = reader.NumberOSkippedfRows; } #endregion excel reader #region ascii reader if (iOUtility.IsSupportedAsciiFile(Bus[TaskManager.EXTENTION].ToString())) { // open file AsciiReader reader = new AsciiReader(sds, (AsciiFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]); if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { workingCopy = dm.GetDatasetWorkingCopy(id); //set packagsize for one loop int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) //check wheter there is a dataset status in the upload wizard bus { // based the dataset status and/ or the upload method if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); // add all datatuples to the datasetversion } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) // datatuples allready exist { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else // if there is no dataset status in the bus, use dataset status edit { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } //count rows numberOfRows += rows.Count(); } while ((rows.Count() > 0 && rows.Count() <= packageSize) || inputWasAltered == true); numberOfSkippedRows = reader.NumberOSkippedfRows; } //Stream.Close(); } #endregion ascii reader #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion contentdescriptors #region set System value into metadata if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "Data", ActionType = newdataset ? AuditActionType.Create : AuditActionType.Edit }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } #endregion set System value into metadata // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(id, numberOfRows + " rows", User.Name); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(datasetid), MessageHelper.GetUpdateDatasetMessage(datasetid, title, User.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion structured data #region unstructured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { try { workingCopy = dm.GetDatasetWorkingCopy(id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy.VersionNo += 1; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "File", ActionType = AuditActionType.Create }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } //filename string filename = ""; if (Bus.ContainsKey(TaskManager.FILENAME)) { filename = Bus[TaskManager.FILENAME]?.ToString(); } // ToDo: Get Comment from ui and users dm.CheckInDataset(id, filename, User.Name, ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion unstructured data } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(id, User.Name); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Dataset, ex.Message)); dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } finally { if (RunningASync) { var user = User; if (temp.Any()) { var es = new EmailService(); es.Send(MessageHelper.GetPushApiUploadFailHeader(id, title), MessageHelper.GetPushApiUploadFailMessage(id, user.Name, temp.Select(e => e.ToString()).ToArray()), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } else { var es = new EmailService(); es.Send(MessageHelper.GetASyncFinishUploadHeader(id, title), MessageHelper.GetASyncFinishUploadMessage(id, title, numberOfRows, numberOfSkippedRows), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } } dm.Dispose(); dsm.Dispose(); } return(temp); }
public string GenerateAsciiFile(long id, long versionId, string title, string mimeType) { DatasetManager datasetManager = new DatasetManager(); try { DatasetVersion datasetVersion = datasetManager.GetDatasetVersion(versionId); string contentDescriptorTitle = ""; string ext = ""; TextSeperator textSeperator = TextSeperator.semicolon; switch (mimeType) { case "text/csv": { contentDescriptorTitle = "generatedCSV"; ext = ".csv"; textSeperator = TextSeperator.semicolon; break; } default: { contentDescriptorTitle = "generatedTXT"; ext = ".txt"; textSeperator = TextSeperator.tab; break; } } AsciiWriter writer = new AsciiWriter(textSeperator); string path = ""; //ascii allready exist if (datasetVersion.ContentDescriptors.Count(p => p.Name.Equals(contentDescriptorTitle) && p.URI.Contains(datasetVersion.Id.ToString())) > 0) { #region FileStream exist ContentDescriptor contentdescriptor = datasetVersion.ContentDescriptors.Where(p => p.Name.Equals(contentDescriptorTitle)).FirstOrDefault(); path = Path.Combine(AppConfiguration.DataPath, contentdescriptor.URI); if (FileHelper.FileExist(path)) { return(path); } else { List <long> datatupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); long datastuctureId = datasetVersion.Dataset.DataStructure.Id; path = generateDownloadFile(id, datasetVersion.Id, datastuctureId, "Data", ext, writer); storeGeneratedFilePathToContentDiscriptor(id, datasetVersion, ext); writer.AddDataTuples(datasetManager, datatupleIds, path, datastuctureId); return(path); } #endregion } // not exist needs to generated else { #region FileStream not exist List <long> datatupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); long datastuctureId = datasetVersion.Dataset.DataStructure.Id; path = generateDownloadFile(id, datasetVersion.Id, datastuctureId, "data", ext, writer); storeGeneratedFilePathToContentDiscriptor(id, datasetVersion, ext); writer.AddDataTuples(datasetManager, datatupleIds, path, datastuctureId); return(path); #endregion } } finally { datasetManager.Dispose(); } }
public string GenerateExcelFile(long id, string title) { string mimeType = "application / xlsm"; string contentDescriptorTitle = "generated"; string ext = ".xlsm"; DatasetManager datasetManager = new DatasetManager(); try { DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(id); ExcelWriter writer = new ExcelWriter(); string path = ""; //excel allready exist if (datasetVersion.ContentDescriptors.Count(p => p.Name.Equals("generated") && p.URI.Contains(datasetVersion.Id.ToString())) > 0) { #region FileStream exist ContentDescriptor contentdescriptor = datasetVersion.ContentDescriptors.Where(p => p.Name.Equals("generated")) .FirstOrDefault(); path = Path.Combine(AppConfiguration.DataPath, contentdescriptor.URI); long version = datasetVersion.Id; long versionNrGeneratedFile = Convert.ToInt64(contentdescriptor.URI.Split('\\').Last().Split('_')[1]); // check if FileStream exist if (FileHelper.FileExist(path) && version == versionNrGeneratedFile) { return(path); } // if not generate else { List <long> datatupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); long datastuctureId = datasetVersion.Dataset.DataStructure.Id; path = generateDownloadFile(id, datasetVersion.Id, datastuctureId, title, ext, writer); storeGeneratedFilePathToContentDiscriptor(id, datasetVersion, ext); writer.AddDataTuplesToTemplate(datasetManager, datatupleIds, path, datastuctureId); return(path); } #endregion } // not exist needs to generated else { #region FileStream not exist List <long> datatupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); long datastuctureId = datasetVersion.Dataset.DataStructure.Id; path = generateDownloadFile(id, datasetVersion.Id, datastuctureId, "data", ext, writer); storeGeneratedFilePathToContentDiscriptor(id, datasetVersion, ext); writer.AddDataTuplesToTemplate(datasetManager, datatupleIds, path, datastuctureId); return(path); #endregion } return(""); } catch (Exception ex) { throw ex; } finally { datasetManager.Dispose(); } }
public async Task <bool> Upload() { Debug.WriteLine("start upload data"); FileStream Stream = null; DatasetVersion workingCopy = new DatasetVersion(); List <DataTuple> rows = new List <DataTuple>(); long id = _dataset.Id; string userName = _user.UserName; var es = new EmailService(); try { List <long> datatupleFromDatabaseIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetManager.GetDatasetLatestVersion(_dataset.Id)); if (FileHelper.FileExist(_filepath) && (datasetManager.IsDatasetCheckedOutFor(id, userName) || datasetManager.CheckOutDataset(id, userName))) { workingCopy = datasetManager.GetDatasetWorkingCopy(id); ////set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = userName, Comment = "Data", ActionType = AuditActionType.Edit }; //schleife int counter = 0; bool inputWasAltered = false; do { counter++; inputWasAltered = false; Stream = reader.Open(_filepath); rows = reader.ReadFile(Stream, Path.GetFileName(_filepath), id, packageSize); Stream.Close(); // if errors exist, send email to user and stop process if (reader.ErrorMessages.Count > 0) { List <string> errorArray = new List <string>(); foreach (var e in reader.ErrorMessages) { errorArray.Add(e.GetMessage()); } //send error messages es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, errorArray.ToArray()), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } //Update Method -- append or update if (_uploadMethod == UploadMethod.Append) { if (rows.Count > 0) { datasetManager.EditDatasetVersion(workingCopy, rows, null, null); inputWasAltered = true; } } else if (_uploadMethod == UploadMethod.Update) { if (rows.Count() > 0) { var splittedDatatuples = uploadHelper.GetSplitDatatuples(rows, variableIds, workingCopy, ref datatupleFromDatabaseIds); datasetManager.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } while (rows.Count() > 0 || inputWasAltered == true); datasetManager.CheckInDataset(id, "via api", userName); string title = workingCopy.Title; //send email es.Send(MessageHelper.GetUpdateDatasetHeader(id), MessageHelper.GetUpdateDatasetMessage(id, title, _user.DisplayName), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } else { //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { "The temporarily stored data could not be read or the dataset is already in checkout status." }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); } return(true); } catch (Exception ex) { if (datasetManager.IsDatasetCheckedOutFor(id, userName)) { datasetManager.UndoCheckoutDataset(id, userName); } //ToDo send email to user es.Send(MessageHelper.GetPushApiUploadFailHeader(_dataset.Id, _title), MessageHelper.GetPushApiUploadFailMessage(_dataset.Id, _user.UserName, new string[] { ex.Message }), new List <string>() { _user.Email }, new List <string>() { ConfigurationManager.AppSettings["SystemEmail"] } ); return(false); } finally { Debug.WriteLine("end of upload"); } }