public void textMarkerHandling_NotTextMarkerInRow_ReturnExpectedListOfStrings() { //Arrange string row = "V1;V2;V3;V4"; List <string> expectedOutcome = new List <string> { "V1", "V2", "V3", "V4" }; AsciiFileReaderInfo info = new AsciiFileReaderInfo(); info.Seperator = TextSeperator.semicolon; AsciiReader reader = new AsciiReader(new StructuredDataStructure(), info); //Act List <string> values = reader.TextMarkerHandling(row, AsciiFileReaderInfo.GetSeperator(TextSeperator.semicolon), AsciiFileReaderInfo.GetTextMarker(TextMarker.quotes)); //Assert Assert.That(values.Count, Is.EqualTo(expectedOutcome.Count)); Assert.That(values, Is.EquivalentTo(expectedOutcome)); }
public IFormatReader <IFileOptionsSet> CreateReader(ReaderType type, string path, IOptionsSet baseOptions = null) { IFormatReader <IFileOptionsSet> result = null; FileOptionsSet options = new FileOptionsSet(baseOptions ?? OptionsSet.Empty) { FilePath = path }; switch (type) { case ReaderType.Ascii: result = new AsciiReader(options); break; case ReaderType.Excel: case ReaderType.Excel2007: result = new XlsReader(options); break; case ReaderType.Dbf: result = new DbfReader(options); break; case ReaderType.Db: throw new InvalidOperationException("use CreateDbReader()"); default: throw new UnimplementedReaderTypeException(type); } return(result); }
public void ValidateRow_runNotValid_LimitErrors() { //Arrange DataGeneratorHelper dgh = new DataGeneratorHelper(); var errors = new List <Error>(); var testData = dgh.GenerateRowsWithRandomValuesBasedOnDatastructureWithErrors(dataStructure, ",", 1000000, true); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); //generate file to read Encoding encoding = Encoding.Default; string path = Path.Combine(AppConfiguration.DataPath, "testdataforvalidation.txt"); if (File.Exists(path)) { File.Delete(path); } using (StreamWriter sw = new StreamWriter(path)) { string header = string.Join(",", vairableNames.ToArray()); sw.WriteLine(header); foreach (var r in testData) { sw.WriteLine(r); } } //Mock IOUtility var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); try { AsciiFileReaderInfo afr = new AsciiFileReaderInfo(); afr.TextMarker = TextMarker.doubleQuotes; afr.Seperator = TextSeperator.comma; DataReader reader = new AsciiReader(dataStructure, afr, ioUtilityMock.Object); var asciireader = (AsciiReader)reader; //Act var row = new List <string>(); using (Stream stream = reader.Open(path)) { asciireader.ValidateFile(stream, "", 1); } //Assert Assert.That(asciireader.ErrorMessages.Count, Is.EqualTo(1000)); } catch (Exception ex) { throw ex; } }
public void ValidateComparisonWithDatatstructureNullTest() { //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo()); //test List <Error> errors = reader.ValidateComparisonWithDatatsructure(null); //asserts errors.Should().NotBeNull(); errors.Count.Should().Equals(1); }
// Use this for initialization void Start() { if (!(asciiReader = GetComponent <AsciiReader>())) { Debug.LogError("No Ascii Reader component found."); } if (!(fileReader = GetComponent <FileReader>())) { Debug.LogError("No File Reader component found."); } //Wait for file Index }
public void ReadRow_textWithWhitspaceAtBeginningandEnd_WithspaceRemoved(string rowString) { //preperation List <string> row = new List <string>(rowString.Split('|')); //Mock IOUtility -> ConvertDateToCulture var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); //Mock datasetManager -> CreateVariableValue var datasetManagerMock = new Mock <DatasetManager>(); datasetManagerMock.Setup(d => d.CreateVariableValue("", "", DateTime.Now, DateTime.Now, new ObtainingMethod(), 1, new List <ParameterValue>())).Returns( new VariableValue() { Value = "", Note = "", SamplingTime = DateTime.Now, ResultTime = DateTime.Now, ObtainingMethod = new ObtainingMethod(), VariableId = 1, ParameterValues = new List <ParameterValue>() } ); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); List <VariableIdentifier> variableIdentifiers = reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); List <Error> errors = reader.ValidateComparisonWithDatatsructure(variableIdentifiers); errors.Should().BeNull(); //test DataTuple dt = reader.ReadRow(new List <string>(row), 1); var v1 = dt.VariableValues[0].Value.ToString(); var v2 = dt.VariableValues[1].Value.ToString(); var v3 = dt.VariableValues[2].Value.ToString(); var v4 = dt.VariableValues[3].Value.ToString(); Assert.That(v1, Is.EqualTo("1")); Assert.That(v2, Is.EqualTo("test")); Assert.That(v3, Is.EqualTo("2.2")); Assert.That(v4, Is.EqualTo("true")); //Assert.Throws<Exception>(() => reader.ReadRow(new List<string>(row), 1)); }
public void ValidateComparisonWithDatatstructureTest(string variableRowString) { //preperation List <string> variableRow = new List <string>(variableRowString.Split('|')); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo()); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); List <VariableIdentifier> variableIdentifiers = reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); //test List <Error> errors = reader.ValidateComparisonWithDatatsructure(variableIdentifiers); //asserts errors.Should().BeNull(); }
public void ValidateComparisonWithDatatstructureVariablNotExistTest(string variableRowString) { //preperation List <string> variableRow = new List <string>(variableRowString.Split('|')); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo()); List <VariableIdentifier> variableIdentifiers = reader.SetSubmitedVariableIdentifiers(variableRow.ToList()); //test List <Error> errors = reader.ValidateComparisonWithDatatsructure(variableIdentifiers); //asserts errors.Should().NotBeNull(); errors.Count.Should().Equals(1); }
public DataApiHelper(Dataset dataset, User user, DataApiModel data, string title, UploadMethod uploadMethod) { datasetManager = new DatasetManager(); userManager = new UserManager(); entityPermissionManager = new EntityPermissionManager(); dataStructureManager = new DataStructureManager(); uploadHelper = new UploadHelper(); _dataset = dataset; _user = user; _data = data; _title = title; _uploadMethod = uploadMethod; _dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(_dataset.DataStructure.Id); reader = new AsciiReader(_dataStructure, new AsciiFileReaderInfo()); }
public void ReadRowLessValuesTest(string rowString) { //preperation List <string> row = new List <string>(rowString.Split('|')); //Mock IOUtility var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); Assert.Throws <Exception>(() => reader.ReadRow(new List <string>(row), 1)); }
public void ReadRow_RowIsNullTest_DataTupleIsNull() { //Mock IOUtility var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); //test DataTuple dt = reader.ReadRow(null, 1); //asserts dt.Should().BeNull(); }
public void ReadRow_ValidRowTest_DataTupleIsValid(string rowString) { //preperation List <string> row = new List <string>(rowString.Split('|')); //Mock IOUtility -> ConvertDateToCulture var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); //Mock datasetManager -> CreateVariableValue var datasetManagerMock = new Mock <DatasetManager>(); datasetManagerMock.Setup(d => d.CreateVariableValue("", "", DateTime.Now, DateTime.Now, new ObtainingMethod(), 1, new List <ParameterValue>())).Returns( new VariableValue() { Value = "", Note = "", SamplingTime = DateTime.Now, ResultTime = DateTime.Now, ObtainingMethod = new ObtainingMethod(), VariableId = 1, ParameterValues = new List <ParameterValue>() } ); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); List <VariableIdentifier> variableIdentifiers = reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); List <Error> errors = reader.ValidateComparisonWithDatatsructure(variableIdentifiers); errors.Should().BeNull(); //test DataTuple dt = reader.ReadRow(new List <string>(row), 1); //asserts dt.Should().NotBeNull(); dt.VariableValues.Count.Should().Equals(row.Count); }
public void ValidateRow_ValidRowTest_NoErrors(string rowString) { //preperation List <string> row = new List <string>(rowString.Split('|')); //Mock IOUtility var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); //prepare the variables DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); //test List <Error> errors = reader.ValidateRow(new List <string>(row), 1); //asserts errors.Should().NotBeNull(); errors.Count.Should().Equals(0); }
public void rowToList_RowAsQuotesAndSeperatorInQuotes_ReturnExpectedListOfStrings() { //Arrange string row = "'V1';'V2';'V3;V4'"; List <string> expectedOutcome = new List <string> { "V1", "V2", "V3;V4" }; AsciiFileReaderInfo info = new AsciiFileReaderInfo(); info.Seperator = TextSeperator.semicolon; AsciiReader reader = new AsciiReader(new StructuredDataStructure(), info); //Act List <string> values = reader.rowToList(row, AsciiFileReaderInfo.GetSeperator(TextSeperator.semicolon)); //Assert Assert.That(values.Count, Is.EqualTo(expectedOutcome.Count)); Assert.That(values, Is.EquivalentTo(expectedOutcome)); }
public ActionResult ValidateFile() { DataStructureManager dsm = new DataStructureManager(); try { BExIS.Dcm.UploadWizard.TaskManager TaskManager = (BExIS.Dcm.UploadWizard.TaskManager)Session["TaskManager"]; ValidationModel model = new ValidationModel(); model.StepInfo = TaskManager.Current(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { try { long id = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { // open FileStream ExcelReader reader = new ExcelReader(); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, id); model.ErrorList = reader.ErrorMessages; if (TaskManager.Bus.ContainsKey(TaskManager.NUMBERSOFROWS)) { TaskManager.Bus[TaskManager.NUMBERSOFROWS] = reader.NumberOfRows; } else { TaskManager.Bus.Add(TaskManager.NUMBERSOFROWS, reader.NumberOfRows); } } if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { AsciiReader reader = new AsciiReader(); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id); model.ErrorList = reader.ErrorMessages; if (TaskManager.Bus.ContainsKey(TaskManager.NUMBERSOFROWS)) { TaskManager.Bus[TaskManager.NUMBERSOFROWS] = reader.NumberOfRows; } } } catch (Exception ex) { model.ErrorList.Add(new Error(ErrorType.Other, "Can not valid. : " + ex.Message)); TaskManager.AddToBus(TaskManager.VALID, false); } finally { Stream.Close(); } } else { model.ErrorList.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); TaskManager.AddToBus(TaskManager.VALID, false); } if (model.ErrorList.Count() == 0) { model.Validated = true; TaskManager.AddToBus(TaskManager.VALID, true); } return(PartialView(TaskManager.Current().GetActionInfo.ActionName, model)); } finally { dsm.Dispose(); } }
public void ValidateRow_runValid_noErrors() { //Arrange DataGeneratorHelper dgh = new DataGeneratorHelper(); var errors = new List <Error>(); var testData = dgh.GenerateRowsWithRandomValuesBasedOnDatastructure(dataStructure, ",", 1000, true); //generate file to read Encoding encoding = Encoding.Default; string path = Path.Combine(AppConfiguration.DataPath, "testdataforvalidation.txt"); if (File.Exists(path)) { File.Delete(path); } using (StreamWriter sw = new StreamWriter(path)) { foreach (var r in testData) { sw.WriteLine(r); } } //Mock IOUtility var ioUtilityMock = new Mock <IOUtility>(); ioUtilityMock.Setup(i => i.ConvertDateToCulture("2018")).Returns("2018"); try { AsciiFileReaderInfo afr = new AsciiFileReaderInfo(); afr.TextMarker = TextMarker.doubleQuotes; afr.Seperator = TextSeperator.comma; DataReader reader = new AsciiReader(dataStructure, new AsciiFileReaderInfo(), ioUtilityMock.Object); IEnumerable <string> vairableNames = dataStructure.Variables.Select(v => v.Label); List <VariableIdentifier> variableIdentifiers = reader.SetSubmitedVariableIdentifiers(vairableNames.ToList()); reader.ValidateComparisonWithDatatsructure(variableIdentifiers); var asciireader = (AsciiReader)reader; //Act var row = new List <string>(); using (StreamReader streamReader = new StreamReader(path, encoding)) { string line; int index = 1; char seperator = AsciiFileReaderInfo.GetSeperator(afr.Seperator); while ((line = streamReader.ReadLine()) != null) { row = asciireader.rowToList(line, ','); errors = asciireader.ValidateRow(row, index); index++; } } //Assert Assert.That(errors.Count, Is.EqualTo(0)); } catch (Exception ex) { throw ex; } }
//temporary solution: norman :FinishUpload2 public async Task <List <Error> > FinishUpload() { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); IOUtility iOUtility = new IOUtility(); List <Error> temp = new List <Error>(); long id = 0; string title = ""; int numberOfRows = 0; int numberOfSkippedRows = 0; try { DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); //Dataset ds = null; bool inputWasAltered = false; if (Bus.ContainsKey(TaskManager.DATASET_ID) && Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { id = Convert.ToInt32(Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(Bus[TaskManager.DATASTRUCTURE_ID]); //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(id); title = latestVersion.Title; string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { Bus[TaskManager.CURRENTPACKAGE] = 0; } else { Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion Progress Informations #region structured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { long datasetid = id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); try { // load all data tuple ids from the latest version List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(id)); // load structured data structure StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm") || iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; //schleife dm.CheckOutDatasetIfNot(id, User.Name); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(id, User.Name)) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", id, User.Name)); } workingCopy = dm.GetDatasetWorkingCopy(id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } ExcelReader reader = null; ExcelFileReaderInfo excelFileReaderInfo = null; if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { excelFileReaderInfo = (ExcelFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]; } reader = new ExcelReader(sds, excelFileReaderInfo); do { counter++; Bus[TaskManager.CURRENTPACKAGE] = counter; //open stream Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = new List <DataTuple>(); if (iOUtility.IsSupportedExcelFile(Bus[TaskManager.EXTENTION].ToString())) { if (reader.Position < excelFileReaderInfo.DataEndRow) { rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } } else { rows = reader.ReadTemplateFile(Stream, Bus[TaskManager.FILENAME].ToString(), (int)id, packageSize); } //Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { } } Stream?.Close(); //count rows numberOfRows += rows.Count(); } while (rows.Count() > 0 && rows.Count() <= packageSize); numberOfSkippedRows = reader.NumberOSkippedfRows; } #endregion excel reader #region ascii reader if (iOUtility.IsSupportedAsciiFile(Bus[TaskManager.EXTENTION].ToString())) { // open file AsciiReader reader = new AsciiReader(sds, (AsciiFileReaderInfo)Bus[TaskManager.FILE_READER_INFO]); if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { workingCopy = dm.GetDatasetWorkingCopy(id); //set packagsize for one loop int packageSize = 100000; Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, Bus[TaskManager.FILENAME].ToString(), id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) //check wheter there is a dataset status in the upload wizard bus { // based the dataset status and/ or the upload method if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("new") || ((UploadMethod)Bus[TaskManager.UPLOAD_METHOD]).Equals(UploadMethod.Append)) { dm.EditDatasetVersion(workingCopy, rows, null, null); // add all datatuples to the datasetversion } else if (Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) // datatuples allready exist { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else // if there is no dataset status in the bus, use dataset status edit { if (rows.Count() > 0) { //split the incoming datatuples to (new|edit) based on the primary keys Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } //count rows numberOfRows += rows.Count(); } while ((rows.Count() > 0 && rows.Count() <= packageSize) || inputWasAltered == true); numberOfSkippedRows = reader.NumberOSkippedfRows; } //Stream.Close(); } #endregion ascii reader #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion contentdescriptors #region set System value into metadata if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "Data", ActionType = newdataset ? AuditActionType.Create : AuditActionType.Edit }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } #endregion set System value into metadata // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(id, numberOfRows + " rows", User.Name); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(datasetid), MessageHelper.GetUpdateDatasetMessage(datasetid, title, User.DisplayName), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion structured data #region unstructured data if (Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(id, User.Name) || dm.CheckOutDataset(id, User.Name)) { try { workingCopy = dm.GetDatasetWorkingCopy(id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy.VersionNo += 1; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } if (Bus.ContainsKey(TaskManager.DATASET_STATUS)) { bool newdataset = Bus[TaskManager.DATASET_STATUS].ToString().Equals("new"); int v = 1; if (workingCopy.Dataset.Versions != null && workingCopy.Dataset.Versions.Count > 1) { v = workingCopy.Dataset.Versions.Count(); } //set modification workingCopy.ModificationInfo = new EntityAuditInfo() { Performer = User.Name, Comment = "File", ActionType = AuditActionType.Create }; setSystemValuesToMetadata(id, v, workingCopy.Dataset.MetadataStructure.Id, workingCopy.Metadata, newdataset); dm.EditDatasetVersion(workingCopy, null, null, null); } //filename string filename = ""; if (Bus.ContainsKey(TaskManager.FILENAME)) { filename = Bus[TaskManager.FILENAME]?.ToString(); } // ToDo: Get Comment from ui and users dm.CheckInDataset(id, filename, User.Name, ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion unstructured data } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(id, User.Name); } } catch (Exception ex) { temp.Add(new Error(ErrorType.Dataset, ex.Message)); dm.CheckInDataset(id, "no update on data tuples", User.Name, ViewCreationBehavior.None); } finally { if (RunningASync) { var user = User; if (temp.Any()) { var es = new EmailService(); es.Send(MessageHelper.GetPushApiUploadFailHeader(id, title), MessageHelper.GetPushApiUploadFailMessage(id, user.Name, temp.Select(e => e.ToString()).ToArray()), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } else { var es = new EmailService(); es.Send(MessageHelper.GetASyncFinishUploadHeader(id, title), MessageHelper.GetASyncFinishUploadMessage(id, title, numberOfRows, numberOfSkippedRows), new List <string> { user.Email }, null, new List <string> { ConfigurationManager.AppSettings["SystemEmail"] }); } } dm.Dispose(); dsm.Dispose(); } return(temp); }
/// <summary> /// test unique of primary keys in a FileStream /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="taskManager"></param> /// <param name="datasetId"></param> /// <param name="primaryKeys"></param> /// <param name="ext"></param> /// <param name="filename"></param> /// <returns></returns> public bool IsUnique(TaskManager taskManager, long datasetId, List <long> primaryKeys, string ext, string filename) { Hashtable hashtable = new Hashtable(); Hashtable test = new Hashtable(); List <string> testString = new List <string>(); List <string> primaryValuesAsOneString = new List <string>(); TaskManager TaskManager = taskManager; int packageSize = 1000; int position = 1; if (ext.Equals(".txt") || ext.Equals(".csv")) { #region csv do { primaryValuesAsOneString = new List <string>(); AsciiReader reader = new AsciiReader(); reader.Position = position; Stream stream = reader.Open(TaskManager.Bus["FilePath"].ToString()); AsciiFileReaderInfo afri = (AsciiFileReaderInfo)TaskManager.Bus["FileReaderInfo"]; DataStructureManager datastructureManager = new DataStructureManager(); StructuredDataStructure sds = datastructureManager.StructuredDataStructureRepo.Get(Convert.ToInt64(TaskManager.Bus["DataStructureId"].ToString())); // get a list of values for each row // e.g. // primarky keys id, name // 1 [1][David] // 2 [2][Javad] List <List <string> > tempList = reader.ReadValuesFromFile(stream, filename, afri, sds, datasetId, primaryKeys, packageSize); // convert List of Lists to list of strings // 1 [1][David] = 1David // 2 [2][Javad] = 2Javad foreach (List <string> l in tempList) { string tempString = ""; foreach (string s in l) { tempString += s; } if (!String.IsNullOrEmpty(tempString)) { primaryValuesAsOneString.Add(tempString); } } // add all primary keys pair into the hasttable foreach (string pKey in primaryValuesAsOneString) { if (pKey != "") { try { hashtable.Add(Utility.ComputeKey(pKey), "pKey"); } catch { return(false); } } } position = reader.Position + 1; stream.Close(); } while (primaryValuesAsOneString.Count > 0); #endregion } if (ext.Equals(".xlsm")) { #region excel template do { //reset primaryValuesAsOneString = new List <string>(); ExcelReader reader = new ExcelReader(); reader.Position = position; Stream stream = reader.Open(TaskManager.Bus["FilePath"].ToString()); DataStructureManager datastructureManager = new DataStructureManager(); StructuredDataStructure sds = datastructureManager.StructuredDataStructureRepo.Get(Convert.ToInt64(TaskManager.Bus["DataStructureId"].ToString())); // get a list of values for each row // e.g. // primarky keys id, name // 1 [1][David] // 2 [2][Javad] List <List <string> > tempList = reader.ReadValuesFromFile(stream, filename, sds, datasetId, primaryKeys, packageSize); // convert List of Lists to list of strings // 1 [1][David] = 1David // 2 [2][Javad] = 2Javad foreach (List <string> l in tempList) { string tempString = ""; foreach (string s in l) { tempString += s; } if (!String.IsNullOrEmpty(tempString)) { primaryValuesAsOneString.Add(tempString); } } // add all primary keys pair into the hasttable foreach (string pKey in primaryValuesAsOneString) { if (pKey != "") { try { hashtable.Add(Utility.ComputeKey(pKey), pKey); } catch { stream.Close(); return(false); } } } position = reader.Position + 1; stream.Close(); } while (primaryValuesAsOneString.Count > 0); #endregion } return(true); }
public List <Error> FinishUpload2(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); try { List <Error> temp = new List <Error>(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); //datatuple list List <DataTuple> rows; DatasetManager dm = new DatasetManager(); Dataset ds = dm.GetDataset(id); DatasetVersion workingCopy = new DatasetVersion(); #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <AbstractTuple> datatupleFromDatabase = dm.GetDatasetVersionEffectiveTuples(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples2(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } } while (rows.Count() > 0); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion // start download generator // filepath //string path = ""; //if (workingCopy != null) //{ // path = GenerateDownloadFile(workingCopy); // dm.EditDatasetVersion(workingCopy, null, null, null); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Updated); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { workingCopy = dm.GetDatasetLatestVersion(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } return(temp); } finally { dsm.Dispose(); } }
//temporary solution: norman :FinishUpload2 public List <Error> FinishUpload(TaskManager taskManager) { DataStructureManager dsm = new DataStructureManager(); DatasetManager dm = new DatasetManager(); try { List <Error> temp = new List <Error>(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List <DataTuple> rows = new List <DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! //GetValues from the previus version // Status DatasetVersion latestVersion = dm.GetDatasetLatestVersion(ds); string status = DatasetStateInfo.NotValid.ToString(); if (latestVersion.StateInfo != null) { status = latestVersion.StateInfo.State; } #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { string title = ""; long datasetid = ds.Id; XmlDatasetHelper xmlDatasetHelper = new XmlDatasetHelper(); title = xmlDatasetHelper.GetInformation(ds.Id, NameAttributeValues.title); try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List <long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) { throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); } workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary <string, List <DataTuple> > splittedDatatuples = new Dictionary <string, List <DataTuple> >(); splittedDatatuples = uploadWizardHelper.GetSplitDatatuples(rows, (List <long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); //send email var es = new EmailService(); es.Send(MessageHelper.GetUpdateDatasetHeader(), MessageHelper.GetUpdateDatasetMessage(datasetid, title, GetUsernameOrDefault()), ConfigurationManager.AppSettings["SystemEmail"] ); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); var es = new EmailService(); es.Send(MessageHelper.GetErrorHeader(), "Can not upload. : " + e.Message, ConfigurationManager.AppSettings["SystemEmail"] ); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { try { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); using (var unitOfWork = this.GetUnitOfWork()) { workingCopy = unitOfWork.GetReadOnlyRepository <DatasetVersion>().Get(workingCopy.Id); //set StateInfo of the previus version if (workingCopy.StateInfo == null) { workingCopy.StateInfo = new Vaiona.Entities.Common.EntityStateInfo() { State = status }; } else { workingCopy.StateInfo.State = status; } unitOfWork.GetReadOnlyRepository <DatasetVersion>().Load(workingCopy.ContentDescriptors); SaveFileInContentDiscriptor(workingCopy); } dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault(), ViewCreationBehavior.None); } catch (Exception ex) { throw ex; } } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault(), ViewCreationBehavior.None); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return(temp); } finally { dm.Dispose(); dsm.Dispose(); } }
public ActionResult SelectAFile(object[] data) { var model = new SelectFileViewModel(); TaskManager = (TaskManager)Session["TaskManager"]; if (data != null) { TaskManager.AddToBus(data); } model.StepInfo = TaskManager.Current(); TaskManager.Current().SetValid(false); if (TaskManager != null) { // is path of FileStream exist if (TaskManager.Bus.ContainsKey(TaskManager.FILEPATH)) { if (IsSupportedExtention(TaskManager)) { try { if (GetDataStructureType().Equals(DataStructureType.Structured)) { #region structured datastructure //try save FileStream var filePath = TaskManager.Bus[TaskManager.FILEPATH].ToString(); //if extention like a makro excel FileStream if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { // open FileStream var reader = new ExcelReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; //check is it template if (reader.IsTemplate(Stream)) { TaskManager.Current().SetValid(true); TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "true"); } else { model.ErrorList.Add(new Error(ErrorType.Other, "File is not a Template")); TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "false"); } if (!ExcelReader.SUPPORTED_APPLICATIONS.Contains(reader.Application)) { model.ErrorList.Add(new Error(ErrorType.Other, "The document was created in an application " + reader.Application + " that will currently not support")); } Stream.Close(); } else { TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "false"); // excel FileStream if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xls")) { // open FileStream var reader = new ExcelReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; TaskManager.Current().SetValid(true); Stream.Close(); } // text ór csv FileStream else if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open FileStream var reader = new AsciiReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; TaskManager.Current().SetValid(true); Stream.Close(); } } #endregion } if (GetDataStructureType().Equals(DataStructureType.Unstructured)) { #region unstructured datastructure //try save FileStream var filePath = TaskManager.Bus[TaskManager.FILEPATH].ToString(); if (FileHelper.FileExist(filePath)) { TaskManager.Current().SetValid(true); } #endregion } } catch (Exception ex) { model.ErrorList.Add(new Error(ErrorType.Other, "Cannot access FileStream on server.")); } } else { model.ErrorList.Add(new Error(ErrorType.Other, "File is not supported.")); } } else { model.ErrorList.Add(new Error(ErrorType.Other, "No FileStream selected or submitted.")); } if (TaskManager.Current().IsValid()) { TaskManager.AddExecutedStep(TaskManager.Current()); TaskManager.GoToNext(); Session["TaskManager"] = TaskManager; ActionInfo actionInfo = TaskManager.Current().GetActionInfo; return(RedirectToAction(actionInfo.ActionName, actionInfo.ControllerName, new RouteValueDictionary { { "area", actionInfo.AreaName }, { "index", TaskManager.GetCurrentStepInfoIndex() } })); } } model.serverFileList = GetServerFileList(); //get datastuctureType model.DataStructureType = GetDataStructureType(); model.SupportedFileExtentions = UploadWizardHelper.GetExtentionList(model.DataStructureType, this.Session.GetTenant()); return(PartialView(model)); }
public ActionResult ValidateFile() { ViewData["SortedErrors"] = null; DataStructureManager dsm = new DataStructureManager(); IOUtility iOUtility = new IOUtility(); try { BExIS.Dcm.UploadWizard.TaskManager TaskManager = (BExIS.Dcm.UploadWizard.TaskManager)Session["TaskManager"]; ValidationModel model = new ValidationModel(); model.StepInfo = TaskManager.Current(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { try { long id = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); long iddsd = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); // Add Number of Variables to the BUS if (sds != null) { TaskManager.AddToBus(TaskManager.NUMBERSOFVARIABLES, sds.Variables.Count); } if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { // open FileStream ExcelReader reader = new ExcelReader(sds, new ExcelFileReaderInfo()); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateTemplateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), id); model.ErrorList = reader.ErrorMessages; TaskManager.AddToBus(TaskManager.NUMBERSOFROWS, reader.NumberOfRows); } if (iOUtility.IsSupportedExcelFile(TaskManager.Bus[TaskManager.EXTENTION].ToString())) { // open FileStream ExcelReader reader = new ExcelReader(sds, (ExcelFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO]); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), id); model.ErrorList = reader.ErrorMessages; TaskManager.AddToBus(TaskManager.NUMBERSOFROWS, reader.NumberOfRows); } if (iOUtility.IsSupportedAsciiFile(TaskManager.Bus[TaskManager.EXTENTION].ToString())) { AsciiReader reader = new AsciiReader(sds, (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO]); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), id); model.ErrorList = reader.ErrorMessages; TaskManager.AddToBus(TaskManager.NUMBERSOFROWS, reader.NumberOfRows); } } catch (Exception ex) { model.ErrorList.Add(new Error(ErrorType.Other, "Can not valid. : " + ex.Message)); TaskManager.AddToBus(TaskManager.VALID, false); } finally { Stream.Close(); } } else { model.ErrorList.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); TaskManager.AddToBus(TaskManager.VALID, false); } if (model.ErrorList.Count() == 0) { model.Validated = true; TaskManager.AddToBus(TaskManager.VALID, true); } List <Error> errorList = new List <Error>(); for (int i = 0; i < model.ErrorList.Count; i++) { // Assume not duplicate. bool duplicate = false; for (int z = 0; z < i; z++) { if (model.ErrorList[z].ToString() == model.ErrorList[i].ToString()) { // This is a duplicate. duplicate = true; break; } } // If not duplicate, add to result. if (!duplicate) { errorList.Add(model.ErrorList[i]); } } model.ErrorList = errorList; if (errorList.Count > 0) { // split up the error messages for a btter overview-- > // set all value error with the same var name, datatypoe and issue-- > // create a dictionary for error messages // variable issues var varNames = errorList.Where(e => e.GetType().Equals(ErrorType.Value)).Select(e => e.getName()).Distinct(); var varIssues = errorList.Where(e => e.GetType().Equals(ErrorType.Value)).Select(e => e.GetMessage()).Distinct(); List <Tuple <string, int, string> > sortedErrors = new List <Tuple <string, int, string> >(); foreach (string vn in varNames) { foreach (string i in varIssues) { int c = errorList.Where(e => e.getName().Equals(vn) && e.GetMessage().Equals(i)).Count(); if (c > 0) { sortedErrors.Add(new Tuple <string, int, string>(vn, c, i)); } } } if (sortedErrors.Count > 0) { ViewData["SortedValueErrors"] = sortedErrors; } } return(PartialView(TaskManager.Current().GetActionInfo.ActionName, model)); } finally { dsm.Dispose(); } }
public ActionResult LoadData() { // test create seeddata SeedDataGenerator.GenerateDays(); AsciiReader reader = new AsciiReader(); //#region Plant //string path = Path.Combine(AppConfigHelper.GetWorkspace(), "PlantSeedData.txt"); //if (DataReader.FileExist(path)) //{ // Stream fileStream = reader.Open(path); // List<Node> nodes = reader.ReadFile<Node>(fileStream, "PlantSeedData.txt", "Plant"); // SubjectManager manager = new SubjectManager(); // foreach (var node in nodes) // { // Plant plant = (Plant)node; // manager.Create(plant); // } // Debug.WriteLine("PlantSeedData.txt : " + nodes.Count); //} ////mischkulturtabelle //path = Path.Combine(AppConfigHelper.GetWorkspace(), "MischkulturTabelle.txt"); //if (DataReader.FileExist(path)) //{ // Stream fileStream = reader.Open(path); // List<Node> nodes = reader.ReadFile<Node>(fileStream, "MischkulturTabelle.txt", "Plant_MKT"); // SubjectManager manager = new SubjectManager(); // foreach (var node in nodes) // { // Plant plant = (Plant)node; // if (!string.IsNullOrEmpty(plant.Name) && !string.IsNullOrEmpty(plant.ScientificName)) // { // // pflanze noch nicht vorhanden // if (!manager.GetAll<Plant>().Any(p => p.Name.ToLower().Equals(plant.Name.ToLower()))) // { // //ToDO Remove dublicates - check if a plant is existing // manager.Create(plant); // } // //else // pflanze existiert // //{ // // Plant plantFromDb = // // manager.GetAll<Plant>() // // .FirstOrDefault( // // p => p.ScientificName.ToLower().Equals(plant.ScientificName.ToLower()) && p.Rank == TaxonRank.Species); // // if (plantFromDb != null && plantFromDb.Name.Equals(plantFromDb.ScientificName)) // // { // // plantFromDb.Name = plant.Name; // // } // // manager.Update(plantFromDb); // //} // } // } // Debug.WriteLine("MischkulturTabelle.txt : " + nodes.Count); //} ////update after creation for associations //if (DataReader.FileExist(path)) //{ // Stream fileStream = reader.Open(path); // List<Node> nodes = reader.ReadFile<Node>(fileStream, "MischkulturTabelle.txt", "Plant_MKT_UPDATE"); // SubjectManager manager = new SubjectManager(); // foreach (var node in nodes) // { // Plant plant = (Plant)node; // manager.Update(plant); // } // Debug.WriteLine("MischkulturTabelle.txt Update : " + nodes.Count); //} //loadTestPlantData(); //#endregion //#region Animal //path = Path.Combine(AppConfigHelper.GetWorkspace(), "AnimalSeedData.txt"); //if (DataReader.FileExist(path)) //{ // Stream fileStream = reader.Open(path); // List<Node> nodes = reader.ReadFile<Node>(fileStream, "AnimalSeedData.txt", "Animal"); // SubjectManager manager = new SubjectManager(); // foreach (var node in nodes) // { // Animal animal = (Animal)node; // if (!manager.GetAll<Animal>().Any(p => p.Name.Equals(animal.Name))) // { // manager.Create(animal); // } // } // Debug.WriteLine("AnimalSeedData.txt : " + nodes.Count); //} ////loadTestAnimalData(); //#endregion //#region Effect //path = Path.Combine(AppConfigHelper.GetWorkspace(), "EffectSeedData.txt"); //if (DataReader.FileExist(path)) //{ // Stream fileStream = reader.Open(path); // List<Node> nodes = reader.ReadFile<Node>(fileStream, "EffectSeedData.txt", "Effect"); // SubjectManager manager = new SubjectManager(); // foreach (var node in nodes) // { // Effect effect = (Effect)node; // if (!manager.GetAll<Effect>().Any(p => p.Name.Equals(effect.Name))) // { // manager.Create(effect); // } // } // Debug.WriteLine("EffectSeedData.txt : " + nodes.Count); //} //#endregion #region Predicate string path = Path.Combine(AppConfigHelper.GetWorkspace(), "PredicateSeedData.txt"); if (DataReader.FileExist(path)) { Stream fileStream = reader.Open(path); List <Predicate> predicates = reader.ReadFile <Predicate>(fileStream, "PredicateSeedData.txt", "Predicate"); SubjectManager manager = new SubjectManager(); foreach (var node in predicates) { Predicate predicate = (Predicate)node; if (!manager.GetAll <Predicate>().Any(p => p.Name.Equals(predicate.Name))) { manager.Create(predicate); } } Debug.WriteLine("PredicateSeedData.txt : " + predicates.Count); } #endregion //#region Interaction //path = Path.Combine(AppConfigHelper.GetWorkspace(), "InteractionSeedData.txt"); //if (DataReader.FileExist(path)) //{ // SubjectManager manager = new SubjectManager(); // Stream fileStream = reader.Open(path); // List<string> interactionsAsStringList = reader.ReadFile(fileStream, "InteractionSeedData.txt"); // List<Interaction> interactions = reader.ConvertToInteractions(interactionsAsStringList, // manager.GetAll<Subject>().ToList(), manager.GetAll<Predicate>().ToList()); // foreach (var node in interactions) // { // if (!manager.GetAll<Interaction>().Any(i => i.Subject.Equals(node.Subject) && i.Object.Equals(node.Object))) // { // manager.Create(node); // } // } // Debug.WriteLine("InteractionSeedData.txt : " + interactions.Count); //} //path = Path.Combine(AppConfigHelper.GetWorkspace(), "MischkulturTabelle.txt"); //if (DataReader.FileExist(path)) //{ // SubjectManager manager = new SubjectManager(); // Stream fileStream = reader.Open(path); // // hoier werden alle interactions und fehlende objecte erzeugt // List<Interaction> l = reader.ReadFile<Interaction>(fileStream, "MischkulturTabelle.txt", "Plant_MKT_UPDATE_INTERACTION"); // foreach (var node in l) // { // manager.Create(node); // } //} //#endregion return(View("Index")); }