/// <summary> /// This method converts an import data table to a DataRepository object. /// </summary> /// <param name="importDataTable">Import data table.</param> /// <param name="columnInfos">Specification of columns including specification of meta data.</param> /// <param name="errorNaN"> /// This is a flag informing the caller about whether error have been changed to NaN because they /// are out of definition. /// </param> /// <returns>DataRepository object.</returns> public DataRepository ConvertImportDataTable(ImportDataTable importDataTable, IEnumerable <ColumnInfo> columnInfos, out bool errorNaN) { errorNaN = false; var dataRepository = new DataRepository { Name = importDataTable.TableName }; //sort table in view ascending by base grid columns var colInfos = columnInfos as IList <ColumnInfo> ?? columnInfos.ToList(); importDataTable.DefaultView.Sort = getSortString(colInfos); addExtendedPropertyForSource(importDataTable, dataRepository); addExtendedPropertiesForMetaData(importDataTable, dataRepository); addExtendedPropertiesForGroupBy(importDataTable, dataRepository); //convert columns foreach (ImportDataColumn importDataColumn in importDataTable.Columns) { bool columnErrorNaN; convertImportDataColumn(dataRepository, importDataColumn, colInfos, out columnErrorNaN); errorNaN |= columnErrorNaN; } // make associations of columns. associateColumns(colInfos, dataRepository); return(dataRepository); }
private static void addExtendedPropertiesForMetaData(ImportDataTable importDataTable, DataRepository dataRepository) { if (importDataTable.MetaData == null) { return; } if (importDataTable.MetaData.Rows.Count <= 0) { return; } foreach (MetaDataColumn metaData in importDataTable.MetaData.Columns) { var value = importDataTable.MetaData.Rows.ItemByIndex(0)[metaData]; if (value == DBNull.Value && !metaData.Required) { continue; } // add extended property var extendedProperty = Activator.CreateInstance(typeof(ExtendedProperty <>).MakeGenericType(metaData.DataType)) as IExtendedProperty; if (extendedProperty == null) { continue; } extendedProperty.Name = metaData.ColumnName; extendedProperty.ValueAsObject = value; dataRepository.ExtendedProperties.Add(extendedProperty); } }
public void should_fill_data_table_with_skipping_rows_sheet3() { ImportDataTable testTable = _importDataTable.Clone(); ImportDataColumn col = testTable.Columns.ItemByIndex(1); col.SkipNullValueRows = true; col.Dimensions = new List <Dimension> { new Dimension { DisplayName = "Time", IsDefault = true, Name = "Time", Units = new List <Unit> { new Unit { IsDefault = false, Name = "h", DisplayName = "in Stunden" }, new Unit { IsDefault = true, Name = "d", DisplayName = "in Tagen" } } } }; var tables = sut.ImportDataTables(testTable, _excelFile, _sheetNames[2], 0, 1, 2, -1, _cms); tables[0].Rows.Count.ShouldBeEqualTo(46); }
/// <summary> /// Builds the control /// </summary> /// <param name="table">The Import datatable which is used to configure the table</param> /// <param name="columnInfos">The columns used to configure the data repository</param> public void Build(ImportDataTable table, IReadOnlyList <ColumnInfo> columnInfos) { InitializeComponent(); Table = table; _columnInfos = columnInfos; buildControl(); }
protected override void Context() { _importerTask = new ImporterTask(new ColumnCaptionHelper(), new LowerLimitOfQuantificationTask()); _table = new ImportDataTable(); _importDataTableGridView = A.Fake <IImportDataTableGridView>(); sut = new ImportDataTableGridPresenter(_importDataTableGridView, _importerTask); }
private void runActionOnExcelTable(ImportDataTable dataTableTemplate, string fileName, string sheetName, IList <ColumnMapping> mapping, Action <DataTable> action, Cache <string, Rectangle> rangesCache) { using (var wb = _importerTask.ReadExcelFile(fileName)) { _importerTask.SelectSheet(fileName, wb, sheetName); int dataStartRow, dataEndRow, unitRow, captionRow; if (rangesCache != null && rangesCache.Contains(sheetName)) { var range = rangesCache[sheetName]; dataEndRow = range.Y + range.Height - 1; unitRow = _importerTask.GetUnitRowGuess(wb, range); captionRow = _importerTask.GetCaptionRowGuess(wb, range); dataStartRow = Math.Max(range.Y, captionRow + 1); } else { dataStartRow = _importerTask.GetFirstDataRowGuess(wb); dataEndRow = -1; unitRow = _importerTask.GetUnitRowGuess(wb); captionRow = _importerTask.GetCaptionRowGuess(wb); } using (var excelTable = GetExcelTable(wb, captionRow, unitRow, dataStartRow, dataEndRow, dataTableTemplate, mapping)) { wb.unLock(); action(excelTable); } } }
public void SetUnitForColumn(ImportDataTable table) { foreach (GridColumn gridCol in gridView.Columns) { setColumnImage(gridCol); gridCol.Caption = _presenter.GetCaptionForColumnByName(gridCol.FieldName); gridCol.BestFit(); } }
/// <summary> /// Counts the number of data tables that will be imported with the given configuration /// </summary> /// <param name="dataTableTemplate">The template ImportDataTable</param> /// <param name="fileName">The filename of the excel file</param> /// <param name="sheetName">The sheetname of the excel sheet</param> /// <param name="mapping">The current column mapping configuration</param> /// <param name="rangesCache">The cache of allowable ranges</param> /// <returns>The number of data tables that would be imported for the given configuration</returns> public int CountDataTables(ImportDataTable dataTableTemplate, string fileName, string sheetName, IList <ColumnMapping> mapping, Cache <string, Rectangle> rangesCache) { var count = 0; Action <DataTable> action = excelTable => count = _importerTask.CountDataTables(fileName, sheetName, dataTableTemplate, mapping, excelTable); runActionOnExcelTable(dataTableTemplate, fileName, sheetName, mapping, action, rangesCache); return(count); }
public void should_fill_data_table_with_skipping_rows_sheet1() { ImportDataTable testTable = _importDataTable.Clone(); testTable.Columns.ItemByIndex(1).SkipNullValueRows = true; var tables = sut.ImportDataTables(testTable, _excelFile, _sheetNames[0], 0, 0, 1, -1, _cms); tables[0].Rows.Count.ShouldBeEqualTo(46); }
public void should_throw_different_data_types_for_fill_data_table_data() { ImportDataTable testTable = _importDataTable.Clone(); testTable.Columns.ItemByIndex(1).DataType = typeof(string); The.Action(() => sut.ImportDataTables(testTable, _excelFile, _sheetNames[0], 0, 0, 1, -1, _cms)).ShouldThrowAn <DifferentDataTypeException>(); The.Action(() => sut.ImportDataTables(testTable, _excelFile, _sheetNames[0], _cms)).ShouldThrowAn <DifferentDataTypeException>(); }
/// <summary> /// This is the method for importing one sheet of an excel file. /// </summary> /// <param name="dataTableTemplate">Object with all specification information.</param> /// <param name="fileName">Name of the excel file containing the data.</param> /// <param name="sheetName">Name of the sheet of that excel file containing the data.</param> /// <param name="mapping">Mapping information which identifies all needed columns in the excel file.</param> /// <param name="rangesCache">The cache of allowable ranges</param> /// <remarks> /// <para>The rows containing captions and unit information are determined automatically.</para> /// <para>If the unit information is within the captions it is assumed that the units are enclosed in scare brakets.</para> /// <para>The first data row and the last data row are also determined automatically.</para> /// </remarks> /// <exception cref="DifferentDataTypeException"> is thrown when there is a mapped data column with not matching data type.</exception> /// <returns>A list of filled import data table objects.</returns> public IList <ImportDataTable> ImportDataTables(ImportDataTable dataTableTemplate, string fileName, string sheetName, IList <ColumnMapping> mapping, Cache <string, Rectangle> rangesCache = null) { IList <ImportDataTable> tables = null; Action <DataTable> action = excelTable => { tables = _importerTask.GetDataTables(fileName, sheetName, dataTableTemplate, mapping, excelTable); }; runActionOnExcelTable(dataTableTemplate, fileName, sheetName, mapping, action, rangesCache); checkTablesCompatibilityWithRepository(tables); return(tables); }
/// <summary> /// This is the method for importing one sheet of an excel file. /// </summary> /// <param name="dataTableTemplate">Object with all specification information.</param> /// <param name="fileName">Name of the excel file containing the data.</param> /// <param name="sheetName">Name of the sheet of that excel file containing the data.</param> /// <param name="captionRow">Row where the column captions are stored.</param> /// <param name="unitRow">Row where the unit information is stored.</param> /// <param name="dataStartRow">Row where the data begins.</param> /// <param name="dataEndRow">Row where the data ends. If value is <c>-1</c> all rows will be read until end.</param> /// <param name="mapping">Mapping information which identifies all needed columns in the excel file.</param> /// <remarks><para>If <paramref name="unitRow"/> is equal to <paramref name="captionRow"/> the unit information is searched in the excel column captions. It is assumed that the unit is enclosed in scare brakets.</para></remarks> /// <exception cref="DifferentDataTypeException"> is thrown when there is a mapped data column with not matching data type.</exception> /// <returns>A list of filled import data table objects.</returns> public IList <ImportDataTable> ImportDataTables(ImportDataTable dataTableTemplate, string fileName, string sheetName, int captionRow, int unitRow, int dataStartRow, int dataEndRow, IList <ColumnMapping> mapping) { using (var wb = _importerTask.ReadExcelFile(fileName)) { _importerTask.SelectSheet(fileName, wb, sheetName); using (var excelTable = GetExcelTable(wb, captionRow, unitRow, dataStartRow, dataEndRow, dataTableTemplate, mapping)) { wb.unLock(); return(_importerTask.GetDataTables(fileName, sheetName, dataTableTemplate, mapping, excelTable)); } } }
public void StartImport(string sourceFile, ImportTableConfiguration configuration, Mode mode) { var metaDataCategories = configuration.MetaDataCategories; _mode = mode; _columnInfos = configuration.ColumnInfos; var importDataTable = _importMapper.ConvertToImportDataTable(metaDataCategories, _columnInfos); if (importDataTable == null) { throw new InvalidArgumentException("Could not map to import table"); } MaximizeBox = true; namingImportPanel.FillWith(_namingView); _importer = new Presentation.Services.Importer(_dataRepositoryMapper, _columnInfos, _importerTask, _dialogCreator); _importDataTable = importDataTable; _columnMappingControls = new Dictionary <string, ColumnMappingControl>(); // Page Source _openSourceFileControl = new OpenSourceFileControl(_dialogCreator, sourceFile) { Dock = DockStyle.Fill }; openSourceFileControlPanel.FillWith(_openSourceFileControl); _openSourceFileControl.OnOpenSourceFile += openSourceFileEvent; xtraTabControl.SelectedPageChanged += (s, e) => OnEvent(() => changeTabs(e.Page, e.PrevPage)); xtraTabControl.SelectedTabPage.Appearance.Header.Font = Fonts.SelectedTabHeaderFont; _rangesCache = new Cache <string, Rectangle>(); createSourceFilePreviewControl(sourceFile); btnImport.Click += (s, e) => OnEvent(importData); btnSelectRange.Click += (s, e) => OnEvent(selectRange); btnImportAll.Click += (s, e) => OnEvent(importAllData); FormClosing += onFormClosing; // Page Imports _dataSetControl = new DataSetControl(_imports, _columnInfos, true); panelImportedTabs.FillWith(_dataSetControl); _dataSetControl.MissingRequiredData += (s, e) => OnEvent(() => enableOKButton(false)); _dataSetControl.RequiredDataCompleted += (s, e) => OnEvent(() => enableOKButton(true)); _dataSetControl.TableDeleted += (s, e) => OnEvent(enableImportsPage); enableImportsPage(); enableOKButton(false); }
private void cleanMemory() { _namingView = null; if (_openSourceFileControl != null) { _openSourceFileControl.OnOpenSourceFile -= openSourceFileEvent; _openSourceFileControl = null; } if (_imports != null) { foreach (ImportDataTable table in _imports.Tables) { table.MetaData?.Dispose(); table.Dispose(); } _imports.Dispose(); } if (_importDataTable != null) { _importDataTable.MetaData?.Dispose(); _importDataTable.Dispose(); } CleanUpHelper.ReleaseEvents(_dataSetControl); _dataSetControl?.Dispose(); CleanUpHelper.ReleaseEvents(_sourceFilePreviewControl); _sourceFilePreviewControl?.Dispose(); CleanUpHelper.ReleaseControls(Controls); Controls.Clear(); _imports = null; _openSourceFileControl = null; _sourceFilePreviewControl = null; _columnMappingControl = null; _dataSetControl = null; _importDataTable = null; _importer = null; _presenter = null; _dataRepositoryMapper = null; _importMapper = null; _columnInfos = null; cleanColumnMappingControls(); _columnMappingControls?.Clear(); _columnMappingControls = null; columnMappingControlPanel.Controls.Clear(); }
public void should_convert_import_data_table() { ImportDataTable testTable = _importDataTable.Clone(); bool errorNaN; var dataRepositories = sut.ConvertImportDataTableList(new List <ImportDataTable> { testTable }, _columnInfos, out errorNaN); errorNaN.ShouldBeFalse(); dataRepositories.Count.ShouldBeEqualTo(1); foreach (var col in dataRepositories[0]) { testTable.Columns.ContainsName(col.Name).ShouldBeTrue(); } }
public void should_throw_not_all_data_columns_are_mapped_for_fill_data_table_data() { var cms = new List <ColumnMapping>(_cms.Count - 1); ImportDataTable testData = _importDataTable.Clone(); testData.Columns.ItemByIndex(1).Required = true; cms.AddRange(_cms.Select(ocm => new ColumnMapping { SourceColumn = ocm.SourceColumn, Target = ocm.Target }).Where(cm => cm.SourceColumn != _columnNames[1])); The.Action(() => sut.ImportDataTables(testData, _excelFile, _sheetNames[0], 0, 0, 1, -1, cms)). ShouldThrowAn <NoMappingForDataColumnException>(); The.Action(() => sut.ImportDataTables(testData, _excelFile, _sheetNames[0], cms)).ShouldThrowAn <NoMappingForDataColumnException>(); }
private void addExtendedPropertiesForGroupBy(ImportDataTable importDataTable, DataRepository dataRepository) { foreach (DictionaryEntry property in importDataTable.ExtendedProperties) { if (dataRepository.ExtendedProperties.Contains(property.Key.ToString())) { continue; } if (string.IsNullOrEmpty(property.Value.ToString())) { continue; } dataRepository.ExtendedProperties.Add(new ExtendedProperty <string> { Name = _columnCaptionHelper.TrimUnits(property.Key.ToString()), Value = property.Value.ToString() }); } }
public void should_throw_invalid_unit_sheet3() { ImportDataTable testTable = _importDataTable.Clone(); ImportDataColumn col = testTable.Columns.ItemByIndex(1); col.Dimensions = new List <Dimension> { new Dimension { DisplayName = "Time", IsDefault = true, Name = "Time", Units = new List <Unit> { new Unit { IsDefault = false, Name = "hour", DisplayName = "in Stunden" }, new Unit { IsDefault = true, Name = "d", DisplayName = "in Tagen" } } } }; var cms = new List <ColumnMapping>(_cms.Count); foreach (ColumnMapping ocm in _cms) { var cm = new ColumnMapping { SourceColumn = ocm.SourceColumn, Target = ocm.Target }; if (cm.SourceColumn == _columnNames[1]) { cm.SelectedUnit = new Unit { Name = "h" } } ; cms.Add(cm); } The.Action(() => sut.ImportDataTables(testTable, _excelFile, _sheetNames[2], 0, 1, 2, -1, cms)).ShouldThrowAn <InvalidUnitForExcelColumnException>(); }
public void BindTo(ImportDataTable table) { gridControl.DataSource = table; gridControl.BindingContext = new BindingContext(); gridControl.ForceInitialize(); foreach (GridColumn col in gridView.Columns) { col.OptionsColumn.AllowEdit = false; setColumnImage(col); var tableColumn = table.Columns.ItemByName(col.FieldName); col.Caption = tableColumn.GetCaptionForColumn(); col.Visible = !string.IsNullOrEmpty(tableColumn.Source); } gridView.BestFitColumns(); gridControl.ToolTipController = new ToolTipController(); gridControl.ToolTipController.GetActiveObjectInfo += (o, e) => OnEvent(() => onToolTipControllerGetActiveObjectInfo(o, e)); }
public void should_not_fill_data_table_with_more_rows_sheet2() { ImportDataTable testTable = _importDataTable.Clone(); ImportDataColumn col = testTable.Columns.ItemByIndex(1); col.Dimensions = new List <Dimension> { new Dimension { DisplayName = "Time", IsDefault = true, Name = "Time", Units = new List <Unit> { new Unit { IsDefault = false, Name = "h", DisplayName = "in Stunden" }, new Unit { IsDefault = true, Name = "d", DisplayName = "in Tagen" } } } }; var cms = new List <ColumnMapping>(_cms.Count); foreach (ColumnMapping ocm in _cms) { var cm = new ColumnMapping { SourceColumn = ocm.SourceColumn, Target = ocm.Target }; if (cm.SourceColumn == _columnNames[1]) { cm.SourceColumn += " [h]"; } cms.Add(cm); } var tables = sut.ImportDataTables(testTable, _excelFile, _sheetNames[1], 0, 0, 1, 99, cms); tables[0].Rows.Count.ShouldBeEqualTo(49); }
public async Task <DataImporter.IImportDataResult> ImportDataAsync(Guid id, IEnumerable <ImportRecord> records) { using (var cn = new SqlConnection(dbOptions.ConnectionString)) { var changed = await cn.QueryFirstOrDefaultAsync <Result>( Sql.ImportData, new { id, data = ImportDataTable.From(id, records), user = user.UUID, groups = GroupMembership.From(user), admin = user.IsAdmin }, commandType : CommandType.StoredProcedure, commandTimeout : dbOptions.DefaultTimeout ); return(changed); } }
private static void addExtendedPropertyForSource(ImportDataTable importDataTable, DataRepository dataRepository) { if (!string.IsNullOrEmpty(importDataTable.Source)) { var sourceProperty = Activator.CreateInstance(typeof(ExtendedProperty <>).MakeGenericType(importDataTable.Source.GetType())) as IExtendedProperty; if (sourceProperty != null) { sourceProperty.Name = "Source"; sourceProperty.ValueAsObject = importDataTable.Source; dataRepository.ExtendedProperties.Add(sourceProperty); } dataRepository.ExtendedProperties.Add(new ExtendedProperty <string> { Name = Constants.FILE, Value = Path.GetFileNameWithoutExtension(importDataTable.File) }); dataRepository.ExtendedProperties.Add(new ExtendedProperty <string> { Name = Constants.SHEET, Value = importDataTable.Sheet }); } }
public ImportDataTable ConvertToImportDataTable(IReadOnlyList <MetaDataCategory> metaDataCategories, IEnumerable <ColumnInfo> columnInfos) { var retVal = new ImportDataTable(); if (metaDataCategories != null && metaDataCategories.Count > 0) { retVal.MetaData = convertToMetaDataTable(metaDataCategories); } foreach (var columnInfo in columnInfos) { var column = new ImportDataColumn { ColumnName = columnInfo.Name, DisplayName = columnInfo.DisplayName, Description = columnInfo.Description, DataType = columnInfo.DataType, Required = (columnInfo.IsMandatory || columnInfo.NullValuesHandling == NullValuesHandlingType.NotAllowed), SkipNullValueRows = (columnInfo.NullValuesHandling == NullValuesHandlingType.DeleteRow), }; if (columnInfo.MetaDataCategories != null && columnInfo.MetaDataCategories.Count > 0) { column.MetaData = convertToMetaDataTable(columnInfo.MetaDataCategories); } if (columnInfo.DimensionInfos != null && columnInfo.DimensionInfos.Count > 0) { column.Dimensions = columnInfo.DimensionInfos.Select(dimensioninfo => dimensioninfo.ConvertToDimensions()).ToList(); column.ActiveDimension = DimensionHelper.FindDimension(column.Dimensions, columnInfo.DefaultDimension.Name); } if (!string.IsNullOrEmpty(columnInfo.RelatedColumnOf)) //column is error column, so we need auxiliary type as metadata { column.ColumnNameOfRelatedColumn = columnInfo.RelatedColumnOf; //Add AuxiliaryType meta data category if (column.MetaData == null) { column.MetaData = new MetaDataTable(); } var listOfValues = new Dictionary <string, string> { { Constants.STD_DEV_ARITHMETIC, Constants.STD_DEV_ARITHMETIC }, { Constants.STD_DEV_GEOMETRIC, Constants.STD_DEV_GEOMETRIC } }; //if there is only the dimensionless dimension defined only geometric error make sense if (column.Dimensions.Count == 1) { if (column.Dimensions[0].IsDimensionless()) { listOfValues.Remove(Constants.STD_DEV_ARITHMETIC); } } var auxiliaryTypeColumn = new MetaDataColumn { ColumnName = Constants.AUXILIARY_TYPE, DisplayName = "Error Type", DataType = typeof(string), Description = "What is the type of error?", ListOfValues = new Dictionary <string, string>(listOfValues), IsListOfValuesFixed = true, Required = true }; column.MetaData.Columns.Add(auxiliaryTypeColumn); // add special condition to the dimensions // for geometric error the unit must be dimensionless // for arithmetic the unit must be a concrete dimension foreach (var dim in column.Dimensions) { if (dim.MetaDataConditions == null) { dim.MetaDataConditions = new Dictionary <string, string>(); } if (dim.IsDimensionless()) { dim.MetaDataConditions.Add(Constants.AUXILIARY_TYPE, Constants.STD_DEV_GEOMETRIC); if (dim.IsDefault) { auxiliaryTypeColumn.DefaultValue = Constants.STD_DEV_GEOMETRIC; } } else { dim.MetaDataConditions.Add(Constants.AUXILIARY_TYPE, Constants.STD_DEV_ARITHMETIC); if (dim.IsDefault) { auxiliaryTypeColumn.DefaultValue = Constants.STD_DEV_ARITHMETIC; } } } } retVal.Columns.Add(column); } return(retVal); }
public void SetUnitForColumn(ImportDataTable table) { _view.SetUnitForColumn(table); }
public void Edit(ImportDataTable table) { _table = table; _view.BindTo(table); }
private string getTableName(string excelFile, IList <ImportDataTable> newTables, string sheet, ImportDataTable dataTable, int tableCount) { var fileNameWithoutExtension = Path.GetFileNameWithoutExtension(excelFile); var tableName = (tableCount > 1) ? nameWithSheetIndex(fileNameWithoutExtension, sheet, newTables, dataTable) : nameWithoutSheetIndex(fileNameWithoutExtension, sheet); tableName = (dataTable.TableName.Length > 0) ? $"{tableName} ({dataTable.TableName})" : tableName; return(tableName); }
public void RandomizeWithHierarchy() { //Generate histogram as follows: //Assume a list of columns (e.g. COnference, Publisher, Year) ordred by set size descending //Generate histogram for Conference //Generate result for it (as base) //a.Select a subset of conference AND a subset of Publisher (Subset ratio e.g. 15%) //Filter base data and generate result for it //Apply the results back to the base result (i.e. join back) //Repeat from a. for the rest. //See what happens. var container = Bootstrapper.Get(); var dal = container.Resolve <Dal>(); var logger = container.Resolve <ILogger>(); var progresser = container.Resolve <IProgress>(); const string query = @" SELECT p.Id, r.Id, p.title, r.conference, r.title, r.publisher, r.year FROM papers p INNER JOIN Raw_Links l ON p.Id = l.[From] INNER JOIN proceedings r ON l.[To] = r.Id AND l.[link-type]='in-proceedings' "; //Select all conferences. var dt = dal.GetDataTable(Dal.DefaultConnStr, query); dt.PrimaryKey = new[] { dt.Columns[0], dt.Columns[1] }; // Setting the primary key is necessary var tbl = ImportDataTable.ImportAdoNetDataTable(dt); var baseHist = SimulationBuilder.GetNormalDistributedBaseHistogram(); //Now we have the base histogram. //We want to generate a histogram for a column and save it back to database. //Let's select conference var selectedCols = new[] { 3, 5, 6 }; const int metricId = 1; var hist = SimulationBuilder.GetHistogram(tbl.Columns[selectedCols[0]] as Column <string>, baseHist); var time = new Random(DateTime.Now.Second); var baseLoockup = SimulationBuilder.CreateSimulatedLoockup(tbl, selectedCols[0], hist, r => TimeSpan.FromMilliseconds(time.Next(3000))); using (var fList = System.IO.File.CreateText(@"C:\Differents.csv")) { //Select subset of this column and subset of the next one. var colIdx = 0; var filteredData = tbl.Rows; while (colIdx < selectedCols.Length - 1) { var col1 = selectedCols[colIdx]; var col2 = selectedCols[colIdx + 1]; colIdx++; var col1Items = GetSubset(tbl.Columns[col1], SubsetLevelRatio); var col2Items = GetSubset(tbl.Columns[col2], SubsetLevelRatio); filteredData = filteredData.Where(r => col1Items.Contains(r.Rows[col1]) && col2Items.Contains(r.Rows[col2])). Select( r => r).ToList(); fList.WriteLine("NewList, {0}, {1}", col1, col2); filteredData.Select( f => new { col1 = tbl.Columns[col1][f.Rows[col1]], col2 = tbl.Columns[col2][f.Rows[col2]] }) .Distinct().ToList().ForEach(l => fList.WriteLine("{0},{1}", l.col1, l.col2)); logger.Log("Filtered items to {0} records now replacing it back.".FormatWith(filteredData.Count)); progresser.Reset(baseLoockup.Count); //replace baseLoockup with filtered data. var newLoockup = SimulationBuilder.CreateSimulatedLoockup(tbl, filteredData, selectedCols[0], hist, r => TimeSpan.FromMilliseconds(time.Next(3000))); foreach (var item in baseLoockup.Where(l => newLoockup.ContainsKey(l.Key))) { progresser.Progressed(1); var newItem = newLoockup[item.Key]; item.Value.Result = newItem.Result; item.Value.Duration = newItem.Duration; } progresser.Finish(); } fList.Close(); } //Now we've got the loockup. Store it somewehere. We put it back in the database. //We already have a table called SimulatedMetricLoockup const string insertMetric = "INSERT INTO Metrics (Id, MetricName, OnQuery, OnColumn) VALUES (@mId, @mName, @mQuery, @mCol);"; const string updateMetric = "UPDATE Metrics SET Id = @mId, MetricName = @mName, OnQuery = @mQuery, OnColumn = @mCol"; dal.InsertOrUpdateEntity(Dal.DefaultConnStr, insertMetric, updateMetric, "SELECT Count(*) FROM Metrics Where Id = @mId", new Dictionary <string, object> { { "@mId", metricId }, { "@mName", "NormalDistru" }, { "mQuery", query }, { "mCol", selectedCols[0] } } ); //Put everything in a temp file. var tmpFileName = dal.InsertAllInTempFile(baseLoockup, l => String.Format("{0},{1},{2},{3}", metricId, l.Key, l.Value.Result ? 1 : 0, l.Value.Duration.Milliseconds)); //bulk insert string bulkIns = @"BULK INSERT SimulatedMetricLoockup FROM '{0}' WITH ( FIELDTERMINATOR = ',', ROWTERMINATOR='\n' )". FormatWith(tmpFileName); dal.RunCommandWithParameter(Dal.DefaultConnStr, bulkIns); }
/// <summary> /// This methods reads the table data from a given workbook. /// </summary> /// <param name="wb">An open excel workbook set to a single sheet.</param> /// <param name="captionRow">Number of row containing captions.</param> /// <param name="unitRow">Number of row containing unit information.</param> /// <param name="dataStartRow">Number of row where the data starts.</param> /// <param name="dataEndRow">Number of row where the import should end or the data ends.</param> /// <param name="dataTableTemplate">Specification of need table.</param> /// <param name="mapping">Mapping between excel columns and needed table column.</param> /// <remarks> /// <para>There are some checks done against the given import data table specification.</para> /// <para>All required data column must be mapped.</para> /// <para>The unit of the excel column must be supported by the import data column.</para> /// <para>Only mapped excel column are taken into the new data table.</para> /// </remarks> /// <returns>A data table containing the data of the excel sheet.</returns> public DataTable GetExcelTable(WorkBook wb, int captionRow, int unitRow, int dataStartRow, int dataEndRow, ImportDataTable dataTableTemplate, IList <ColumnMapping> mapping) { //check arguments if (dataEndRow == -1 || dataEndRow > wb.LastRow) { dataEndRow = wb.LastRow; } if (captionRow < 0 || captionRow > wb.LastRow) { throw new OSPSuiteException(Error.CaptionRowOutOfRange(captionRow, wb.LastRow, wb.GetCurrentSheetName())); } if (unitRow == -1) { unitRow = captionRow; } if (unitRow < 0 || unitRow > wb.LastRow) { throw new OSPSuiteException(Error.UnitRowOutOfRange(unitRow, wb.LastRow, wb.GetCurrentSheetName())); } if (dataStartRow < 0 || dataStartRow > wb.LastRow) { throw new OSPSuiteException(Error.FirstDataRowOutOfRange(dataStartRow, wb.LastRow, wb.GetCurrentSheetName())); } if (dataEndRow < dataStartRow) { throw new OSPSuiteException(Error.LastDataRowLessThanFirstDataRow(dataEndRow, dataStartRow, wb.GetCurrentSheetName())); } //check that for each table column there is a mapping to an excel column. _importerTask.CheckWhetherAllDataColumnsAreMapped(dataTableTemplate.Columns, mapping); //read the excel captions and check whether all mapped columns are available. var columnNames = _importerTask.GetColumnNames(wb, captionRow); _importerTask.CheckWhetherMappedExcelColumnExist(mapping, columnNames); //read the units of the excel file IList <string> units = (unitRow == captionRow) ? _importerTask.GetUnits(columnNames) : _importerTask.GetUnits(wb, unitRow); //check whether the units of all mapped columns are supported. _importerTask.CheckUnitSupportForAllMappedColumns(dataTableTemplate.Columns, mapping, columnNames, units); //read the excel file var excelTable = getSheetTable(wb, captionRow, unitRow, 0, wb.LastCol + 1, dataStartRow, dataEndRow); //check data types of mapped columns _importerTask.CheckDataTypes(dataTableTemplate.Columns, mapping, excelTable.Columns); return(excelTable); }
private void btnRun_Click(object sender, EventArgs e) { var container = Bootstrapper.Get(); var dal = container.Resolve <Dal>(); const string query = @" SELECT p.Id, r.Id, p.title, r.conference, r.title, r.publisher, r.year FROM papers p INNER JOIN Raw_Links l ON p.Id = l.[From] INNER JOIN proceedings r ON l.[To] = r.Id AND l.[link-type]='in-proceedings' "; //Select all conferences. var dt = dal.GetDataTable(Dal.DefaultConnStr, query); dt.PrimaryKey = new[] { dt.Columns[0], dt.Columns[1] }; // Setting the primary key is necessary var tbl = ImportDataTable.ImportAdoNetDataTable(dt); var baseHist = SimulationBuilder.GetNormalDistributedBaseHistogram(); //Now we have the base histogram. //We want to generate a histogram for a column and save it back to database. //Let's select conference const int selectedCol = 3; const int metricId = 1; var hist = SimulationBuilder.GetHistogram(tbl.Columns[selectedCol] as Column <string>, baseHist); var time = new Random(DateTime.Now.Second); var loockup = SimulationBuilder.CreateSimulatedLoockup(tbl, selectedCol, hist, r => TimeSpan.FromMilliseconds(time.Next(3000))); //Now we've got the loockup. Store it somewehere. We put it back in the database. //We already have a table called SimulatedMetricLoockup const string insertMetric = "INSERT INTO Metrics (Id, MetricName, OnQuery, OnColumn) VALUES (@mId, @mName, @mQuery, @mCol);"; const string updateMetric = "UPDATE Metrics SET Id = @mId, MetricName = @mName, OnQuery = @mQuery, OnColumn = @mCol"; dal.InsertOrUpdateEntity(Dal.DefaultConnStr, insertMetric, updateMetric, "SELECT Count(*) FROM Metrics Where Id = @mId", new Dictionary <string, object> { { "@mId", metricId }, { "@mName", "NormalDistru" }, { "mQuery", query }, { "mCol", selectedCol } } ); //Put everything in a temp file. var tmpFileName = dal.InsertAllInTempFile(loockup, l => String.Format("{0},{1},{2},{3}", metricId, l.Key, l.Value.Result ? 1 : 0, l.Value.Duration.Milliseconds)); //bulk insert string bulkIns = @"BULK INSERT SimulatedMetricLoockup FROM '{0}' WITH ( FIELDTERMINATOR = ',', ROWTERMINATOR='\n' )". FormatWith(tmpFileName); dal.RunCommandWithParameter(Dal.DefaultConnStr, bulkIns); //const string insertLoockupQ = @"INSERT INTO SimulatedMetricLoockup (MetricId, LoockupKey, LoockupValue, Delay) VALUES (@mId, @lKey, @lVal, @lD)"; //dal.RunForAll(Dal.DefaultConnStr, insertLoockupQ, loockup.ToList(), // l => new Dictionary<string, object> {{"@mId", 1}, {"@lKey", l.Key}, {"@lVal", l.Value.Result}, {"@lD", l.Value.Duration}} // ); }
private static string nameWithSheetIndex(string fileNameWithoutExtension, string sheet, IList <ImportDataTable> newTables, ImportDataTable dataTable) { return($"{fileNameWithoutExtension}.{sheet}.{newTables.IndexOf(dataTable) + 1}"); }