/// <summary> /// Starts the importing operation. /// </summary> public void Import() { _dataUnpacker.StartUnpacking(_datasetImporter.FileExtension); var datasets = _dbWriter.GetDatasets(); try { foreach (var table in datasets) { using (var datasetStream = _dataUnpacker.OpenStreamForUnpacking(table.Name)) { if (datasetStream != null) { _logger?.LogInformation($"Importing {table.Name}..."); int errorCount = 0; int recordCount = 0; var dataset = _datasetImporter.StartImport(datasetStream); dataset.SetSchema(table.Schema); //!!!!! TODO: need to save schema with exported file and then read it from there _dbWriter.StartSeeding(dataset); try { while (_datasetImporter.HasRecords()) { try { _dbWriter.InsertRecord(_datasetImporter.NextRecord()); recordCount++; } catch (Exception ex) { _logger?.LogDebug("ERROR: " + ex.Message); errorCount++; } } } finally { _dbWriter.FinishSeeding(); _datasetImporter.FinishImport(); _logger?.LogInformation($"{recordCount} records were imported"); if (errorCount > 0) { _logger?.LogWarning($"{errorCount} errors during import (duplicate records or violated constraints)"); } } } } } } finally { _dataUnpacker.FinishUnpacking(); } }
/// <summary> /// Starts the importing operation. /// </summary> public void Import() { _dataUnpacker.StartUnpacking(_datasetImporter.FileExtension); var datasets = _dbWriter.GetDatasets(); try { foreach (var table in datasets) { try { using (var datasetStream = _dataUnpacker.OpenStreamForUnpacking(table.Name)) { if (datasetStream != null) { _logger?.LogInformation($"Importing {table.Name}..."); var dataset = _datasetImporter.StartImport(datasetStream); _dbWriter.StartSeeding(dataset); while (_datasetImporter.HasRecords()) { try { _dbWriter.WriteRecord(_datasetImporter.NextRecord()); } catch (Exception ex) { _logger?.LogError(ex.Message); } } _datasetImporter.FinishImport(); } } } finally { _dbWriter.FinishSeeding(); } } } finally { _dataUnpacker.FinishUnpacking(); } }