public void TestBulkInsert_ExplicitDateTimeFormats(DatabaseType type)
        {
            DiscoveredDatabase db  = GetTestDatabase(type);
            DiscoveredTable    tbl = db.CreateTable("MyDateTestTable",
                                                    new[]
            {
                new DatabaseColumnRequest("MyDate", new DatabaseTypeRequest(typeof(DateTime)))
                {
                    AllowNulls = false
                },
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("MyDate");
                dt.Rows.Add("20011230");

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;
                    bulk.DateTimeDecider.Settings.ExplicitDateFormats = new [] { "yyyyMMdd" };
                    bulk.Upload(dt);
                }
            }

            var dtDown = tbl.GetDataTable();

            Assert.AreEqual(new DateTime(2001, 12, 30), dtDown.Rows[0]["MyDate"]);
        }
Exemple #2
0
        private void LoadFile(DiscoveredTable tableToLoad, FileInfo fileToLoad, DiscoveredDatabase dbInfo, Stopwatch timer, IDataLoadJob job)
        {
            using (var con = dbInfo.Server.GetConnection())
            {
                DataTable dt = tableToLoad.GetDataTable(0);

                using (var insert = tableToLoad.BeginBulkInsert(Culture))
                {
                    // setup bulk insert it into destination
                    insert.Timeout = 500000;

                    //bulk insert ito destination
                    job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to open file " + fileToLoad.FullName));
                    OpenFile(fileToLoad, job);

                    //confirm the validity of the headers
                    ConfirmFlatFileHeadersAgainstDataTable(dt, job);

                    con.Open();

                    //now we will read data out of the file in batches
                    int batchNumber         = 1;
                    int maxBatchSize        = 10000;
                    int recordsCreatedSoFar = 0;

                    try
                    {
                        //while there is data to be loaded into table
                        while (IterativelyBatchLoadDataIntoDataTable(dt, maxBatchSize) != 0)
                        {
                            DropEmptyColumns(dt);
                            ConfirmFitToDestination(dt, tableToLoad, job);
                            try
                            {
                                recordsCreatedSoFar += insert.Upload(dt);

                                dt.Rows.Clear(); //very important otherwise we add more to the end of the table but still insert last batches records resulting in exponentially multiplying upload sizes of duplicate records!

                                job.OnProgress(this,
                                               new ProgressEventArgs(dbInfo.GetRuntimeName(),
                                                                     new ProgressMeasurement(recordsCreatedSoFar, ProgressType.Records), timer.Elapsed));
                            }
                            catch (Exception e)
                            {
                                throw new Exception("Error processing batch number " + batchNumber + " (of batch size " + maxBatchSize + ")", e);
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        throw new FlatFileLoadException("Error processing file " + fileToLoad, e);
                    }
                    finally
                    {
                        CloseFile();
                    }
                }
            }
        }
        private IBulkCopy InitializeBulkCopy(DataTable dt, IDataLoadEventListener job)
        {
            var insert = _table.BeginBulkInsert();

            insert.Timeout = Timeout;

            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
                                                   "SqlBulkCopy to " + _dbInfo.Server + ", " + _dbInfo.GetRuntimeName() + ".." + Table + " initialised for " + dt.Columns.Count + " columns, with a timeout of " + Timeout + ".  First chunk received had rowcount of " + dt.Rows.Count));

            return(insert);
        }
        public void TestBulkInsert_BadDecimalFormat_DecimalError(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Id", new DatabaseTypeRequest(typeof(int)))
                {
                    IsAutoIncrement = true, IsPrimaryKey = true
                },
                new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("Score", new DatabaseTypeRequest(typeof(decimal), null, new DecimalSize(2, 1))),
                new DatabaseColumnRequest("Age", new DatabaseTypeRequest(typeof(int)))
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("age");
                dt.Columns.Add("name");
                dt.Columns.Add("score");

                dt.Rows.Add(60, "Jamie", 1.2);
                dt.Rows.Add(30, "Frank", 1.3);
                dt.Rows.Add(11, "Toad", "."); //bad data
                dt.Rows.Add(100, "King");
                dt.Rows.Add(10, "Frog");

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;

                    Exception ex = null;
                    try
                    {
                        bulk.Upload(dt);
                    }
                    catch (Exception e)
                    {
                        ex = e;
                    }

                    Assert.IsNotNull(ex, "Expected upload to fail because value on row 2 is bad");

                    Assert.AreEqual("Failed to parse value '.' in column 'score'", ex.Message);
                    Assert.IsNotNull(ex.InnerException, "Expected parse error to be an inner exception");
                    StringAssert.Contains("Could not parse string value '.' with Decider Type:DecimalTypeDecider", ex.InnerException.Message);
                }
            }
        }
        public void TestBulkInsert_Transaction(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("Age", new DatabaseTypeRequest(typeof(int)))
            });


            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("Name");
                dt.Columns.Add("Age");
                dt.Rows.Add("Dave", 50);
                dt.Rows.Add("Jamie", 60);

                using (var transaction = tbl.Database.Server.BeginNewTransactedConnection())
                {
                    using (IBulkCopy bulk = tbl.BeginBulkInsert(transaction.ManagedTransaction))
                    {
                        bulk.Timeout = 30;
                        bulk.Upload(dt);

                        //inside transaction the count is 2
                        Assert.AreEqual(2, tbl.GetRowCount(transaction.ManagedTransaction));

                        dt.Rows.Clear();
                        dt.Rows.Add("Frank", 100);

                        bulk.Upload(dt);

                        //inside transaction the count is 3
                        Assert.AreEqual(3, tbl.GetRowCount(transaction.ManagedTransaction));
                    }

                    transaction.ManagedTransaction.CommitAndCloseConnection();
                }
            }

            //Transaction was committed final row count should be 3
            Assert.AreEqual(3, tbl.GetRowCount());
        }
        public void TestBulkInsert_SpacedOutNames(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Na me", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("A ge", new DatabaseTypeRequest(typeof(int)))
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("Na me");
                dt.Columns.Add("A ge");
                dt.Rows.Add("Dave", 50);
                dt.Rows.Add("Jamie", 60);

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;
                    bulk.Upload(dt);

                    Assert.AreEqual(2, tbl.GetRowCount());

                    dt.Rows.Clear();
                    dt.Rows.Add("Frank", 100);

                    bulk.Upload(dt);

                    Assert.AreEqual(3, tbl.GetRowCount());
                }
            }

            tbl.Insert(new Dictionary <string, object>()
            {
                { "Na me", "George" },
                { "A ge", "300" }
            });

            Assert.AreEqual(4, tbl.GetRowCount());
        }
Exemple #7
0
        public override void WriteItems(DataTable items)
        {
            StripWhiteSpace(items);

            items.TableName = _reportName;

            if (!_tbl.Exists())
            {
                _tbl.Database.CreateTable(_tbl.GetRuntimeName(), items);
            }
            else
            {
                using (var insert = _tbl.BeginBulkInsert())
                {
                    insert.Upload(items);
                }
            }
        }
        public void TestBulkInsert_ColumnOrdinals(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("Age", new DatabaseTypeRequest(typeof(int)))
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("Age");
                dt.Columns.Add("Name");
                dt.Rows.Add("50", "David");
                dt.Rows.Add("60", "Jamie");

                Assert.AreEqual("Age", dt.Columns[0].ColumnName);
                Assert.AreEqual(typeof(string), dt.Columns[0].DataType);

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;
                    bulk.Upload(dt);

                    Assert.AreEqual(2, tbl.GetRowCount());
                }

                //columns should not be reordered
                Assert.AreEqual("Age", dt.Columns[0].ColumnName);
                Assert.AreEqual(typeof(int), dt.Columns[0].DataType); //but the data type was changed by HardTyping it
            }
        }
        public void TestBulkInsert_SchemaTooNarrow_DecimalError(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Id", new DatabaseTypeRequest(typeof(int)))
                {
                    IsAutoIncrement = true, IsPrimaryKey = true
                },
                new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("Score", new DatabaseTypeRequest(typeof(decimal), null, new DecimalSize(2, 1))),
                new DatabaseColumnRequest("Age", new DatabaseTypeRequest(typeof(int)))
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("age");
                dt.Columns.Add("name");
                dt.Columns.Add("score");

                dt.Rows.Add(60, "Jamie", 1.2);
                dt.Rows.Add(30, "Frank", 1.3);
                dt.Rows.Add(11, "Toad", 111111111.11); //bad data
                dt.Rows.Add(100, "King");
                dt.Rows.Add(10, "Frog");

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;

                    Exception ex = null;
                    try
                    {
                        bulk.Upload(dt);
                    }
                    catch (Exception e)
                    {
                        ex = e;
                    }

                    Assert.IsNotNull(ex, "Expected upload to fail because value on row 2 is too long");

                    switch (type)
                    {
                    case DatabaseType.MicrosoftSQLServer:
                        StringAssert.Contains("Failed to load data row 3 the following values were rejected by the database", ex.Message);
                        StringAssert.Contains("Parameter value '111111111.1' is out of range", ex.Message);
                        break;

                    case DatabaseType.MySql:
                        Assert.AreEqual("Out of range value for column 'Score' at row 3", ex.Message);
                        break;

                    case DatabaseType.Oracle:
                        StringAssert.Contains("value larger than specified precision allowed for this column", ex.Message);

                        break;

                    case DatabaseType.PostgreSql:
                        StringAssert.Contains("numeric field overflow", ex.Message);
                        break;

                    default:
                        throw new ArgumentOutOfRangeException(nameof(type), type, null);
                    }
                }
            }
        }
        public void TestBulkInsert_SchemaTooNarrow_StringError(DatabaseType type)
        {
            DiscoveredDatabase db = GetTestDatabase(type);

            DiscoveredTable tbl = db.CreateTable("MyBulkInsertTest",
                                                 new[]
            {
                new DatabaseColumnRequest("Id", new DatabaseTypeRequest(typeof(int)))
                {
                    IsAutoIncrement = true, IsPrimaryKey = true
                },
                new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10)),
                new DatabaseColumnRequest("Age", new DatabaseTypeRequest(typeof(int)))
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("age");
                dt.Columns.Add("name");

                dt.Rows.Add(60, "Jamie");
                dt.Rows.Add(30, "Frank");
                dt.Rows.Add(11, "Toad");
                dt.Rows.Add(50, new string('A', 11));
                dt.Rows.Add(100, "King");
                dt.Rows.Add(10, "Frog");

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;

                    Exception ex = null;
                    try
                    {
                        bulk.Upload(dt);
                    }
                    catch (Exception e)
                    {
                        ex = e;
                    }

                    Assert.IsNotNull(ex, "Expected upload to fail because value on row 2 is too long");

                    switch (type)
                    {
                    case DatabaseType.MicrosoftSQLServer:
                        StringAssert.Contains("BulkInsert failed on data row 4 the complaint was about source column <<name>> which had value <<AAAAAAAAAAA>> destination data type was <<varchar(10)>>", ex.Message);
                        break;

                    case DatabaseType.MySql:
                        Assert.AreEqual("Data too long for column 'Name' at row 4", ex.Message);
                        break;

                    case DatabaseType.Oracle:
                        StringAssert.Contains("NAME", ex.Message);
                        StringAssert.Contains("maximum: 10", ex.Message);
                        StringAssert.Contains("actual: 11", ex.Message);

                        break;

                    case DatabaseType.PostgreSql:
                        StringAssert.Contains("value too long for type character varying(10)", ex.Message);
                        break;

                    default:
                        throw new ArgumentOutOfRangeException(nameof(type), type, null);
                    }
                }
            }
        }
        public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            if (toProcess == null)
            {
                return(null);
            }

            IDatabaseColumnRequestAdjuster adjuster = null;

            if (Adjuster != null)
            {
                var constructor = new ObjectConstructor();
                adjuster = (IDatabaseColumnRequestAdjuster)constructor.Construct(Adjuster);
            }

            //work out the table name for the table we are going to create
            if (TargetTableName == null)
            {
                if (string.IsNullOrWhiteSpace(toProcess.TableName))
                {
                    throw new Exception("Chunk did not have a TableName, did not know what to call the newly created table");
                }

                TargetTableName = QuerySyntaxHelper.MakeHeaderNameSane(toProcess.TableName);
            }

            ClearPrimaryKeyFromDataTableAndExplicitWriteTypes(toProcess);

            StartAuditIfExists(TargetTableName);

            if (_loggingDatabaseListener != null)
            {
                listener = new ForkDataLoadEventListener(listener, _loggingDatabaseListener);
            }

            EnsureTableHasDataInIt(toProcess);

            bool createdTable = false;

            if (_firstTime)
            {
                bool tableAlreadyExistsButEmpty = false;

                if (!_database.Exists())
                {
                    throw new Exception("Database " + _database + " does not exist");
                }

                discoveredTable = _database.ExpectTable(TargetTableName);

                //table already exists
                if (discoveredTable.Exists())
                {
                    tableAlreadyExistsButEmpty = true;

                    if (!AllowLoadingPopulatedTables)
                    {
                        if (discoveredTable.IsEmpty())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found table " + TargetTableName + " already, normally this would forbid you from loading it (data duplication / no primary key etc) but it is empty so we are happy to load it, it will not be created"));
                        }
                        else
                        {
                            throw new Exception("There is already a table called " + TargetTableName + " at the destination " + _database);
                        }
                    }

                    if (AllowResizingColumnsAtUploadTime)
                    {
                        _dataTypeDictionary = discoveredTable.DiscoverColumns().ToDictionary(k => k.GetRuntimeName(), v => v.GetDataTypeComputer(), StringComparer.CurrentCultureIgnoreCase);
                    }
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Determined that the table name " + TargetTableName + " is unique at destination " + _database));
                }

                //create connection to destination
                if (!tableAlreadyExistsButEmpty)
                {
                    createdTable = true;

                    if (AllowResizingColumnsAtUploadTime)
                    {
                        _database.CreateTable(out _dataTypeDictionary, TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster);
                    }
                    else
                    {
                        _database.CreateTable(TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster);
                    }

                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Created table " + TargetTableName + " successfully."));
                }

                _managedConnection = _server.BeginNewTransactedConnection();
                _bulkcopy          = discoveredTable.BeginBulkInsert(_managedConnection.ManagedTransaction);

                if (Culture != null)
                {
                    _bulkcopy.DateTimeDecider.Culture = Culture;
                }

                _firstTime = false;
            }

            try
            {
                if (AllowResizingColumnsAtUploadTime && !createdTable)
                {
                    ResizeColumnsIfRequired(toProcess, listener);
                }

                //push the data
                swTimeSpentWritting.Start();

                _affectedRows += _bulkcopy.Upload(toProcess);

                swTimeSpentWritting.Stop();
                listener.OnProgress(this, new ProgressEventArgs("Uploading to " + TargetTableName, new ProgressMeasurement(_affectedRows, ProgressType.Records), swTimeSpentWritting.Elapsed));
            }
            catch (Exception e)
            {
                _managedConnection.ManagedTransaction.AbandonAndCloseConnection();

                if (LoggingServer != null)
                {
                    _dataLoadInfo.LogFatalError(GetType().Name, ExceptionHelper.ExceptionToListOfInnerMessages(e, true));
                }

                throw new Exception("Failed to write rows (in transaction) to table " + TargetTableName, e);
            }

            return(null);
        }