コード例 #1
0
        public void TestBulkInsert_ExplicitDateTimeFormats(DatabaseType type)
        {
            DiscoveredDatabase db  = GetTestDatabase(type);
            DiscoveredTable    tbl = db.CreateTable("MyDateTestTable",
                                                    new[]
            {
                new DatabaseColumnRequest("MyDate", new DatabaseTypeRequest(typeof(DateTime)))
                {
                    AllowNulls = false
                },
            });

            //There are no rows in the table yet
            Assert.AreEqual(0, tbl.GetRowCount());

            using (var dt = new DataTable())
            {
                dt.Columns.Add("MyDate");
                dt.Rows.Add("20011230");

                using (IBulkCopy bulk = tbl.BeginBulkInsert())
                {
                    bulk.Timeout = 30;
                    bulk.DateTimeDecider.Settings.ExplicitDateFormats = new [] { "yyyyMMdd" };
                    bulk.Upload(dt);
                }
            }

            var dtDown = tbl.GetDataTable();

            Assert.AreEqual(new DateTime(2001, 12, 30), dtDown.Rows[0]["MyDate"]);
        }
コード例 #2
0
        public void NowTestDataInsertion(DatabaseType dbType)
        {
            AlterTest_InvalidThenRecreateItAndItsValidAgain(dbType);

            _table.Insert(new Dictionary <string, object>
            {
                { "name", "Franky" },
                { "bubbles", 3 },
                { "hic_validFrom", new DateTime(2001, 1, 2) },
                { "hic_dataLoadRunID", 7 }
            });

            var liveOldRow = _table.GetDataTable().Rows.Cast <DataRow>().Single(r => r["bubbles"] as int? == 3);

            Assert.AreEqual(new DateTime(2001, 1, 2), ((DateTime)liveOldRow[SpecialFieldNames.ValidFrom]));

            RunSQL("UPDATE {0} set bubbles =99", _table.GetFullyQualifiedName());

            //new value is 99
            Assert.AreEqual(99, ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'", _table.GetFullyQualifiedName()));
            //archived value is 3
            Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'", _archiveTable.GetFullyQualifiedName()));

            //Legacy table valued function only works for MicrosoftSQLServer
            if (dbType == DatabaseType.MicrosoftSQLServer)
            {
                //legacy in 2001-01-01 it didn't exist
                Assert.IsNull(ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-01') where name = 'Franky'"));
                //legacy in 2001-01-03 it did exist and was 3
                Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-03') where name = 'Franky'"));
                //legacy boundary case?
                Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-02') where name = 'Franky'"));

                //legacy today it is 99
                Assert.AreEqual(99, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy(GETDATE()) where name = 'Franky'"));
            }

            // Live row should now reflect that it is validFrom today
            var liveNewRow = _table.GetDataTable().Rows.Cast <DataRow>().Single(r => r["bubbles"] as int? == 99);

            Assert.AreEqual(DateTime.Now.Date, ((DateTime)liveNewRow[SpecialFieldNames.ValidFrom]).Date);

            // Archived row should not have had it's validFrom field broken
            var archivedRow = _archiveTable.GetDataTable().Rows.Cast <DataRow>().Single(r => r["bubbles"] as int? == 3);

            Assert.AreEqual(new DateTime(2001, 1, 2), ((DateTime)archivedRow[SpecialFieldNames.ValidFrom]));
        }
コード例 #3
0
ファイル: FlatFileAttacher.cs プロジェクト: lulzzz/RDMP
        private void LoadFile(DiscoveredTable tableToLoad, FileInfo fileToLoad, DiscoveredDatabase dbInfo, Stopwatch timer, IDataLoadJob job)
        {
            using (var con = dbInfo.Server.GetConnection())
            {
                DataTable dt = tableToLoad.GetDataTable(0);

                using (var insert = tableToLoad.BeginBulkInsert(Culture))
                {
                    // setup bulk insert it into destination
                    insert.Timeout = 500000;

                    //bulk insert ito destination
                    job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to open file " + fileToLoad.FullName));
                    OpenFile(fileToLoad, job);

                    //confirm the validity of the headers
                    ConfirmFlatFileHeadersAgainstDataTable(dt, job);

                    con.Open();

                    //now we will read data out of the file in batches
                    int batchNumber         = 1;
                    int maxBatchSize        = 10000;
                    int recordsCreatedSoFar = 0;

                    try
                    {
                        //while there is data to be loaded into table
                        while (IterativelyBatchLoadDataIntoDataTable(dt, maxBatchSize) != 0)
                        {
                            DropEmptyColumns(dt);
                            ConfirmFitToDestination(dt, tableToLoad, job);
                            try
                            {
                                recordsCreatedSoFar += insert.Upload(dt);

                                dt.Rows.Clear(); //very important otherwise we add more to the end of the table but still insert last batches records resulting in exponentially multiplying upload sizes of duplicate records!

                                job.OnProgress(this,
                                               new ProgressEventArgs(dbInfo.GetRuntimeName(),
                                                                     new ProgressMeasurement(recordsCreatedSoFar, ProgressType.Records), timer.Elapsed));
                            }
                            catch (Exception e)
                            {
                                throw new Exception("Error processing batch number " + batchNumber + " (of batch size " + maxBatchSize + ")", e);
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        throw new FlatFileLoadException("Error processing file " + fileToLoad, e);
                    }
                    finally
                    {
                        CloseFile();
                    }
                }
            }
        }