public void BigData_CSV_ACCESS_ViaOdbc()
        {
            int numberOfRows = 2000;

            ControlFlow.CurrentDbConnection = new AccessOdbcConnectionManager(new OdbcConnectionString(AccessConnectionStringParameter))
            {
                AlwaysUseSameConnection = false
            };
            Stopwatch       watch        = new Stopwatch();
            TableDefinition stagingTable = new TableDefinition("staging", new List <TableColumn>()
            {
                new TableColumn("Col1", "CHAR", allowNulls: true),
                new TableColumn("Col2", "CHAR", allowNulls: true),
                new TableColumn("Col3", "CHAR", allowNulls: false),
                new TableColumn("Col4", "CHAR", allowNulls: false),
            });

            try {
                SqlTask.ExecuteNonQuery("Try to drop table", $@"DROP TABLE {stagingTable.Name};");
            } catch { }
            new CreateTableTask(stagingTable)
            {
                ThrowErrorIfTableExists = true
            }.Execute();
            string        fileName = "src/ConnectionManager/AccessBigData_CSV2DB.csv";
            BigDataHelper bigData  = new BigDataHelper()
            {
                FileName        = fileName,
                NumberOfRows    = numberOfRows,
                TableDefinition = stagingTable
            };

            watch.Start();
            LogTask.Info($"Create .csv file {fileName} with {numberOfRows} Rows");
            bigData.CreateBigDataCSV();
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to create .csv file");
            watch.Reset();

            CSVSource source = new CSVSource(fileName);
            DBDestination <string[]> dest = new DBDestination <string[]>(30)
            {
                DestinationTableDefinition = stagingTable
            };

            source.LinkTo(dest);
            watch.Start();
            source.Execute();
            dest.Wait();
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to write everything into database");

            Assert.AreEqual(numberOfRows, RowCountTask.Count(stagingTable.Name));
        }
示例#2
0
        public void BigData_CSV_DB(int numberOfRows)
        {
            Stopwatch       watch        = new Stopwatch();
            TableDefinition stagingTable = new TableDefinition("test.Staging", new List <TableColumn>()
            {
                new TableColumn("Col1", "nchar(1000)", allowNulls: false),
                new TableColumn("Col2", "nchar(1000)", allowNulls: false),
                new TableColumn("Col3", "nchar(1000)", allowNulls: false),
                new TableColumn("Col4", "nchar(1000)", allowNulls: false),
            });

            stagingTable.CreateTable();
            string        fileName = "src/DataFlow/BigData_CSV2DB.csv";
            BigDataHelper bigData  = new BigDataHelper()
            {
                FileName        = fileName,
                NumberOfRows    = numberOfRows,
                TableDefinition = stagingTable
            };

            watch.Start();
            LogTask.Info($"Create .csv file {fileName} with {numberOfRows} Rows");
            bigData.CreateBigDataCSV();
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to create .csv file");
            watch.Reset();

            CSVSource source = new CSVSource(fileName);
            DBDestination <string[]> dest = new DBDestination <string[]>(1000)
            {
                DestinationTableDefinition = stagingTable
            };

            source.LinkTo(dest);

            watch.Start();
            source.Execute();
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to read everything into memory (while constantly writing)");
            LogTask.Info($"Already {RowCountTask.Count("test.Staging", RowCountOptions.QuickQueryMode)} inserted into table");
            dest.Wait(); //TODO Wait should be part of source
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to write everything into database");

            Assert.AreEqual(numberOfRows, SqlTask.ExecuteScalar <int>("Check staging table", $"select count(*) from test.Staging"));
        }
示例#3
0
        public void BigData_CSV_DB(int numberOfRows, bool useGenericCSVSource = false)
        {
            Stopwatch       watch        = new Stopwatch();
            TableDefinition stagingTable = new TableDefinition("test.Staging", new List <TableColumn>()
            {
                new TableColumn("Col1", "nchar(1000)", allowNulls: false),
                new TableColumn("Col2", "nchar(1000)", allowNulls: false),
                new TableColumn("Col3", "nchar(1000)", allowNulls: false),
                new TableColumn("Col4", "nchar(1000)", allowNulls: false),
            });

            stagingTable.CreateTable();
            string        fileName = "src/DataFlow/BigData_CSV2DB.csv";
            BigDataHelper bigData  = new BigDataHelper()
            {
                FileName        = fileName,
                NumberOfRows    = numberOfRows,
                TableDefinition = stagingTable
            };

            watch.Start();
            LogTask.Info($"Create .csv file {fileName} with {numberOfRows} Rows");
            bigData.CreateBigDataCSV();
            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to create .csv file");
            watch.Reset();

            if (useGenericCSVSource)
            {
                StartGenericCSVLoad(watch, stagingTable, fileName);
            }
            else
            {
                StartDefaultCSVLoad(watch, stagingTable, fileName);
            }

            LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to write everything into database");

            Assert.AreEqual(numberOfRows, SqlTask.ExecuteScalar <int>("Check staging table", $"select count(*) from test.Staging"));
        }