Ejemplo n.º 1
0
        private static void ProcessAndUploadDataSet(ReddDataSet dataSet, PerformanceResult performanceResult, int bulkSize)
        {
            using (MySqlConnection insertConnection = new MySqlConnection(ConnectionString))
            {
                insertConnection.Open();
                var sw = Stopwatch.StartNew();

                var rows = new List <string>();
                int pos  = 0;
                foreach (var line in File.ReadLines(dataSet.FilePath))
                {
                    var split = line.Split(' ');
                    var date  = FromUnixTime(long.Parse(split[0]));
                    rows.Add($"({split[1]}, {dataSet.Id},{date.Day}, {date.Month})");
                    if (++pos % bulkSize == 0)
                    {
                        StoreDataOnServer(rows, insertConnection);
                    }
                }

                //Store the remaining rows
                StoreDataOnServer(rows, insertConnection);
                performanceResult.InsertTimeMs    = sw.ElapsedMilliseconds;
                performanceResult.NumberOfNewRows = dataSet.LineCount;
            }
        }
Ejemplo n.º 2
0
        public static PerformanceResult BenchmarkDataSet(ReddDataSet dataSet, int numOfRepetitions = 5)
        {
            var performanceResult = new PerformanceResult {
                DataSet = dataSet
            };


            ProcessAndUploadDataSet(dataSet, performanceResult, 100000);

            BenchmarkServerCalculation(performanceResult, numOfRepetitions);

            return(performanceResult);
        }
        public static void LoadCsvDataIntoSqlServer(ReddDataSet dataSet)
        {
            // This should be the full path

            var createdCount = 0;

            using (var textFieldParser = new TextFieldParser(dataSet.FilePath))
            {
                textFieldParser.TextFieldType = FieldType.Delimited;
                textFieldParser.Delimiters    = new[] { " " };

                var connectionString = "Server=localhost;Database=load_profiles";

                var dataTable = new DataTable(_tableName);

                // Add the columns in the temp table
                dataTable.Columns.Add("meterId");
                dataTable.Columns.Add("timestamp");
                dataTable.Columns.Add("power");

                using (var sqlConnection = new SqlConnection(connectionString))
                {
                    sqlConnection.Open();



                    // Create the bulk copy object
                    var sqlBulkCopy = new SqlBulkCopy(sqlConnection)
                    {
                        DestinationTableName = _tableName
                    };

                    // Setup the column mappings, anything ommitted is skipped
                    sqlBulkCopy.ColumnMappings.Add("meterId", "meterId");
                    sqlBulkCopy.ColumnMappings.Add("timestamp", "timestamp");
                    sqlBulkCopy.ColumnMappings.Add("power", "power");


                    // Loop through the CSV and load each set of 100,000 records into a DataTable
                    // Then send it to the LiveTable
                    while (!textFieldParser.EndOfData)
                    {
                        dataTable.Rows.Add(dataSet.Id, textFieldParser.ReadFields());

                        createdCount++;

                        if (createdCount % _batchSize == 0)
                        {
                            InsertDataTable(sqlBulkCopy, sqlConnection, dataTable);

                            break;
                        }
                    }

                    // Don't forget to send the last batch under 100,000
                    InsertDataTable(sqlBulkCopy, sqlConnection, dataTable);

                    sqlConnection.Close();
                }
            }
        }