public void fill_data_table_uses_table_columns_if_specified()
        {
            var table = new DataTable();
            table.Columns.Add("First");
            table.Columns.Add("Second");
            var csv = @"Header1,Header2
1,2";

            using (var reader = CsvReader.FromCsvString(csv))
            {
                reader.ReadHeaderRecord();

                Assert.Equal(1, table.Fill(reader));
                Assert.Equal(1, table.Rows.Count);
                Assert.Equal("First", table.Columns[0].ColumnName);
                Assert.Equal("Second", table.Columns[1].ColumnName);
                Assert.Equal("1", table.Rows[0][0]);
                Assert.Equal("2", table.Rows[0][1]);
            }
        }
Beispiel #2
0
        private static void FillDataTableFromCSVFile()
        {
            #region FillDataTableFromCSVFile

            var table = new DataTable();

            using (var streamReader = new StreamReader("PlanetaryData.csv"))
            using (var reader = new CsvReader(streamReader))
            {
                reader.ReadHeaderRecord();
                table.Fill(reader);
            }

            Console.WriteLine("Table contains {0} rows.", table.Rows.Count);

            #endregion
        }
        public void fill_data_table_throws_if_the_number_of_values_in_a_record_exceeds_the_number_of_columns_in_the_data_table()
        {
            var table = new DataTable();
            table.Columns.Add("First");
            table.Columns.Add("Second");

            using (var reader = CsvReader.FromCsvString("first,second,third"))
            {
                var ex = Assert.Throws<InvalidOperationException>(() => table.Fill(reader));
                Assert.Equal("DataTable has 2 columns, but a DataRecord had 3. The number of columns in the DataTable must match or exceed the number of values in each DataRecord.", ex.Message);
            }
        }
        public void fill_data_table_works_with_large_csv_input()
        {
            var csv = string.Empty;

            using (var stringWriter = new StringWriter())
            using (var writer = new CsvWriter(stringWriter))
            {
                writer.WriteRecord("Header1", "Header2");

                for (var i = 0; i < 1000; ++i)
                {
                    writer.WriteRecord("value0_" + i, "value1_" + i);
                }

                writer.Flush();
                csv = stringWriter.ToString();
            }

            // read less than all available records
            using (var reader = CsvReader.FromCsvString(csv))
            {
                var table = new DataTable();
                reader.ReadHeaderRecord();

                Assert.Equal(913, table.Fill(reader, 913));
                Assert.Equal(913, table.Rows.Count);
                Assert.True(reader.HasMoreRecords);
            }

            // read exactly available records
            using (var reader = CsvReader.FromCsvString(csv))
            {
                var table = new DataTable();
                reader.ReadHeaderRecord();

                Assert.Equal(1000, table.Fill(reader, 1000));
                Assert.Equal(1000, table.Rows.Count);
                Assert.False(reader.HasMoreRecords);
            }

            // attempt to read more than available records
            using (var reader = CsvReader.FromCsvString(csv))
            {
                var table = new DataTable();
                reader.ReadHeaderRecord();

                Assert.Equal(1000, table.Fill(reader, 1500));
                Assert.Equal(1000, table.Rows.Count);
                Assert.False(reader.HasMoreRecords);
            }
        }
        public void fill_data_table_stops_if_it_reaches_maximum_records()
        {
            var table = new DataTable();
            var csv = @"Header1,Header2
1,2
3,4
5,6
7,8";

            using (var reader = CsvReader.FromCsvString(csv))
            {
                reader.ReadHeaderRecord();

                Assert.Equal(3, table.Fill(reader, 3));
                Assert.Equal(3, table.Rows.Count);
                Assert.True(reader.HasMoreRecords);
            }
        }
        public void fill_data_table_stops_short_of_maximum_records_if_it_runs_out_of_data()
        {
            var table = new DataTable();
            var csv = @"Header1,Header2
1,2
3,4";

            using (var reader = CsvReader.FromCsvString(csv))
            {
                reader.ReadHeaderRecord();

                Assert.Equal(2, table.Fill(reader, 10));
                Assert.Equal(2, table.Rows.Count);
            }
        }
Beispiel #7
0
        /// <summary>
        /// Импортирует массив данныех из CSV файла
        /// </summary>
        /// <param name="path">Путь к файлу</param>
        /// <returns>Успех выполнения операции</returns>
        public bool Import(String path)
        {
            try
            {
                log.Info("Import CSV file...");
                var timer = new Stopwatch();
                timer.Start();

                dataTable = new DataTable();

                using (var streamReader = new StreamReader(path))
                using (var reader = new CsvReader(streamReader))
                {
                    reader.ValueSeparator = ';';
                    reader.ReadHeaderRecord();
                    dataTable.Fill(reader);
                }

                timer.Stop();
                log.Info(String.Format("Import complete! Csv file contains {0} rows. Elapsed time: {1} ms",
                    dataTable.Rows.Count, timer.Elapsed.Milliseconds));
                return true;
            }
            catch (Exception ex)
            {
                log.Error("Can't read file!", ex);
                return false;
            }
        }