public void CsvReaderThrowsIfNotInitialized()
 {
     using (var csv = new CsvDataReader(new StreamReader(GetFileStream("testdata.csv"))))
     {
         ExceptionAssert.Throws<InvalidOperationException>(() => { var value = csv["Name"]; });
     }
 }
        public void CanReadCsv()
        {
            IDataTable table;

            using (var csv = new CsvDataReader(new StreamReader(GetFileStream("testdata.csv"), Encoding.UTF8)))
            {
                table = LightweightDataTable.FromDataReader(csv);
            }

            VerifyDataTable(table);
        }
Пример #3
0
        internal static void CheckReferenceData()
        {
            // csv...
            string data = ResourceHelper.GetString("AmxMobile.Services.Resources.Countries.csv");
            using (StringReader reader = new StringReader(data))
            {
                CsvDataReader csv = new CsvDataReader(reader, true);

                // txn...
                using (TransactionState txn = Database.StartTransaction())
                {
                    try
                    {
                        while (csv.Read())
                        {
                            // get...
                            string name = csv.GetString("Common Name");
                            if (name == null)
                                throw new InvalidOperationException("'name' is null.");
                            if (name.Length == 0)
                                throw new InvalidOperationException("'name' is zero-length.");

                            // get...
                            Country country = Country.GetByName(name);
                            if (country == null)
                            {
                                country = new Country();
                                country.Name = name;
                                if (string.Compare(name, "United Kingdom", true, Cultures.System) == 0)
                                    country.Ordinal = 1000;
                                if (string.Compare(name, "United States", true, Cultures.System) == 0)
                                    country.Ordinal = 1001;
                                else
                                    country.Ordinal = 9999;

                                // save...
                                country.SaveChanges();
                            }
                        }

                        // ok...
                        txn.Commit();
                    }
                    catch (Exception ex)
                    {
                        txn.Rollback(ex);
                        throw new InvalidOperationException("The operation failed", ex);
                    }
                }

            }
        }
Пример #4
0
        public void DataTableLoadNoHeaderTest()
        {
            var config = new CsvConfiguration(CultureInfo.InvariantCulture)
            {
                HasHeaderRecord = false,
            };
            var s = new StringBuilder();

            s.AppendLine("1,one");
            s.AppendLine("2,two");
            using (var reader = new StringReader(s.ToString()))
                using (var csv = new CsvReader(reader, config))
                {
                    var dataReader = new CsvDataReader(csv);

                    var dataTable = new DataTable();

                    dataTable.Load(dataReader);

                    Assert.AreEqual(0, dataTable.Rows.Count);
                }
        }
Пример #5
0
        private static void Main(string[] args)
        {
            using (var textReader = new StreamReader(@".\test.csv"))
            {
                var config = new CsvConfiguration(CultureInfo.InvariantCulture)
                {
                    HasHeaderRecord = false
                };
                var csvReader     = new CsvReader(textReader, config);
                var csvDataReader = new CsvDataReader(csvReader);
                while (csvDataReader.Read())
                {
                    for (int i = 0; i < csvDataReader.FieldCount; i++)
                    {
                        Console.Write(csvDataReader.GetString(i) + " ");
                    }
                    Console.WriteLine(" ");
                }
            }

            Console.Read();
        }
Пример #6
0
        public void GetOrdinalMissingTest()
        {
            var data = new List <string[]>
            {
                new[] { "Id", "Name" },
                new[] { "1", "one" },
                null,
            };
            var queue  = new Queue <string[]>(data);
            var parser = new ParserMock(queue);

            using (var csv = new CsvReader(parser))
            {
                using (var dr = new CsvDataReader(csv))
                {
                    Assert.ThrowsException <IndexOutOfRangeException>(() =>
                    {
                        dr.GetOrdinal("Foo");
                    });
                }
            }
        }
Пример #7
0
 private void LoadEvidenceCsv()
 {
     this.loggerWrapper.Info("Reading CDC1 evidence data...");
     using (var csv = CsvDataReader.Create("duplicates_base.csv", new CsvDataReaderOptions {
         HasHeaders = false
     }))
     {
         while (csv.Read())
         {
             var exists = this._cdc1Evidence.ContainsKey(csv.GetString(0));
             if (!exists)
             {
                 this._cdc1Evidence.Add(csv.GetString(0).ToLower(), csv.GetString(1));
             }
             else
             {
                 this.loggerWrapper.Warning($"Row with key {csv.GetString(0)}, value {csv.GetString(1)} could not be added, key already exists");
             }
         }
     }
     this.loggerWrapper.Info($"CDC1 evidence data loaded ({this._cdc1Evidence.Count} rows)");
 }
Пример #8
0
 private DataTable ReadCSVFile(string filename)
 {
     try
     {
         using (var reader = new StreamReader(filename))
             using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
             {
                 using (var dr = new CsvDataReader(csv))
                 {
                     var dt = new DataTable();
                     dt.TableName = "csvDataTable";
                     dt.Load(dr);
                     return(dt);
                 }
             }
     }
     catch (Exception)
     {
         Console.WriteLine("There is an issue reading the input (CSV) file");
         throw;
     }
 }
Пример #9
0
        private void importCsvBtn_Click(object sender, EventArgs e)
        {
            if (openFileDialog1.ShowDialog() == DialogResult.OK){               
                var reader = new StreamReader(openFileDialog1.FileName);
                var csv = new CsvReader(reader, CultureInfo.InvariantCulture);
                var dr = new CsvDataReader(csv);
                
                var dt = new DataTable();
                dt.Columns.Add("Id",typeof(int));
                dt.Columns.Add("Name",typeof(string));
                dt.Columns.Add("Progress",typeof(int));

                dt.Load(dr);

                mainDT = dt.Copy();

                dataGridView1.DataSource = dt;

                fillProgressChart(dt);
                
            }
        }
Пример #10
0
        public IDataReader GetReader(string readerType)
        {
            IDataReader reader = null;

            if (readerType.ToLower().Equals("xml"))
            {
                reader = new XmlDataReader();
            }
            else if (readerType.ToLower().Equals("csv"))
            {
                reader = new CsvDataReader();
            }
            else if (readerType.ToLower().Equals("xls"))
            {
                reader = new ExcelDataReader();
            }
            else if (readerType.ToLower().Equals("db"))
            {
                reader = new DbDataReader();
            }
            return(reader);
        }
Пример #11
0
        public void ReadWithNoHeaderTest()
        {
            var s = new StringBuilder();

            s.AppendLine("1,one");
            s.AppendLine("2,two");
            using (var reader = new StringReader(s.ToString()))
                using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                {
                    csv.Configuration.HasHeaderRecord = false;
                    csv.Configuration.Delimiter       = ",";
                    var dataReader = new CsvDataReader(csv);

                    dataReader.Read();
                    Assert.AreEqual(1, dataReader.GetInt32(0));
                    Assert.AreEqual("one", dataReader.GetString(1));

                    dataReader.Read();
                    Assert.AreEqual(2, dataReader.GetInt32(0));
                    Assert.AreEqual("two", dataReader.GetString(1));
                }
        }
Пример #12
0
        public List <VolunteerWorkingHoursData> GetVolunteersWorkingHours()
        {
            var resultList      = new List <VolunteerWorkingHoursData>();
            var dataReader      = new CsvDataReader();
            var csvResult       = dataReader.ReadData(_filePath);
            var columnNames     = csvResult.ColumnNames;
            var indexOfId       = columnNames.ToList().IndexOf("Id");
            var indexOfDateFrom = columnNames.ToList().IndexOf("From");
            var indexOfDateTo   = columnNames.ToList().IndexOf("To");
            var ci = new CultureInfo("pl-PL");

            foreach (var row in csvResult.Rows)
            {
                var volunteer = new VolunteerWorkingHoursData()
                {
                    Id       = row[indexOfId],
                    DateFrom = DateTime.Parse(row[indexOfDateFrom], ci),
                    DateTo   = DateTime.Parse(row[indexOfDateTo], ci)
                };
                resultList.Add(volunteer);
            }
            return(resultList);
        }
Пример #13
0
        private DataTable Load_Method()
        {
            // NOTE: This means that "headers" are required
            var config = new CsvConfiguration(CultureInfo.InvariantCulture)
            {
                PrepareHeaderForMatch = (
                    string header,
                    int index
                    ) => header.ToLower()
            };

            using (var reader = new StreamReader(path))
                using (var csv = new CsvReader(reader, config))
                {
                    using (var dr = new CsvDataReader(csv))
                    {
                        var dt = new DataTable();
                        dt.Load(dr);
                        dataGrid.DataContext = dt.DefaultView;
                        return(dt);
                    }
                }
        }
Пример #14
0
        public void MultiLineTest()
        {
            using (CsvDataReader reader = GetReader("MultiLine"))
            {
                reader.AttemptToFixMultiline = true;

                CsvUser[] users = reader.LoadList <CsvUser>();

                Assert.AreEqual(2, users.Length);
                CsvUser user1 = users[0];
                Assert.AreEqual(1, user1.Id);
                Assert.AreEqual("John", user1.FirstName);
                Assert.AreEqual("Doe", user1.LastName);
                Assert.AreEqual(@"Multiline note line 1
Multiline note line 2", user1.Note);

                CsvUser user2 = users[1];
                Assert.AreEqual(2, user2.Id);
                Assert.AreEqual("Mike", user2.FirstName);
                Assert.AreEqual("NotDoe", user2.LastName);
                Assert.AreEqual("some note", user2.Note);
            }
        }
Пример #15
0
        /// <summary>
        /// Reads the csv file https://joshclose.github.io/CsvHelper/examples/reading/
        /// </summary>
        /// <param name="filePath">File path</param>
        /// <returns>DataTable</returns>
        private static DataTable ReadCsvFile(string filePath)
        {
            var dataTable = new DataTable("Data");

            if (!File.Exists(filePath))
            {
                throw new FileNotFoundException("File not found!");
            }
            // Setting up csv configuration for reading.
            var config = new CsvConfiguration(CultureInfo.InvariantCulture)
            {
                PrepareHeaderForMatch = args => args.Header.ToLower(),
                MissingFieldFound     = null,
                TrimOptions           = TrimOptions.Trim
            };

            // Creating streams to read the file.
            using var sr  = new StreamReader(filePath);
            using var csv = new CsvReader(sr, config);
            using var dr  = new CsvDataReader(csv);
            dataTable.Load(dr);

            // For loop check for empty columns.
            for (int i = 1; i <= dataTable.Rows.Count; i++)
            {
                for (int j = 1; j <= dataTable.Columns.Count; j++)
                {
                    if (dataTable.Rows[i - 1].ItemArray[1].ToString() == String.Empty ||
                        dataTable.Rows[i - 1].ItemArray[2].ToString() == String.Empty)
                    {
                        dataTable.Rows.RemoveAt(i - 1);
                    }
                }
            }

            return(dataTable);
        }
Пример #16
0
        public override int Execute(
            CommandContext context,
            SelectSettings settings
            )
        {
            var filename = settings.File;

            Stream iStream = filename == "."
                                ? Console.OpenStandardInput()
                                : File.OpenRead(settings.File);

            var tr = new StreamReader(iStream);

            for (int i = 0; i < settings.Skip; i++)
            {
                tr.ReadLine();
            }
            var opts =
                new CsvDataReaderOptions
            {
                BufferSize = 0x100000,
            };
            var csv  = CsvDataReader.Create(tr, opts);
            var data = csv.Select(settings.Columns);

            var oStream =
                settings.Output == "."
                                ? Console.OpenStandardOutput()
                                : File.Create(settings.Output);

            var tw = new StreamWriter(oStream);
            var ww = CsvDataWriter.Create(tw);

            ww.Write(data);

            return(0);
        }
Пример #17
0
        public static DataTable readTextFileToTable(string fileName, string delimiter)
        {
            DataTable table = new DataTable();

            try
            {
                using (var reader = new StreamReader(fileName))

                    using (var csv = new CsvReader(reader))
                    {
                        csv.Configuration.Delimiter = delimiter;

                        using (var dr = new CsvDataReader(csv))
                        {
                            table.Load(dr);
                        }
                    }
            }
            catch (Exception ex)
            {
                return(null);
            }
            return(table);
        }
Пример #18
0
        /// <summary>
        /// Converts the specified 'data' into a DataTable.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="data"></param>
        /// <returns></returns>
        public static DataTable ConvertToDataTable <T>(this IEnumerable <T> data)
        {
            var csv = data.ConvertToCSV().Replace(",,", ",\"\",");

            using var stringReader = new StringReader(csv);
            using var reader       = new CsvReader(stringReader, CultureInfo.InvariantCulture);
            // Configure the CsvReader before creating the CsvDataReader.
            using var dataReader = new CsvDataReader(reader);
            var dt = new DataTable();

            var properties = typeof(T).GetCachedProperties();

            properties.ForEach(p =>
            {
                var isNullable = p.PropertyType.IsGenericType && p.PropertyType.GetGenericTypeDefinition() == typeof(Nullable <>);
                // Skip enumerable values.
                if (!p.PropertyType.IsEnumerable())
                {
                    var type = isNullable ? Nullable.GetUnderlyingType(p.PropertyType) : p.PropertyType;
                    if (type.IsEnum || isNullable)
                    {
                        type = typeof(string);                            // Need to do this because enums are converted to strings.
                    }
                    var displayAttr = p.GetCustomAttribute <DisplayNameAttribute>();
                    var name        = displayAttr?.DisplayName ?? p.Name;
                    dt.Columns.Add(new DataColumn(name, type)
                    {
                        AllowDBNull = isNullable
                    });
                }
            });

            dt.Load(dataReader);

            return(dt);
        }
Пример #19
0
        public IActionResult Index()
        {
            IndexViewModel viewModel = new IndexViewModel();
            DataTable      dt        = new DataTable();

            using (var reader = new StreamReader(".\\Resources\\Demographic_Statistics_By_Zip_Code.csv"))
                using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                {
                    // Do any configuration to `CsvReader` before creating CsvDataReader.
                    using (var dr = new CsvDataReader(csv))
                    {
                        //var dt = new DataTable();
                        //dt.Load(dr);
                        viewModel.Table = new DataTable();
                        viewModel.Table.Load(dr);
                        viewModel.TableHtml = ConvertDataTableToHTML(viewModel.Table);
                    }
                }
            //  THIS LOOKS LIKE IT WORKS!!!!
            viewModel.AssistanceCount = viewModel.Table.Select("[COUNT RECEIVES PUBLIC ASSISTANCE] > '0'").Count();
            viewModel.Result          = viewModel.Table.Select("[COUNT RECEIVES PUBLIC ASSISTANCE] > '0'").ToList();

            return(View(viewModel));
        }
Пример #20
0
        public void DbNullTest()
        {
            var config = new CsvConfiguration(CultureInfo.InvariantCulture)
            {
                HasHeaderRecord = false,
            };
            var s = new StringBuilder();

            s.AppendLine(",null");
            using (var reader = new StringReader(s.ToString()))
                using (var csv = new CsvReader(reader, config))
                {
                    csv.Context.TypeConverterOptionsCache.GetOptions <string>().NullValues.Add("null");

                    var dataReader = new CsvDataReader(csv);
                    Assert.Equal(string.Empty, dataReader.GetValue(0));
                    Assert.Equal(DBNull.Value, dataReader.GetValue(1));

                    var values = new object[2];
                    dataReader.GetValues(values);
                    Assert.Equal(string.Empty, values[0]);
                    Assert.Equal(DBNull.Value, values[1]);
                }
        }
Пример #21
0
        public async Task <string> ImportAsync(string dbconnectstr, string csvfile, string tablename, int batchsize = 10000)
        {
            try
            {
                //统计总行数,生成建表脚本
                if (this.OnMessageOut != null)
                {
                    OnMessageOut(this, new Tuple <int, string>(0, "探索文件中..."));
                }
                total = 0;
                Dictionary <string, int> colsdef = new Dictionary <string, int>();

                await Task.Run(() => {
                    using (var reader = new StreamReader(csvfile))
                        using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                        {
                            csv.Read();
                            csv.ReadHeader();
                            var cols = csv.GetRecord <dynamic>();

                            foreach (var item in cols)
                            {
                                colsdef.Add(item.Key, 0);
                            }
                            timer.Start();
                            while (csv.Read())
                            {
                                var rows = csv.GetRecord <dynamic>();
                                foreach (var item in rows)
                                {
                                    int len  = item.Value.Length;
                                    string k = item.Key;
                                    if (len > colsdef[k])
                                    {
                                        colsdef[k] = len;
                                    }
                                }
                                total++;
                            }

                            timer.Stop();
                        }
                });


                if (this.OnMessageOut != null)
                {
                    OnMessageOut(this, new Tuple <int, string>(1, total.ToString()));
                }



                StringBuilder sb = new StringBuilder();
                sb.AppendFormat("CREATE TABLE [dbo].[{0}](", tablename);
                var list = colsdef.ToList();
                for (int i = 0; i < list.Count; i++)
                {
                    sb.AppendFormat("[{0}] [VARCHAR]({1}) NULL", list[i].Key, list[i].Value > 0 ? list[i].Value * 2 : 50);
                    if (i < list.Count - 1)
                    {
                        sb.Append(",");
                    }
                }
                sb.Append(")");

                using (var reader = new StreamReader(csvfile))
                    using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                    {
                        using (SqlConnection con = new SqlConnection(dbconnectstr)) {
                            con.Open();
                            await this.AutoCreatedTable(tablename, con, sb.ToString());

                            using (CsvDataReader dr = new CsvDataReader(csv))
                                using (SqlBulkCopy blp = new SqlBulkCopy(con))
                                {
                                    blp.NotifyAfter          = batchsize;
                                    blp.SqlRowsCopied       += Blp_SqlRowsCopied;
                                    blp.DestinationTableName = tablename;
                                    blp.BatchSize            = batchsize;

                                    await blp.WriteToServerAsync(dr);
                                }
                        }
                    }

                if (this.OnMessageOut != null)
                {
                    this.OnMessageOut(this, new Tuple <int, string>(2, total.ToString()));
                }

                return("");
            }
            catch (Exception ex) {
                timer.Stop();
                total = 0;
                return(ex.Message);
            }
        }
Пример #22
0
        private async Task ImportDataFiles()
        {
            //The REALLY big files, but all the data is in codes that ref the document tables so no data issues to worry about

            ConsoleWriteColour($"Starting Import of Data Files", ConsoleColor.Blue);

            string docsFullPath = $"{dataSetPath}/{options.DataFolder}/";

            //maybe add in a check for csv files that aren't in the settings and output a warning for if they add in new files.

            foreach (FileToDBMapping fileMapping in options.DataFileList)
            {
                DateTime fileStart = DateTime.Now;

                string filename = $"{fileMapping.FileName}.csv";

                try
                {
                    List <string> columns;

                    using (StreamReader reader = new StreamReader($"{docsFullPath}/{filename}"))
                    {
                        //todo set up culture stuff properly at some point

                        //For the really big data files we REALLY don't want to be reading the whole file in at this point, just need the header row to get the table set up
                        using (CsvReader csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                        {
                            csv.Configuration.IgnoreBlankLines = true;

                            //Handles the file with the empty field at the end.
                            csv.Read();
                            csv.Context.Record = csv.Context.Record.Reverse().SkipWhile(string.IsNullOrWhiteSpace).Reverse().ToArray();
                            csv.ReadHeader();

                            columns = csv.Context.HeaderRecord.ToList <string>();
                        }
                    }

                    if (await createTableIfItsNotThere(fileMapping.DBTableName, columns))
                    {
                        using (SqlConnection connection = new SqlConnection(options.DBConnectionString))
                        {
                            connection.Open();

                            using (SqlTransaction transaction = connection.BeginTransaction())
                            {
                                using (SqlCommand command = new SqlCommand())
                                {
                                    command.Connection  = connection;
                                    command.Transaction = transaction;

                                    command.CommandText = $"TRUNCATE TABLE dbo.{fileMapping.DBTableName}";
                                    await command.ExecuteNonQueryAsync();
                                }

                                using (StreamReader reader = new StreamReader($"{docsFullPath}/{filename}"))
                                {
                                    using (CsvDataReader csvReader = new CsvDataReader(new CsvReader(reader, CultureInfo.InvariantCulture)))
                                    {
                                        using (SqlBulkCopy copy = new SqlBulkCopy(connection, SqlBulkCopyOptions.KeepNulls, transaction))
                                        {
                                            copy.BatchSize = 10000; //TODO put this in settings at some point

                                            copy.DestinationTableName = fileMapping.DBTableName;
                                            await copy.WriteToServerAsync(csvReader);

                                            transaction.Commit();
                                        }
                                    }
                                }
                            }
                        }

                        ConsoleWriteColour($"{filename} imported into {fileMapping.DBTableName} in {GetElapsedTime(fileStart)}", ConsoleColor.Blue);
                    }
                    else
                    {
                        failedFiles.Add(filename);
                        throw new NotImplementedException($"Table {fileMapping.DBTableName} does not exist and cannot be created");
                    }
                }
                catch (Exception ex)
                {
                    failedFiles.Add(filename);
                    //TODO - add in some sensible error handling
                    ConsoleWriteColour($"{filename} failed", ConsoleColor.Yellow);
                    ConsoleWriteColour(ex.ToString(), ConsoleColor.Red);
                }
            }
        }
Пример #23
0
        public StreamLinesSample()
        {
            InitializeComponent();


            // We will set the tube path position color based on the Density
            string colorColumnName  = "Density"; // "AngularVelocity"
            bool   invertColorValue = false;     // This needs to be set to true when using AngularVelocity


            // First create a gradient legend and texture
            var gradientStopCollection = new GradientStopCollection();

            gradientStopCollection.Add(new GradientStop(Colors.Red, 1));
            gradientStopCollection.Add(new GradientStop(Colors.Yellow, 0.75));
            gradientStopCollection.Add(new GradientStop(Colors.Lime, 0.5));
            gradientStopCollection.Add(new GradientStop(Colors.Aqua, 0.25));
            gradientStopCollection.Add(new GradientStop(Colors.Blue, 0));

            var linearGradientBrush = new LinearGradientBrush(gradientStopCollection,
                                                              new System.Windows.Point(0, 1),  // startPoint (offset == 0) - note that y axis is down (so 1 is bottom)
                                                              new System.Windows.Point(0, 0)); // endPoint (offset == 1)

            // Create Legend control
            var gradientColorLegend = new GradientColorLegend()
            {
                Width  = 70,
                Height = 200,
                Margin = new Thickness(5, 5, 5, 5)
            };

            gradientColorLegend.GradientBrush = linearGradientBrush;

            var gradientTexture = gradientColorLegend.RenderToTexture(size: 256, isHorizontal: true);

            var imageBrush = new ImageBrush(gradientTexture);

            // IMPORTANT:
            // When texture coordinates have one components (for example y) always set to 0,
            // we need to change the ViewportUnits from the default RelativeToBoundingBox to Absolute.
            imageBrush.ViewportUnits = BrushMappingMode.Absolute;

            var gradientMaterial = new DiffuseMaterial(imageBrush);


            // Now load sample data
            // Sample data was created by using ParaView application and exporting the streamlines into csv file.
            string sampleDataFileName = System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Resources\\Streamlines.csv");


            // Create csv file reader that can read data from a csv file
            var csvDataReader = new CsvDataReader();

            csvDataReader.ReadFile(sampleDataFileName);


            float minValue, maxValue;

            csvDataReader.GetValuesRange(colorColumnName, out minValue, out maxValue);

            float dataRange = maxValue - minValue;


            var streamlineIndexes = csvDataReader.IndividualObjectIndexes;


            // Create the streamlines
            var allStreamlineBounds = new Rect3D();

            for (var i = 0; i < csvDataReader.IndividualObjectIndexes.Length - 1; i++)
            {
                Rect3D bounds;

                int startIndex = streamlineIndexes[i];
                int endIndex   = streamlineIndexes[i + 1] - 1;

                int dataCount = endIndex - startIndex;

                if (dataCount < 2) // Skip streamlines without any positions or with less then 2 positions
                {
                    continue;
                }

                var positions = csvDataReader.GetPositions(startIndex, dataCount, out bounds);

                allStreamlineBounds.Union(bounds);

                var pathPositions = new Point3DCollection(positions);


                float[] dataValues = csvDataReader.GetValues(colorColumnName, startIndex, dataCount);

                // Generate texture coordinates for each path position
                // Because our texture is one dimensional gradient image (size 256 x 1)
                // we set the x coordinate in range from 0 to 1 (0 = first gradient color; 1 = last gradient color).
                //
                // Note:
                // If we would only set x texture coordinate and preserve y at 0,
                // WPF would not render the texture because the y size would be 0
                // and because by default the ViewportUnits is set to RelativeToBoundingBox,
                // WPF "thinks" the texture is empty.
                // Therefore we need to set the imageBrush.ViewportUnits to Absolute.
                var positionsCount     = pathPositions.Count;
                var textureCoordinates = new PointCollection(positionsCount);
                for (int j = 0; j < dataCount; j++)
                {
                    float relativeDataValue = (dataValues[j] - minValue) / dataRange;

                    if (invertColorValue)
                    {
                        relativeDataValue = 1.0f - relativeDataValue;
                    }

                    textureCoordinates.Add(new Point(relativeDataValue, 0));
                }


                var tubePathMesh3D = new Ab3d.Meshes.TubePathMesh3D(
                    pathPositions: pathPositions,
                    pathPositionTextureCoordinates: textureCoordinates,
                    radius: 0.03,
                    isTubeClosed: true,
                    isPathClosed: false,
                    segments: 8);


                var geometryModel3D = new GeometryModel3D(tubePathMesh3D.Geometry, gradientMaterial);
                //var geometryModel3D = new GeometryModel3D(tubePathMesh3D.Geometry, new DiffuseMaterial(Brushes.Red));
                geometryModel3D.BackMaterial = new DiffuseMaterial(Brushes.DimGray);

                var modelVisual3D = new ModelVisual3D()
                {
                    Content = geometryModel3D
                };

                MainViewport.Children.Add(modelVisual3D);
            }

            Camera1.TargetPosition = allStreamlineBounds.GetCenterPosition();
            Camera1.Distance       = allStreamlineBounds.GetDiagonalLength();


            // Add legend control:
            int legendValuesCount = 5;

            for (int i = 0; i < legendValuesCount; i++)
            {
                float t        = (float)i / (float)(legendValuesCount - 1);
                float oneValue = minValue + t * (maxValue - minValue);

                string valueLegendText = string.Format(System.Globalization.CultureInfo.InvariantCulture, "{0:0.00}", oneValue);
                gradientColorLegend.LegendLabels.Add(new GradientColorLegend.LegendLabel(t, valueLegendText));
            }


            var legendTitleTextBlock = new TextBlock()
            {
                Text                = colorColumnName,
                FontSize            = 14,
                FontWeight          = FontWeights.Bold,
                Foreground          = Brushes.Black,
                HorizontalAlignment = HorizontalAlignment.Center
            };


            var stackPanel = new StackPanel()
            {
                Orientation         = Orientation.Vertical,
                VerticalAlignment   = VerticalAlignment.Bottom,
                HorizontalAlignment = HorizontalAlignment.Right,
                Margin = new Thickness(0, 0, 5, 5)
            };

            stackPanel.Children.Add(legendTitleTextBlock);
            stackPanel.Children.Add(gradientColorLegend);

            RootGrid.Children.Add(stackPanel);
        }
Пример #24
0
        public void SetFromFile(string fileName)
        {
            DataTable    dataTable;
            StreamReader streamReader = new StreamReader(fileName);

            using (CsvReader csvReader = new CsvReader(streamReader, CultureInfo.InvariantCulture))
            {
                using (var csv = new CsvReader(streamReader, CultureInfo.InvariantCulture))
                {
                    using (var dr = new CsvDataReader(csv))
                    {
                        dataTable = new DataTable();
                        dataTable.Load(dr);
                    }
                }
            }
            streamReader.Close();


            foreach (DataColumn column in dataTable.Columns)
            {
                if (column.ColumnName == "Last Name" || column.ColumnName == "First Name" || column.ColumnName == "Email Address")
                {
                    continue;
                }
                Lesson lesson = new Lesson
                {
                    Theme = column.ColumnName
                };

                Lessons.Add(lesson);
            }

            List <RowOfMark> rowMarks = new List <RowOfMark>();

            foreach (DataRow row in dataTable.Rows)
            {
                List <string> rowArray = new List <string>(row.ItemArray.Select(s => s.ToString()));
                if (rowArray[0] == "Date")
                {
                    rowArray.RemoveRange(0, 3);
                    for (int i = 0; i < rowArray.Count; i++)
                    {
                        Lessons[i].Date = rowArray[0];
                    }
                    continue;
                }

                if (rowArray[0] == "Points")
                {
                    continue;
                }

                rowMarks.Add(new RowOfMark(new List <string>(row.ItemArray.Select(s => s.ToString()))));
            }
            SetPupils(rowMarks);

            Marks(rowMarks);

            JoinTheSamePupil();
        }
Пример #25
0
 public FastJavaCsvBenchmark()
 {
     reader = CsvDataReader.Create(new InfiniteDataReader(Record));
     writer = new CsvWriter(TextWriter.Null);
 }
Пример #26
0
        public void Run(Stream stream)
        {
            if (stream == null)
                throw new ArgumentNullException("stream");
            if (_disposed)
                throw new ObjectDisposedException("ImportActivity");

            using (_reader = new CsvDataReader(stream, _csvDelimiterChar, Encoding.Default))
            {
                _reader.Settings.UseTextQualifier = false;
                foreach (var col in _csvMetadata)
                {
                    CsvDataReader.Column column = new CsvDataReader.Column(col.TypeName);
                    if (!string.IsNullOrEmpty(_numberFormatInfo))
                        column.FormatProvider = new CultureInfo(_numberFormatInfo);
                    _reader.Columns.Add(column, col.Name);
                }

                _reader.ReadRecord += e => e.SkipRecord = OnValidateRecord(e.Values);

                LoadCounters rowCounters;

                _loader.LoadData((IDataReader)_reader, out rowCounters);

                OnComplete(new CompleteEventArgs() { Counters = rowCounters });
            }
        }
Пример #27
0
        public void SimpleOpen()
        {
            CsvDataReader reader = new CsvDataReader(@"..\..\SimpleCsv.txt");

            reader.Dispose();
        }
        public static IDataTable FromFile(
			string fileName,
			Stream stream,
			long contentLength,
			CsvConfiguration configuration,
			int skip = 0,
			int take = int.MaxValue)
        {
            Guard.ArgumentNotEmpty(() => fileName);
            Guard.ArgumentNotNull(() => stream);
            Guard.ArgumentNotNull(() => configuration);

            if (contentLength == 0)
            {
                throw Error.Argument("fileName", "The posted file '{0}' does not contain any data.".FormatInvariant(fileName));
            }

            IDataReader dataReader = null;

            try
            {
                var fileExt = System.IO.Path.GetExtension(fileName).ToLowerInvariant();

                switch (fileExt)
                {
                    case ".xlsx":
                        dataReader = new ExcelDataReader(stream, true); // TODO: let the user specify if excel file has headers
                        break;
                    default:
                        dataReader = new CsvDataReader(new StreamReader(stream), configuration);
                        break;
                }

                var table = LightweightDataTable.FromDataReader(dataReader, skip, take);

                if (table.Columns.Count == 0 || table.Rows.Count == 0)
                {
                    throw Error.InvalidOperation("The posted file '{0}' does not contain any columns or data rows.".FormatInvariant(fileName));
                }

                return table;
            }
            catch (Exception ex)
            {
                throw ex;
            }
            finally
            {
                if (dataReader != null)
                {
                    if (!dataReader.IsClosed)
                    {
                        dataReader.Dispose();
                    }
                    dataReader = null;
                }
            }
        }
Пример #29
0
        public void processData(string PCRserial, string outputFileName)
        {
            //string rootFolderPCR = @"C:\Users\Admin\Desktop\results_csvFiles"; //For Release
            //string rootFolderResult = @"C:\Users\Admin\Desktop\Test_Results"; //For Release

            string rootFolderPCR    = @"D:\CFXManagerAPI_Examples-11242014_FINAL"; //For testing & simulation
            string rootFolderResult = @"D:\CFXManagerAPI_Examples-11242014_FINAL"; //For testing & simulation

            string jsonResultFileName = string.Format("{0}_{1}_{2}", PCRserial, outputFileName, "Result.txt");
            string jsonResultFilePath = Path.Combine(rootFolderResult, jsonResultFileName);
            string csvResultFileName  = string.Format("{0}_{1}_{2}", PCRserial, outputFileName, "Result.csv");
            string csvResultFilePath  = Path.Combine(rootFolderResult, csvResultFileName);

            string FAMampl = string.Format("{0} -  {1}", outputFileName, "Quantification Amplification Results_FAM.csv");
            string HEXampl = string.Format("{0} -  {1}", outputFileName, "Quantification Amplification Results_HEX.csv");
            string quantCq = string.Format("{0} -  {1}", outputFileName, "Quantification Cq Results.csv");
            string allelicDiscrim = string.Format("{0} -  {1}", outputFileName, "Allelic Discrimination Results_ADSheet.csv");
            string endpointFAM = string.Format("{0} -  {1}", outputFileName, "End Point Results_FAM.csv");
            string endpointHEX = string.Format("{0} -  {1}", outputFileName, "End Point Results_HEX.csv");
            string meltFAM = string.Format("{0} -  {1}", outputFileName, "Melt Curve Plate View Results_FAM.csv");
            string meltHEX = string.Format("{0} -  {1}", outputFileName, "Melt Curve Plate View Results_HEX.csv");
            string standardCurve = string.Format("{0} -  {1}", outputFileName, "Standard Curve Results.csv");
            string runInfo = string.Format("{0} - {1}", outputFileName, "Run Information.csv");
            string quantPlateFAM = string.Format("{0} -  {1}", outputFileName, "Quantification Plate View Results_FAM.csv");
            string quantPlateHEX = string.Format("{0} -  {1}", outputFileName, "Quantification Plate View Results_HEX.csv");
            string quantSumm = string.Format("{0} -  {1}", outputFileName, "Quantification Summary.csv");
            string ANOVAresults = string.Format("{0} -  {1}", outputFileName, "ANOVA Results_ANOVA.csv");
            string FAMamplPath = Path.Combine(rootFolderPCR, FAMampl);
            string HEXamplPath = Path.Combine(rootFolderPCR, HEXampl);
            string quantCqPath = Path.Combine(rootFolderPCR, quantCq);
            int    jj, kk, posCtrlIndx = 10, negCtrlIndx = 0;
            int    overallResult = 2, PCTresult = 1, NECresult = 1;
            int    numWells, numCycles;
            float  tf1, tf2, tf3, CqFAM, rfuFAM;

            DataTable FAMtable     = new DataTable();
            DataTable HEXtable     = new DataTable();
            DataTable quantCqTable = new DataTable();
            DataTable resultTable  = new DataTable();
            DataTable tempTable    = new DataTable();
            DataRow   emptyRow;

            string[] WELL_CALL = new string[3] {
                "(-) neg", "(+) POSITIVE", "inconc."
            };
            string[] PCT_RESULT = new string[2] {
                "FAIL", "SUCCESS"
            };
            string[] NEC_RESULT = new string[2] {
                "FAIL", "SUCCESS"
            };
            string[] OVERALL_RESULT = new string[3] {
                "INCONCLUSIVE", "PARTIAL SUCCESS", "SUCCESS"
            };


            /*//CODE FOR DELETING ANY FILE - STARTS
             * try
             * {
             *  if (File.Exists(filePath))
             *  {
             *      Console.WriteLine("File Found. Press any key to Delete it\n");
             *      Console.ReadKey(true);
             *      File.Delete(filePath);
             *      Console.WriteLine("File Deleted\n");
             *  }
             *  else
             *      Console.WriteLine("File not found!\n");
             * }
             * catch(IOException ioExp)
             * {
             *  Console.WriteLine(ioExp.Message);
             * }
             * //CODE FOR DELETING ANY FILE - ENDS*/


            //LOADING THE CQ QUANTIFICATION TABLE - START
            using (var reader = new StreamReader(quantCqPath))
                using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                {
                    using (var dr = new CsvDataReader(csv))
                    {
                        quantCqTable.Load(dr);
                    }
                }
            //LOADING THE CQ QUANTIFICATION TABLE - END


            //CREATING AND LOADING THE FAM TABLE - START
            using (var reader = new StreamReader(FAMamplPath))
                using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                {
                    using (var dr = new CsvDataReader(csv))
                    {
                        tempTable.Load(dr);
                    }
                }

            numWells  = tempTable.Columns.Count - 2;
            numCycles = tempTable.Rows.Count;
            FAMtable.Columns.Add("Well Num", typeof(String));
            FAMtable.Columns.Add("Content | FAM RFU -->", typeof(String));

            for (int i = 0; i < numCycles; i++) //
            {
                FAMtable.Columns.Add("Cycle" + (i + 1).ToString(), typeof(String));
            }
            FAMtable.Columns.Add("Well No.", typeof(String));
            FAMtable.Columns.Add("FAM Cq", typeof(String));
            FAMtable.Columns.Add("Set Point Temperature (Celsius)", typeof(String));

            for (int i = 0; i < numWells; i++)
            {
                emptyRow = FAMtable.NewRow();
                for (int j = 0; j < FAMtable.Columns.Count; j++)
                {
                    emptyRow[j] = string.Empty;
                }
                FAMtable.Rows.Add(emptyRow);
            }

            for (int i = 0; i < numWells; i++)
            {
                FAMtable.Rows[i][0]             = quantCqTable.Rows[i][1].ToString(); //Well number
                FAMtable.Rows[i][1]             = quantCqTable.Rows[i][4].ToString(); //Well content
                FAMtable.Rows[i][numCycles + 2] = quantCqTable.Rows[i][1].ToString(); //Well number again
            }

            jj = 0;
            for (int i = 0; i < quantCqTable.Rows.Count; i++)
            {
                if (quantCqTable.Rows[i][2].ToString() == "FAM")
                {
                    tf1 = Convert.ToSingle(quantCqTable.Rows[i][7].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    FAMtable.Rows[jj][numCycles + 3] = tf1.ToString();                      //FAM Cq
                    FAMtable.Rows[jj][numCycles + 4] = quantCqTable.Rows[i][14].ToString(); //Set Point tempr
                    ++jj;
                }
            }

            for (int i = 0; i < numWells; i++)
            {
                for (int j = 0; j < numCycles; j++)
                {
                    tf1 = Convert.ToSingle(tempTable.Rows[j][i + 2].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    FAMtable.Rows[i][j + 2] = tf1.ToString(); //FAM RFU values
                }
            }
            tempTable.Clear();
            //CREATING AND LOADING THE FAM TABLE - END


            //CREATING AND LOADING THE HEX TABLE - START
            using (var reader = new StreamReader(HEXamplPath))
                using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
                {
                    using (var dr = new CsvDataReader(csv))
                    {
                        tempTable.Load(dr);
                    }
                }
            numWells  = tempTable.Columns.Count - 2;
            numCycles = tempTable.Rows.Count;
            HEXtable.Columns.Add("Well Num", typeof(String));
            HEXtable.Columns.Add("Content | HEX RFU -->", typeof(String));

            for (int i = 0; i < numCycles; i++)
            {
                HEXtable.Columns.Add("Cycle" + (i + 1).ToString(), typeof(String));
            }
            HEXtable.Columns.Add("Well No.", typeof(String));
            HEXtable.Columns.Add("HEX Cq", typeof(String));
            HEXtable.Columns.Add("Set Point Temperature (Celsius)", typeof(String));

            for (int i = 0; i < numWells; i++)
            {
                emptyRow = HEXtable.NewRow();
                for (int j = 0; j < HEXtable.Columns.Count; j++)
                {
                    emptyRow[j] = string.Empty;
                }
                HEXtable.Rows.Add(emptyRow);
            }

            for (int i = 0; i < numWells; i++)
            {
                HEXtable.Rows[i][0]             = quantCqTable.Rows[i][1].ToString(); //Well number
                HEXtable.Rows[i][1]             = quantCqTable.Rows[i][4].ToString(); //Well content
                HEXtable.Rows[i][numCycles + 2] = quantCqTable.Rows[i][1].ToString(); //Well number again
            }

            jj = 0;
            for (int i = 0; i < quantCqTable.Rows.Count; i++)
            {
                if (quantCqTable.Rows[i][2].ToString() == "HEX")
                {
                    tf1 = Convert.ToSingle(quantCqTable.Rows[i][7].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    HEXtable.Rows[jj][numCycles + 3] = tf1.ToString();                      //HEX Cq
                    HEXtable.Rows[jj][numCycles + 4] = quantCqTable.Rows[i][14].ToString(); //Set Point Temperature
                    ++jj;
                }
            }

            for (int i = 0; i < numWells; i++)
            {
                for (int j = 0; j < numCycles; j++)
                {
                    tf1 = Convert.ToSingle(tempTable.Rows[j][i + 2].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    HEXtable.Rows[i][j + 2] = tf1.ToString(); //HEX RFU values
                }
            }
            tempTable.Clear();
            //CREATING AND LOADING THE HEX TABLE - END


            //CREATING AND LOADING THE RESULT TABLE - START
            resultTable.Columns.Add("Well_number", typeof(String));
            resultTable.Columns.Add("Well_content", typeof(String));
            resultTable.Columns.Add("HEX_RFU_initial", typeof(String));
            resultTable.Columns.Add("HEX_RFU_3EndPoints_Avg", typeof(String));
            resultTable.Columns.Add("HEX_Cq", typeof(String));
            resultTable.Columns.Add("FAM_RFU_initial", typeof(String));
            resultTable.Columns.Add("FAM_RFU_3EndPoints_Avg", typeof(String));
            resultTable.Columns.Add("FAM_Cq", typeof(String));
            resultTable.Columns.Add("Point_of_interest", typeof(String));
            resultTable.Columns.Add("Call", typeof(String));
            resultTable.Columns.Add("Well_Number", typeof(String));
            resultTable.Columns.Add("Field", typeof(String));
            resultTable.Columns.Add("Field Information", typeof(String));

            for (int i = 1; i <= numWells; i++)
            {
                emptyRow = resultTable.NewRow();
                for (int j = 0; j < resultTable.Columns.Count; j++)
                {
                    emptyRow[j] = "--";
                }
                resultTable.Rows.Add(emptyRow);
            }

            for (int i = 0; i < numWells; i++)
            {
                resultTable.Rows[i][0] = quantCqTable.Rows[i][1].ToString(); //For Well Number
                resultTable.Rows[i][1] = quantCqTable.Rows[i][4].ToString(); //For Well Content
                resultTable.Rows[i][2] = HEXtable.Rows[i][2].ToString();     //For HEX RFU Initial
                tf1 = Convert.ToSingle(HEXtable.Rows[i][numCycles - 1].ToString());
                tf2 = Convert.ToSingle(HEXtable.Rows[i][numCycles].ToString());
                tf3 = Convert.ToSingle(HEXtable.Rows[i][numCycles + 1].ToString());
                resultTable.Rows[i][3] = ((tf1 + tf2 + tf3) / 3).ToString(); //For HEX RFU 3EndPoints-avg
                resultTable.Rows[i][5] = FAMtable.Rows[i][2].ToString();     //For FAM RFU Initial
                tf1 = Convert.ToSingle(FAMtable.Rows[i][numCycles - 1].ToString());
                tf2 = Convert.ToSingle(FAMtable.Rows[i][numCycles].ToString());
                tf3 = Convert.ToSingle(FAMtable.Rows[i][numCycles + 1].ToString());
                resultTable.Rows[i][6]  = ((tf1 + tf2 + tf3) / 3).ToString(); //For FAM RFU 3EndPoints-avg
                resultTable.Rows[i][10] = quantCqTable.Rows[i][1].ToString(); //For Well Number again
            }

            jj = 0; kk = 0;
            for (int i = 0; i < quantCqTable.Rows.Count; i++)
            {
                if (quantCqTable.Rows[i][2].ToString() == "FAM")
                {
                    tf1 = Convert.ToSingle(quantCqTable.Rows[i][7].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    resultTable.Rows[jj][7] = tf1.ToString(); //For FAM Cq
                    ++jj;
                }
                if (quantCqTable.Rows[i][2].ToString() == "HEX")
                {
                    tf1 = Convert.ToSingle(quantCqTable.Rows[i][7].ToString());
                    tf1 = (float)Math.Round(tf1, 2);
                    resultTable.Rows[kk][4] = tf1.ToString(); //For HEX Cq
                    ++kk;
                }
            }

            for (int i = 0; i < numWells; i++) // Loop for flagging critical wells
            {
                resultTable.Rows[i][8] = "--"; //By default, there is no flag

                if (resultTable.Rows[i][1].ToString() == "Pos Ctrl" || resultTable.Rows[i][1].ToString() == "Neg Ctrl")
                {   //POSITIVE AND NEGATIVE CONTROL WELLS ARE ALWAYS FLAGGED
                    resultTable.Rows[i][8] = "<<CHECK!!";
                    if (resultTable.Rows[i][1].ToString() == "Pos Ctrl")
                    {
                        posCtrlIndx = i;
                    }
                    else
                    {
                        negCtrlIndx = i;
                    }
                }
                else
                {
                    if (resultTable.Rows[i][7].ToString() != "NaN")
                    {
                        CqFAM  = Convert.ToSingle(resultTable.Rows[i][7].ToString());
                        rfuFAM = Convert.ToSingle(resultTable.Rows[i][6].ToString());
                        if (CqFAM >= 15 && rfuFAM >= 500) // Flag thresholds defined here
                        {
                            resultTable.Rows[i][8] = "<<CHECK!!";
                        }
                    }
                }
            }

            for (int i = 0; i < numWells; i++)         //Loop for making the positive/negative call
            {
                resultTable.Rows[i][9] = WELL_CALL[2]; //By default, the test is INCONCLUSIVE
                if (resultTable.Rows[i][7].ToString() == "NaN")
                {
                    if (resultTable.Rows[i][4].ToString() != "NaN")
                    {
                        resultTable.Rows[i][9] = WELL_CALL[0];
                    }
                }
                else
                {
                    CqFAM  = Convert.ToSingle(resultTable.Rows[i][7].ToString());
                    rfuFAM = Convert.ToSingle(resultTable.Rows[i][6].ToString());
                    if (CqFAM >= 19.9 && rfuFAM >= 1000) // Call thresholds defined here
                    {
                        resultTable.Rows[i][9] = WELL_CALL[1];
                    }
                    else
                    {
                        resultTable.Rows[i][9] = WELL_CALL[0];
                    }
                }
            }

            resultTable.Rows[0][11]  = "Lab Name";
            resultTable.Rows[1][11]  = "PCR Serial num";
            resultTable.Rows[2][11]  = "Username";
            resultTable.Rows[3][11]  = "Date of Run";
            resultTable.Rows[4][11]  = "Time of Run";
            resultTable.Rows[5][11]  = "microPlate Bar Code";
            resultTable.Rows[6][11]  = "Number of Cycles";
            resultTable.Rows[7][11]  = "Set Point (Celsius)";
            resultTable.Rows[8][11]  = "PCT result";
            resultTable.Rows[9][11]  = "NEC result";
            resultTable.Rows[10][11] = "Overall Test Result";
            resultTable.Rows[6][12]  = numCycles.ToString();
            resultTable.Rows[7][12]  = quantCqTable.Rows[0][14]; // Set point temperature (Celsius)

            overallResult = 2;
            if (resultTable.Rows[posCtrlIndx][9].ToString() == WELL_CALL[1])
            {
                PCTresult = 1; resultTable.Rows[8][12] = PCT_RESULT[1];
            }
            else
            {
                PCTresult = 0;
                resultTable.Rows[8][12] = PCT_RESULT[0];
                for (int i = 0; i < numWells; i++)
                {
                    if (i != posCtrlIndx && i != negCtrlIndx)
                    {
                        resultTable.Rows[i][9] = WELL_CALL[2];
                    }
                }
                overallResult = 0;
            }

            if (resultTable.Rows[negCtrlIndx][9].ToString() == WELL_CALL[0])
            {
                NECresult = 1; resultTable.Rows[9][12] = NEC_RESULT[1];
            }
            else
            {
                NECresult = 0;
                resultTable.Rows[9][12] = NEC_RESULT[0];
                for (int i = 0; i < numWells; i++)
                {
                    if (i != posCtrlIndx && i != negCtrlIndx)
                    {
                        resultTable.Rows[i][9] = WELL_CALL[2];
                    }
                }
                overallResult = 0;
            }

            if (overallResult != 0)
            {
                for (int i = 0; i < numWells; i++)
                {
                    if (resultTable.Rows[i][9].ToString() == WELL_CALL[2])
                    {
                        overallResult = 1;
                        break;
                    }
                }
            }

            resultTable.Rows[10][12] = OVERALL_RESULT[overallResult];
            //CREATING AND LOADING THE RESULT TABLE - END


            //WRITING THE TABLES INTO CSV FILES - START
            using (var textWriter = File.CreateText(csvResultFilePath)) // Writing the result file
            {
                using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture))
                {
                    // Write columns
                    foreach (DataColumn column in resultTable.Columns)
                    {
                        csv.WriteField(column.ColumnName);
                    }
                    csv.NextRecord();

                    // Write row values
                    foreach (DataRow row in resultTable.Rows)
                    {
                        for (var i = 0; i < resultTable.Columns.Count; i++)
                        {
                            csv.WriteField(row[i]);
                        }
                        csv.NextRecord();
                    }
                }
            }

            /*using (var textWriter = File.CreateText(@"D:\CFXManagerAPI_Examples-11242014_FINAL\FAMtable.csv")) // Writing the FAMtable
             * {
             *  using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture))
             *  {
             *      // Write columns
             *      foreach (DataColumn column in FAMtable.Columns)
             *      {
             *          csv.WriteField(column.ColumnName);
             *      }
             *      csv.NextRecord();
             *
             *      // Write row values
             *      foreach (DataRow row in FAMtable.Rows)
             *      {
             *          for (var i = 0; i < FAMtable.Columns.Count; i++)
             *          {
             *              csv.WriteField(row[i]);
             *          }
             *          csv.NextRecord();
             *      }
             *  }
             * }*/

            /*using (var textWriter = File.CreateText(@"D:\CFXManagerAPI_Examples-11242014_FINAL\HEXtable.csv")) // Writing the HEXtable
             * {
             *  using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture))
             *  {
             *      // Write columns
             *      foreach (DataColumn column in HEXtable.Columns)
             *      {
             *          csv.WriteField(column.ColumnName);
             *      }
             *      csv.NextRecord();
             *
             *      // Write row values
             *      foreach (DataRow row in HEXtable.Rows)
             *      {
             *          for (var i = 0; i < HEXtable.Columns.Count; i++)
             *          {
             *              csv.WriteField(row[i]);
             *          }
             *          csv.NextRecord();
             *      }
             *  }
             * }*/
            //WRITING THE TABLES INTO CSV FILES - END


            //WRITING THE RESULT FILE TO OBJECTS - START
            DataPreamble    preamble     = new DataPreamble();
            List <WellData> wells        = new List <WellData>();
            FullTestData    compact_data = new FullTestData();
            string          jsonText;

            preamble.lab_name       = "Calcinate Lab"; preamble.pcr_serial = PCRserial; preamble.username = "******";
            preamble.run_date       = outputFileName.Substring(5, 10); preamble.run_time = outputFileName.Substring(16, 8);
            preamble.plate_barcode  = "BARCODE";
            preamble.num_cycles     = numCycles; preamble.setpoint_celsius = Convert.ToSingle(resultTable.Rows[7][12]);
            preamble.pct_result     = PCT_RESULT[PCTresult]; preamble.nec_result = NEC_RESULT[NECresult];
            preamble.overall_result = OVERALL_RESULT[overallResult];

            for (int i = 0; i < numWells; i++)
            {
                wells.Add(new WellData(numCycles));
            }
            jj = 0;
            foreach (WellData well in wells)
            {
                well.well_num               = resultTable.Rows[jj][0].ToString();
                well.well_content           = resultTable.Rows[jj][1].ToString();
                well.hex_rfu_initial        = Convert.ToSingle(resultTable.Rows[jj][2]);
                well.hex_rfu_3endpoints_avg = Convert.ToSingle(resultTable.Rows[jj][3]);
                well.hex_cq                 = resultTable.Rows[jj][4].ToString();
                well.fam_rfu_initial        = Convert.ToSingle(resultTable.Rows[jj][5]);
                well.fam_rfu_3endpoints_avg = Convert.ToSingle(resultTable.Rows[jj][6]);
                well.fam_cq                 = resultTable.Rows[jj][7].ToString();
                well.point_of_interest      = resultTable.Rows[jj][8].ToString();
                well.test_call              = resultTable.Rows[jj][9].ToString();
                well.well_barcode           = resultTable.Rows[jj][11].ToString();

                for (int i = 0; i < numCycles; i++)
                {
                    well.hex_rfu_points[i] = Convert.ToSingle(HEXtable.Rows[jj][i + 2]);
                    well.fam_rfu_points[i] = Convert.ToSingle(FAMtable.Rows[jj][i + 2]);
                }

                ++jj;
            }

            compact_data.data_preamble = preamble;
            compact_data.wells_data    = wells;
            // WRITING THE RESULT FILES TO OBJECTS - END

            // SERIALIZING THE OBJECTS TO JSON AND WRITING THE TXT FILE - START
            jsonText = JsonConvert.SerializeObject(compact_data, Formatting.Indented);
            File.WriteAllText(jsonResultFilePath, jsonText, Encoding.UTF8);
            // SERIALIZING THE OBJECTS TO JSON AND WRITING THE TXT FILE - END


            // DELETING THE PCR FILES AND DATATABLES - START
            if (File.Exists(quantCqPath))
            {
                File.Delete(quantCqPath);
            }
            if (File.Exists(FAMamplPath))
            {
                File.Delete(FAMamplPath);
            }
            if (File.Exists(HEXamplPath))
            {
                File.Delete(HEXamplPath);
            }
            if (File.Exists(Path.Combine(rootFolderPCR, allelicDiscrim)))
            {
                File.Delete(Path.Combine(rootFolderPCR, allelicDiscrim));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, meltFAM)))
            {
                File.Delete(Path.Combine(rootFolderPCR, meltFAM));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, meltHEX)))
            {
                File.Delete(Path.Combine(rootFolderPCR, meltHEX));
            }
            //if (File.Exists(Path.Combine(rootFolderPCR, endpointFAM)))
            //    File.Delete(Path.Combine(rootFolderPCR, endpointFAM));
            if (File.Exists(Path.Combine(rootFolderPCR, endpointHEX)))
            {
                File.Delete(Path.Combine(rootFolderPCR, endpointHEX));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, standardCurve)))
            {
                File.Delete(Path.Combine(rootFolderPCR, standardCurve));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, runInfo)))
            {
                File.Delete(Path.Combine(rootFolderPCR, runInfo));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, quantPlateFAM)))
            {
                File.Delete(Path.Combine(rootFolderPCR, quantPlateFAM));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, quantPlateHEX)))
            {
                File.Delete(Path.Combine(rootFolderPCR, quantPlateHEX));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, quantSumm)))
            {
                File.Delete(Path.Combine(rootFolderPCR, quantSumm));
            }
            if (File.Exists(Path.Combine(rootFolderPCR, ANOVAresults)))
            {
                File.Delete(Path.Combine(rootFolderPCR, ANOVAresults));
            }
            HEXtable.Clear();
            FAMtable.Clear();
            resultTable.Clear();
            //DELETING THE PCR FILES AND DATATABLES - END
        }
Пример #30
0
        /// <summary>
        /// Read CSV File and convert to DataTable object
        /// Note, if the CSV file has no header, then be sure to include 'Index' attributes on your class properties. I.e:
        ///
        ///     public class Foo
        ///     {
        ///         [Index(0)]
        ///         public int Id { get; set; }
        ///
        ///         [Index(1)]
        ///         public string Name { get; set; }
        ///     }
        ///
        /// You can also map by a different header name to your class properties by adding the 'Name' attribute to your properties. I.e:
        ///
        ///     public class Foo
        ///     {
        ///         [Name("id")]
        ///          public int Id { get; set; }
        ///
        ///         [Name("name")]
        ///         public string Name { get; set; }
        ///     }
        ///
        /// Other attributes include:
        ///
        ///     [BooleanTrueValues("yes")]
        ///     [BooleanFalseValues("no")]
        ///     [Optional]
        ///     [Ignored]
        ///
        /// More information available at: https://joshclose.github.io/CsvHelper/getting-started
        ///
        /// </summary>
        /// <param name="path">File path of CSV file</param>
        /// <param name="delimiter">(Optional) - Single character seperator, i.e: ";" "," "\t" (tab) or "auto" to try and detect</param>
        /// <param name="hasHeaders">(Optional) - Does the CSV file have headers or not?</param>
        /// <param name="ignoreQuotes">(Optional) - If true, quotation marks are treated like any other character</param>
        /// <param name="trimOptions">(Optional) - Trim inside quotes, trim fields, don't trim</param>
        /// <returns>DataTable result</returns>
        public static DataTable CsvFileToDataTable(
            string path,
            string delimiter           = "auto",
            bool hasHeaders            = true,
            bool ignoreQuotes          = false,
            CsvTrimOptions trimOptions = CsvTrimOptions.TrimFields)
        {
            var dt = new DataTable();

            char selectedDelimeter;

            try
            {
                if (delimiter == "auto")
                {
                    string singleLine = string.Empty;

                    if (hasHeaders)
                    {
                        using (StreamReader readingFile = new StreamReader(path))
                        {
                            singleLine = readingFile.ReadLine();
                        }

                        selectedDelimeter = TryDetectDelimiter(new string[] { singleLine });
                    }
                    else
                    {
                        selectedDelimeter = TryDetectDelimiter(File.ReadAllLines(path));
                    }
                }
                else
                {
                    selectedDelimeter = delimiter.ToCharArray()[0];
                }
            }
            catch
            {
                selectedDelimeter = ',';
            }

            using (var reader = new StreamReader(path))
                using (var csv = new CsvReader(reader))
                {
                    csv.Configuration.HasHeaderRecord  = hasHeaders;
                    csv.Configuration.Delimiter        = selectedDelimeter.ToString();
                    csv.Configuration.IgnoreBlankLines = true;
                    csv.Configuration.IgnoreQuotes     = ignoreQuotes;

                    switch (trimOptions)
                    {
                    case CsvTrimOptions.None:
                        csv.Configuration.TrimOptions = TrimOptions.None;
                        break;

                    case CsvTrimOptions.TrimInsideQuotes:
                        csv.Configuration.TrimOptions = TrimOptions.InsideQuotes;
                        break;

                    case CsvTrimOptions.TrimFields:
                        csv.Configuration.TrimOptions = TrimOptions.Trim;
                        break;

                    default:
                        break;
                    }

                    using (var dr = new CsvDataReader(csv))
                    {
                        dt.Load(dr);
                    }
                }

            return(dt);
        }
Пример #31
0
        private static void Main(string[] args)
        {
            Console.WriteLine("HR data import job.");
            Console.WriteLine("Please do not close this window.");

            int totalImported = 0;
            int totalAdded = 0;

            try
            {
                string targetWebUrl = ConfigurationManager.AppSettings["CAWebSiteUrl"];

                Logger.Log("Start Job: Import HR data to " + targetWebUrl);

                Logger.Log("Data Mapping: Read CSV data mapping relations from " + ConfigurationManager.AppSettings["CsvDataMappingFilePath"]);

                var csvMapping = DataMapperFactory.Instance.GetDataMapper(ProviderType.CSV).GetMapping();

                DataTable csvTable;

                string csvFilePath = ConfigurationManager.AppSettings["CsvFilePath"];

                string encodingString = ConfigurationManager.AppSettings["CsvFileEncoding"];

                Encoding encoding = encodingString.IsNullOrWhitespace() ? Encoding.Default : Encoding.GetEncoding(encodingString);

                using (var csvReader = new CsvDataReader(csvFilePath, encoding, ConfigurationManager.AppSettings["CsvDataDelimiter"][0]))
                {
                    csvTable = new CsvDataProvider(csvReader, csvMapping).ReadAsDataTable();
                }

                var csvRows = csvTable.Rows;

                Logger.Log("Data Import: Read CSV data [" + csvRows.Count + " rows] from " + csvFilePath);

                Logger.Log("Data Mapping: Read AD data mapping relations from " + ConfigurationManager.AppSettings["ADDataMappingFilePath"]);

                var adMapping = DataMapperFactory.Instance.GetDataMapper(ProviderType.AD).GetMapping();

                DataTable adTable;

                string ldap = ConfigurationManager.AppSettings["LDAPString"];

                Logger.Log("Data Import: Read AD data from " + ldap);

                using (var adReader = new ADDataReader(ldap, adMapping.ColumnMappings.Keys.ToArray()))
                {
                    adTable = new ADDataProvider(adReader, adMapping).ReadAsDataTable();
                }

                string accountNameColumn = adMapping.ColumnMappings["sAMAccountName"].DataTableColumnName;

                // Update Manager Information for each employee.
                foreach (DataRow csvRow in csvRows)
                {
                    csvRow.BeginEdit();

                    string name = FindManagerAccountFromADTable(adTable, csvRow.Field<string>("ManagerId"), accountNameColumn);

                    csvRow["Manager"] = name;

                    csvRow.EndEdit();
                }

                bool logUpdatedUsers;

                bool.TryParse(ConfigurationManager.AppSettings["LogUpdatedUsers"], out logUpdatedUsers);

                using (var site = new SPSite(targetWebUrl))
                {
                    Logger.Log("Data Import: Read data import gray list.");

                    var exceptions = SPHelper.GetImportExceptionsFromSPList(site.RootWeb, ConfigurationManager.AppSettings["HRDataImportExceptionsListName"]);

                    var context = ServerContext.GetContext(site);

                    var upm = new UserProfileManager(context);

                    var columns = adTable.Columns;

                    foreach (DataRow r in adTable.Rows)
                    {
                        // User account and Employee ID should not be empty.
                        string account = r.Field<string>(accountNameColumn);
                        string employeeId = r.Field<string>("EmployeeId");

                        if (account.IsNullOrWhitespace() || employeeId.IsNullOrWhitespace())
                        {
                            continue;
                        }

                        // If specific account was listed in the import exceptions, ignore it.
                        if (exceptions.Any(e => e.Equals(account.Trim(), StringComparison.InvariantCultureIgnoreCase)))
                        {
                            Logger.Log("Data Import: User [" + account + "] data will not update for the account is in the gray list.");
                            continue;
                        }

                        // Find in the CSV DataTable if any row has same employee ID.
                        foreach (DataRow row in from DataRow row in csvRows let id = row.Field<string>("EmployeeId") where employeeId.Trim().Equals(id) select row)
                        {
                            r.BeginEdit();

                            foreach (string colName in (from DataColumn col in columns select col.ColumnName).Where(colName => !row.IsNull((string) colName)))
                            {
                                r[colName] = row[colName];
                            }

                            r.EndEdit();

                            break;
                        }

                        try
                        {
                            var status = SPHelper.UpdateUserProfileByAccount(upm, account, r, columns);

                            if (status == UserProfileUpdateStatus.Updated)
                            {
                                if (logUpdatedUsers)
                                {
                                    Logger.Log(string.Format("Data Import: User [{0}, {1}]  was updated.", account, employeeId));
                                }
                                totalImported++;
                            }
                            else
                            {
                                totalAdded++;
                            }
                        }
                        catch (SPException spex)
                        {
                            Logger.Log("Error: User [" + account + "] data was not updated. Error Message = " + spex.Message);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Logger.Log("Error: " + ex.Message);
            }

            string stat = string.Format(CultureInfo.InvariantCulture, "{0} employees were created, {1} employees were updated.", totalAdded, totalImported);

            Logger.Log("End Job: Data Import Completed. " + stat + " \r\n=======================================================");
        }
Пример #32
0
        protected void Page_Command(Object sender, CommandEventArgs e)
        {
            try
            {
                if ( e.CommandName == "Next" )
                {
                    switch ( nImportStep )
                    {
                        case 1:
                        {
                            nImportStep++;
                            ShowStep();
                            ViewState["ImportStep"] = nImportStep;
                            break;
                        }
                        case 2:
                        {
                            if ( Page.IsValid )
                            {
                                HttpPostedFile pstIMPORT = fileIMPORT.PostedFile;
                                if ( pstIMPORT != null )
                                {
                                    if ( pstIMPORT.FileName.Length > 0 )
                                    {
                                        string sFILENAME       = Path.GetFileName (pstIMPORT.FileName);
                                        string sFILE_EXT       = Path.GetExtension(sFILENAME);
                                        string sFILE_MIME_TYPE = pstIMPORT.ContentType;
                                        if ( radXML_SPREADSHEET.Checked )
                                        {
                                            xml.Load(pstIMPORT.InputStream);
                                            ConvertXmlSpreadsheetToXml(ref xml, sImportModule.ToLower());
                                        }
                                        else if ( sFILE_MIME_TYPE == "text/xml" || radXML.Checked )
                                        {
                                            using ( MemoryStream mstm = new MemoryStream() )
                                            {
                                                using ( BinaryWriter mwtr = new BinaryWriter(mstm) )
                                                {
                                                    using ( BinaryReader reader = new BinaryReader(pstIMPORT.InputStream) )
                                                    {
                                                        byte[] binBYTES = reader.ReadBytes(8 * 1024);
                                                        while ( binBYTES.Length > 0 )
                                                        {
                                                            for ( int i = 0; i < binBYTES.Length; i++ )
                                                            {
                                                                // MySQL dump seems to dump binary 0 & 1 for byte values.
                                                                if ( binBYTES[i] == 0 )
                                                                    mstm.WriteByte(Convert.ToByte('0'));
                                                                else if ( binBYTES[i] == 1 )
                                                                    mstm.WriteByte(Convert.ToByte('1'));
                                                                else
                                                                    mstm.WriteByte(binBYTES[i]);
                                                            }
                                                            binBYTES = reader.ReadBytes(8 * 1024);
                                                        }
                                                    }
                                                    mwtr.Flush();
                                                    mstm.Seek(0, SeekOrigin.Begin);
                                                    xml.Load(mstm);
                                                    bool bExcelSheet = false;
                                                    foreach ( XmlNode xNode in xml )
                                                    {
                                                        if ( xNode.NodeType == XmlNodeType.ProcessingInstruction )
                                                        {
                                                            if ( xNode.Name == "mso-application" && xNode.InnerText == "progid=\"Excel.Sheet\"" )
                                                            {
                                                                bExcelSheet = true;
                                                                break;
                                                            }
                                                        }
                                                    }
                                                    if ( bExcelSheet )
                                                        ConvertXmlSpreadsheetToXml(ref xml, sImportModule.ToLower());
                                                }
                                            }
                                        }
                                        else if ( radEXCEL.Checked )
                                        {
                                            ExcelDataReader.ExcelDataReader spreadsheet = new ExcelDataReader.ExcelDataReader(pstIMPORT.InputStream);
                                            if ( spreadsheet.WorkbookData.Tables.Count > 0 )
                                            {
                                                xml = ConvertTableToXml(spreadsheet.WorkbookData.Tables[0], sImportModule.ToLower());
                                            }
                                        }
                                        else if ( radCUSTOM_TAB.Checked )
                                        {
                                            CsvDataReader spreadsheet = new CsvDataReader(pstIMPORT.InputStream, ControlChars.Tab);
                                            if ( spreadsheet.Table != null )
                                            {
                                                xml = ConvertTableToXml(spreadsheet.Table, sImportModule.ToLower());
                                            }
                                        }
                                        else
                                        {
                                            // 08/21/2006 Paul.  Everything else is comma separated.  Convert to XML.
                                            CsvDataReader spreadsheet = new CsvDataReader(pstIMPORT.InputStream, ',');
                                            if ( spreadsheet.Table != null )
                                            {
                                                xml = ConvertTableToXml(spreadsheet.Table, sImportModule.ToLower());
                                            }
                                        }
                                    }
                                }
                            }
                            if ( xml.DocumentElement == null )
                                throw(new Exception(L10n.Term("Import.LBL_NOTHING")));

                            // 08/21/2006 Paul.  Don't move to next step if there is no data.
                            XmlNodeList nlRows = xml.DocumentElement.SelectNodes(sImportModule.ToLower());
                            if ( nlRows.Count == 0 )
                                throw(new Exception(L10n.Term("Import.LBL_NOTHING")));
                            ViewState["xml"] = xml.OuterXml;
                            UpdateImportMappings(true);

                            nImportStep++ ;
                            ShowStep();
                            ViewState["ImportStep"] = nImportStep;
                            break;
                        }
                        case 3:
                        {
                            UpdateImportMappings(false);
                            GenerateImport();
                            nImportStep++ ;
                            ShowStep();
                            ViewState["ImportStep"] = nImportStep;
                            break;
                        }
                    }
                }
                else if ( e.CommandName == "Back" )
                {
                    if ( nImportStep > 1 )
                    {
                        nImportStep--;
                        ShowStep();
                        ViewState["ImportStep"] = nImportStep;
                    }
                }
                else if ( e.CommandName == "ImportMore" )
                {
                    //radEXCEL.Checked = true;
                    //chkHasHeader.Checked = true;
                    //nImportStep = 1;
                    //ShowStep();
                    //ViewState["ImportStep"] = nImportStep;
                    // 08/20/2006 Paul.  Redirecting is a safer way to reset all variables.
                    Response.Redirect(Request.Path);
                }
                else if ( e.CommandName == "Finish" )
                {
                    Response.Redirect("~/" + sImportModule);
                }
            }
            catch(Exception ex)
            {
                //SplendidError.SystemError(new StackTrace(true).GetFrame(0), ex.Message);
                lblError.Text = ex.Message;
                return;
            }
        }
        public override async Task<bool> DoWorkAsync(BulkInsertInput input, CancellationToken token)
        {
            var destinationDatabase = _databases.FirstOrDefault(x => x.Name == input.Destination.ConnectionName);
            if (destinationDatabase == null)
            {
                throw new ArgumentException("Invalid Destination ConnectionName");
            }

            if (input.Destination.TruncateFirst)
            {
                await destinationDatabase.TruncateTable(input.Destination.SchemaName, input.Destination.TableName);
            }

            DbConnection sourceConnection = null;
            DbCommand sourceCommand = null;
            IDataReader dataReader = null;
            
            if (!string.IsNullOrEmpty(input.Source.ConnectionName))
            {
                _logger.LogDebug($"Quering '{input.Source.ConnectionName}' as source");
                var sourceDatabase = _databases.FirstOrDefault(x => x.Name == input.Source.ConnectionName);
                if (sourceDatabase == null)
                {
                    throw new ArgumentException("Invalid Source ConnectionName");
                }
                
                if (input.Source.Query.QueryFileToken != null)
                {
                    using(var fileStore = _fileStoreFactory.GetFileStoreFromFileToken(input.Source.Query.QueryFileToken))
                    using(Stream stream = await fileStore.OpenRead(input.Source.Query.QueryFileToken))
                    using(StreamReader reader = new StreamReader(stream))
                    {
                        input.Source.Query.Query = reader.ReadToEnd();
                    }
                }
                
                sourceConnection = sourceDatabase.CreateConnection();
                await sourceConnection.OpenAsync(token);
                sourceCommand = sourceConnection.CreateCommand(input.Source.Query);
                dataReader = await sourceCommand.ExecuteReaderAsync(token);
            }
            else if(input.Source.FileToken != null)
            {
                using var fileStore = _fileStoreFactory.GetFileStoreFromFileToken(input.Source.FileToken);
                string tmpFileName = Path.GetTempFileName();
                
                _logger.LogDebug($"Copying filetoken '{input.Source.FileToken}' into '{tmpFileName}'");
                using (var reader = await fileStore.OpenRead(input.Source.FileToken))
                using (var writer = File.OpenWrite(tmpFileName))
                {
                    await reader.CopyToAsync(writer,2048, token);
                }

                input.Source.FileName = tmpFileName;
            }
            
            if(dataReader == null)
            {
                if (string.IsNullOrEmpty(input.Source.FileName))
                {
                    throw new ArgumentException("No Source specified");
                }
                if (!File.Exists(input.Source.FileName))
                {
                    throw new ArgumentException($"File '{input.Source.FileName}' not found");
                }
                
                _logger.LogDebug($"Loading file '{input.Source.FileName}' as source");
                
                dataReader = new CsvDataReader(input.Source.FileName,input.Source.Separator,input.Source.Headers, CultureInfo.GetCultureInfo(input.Source.CultureName));
            }

            try
            {
                _logger.LogDebug("Starting BulkInsert");
                await destinationDatabase.BulkInsert(
                    dataReader,
                    input.Destination.SchemaName,
                    input.Destination.TableName,
                    input.Destination.Timeout,
                    token);
                
                _logger.LogDebug("BulkInsert completed");
            }
            finally
            {
                dataReader?.Dispose();
                sourceCommand?.Dispose();
                sourceConnection?.Dispose();
            }
            
            return true;
        }
Пример #34
0
 public static DbDataReader GetData()
 {
     return(CsvDataReader.Create(GetTextReader()));
 }
Пример #35
0
    private async Task <int> UploadFromStream(Stream stream, DateTime dateFrom, DateTime dateTo, CancellationToken token)
    {
        int      recordCount;
        var      startTime = DateTime.UtcNow;
        TimeSpan processingTime;

        using (var reader = new StreamReader(stream, Encoding.UTF8))
            using (var connection = GetSqlConnection()) {
                await connection.OpenAsync(token).ConfigureAwait(false);

                var bulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.TableLock, null);
                bulkCopy.DestinationTableName = "dbo.AzureUsageRecords_Stage";
                bulkCopy.BatchSize            = BatchSize;
                bulkCopy.NotifyAfter          = BatchSize;
                bulkCopy.BulkCopyTimeout      = BatchCopyTimeout;
                bulkCopy.SqlRowsCopied       += BulkCopy_SqlRowsCopied;

                startTime = DateTime.UtcNow;
                string line1 = await reader.ReadLineAsync().ConfigureAwait(false); // skip the first line - contains billing period

                using (var recReader = new CsvDataReader <DetailedUsage>(reader, x => { return(Sink(x, dateFrom, dateTo)); }, TrackMaxLenghts)) {
                    _batchStartTime = DateTime.UtcNow;

                    // note: by default SqlBulkCopy relies on column ordinal only - create mappings
                    for (int sourceColumnOrdinal = 0; sourceColumnOrdinal < recReader.FieldCount; sourceColumnOrdinal++)
                    {
                        string destinationColumnName = recReader.GetName(sourceColumnOrdinal);
                        bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(sourceColumnOrdinal, destinationColumnName));
                    }

                    try {
                        await bulkCopy.WriteToServerAsync(recReader, token).ConfigureAwait(false);
                    } catch (SqlException ex) {
                        // Note: error 40197 with code 4815 indicates some text column length is too short
                        if (TrackMaxLenghts && ex.Number == 40197)
                        {
                            string dataLengths = "Max field lengths registered:";
                            foreach (var field in recReader.MaxLenghts)
                            {
                                dataLengths += $"\n{field.Key} : {field.Value}";
                            }
                            _logger.LogDebug(dataLengths);
                        }
                        throw;
                    } catch (Exception ex) {
                        throw;
                    }

                    recordCount = recReader.RecordsAffected;
                }
            }

        processingTime = DateTime.UtcNow.Subtract(startTime);

        if (recordCount != 0 && processingTime.TotalSeconds != 0)
        {
            _logger.LogInformation($"total {recordCount:n0} records uploaded in {processingTime.TotalSeconds:n1} s ({recordCount / processingTime.TotalSeconds:n1} rec/s)");
        }
        else
        {
            _logger.LogInformation($"total {recordCount:n0} records uploaded in {processingTime.TotalSeconds:n1} s");
        }

        return(recordCount);
    }
        public void ImportTableFromFile(string table, string filenameSql)
        {
            using (var fileStream = new FileStream(filenameSql, FileMode.Open, FileAccess.Read))
            {
                using (StreamReader streamReader = new StreamReader(fileStream, Encoding))
                {
                    var externalTransaction = _transaction != null ? _transaction.Transaction : null;
                    var executeQuery = _sqlServer.ExecuteQuery("select top 1 * from " + table, _transaction);
                    using (var bc = new SqlBulkCopy(_sqlServer.Connection, SqlBulkCopyOptions.TableLock, externalTransaction))
                    {

                        bc.BatchSize = 10000;
                        bc.BulkCopyTimeout = 6000; //10 Minutes
                        bc.DestinationTableName = table;

                        IDataReader dt = new CsvDataReader(streamReader, executeQuery.Columns);
                        bc.WriteToServer(dt);
                        bc.Close();
                    }
                }
            }
        }
Пример #37
0
 /// <summary>
 /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
 /// </summary>
 public void Dispose()
 {
     _reader  = null;
     _current = null;
 }
Пример #38
0
        public override DataTable ToDataTable(DataTableConvertParams context, Stream stream)
        {
            if (context == null)
            {
                context = new DataTableConvertParams();
            }

            StreamReader sr = new StreamReader(stream);
            DataTable    dt = new DataTable();

            if (context.RecordDelim != '\n')
            {
                #region If Record Delimiter is overriden, itterate through all characters and replace them with new line
                //https://stackoverflow.com/questions/1232443/writing-to-then-reading-from-a-memorystream
                MemoryStream ms = new MemoryStream();

                StreamWriter sw = new StreamWriter(ms);
                while (sr.Peek() >= 0)
                {
                    Char c = (Char)sr.Read();
                    if (c == context.RecordDelim)
                    {
                        sw.Write('\n');
                    }
                    else
                    {
                        sw.Write(c);
                    }
                }

                sw.Flush();
                ms.Position = 0;

                sr = new StreamReader(ms, Encoding.UTF8);
                #endregion If Record Delimiter is overriden, itterate through all characters and replace them with new line
            }

            //_conf.BadDataFound = cxt =>
            //{
            //For debugging (put breakpoints here)
            //};

            _conf.Delimiter = context.FieldDelim.ToString();
            if (_conf.Delimiter != ",")
            {
                _conf.IgnoreQuotes = true;
            }
            _conf.HasHeaderRecord   = context.HasHeaders;
            _conf.MissingFieldFound = null;
            CsvReader     csvReader  = new CsvReader(sr, _conf);
            CsvDataReader dataReader = new CsvDataReader(csvReader);

            if (!context.HasHeaders)
            {
                #region If No Headers loop through all records and add columns as columns are found
                while (dataReader.Read())
                {
                    while (dt.Columns.Count < dataReader.FieldCount)
                    {
                        dt.Columns.Add($"Col{dt.Columns.Count}");
                    }

                    DataRow row = dt.NewRow();

                    for (int i = 0; i < dataReader.FieldCount; i++)
                    {
                        row[i] = dataReader.GetValue(i);
                    }

                    dt.Rows.Add(row);
                }

                dataReader.Close();
                sr.Close();
                #endregion If No Headers loop through all records and add columns as columns are found
            }
            else
            {
                #region If there are headers DataTable.Load will suffice
                try
                {
                    dt.Load(dataReader, LoadOption.Upsert);
                }
                catch (Exception ex)
                {
                    throw ex;
                }
                finally
                {
                    dataReader.Close();
                    sr.Close();
                }
                #endregion If there are headers DataTable.Load will suffice
            }

            //CsvHelper seems to make DataTable Columns readonly
            foreach (DataColumn dc in dt.Columns)
            {
                dc.ReadOnly = false;
            }

            return(dt);
        }