/// <summary> /// Return all the records. T should be interface with getter properties that match types and names of the database. /// Optionally instead of T being and interface you can pass in an anonymous object with properties that match that /// database and then you'll get an IEnumerable of that anonymous type with the data filled in. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="reader">The reader.</param> /// <param name="prototype">The prototype. Anonymous class instance</param> /// <returns></returns> public static IEnumerable <T> AllRecords <T>(this DBFReader reader, T prototype = null) where T : class { var tType = typeof(T); var tProperties = tType.GetProperties() .Where( it => Array.FindIndex(reader.Fields, f => f.Name.Equals(it.Name, StringComparison.InvariantCultureIgnoreCase)) >= 0) .ToList(); var tProps = tProperties .Select( it => Array.FindIndex(reader.Fields, jt => jt.Name.Equals(it.Name, StringComparison.InvariantCultureIgnoreCase))) .Where(it => it >= 0) .ToArray(); var tOrderedProps = tProps.OrderBy(it => it).ToArray(); var tReturn = new List <T>(); if (tType.GetCustomAttributes(typeof(CompilerGeneratedAttribute), false).Any()) { var tAnon = reader.NextRecord(tProps, tOrderedProps); while (tAnon != null) { tReturn.Add((T)Activator.CreateInstance(tType, tAnon)); tAnon = reader.NextRecord(tProps, tOrderedProps); } return(tReturn); } var t = reader.NextRecord(tProps, tOrderedProps); while (t != null) { var interceptor = new Enumerable.DBFInterceptor(t, tProperties.Select(it => it.Name).ToArray()); tReturn.Add(interceptor.ActLike <T>(typeof(Enumerable.IDBFInterceptor))); t = reader.NextRecord(tProps, tOrderedProps); } return(tReturn); }
public static List <Inv> GetInvoiceList() { if (!File.Exists("inv.dbf")) { throw new FileNotFoundException("File \"inv.dbf\" not found!"); } using (Stream rs = File.Open("inv.dbf", FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite)) { var rdr = new DBFReader(rs); rdr.CharEncoding = Encoding.GetEncoding(874); List <Inv> invoice_list = new List <Model.Inv>(); for (int i = 0; i < rdr.RecordCount; i++) { try { object[] obj = rdr.NextRecord(); invoice_list.Add(new Inv { docnum = (string)obj[0], email = (string)obj[1], status = (string)obj[2] }); } catch (Exception) { break; } } return(invoice_list); } }
public void checkDataType_N() { Decimal writtenValue; using ( Stream fos = File.Open(TestPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) using (var writer = new DBFWriter()) { var field = new DBFField("F1", NativeDbType.Numeric, 15, 0); writer.Fields = new[] { field }; writtenValue = 123456789012345L; writer.AddRecord(writtenValue); writer.Write(fos); } using ( Stream fis = File.Open(TestPath(), FileMode.Open, FileAccess.Read)) using (var reader = new DBFReader(fis)) { var readValues = reader.NextRecord(); Assert.That(readValues[0], EqualTo(writtenValue), "Written Value Equals Read"); } }
public void checkDataType_M() { var fieldLength = 2400; MemoValue writtenValue; using (Stream fos = File.Open(TestMemoPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) using (var writer = new DBFWriter { DataMemoLoc = Path.ChangeExtension(TestMemoPath(), "DBT") }) { var field = new DBFField("F1", NativeDbType.Memo); writer.Fields = new[] { field }; writtenValue = new MemoValue(GetCharacters(fieldLength)); writer.AddRecord(writtenValue); writer.Write(fos); } using (Stream fis = File.Open(TestMemoPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) using (var reader = new DBFReader(fis) { DataMemoLoc = Path.ChangeExtension(TestMemoPath(), "DBT") }) { var readValues = reader.NextRecord(); Assert.That(readValues[0], EqualTo(writtenValue), "Written Value not equaling Read"); } }
public void Export(string dbfile, string ascfile, int nrow, int ncol, int cellsize, float xcorner, float ycorner, float nodatavalue = -999) { DBFReader dbf = new DBFReader(dbfile); int act_index = dbf.GetFiledNameIndex("active"); int row_index = dbf.GetFiledNameIndex("row"); int col_index = dbf.GetFiledNameIndex("col"); int elev_index = dbf.GetFiledNameIndex("elev"); DataCube <float> mat = new DataCube <float>(1, nrow, ncol); int nact = 0; for (int n = 0; n < dbf.RecordCount; n++) { var obj = dbf.NextRecord(); int row = int.Parse(obj[row_index].ToString()); int col = int.Parse(obj[col_index].ToString()); int act = int.Parse(obj[act_index].ToString()); if (act > 0) { mat[0, row - 1, col - 1] = float.Parse(obj[elev_index].ToString()); nact++; } else { mat[0, row - 1, col - 1] = nodatavalue; } } dbf.Close(); // this.Save<float>(ascfile, Value, nrow, ncol, cellsize, xcorner, ycorner, nodatavalue); }
public void Export(string ac_filename, string[] dbffiles, int[] index) { int steps = dbffiles.Length; DBFReader dbf = new DBFReader(dbffiles[0]); int nfeature = dbf.RecordCount; var buf = new DataCube<float>(index.Length, steps, nfeature); string[] field = new string[index.Length]; for (int i = 0; i < index.Length; i++) { field[i] = dbf.Fields[index[i]].Name; } dbf.Close(); for (int t = 0; t < steps; t++) { dbf = new DBFReader(dbffiles[t]); for (int n = 0; n < dbf.RecordCount; n++) { var obj = dbf.NextRecord(); for (int i = 0; i < index.Length; i++) { buf[i, t, n] = float.Parse(obj[index[i]].ToString()); } } dbf.Close(); } buf.Variables = field; DataCubeStreamWriter ac = new DataCubeStreamWriter(ac_filename); ac.WriteAll(buf); }
public void checkLongCharLengthWithClipper() { var fieldLength = 750; string writtenValue; using ( Stream fos = File.Open(TestClipLongPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) { var writer = new DBFWriter(); var field = new DBFField("F1", NativeDbType.Char, fieldLength); writer.Fields = new[] { field }; writtenValue = GetCharacters(fieldLength); writer.AddRecord(writtenValue); writer.Write(fos); } using ( Stream fis = File.Open(TestClipLongPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) { var reader = new DBFReader(fis); Assert.That(reader.Fields.First().FieldLength, EqualTo(fieldLength)); var readValues = reader.NextRecord(); Assert.That(readValues[0], EqualTo(writtenValue), "Written Value not equaling Read"); } }
public static List <Data> ReadDBF(FileInfo fi) { const int INDEX_ACCOUNT = 3; const int INDEX_KOD = 4; const int INDEX_NAME = 5; const int INDEX_RL = 6; const int INDEX_R2 = 7; const int INDEX_CODE = 8; const int INDEX_QUANTITY = 11; var datas = new List <Data>(); try { using (var dbf_stream = File.Open(fi.FullName, FileMode.Open, FileAccess.ReadWrite)) { var reader = new DBFReader(dbf_stream) { CharEncoding = Encoding.Default }; for (var j = 0; j < reader.RecordCount; j++) { try { Application.DoEvents(); var record = reader.NextRecord(); var data = new Data( record[INDEX_ACCOUNT], record[INDEX_NAME], record[INDEX_KOD], record[INDEX_CODE], record[INDEX_RL], record[INDEX_R2], record[INDEX_QUANTITY] ); datas.Add(data); } catch (Exception ex) { AppLogAndEventHelper.Instance.RaiseDebugInfo($"Строка = {j} Ошибка = {ex.Message}"); AppLogAndEventHelper.Instance.RaiseError(ex); } } } } catch (Exception ex) { AppLogAndEventHelper.Instance.RaiseError(ex); } return(datas); }
/// <summary> /// Returns a list of dynamic objects whose properties and types match up with that database name. /// </summary> /// <param name="reader">The reader.</param> /// <param name="whereColumn">The where column name.</param> /// <param name="whereColumnEquals">What the were column should equal.</param> /// <returns></returns> static public IEnumerable <dynamic> DynamicAllRecords(this DBFReader reader, string whereColumn = null, dynamic whereColumnEquals = null) { var tProperties = reader.GetSelectFields().Select(it => it.Name).ToArray(); int?tWhereColumn = null; if (!String.IsNullOrEmpty(whereColumn)) { tWhereColumn = Array.FindIndex(tProperties, it => it.Equals(whereColumn, StringComparison.InvariantCultureIgnoreCase)); } var tReturn = new List <object>(); object[] t = reader.NextRecord(); while (t != null) { if (tWhereColumn.HasValue) { dynamic tO = t[tWhereColumn.Value]; if (!tO.Equals(whereColumnEquals)) { t = reader.NextRecord(); continue; } } var tIntercepter = new Enumerable.DBFIntercepter(t, tProperties); tReturn.Add(tIntercepter); t = reader.NextRecord(); } return(tReturn); }
/// <summary> /// Returns a list of dynamic objects whose properties and types match up with that database name. /// </summary> /// <param name="reader">The reader.</param> /// <param name="whereColumn">The where column name.</param> /// <param name="whereColumnEquals">What the were column should equal.</param> /// <returns></returns> public static IEnumerable <dynamic> DynamicAllRecords(this DBFReader reader, string whereColumn = null, dynamic whereColumnEquals = null) { var props = reader.GetSelectFields().Select(it => it.Name).ToArray(); int?whereColumnIndex = null; if (!String.IsNullOrEmpty(whereColumn)) { whereColumnIndex = Array.FindIndex(props, it => it.Equals(whereColumn, StringComparison.InvariantCultureIgnoreCase)); } var tReturn = new List <object>(); var t = reader.NextRecord(); while (t != null) { if (whereColumnIndex is int i) { dynamic tO = t[i]; if (!tO.Equals(whereColumnEquals)) { t = reader.NextRecord(); continue; } } var interceptor = new Enumerable.DBFInterceptor(t, props); tReturn.Add(interceptor); t = reader.NextRecord(); } return(tReturn); }
public void ReadTest() { using (var dbfstream = DbfsData("dbase_8b.dbf")) using (var memoStream = DbfsData("dbase_8b.dbt")) using (DBFReader dbfr = new DBFReader(dbfstream) { DataMemo = memoStream }) { object[] record = dbfr.NextRecord(); //This line would hang issue #19 https://github.com/ekonbenefits/dotnetdbf/issues/19 Assert.Throws <DBTException>(() => record[5].ToString()); } }
public void Test() { using ( Stream fis = File.Open(@"f:\st\dev\testdata\p.dbf", FileMode.OpenOrCreate, FileAccess.ReadWrite)) using (var reader = new DBFReader(fis) { DataMemoLoc = Path.ChangeExtension(@"f:\st\dev\testdata\p.dbf", "DBT") }) { var readValues = reader.NextRecord(); Console.WriteLine(readValues); } }
public static ManageDataResult AddInvoiceRecord(Inv inv_to_add) { if (!File.Exists("inv.dbf")) { throw new FileNotFoundException("File \"inv.dbf\" not found!"); } try { using (Stream stream = File.Open("inv.dbf", FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read)) { var reader = new DBFReader(stream); reader.CharEncoding = Encoding.GetEncoding(874); List <Inv> invoice_list = new List <Model.Inv>(); var writer = new DBFWriter(stream); for (int i = 0; i < reader.RecordCount; i++) { try { object[] obj = reader.NextRecord(); // add existing record first writer.AddRecord((string)obj[0], (string)obj[1], (string)obj[2]); } catch (Exception) { break; } } // add a target record writer.AddRecord(inv_to_add.docnum, inv_to_add.email, inv_to_add.status); } return(new ManageDataResult { success = true, message = "success" }); } catch (Exception ex) { //Console.WriteLine("File is in use by another process."); return(new ManageDataResult { success = false, message = ex.Message }); } }
public void Export(ITriangularGrid grid, string ac_filename, string[] dbffiles, int[] index) { int steps = dbffiles.Length; int nfeature = grid.VertexCount; var buf = new DataCube<float>(index.Length, steps, nfeature); string[] field = new string[index.Length]; for (int i = 0; i < steps; i++) { DBFReader dbf = new DBFReader(dbffiles[i]); var vec = new double[index.Length][]; for (int t = 0; t < index.Length; t++) { vec[t] = new double[dbf.RecordCount]; } for (int n = 0; n < dbf.RecordCount; n++) { var obj = dbf.NextRecord(); for (int t = 0; t < index.Length; t++) { vec[t][n] = double.Parse(obj[index[t]].ToString()); } } for (int t = 0; t < index.Length; t++) { for (int k = 0; k < grid.VertexCount; k++) { var cells = grid.Topology.NodeConnectedCells[k]; double temp = 0; for (int c = 0; c < cells.Length; c++) { temp += vec[t][cells[c]]; } temp /= cells.Length; buf[t, i, k] = (float)temp; } field[t] = dbf.Fields[index[t]].Name; } dbf.Close(); } buf.Variables = field; DataCubeStreamWriter ac = new DataCubeStreamWriter(ac_filename); ac.WriteAll(buf); }
private object ImportText() { var fileName = Project.Current.MapPath(FileName); if (string.IsNullOrWhiteSpace(fileName) || !File.Exists(fileName)) { throw new FileNotFoundException($"File '{fileName}' not found."); } using var stream = new FileStream(fileName, FileMode.Open, FileAccess.Read); var reader = new DBFReader(stream); var table = new DataTable("DbfData"); foreach (var field in reader.Fields) { var column = table.Columns.Add(field.Name, field.Type); switch (field.DataType) { case NativeDbType.Char: case NativeDbType.Memo: column.MaxLength = field.FieldLength; break; } } table.BeginLoadData(); try { object[] records; while ((records = reader.NextRecord()) != null) { table.Rows.Add(records); } } finally { table.EndLoadData(); } return(table); }
public void checkSelect() { var fieldLength = 2400; string writtenValue; using ( Stream fos = File.Open(TestSelectPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) { var writer = new DBFWriter { DataMemoLoc = Path.ChangeExtension(TestSelectPath(), "DBT") }; var field = new DBFField("F1", NativeDbType.Memo); var field2 = new DBFField("F2", NativeDbType.Numeric, 10); var field3 = new DBFField("F3", NativeDbType.Char, 10); writer.Fields = new[] { field, field2, field3 }; writtenValue = "alpha"; writer.AddRecord(new MemoValue(GetCharacters(fieldLength)), 10, writtenValue); writer.Write(fos); } using ( Stream fis = File.Open(TestSelectPath(), FileMode.OpenOrCreate, FileAccess.ReadWrite)) { var reader = new DBFReader(fis) { DataMemoLoc = Path.ChangeExtension(TestSelectPath(), "DBT") }; reader.SetSelectFields("F3"); var readValues = reader.NextRecord(); Assert.That(readValues[0], StartsWith(writtenValue), "Written Value not equaling Read"); } }
public bool NextRecord() { currentRow = dbf.NextRecord(); return(currentRow.Length > 0); }
/// <summary> /// Reading from a DBF file /// </summary> /// <typeparam name="T">Generic type</typeparam> /// <param name="path">File path</param> /// <param name="winCoding">Encoding windows (1251), otherwise DOS (866)</param> /// <param name="filter">Data filtering function</param> /// <param name="skip">Skip records</param> /// <param name="take">Get the number of records</param> /// <returns></returns> public static IList <T> Read <T>( string path, bool winCoding = true, Func <T, bool> filter = null, int skip = 0, int take = int.MaxValue) where T : class { if (!File.Exists(path)) { throw new FileNotFoundException("Файл не обнаружен", path); } var instanse = Instance; using (var fileStream = File.Open(path, FileMode.Open, FileAccess.Read)) { var binaryReader = new BinaryReader(fileStream, Encoding.ASCII); var reader = new DBFReader(fileStream); reader.CharEncoding = Configuration.GetEncoding(winCoding); var type = typeof(T); var props = type.GetProperties() .Where(p => p.CanWrite && Array.FindIndex(reader.Fields, f => f.Name.Equals(p.Name, StringComparison.InvariantCultureIgnoreCase)) >= 0) .ToList(); Dictionary <PropertyInfo, int> propIndexDictionary = new Dictionary <PropertyInfo, int>(); foreach (var prop in props) { var index = Array.FindIndex(reader.Fields, f => f.Name.Equals(prop.Name, StringComparison.InvariantCultureIgnoreCase)); if (index >= 0) { propIndexDictionary.Add(prop, index); } } var retval = new List <T>(); var rowValues = reader.NextRecord(); int filteredCount = 0; int addedCount = 0; while (rowValues != null) { var newObj = (T)Activator.CreateInstance(typeof(T)); foreach (var propIndex in propIndexDictionary) { var fieldValue = rowValues[propIndex.Value]; if (fieldValue != null) { var propValue = Convert.ChangeType(fieldValue, propIndex.Key.PropertyType); propIndex.Key.SetValue(newObj, propValue); } else { //var t = propIndex.Key.PropertyType; //propIndex.Key.SetValue(newObj, default(propIndex.Key.PropertyType))); } } // Фильтрация if (filter != null) { if (!filter(newObj)) { rowValues = reader.NextRecord(); continue; } } // Получить только указанный блок записей filteredCount++; if (filteredCount > skip) { if (addedCount >= take) { break; } retval.Add(newObj); addedCount++; } rowValues = reader.NextRecord(); } return(retval); } }