public DbfRow Seek(byte[] key, byte?appendByte = 0x20) { if (key.Length > keyBytesLen) { throw ExceptionFactory.CreateArgumentException("key", "Key byte array length more then '{0}'!", keyBytesLen); } else if (key.Length < keyBytesLen) { if (appendByte == null) { throw ExceptionFactory.CreateArgumentException("key", "Key byte array length less then '{0}'!", keyBytesLen); } else { int startIx = key.Length; Array.Resize(ref key, keyBytesLen); for (int i = startIx; i < keyBytesLen; i++) { key[i] = (byte)appendByte; } throw new NotImplementedException(); } } return(GetRow(SeekKey(key, false))); }
/// <summary> /// Read content of memo field. /// (multithread calls enabled) /// </summary> /// <param name="blockNo">Pointer of first block in memo (DBT) file, readed from DBF's memo field.</param> /// <returns></returns> public override byte[] ReadMemoBytes(int blockNo) { byte[] retBytes = null; if (blockNo < 1) { throw ExceptionFactory.CreateArgumentException("blockNo", "ReadMemoBytes({0}) invalid block number!", blockNo); } if ((blockNo * blockSize) >= stream.Length) { throw ExceptionFactory.CreateArgumentException("blockNo", "ReadMemoBytes({0}) out of dbt stream length!", blockNo); } lock (lockObject) { switch (memoType) { case MemoFileType.DBT_Ver3: retBytes = ReadMemoArray3(blockNo); break; case MemoFileType.DBT_Ver4: retBytes = ReadMemoArray4(blockNo); break; default: throw new Exception("MemoFileDBT/ReadMemoBytes: invalid switch case!"); } } return(retBytes); }
private byte[] ReadMemoArray4(int blockNo) { // return content of memo in 'memoBytes' and size of useful bytes byte[] memoBytes = null; BinaryReader reader = new BinaryReader(stream); // don't use using '(BinaryReader reader...' because 'using' dispose 'stream' too! reader.BaseStream.Position = (blockNo * blockSize); byte[] signal = reader.ReadBytes(4); if (isDbt4MemoBlockSignal(signal)) { int memoLen = reader.ReadInt32(); if ((memoLen < 1) || (memoLen > maxBlobSize)) { throw new Exception(String.Format("ReadMemoArray4({0}): memo signal OK, but memo length error! [{1}/1..{2}]", blockNo, memoLen, maxBlobSize)); } memoBytes = reader.ReadBytes(memoLen); } else { throw ExceptionFactory.CreateArgumentException("blockNo", "ReadMemoArray4({0}) invalid block number or wrong fileformat or file corrupted!", blockNo); } return(memoBytes); }
private byte[] ReadMemoArrayFPT(int blockNo) { // return content of memo in 'memoBytes' and size of useful bytes byte[] memoBytes = null; var reader = new BigEndianBinaryReader(stream); // don't use using '(BinaryReader reader...' because 'using' dispose 'stream' too! reader.BaseStream.Position = (blockNo * blockSize); byte[] signal = reader.ReadBytes(4); if ((int)signal[3] == 1) { int memoLen = reader.ReadInt32(); if ((memoLen < 1) || (memoLen > maxBlobSize)) { throw new Exception(String.Format( "ReadMemoArrayFPT({0}): memo signal OK, but memo length error! [{1}/1..{2}]", blockNo, memoLen, maxBlobSize)); } memoBytes = reader.ReadBytes(memoLen); } else { throw ExceptionFactory.CreateArgumentException("blockNo", "ReadMemoArrayFPT({0}) invalid memo type {1}", blockNo, (int)(signal[0])); } return(memoBytes); }
public DbfRow SoftSeek(byte[] key) { if (key.Length > keyBytesLen) { throw ExceptionFactory.CreateArgumentException("key", "Key byte array length more then '{0}'!", keyBytesLen); } return(GetRow(SeekKey(key, true))); }
public bool recNoOverflowException = true; // internal ClipperReader(DbfTable dbfTable, bool skipDeleted) { if (dbfTable == null) { throw ExceptionFactory.CreateArgumentException("dbfTable", "null parameter"); } this._dbfTable = dbfTable; this.skipDeleted = skipDeleted; Top(); }
public bool skipDeleted = true; // leave out deleted rows from result internal Reader(DbfTable dbfTable, int startRecNo = 0) { if (dbfTable == null) { throw ExceptionFactory.CreateArgumentException("dbfTable", "null parameter"); } this.dbfTable = dbfTable; this.nextRecNo = Math.Max(startRecNo, 0); this.skipDeleted = dbfTable.skipDeleted; }
public static bool[] enabledKeyChars = new bool[enabledKeyCharsLen]; // It can be modified by users if only it's a problem! #endregion #region constructor ----------------------------------------------------------------------------------- internal IndexFileBase(Stream stream, DbfTable dbfTable, bool?skipDeleted = null, int indexPageCacheSize = 0) { disposed = false; if (stream == null) { throw ExceptionFactory.CreateArgumentException("stream", "IndexFileXXXX/stream is null!"); } this.stream = stream; this._dbfTable = dbfTable; this.skipDeleted = skipDeleted ?? dbfTable.skipDeleted; this.indexPageCacheSize = indexPageCacheSize; }
internal MemoFileBase(Stream stream, Encoding encoding) { disposed = false; if (stream == null) { throw ExceptionFactory.CreateArgumentException("stream", "MemoFileXXXX/stream is null!"); } if (encoding == null) { throw ExceptionFactory.CreateArgumentException("encoding", "MemoFileXXXX/encoding is null!"); } this.stream = stream; this.encoding = encoding; }
protected DbfTable(Stream stream, DbfTableParameters parameters) { if (stream == null) { throw new ArgumentNullException("stream"); } if (!stream.CanRead) { throw ExceptionFactory.CreateArgumentException("stream", "The stream does not allow reading (CanRead property returns false)."); } if (!stream.CanSeek) { throw ExceptionFactory.CreateArgumentException("stream", "The stream does not allow reading (CanSeek property returns false)."); } this._stream = stream; this.parameters = parameters; RefreshHeaderInfo(); // if (this.parameters.encoding == null) { this.parameters.encoding = ReadDbfHeader_Encoding(_header.codepageCode); if (this.parameters.encoding == null) { throw new Exception("DbfTable: the DBF file don't contains codepage information!"); } } this._columns = ReadDbfColumns(_stream, this.parameters.encoding, _header.newHeaderStructure, this.parameters.openMemo); // int calcLen = _header.firstRecordPosition + (_header.recCount * _header.rowLength) + 1; if ((stream.Length < calcLen - 1) || (stream.Length > calcLen + 1)) { // dBase & Clipper different (There is or there isn't a 0x1F character at end of DBF data file . throw ExceptionFactory.CreateArgumentOutOfRangeException("DBF table", "Datafile length error! [got: {0} expected: {1}]", stream.Length, calcLen); } }
private bool MarchingMore(bool forward, int step) { if (step < 1) { throw ExceptionFactory.CreateArgumentException("MarchingMore/step", "Step parameter value must more then 0 !"); } for (int i = 0; i < step; i++) { if (!MarchingOne(forward)) { return(false); } } return(eof); }
internal void AtachedToAnotherTable(DbfTable dbfTable, int recNo) { this._dbfTableClassID = (dbfTable == null) ? Guid.Empty : dbfTable.dbfTableClassID; this._recNo = recNo; // if (dbfTable != null) { if (!IsIdenticalColumnsDefinition(this._columns, dbfTable._columns)) { throw ExceptionFactory.CreateArgumentException("dbfTable", "DbfRow.AtachedToAnotherTable/this.columns and dbfTable.columns aren't identical !"); } } // foreach (var item in memoCache) { item.modified = ((item.data != null) && (item.data.Length > 0)); (item.column as Column).SetNull(_buffer); } }
public static bool IsIdenticalColumnsDefinition(ICollection <IColumn> columns1, ICollection <IColumn> columns2) { if (columns1 == null) { throw ExceptionFactory.CreateArgumentException("columns1", "Null parameter value not allowed!"); } if (columns2 == null) { throw ExceptionFactory.CreateArgumentException("columns2", "Null parameter value not allowed!"); } List <Column> cols1 = new List <Column>(); List <Column> cols2 = new List <Column>(); foreach (var item in columns1) { cols1.Add(item as Column); } foreach (var item in columns2) { cols2.Add(item as Column); } if (cols1.Count != cols2.Count) { return(false); } cols1.Sort((col1, col2) => String.Compare(col1.name, col2.name, true)); // Ignore case cols2.Sort((col1, col2) => String.Compare(col1.name, col2.name, true)); // Ignore case for (int i = 0; (i < cols1.Count); i++) { if (String.Compare(cols1[i].name, cols2[i].name, true) != 0) // Ignore case { return(false); } if (cols1[i].dbfType != cols2[i].dbfType) { return(false); } if (cols1[i].size != cols2[i].size) { return(false); } if (cols1[i].dec != cols2[i].dec) { return(false); } if (cols1[i].type != cols2[i].type) { return(false); } if (cols1[i].offset != cols2[i].offset) { return(false); } } return(true); }