Ejemplo n.º 1
0
        /// <summary>
        /// Reads a record specified by index. This method requires the stream to be able to seek to position.
        /// If you are using a http stream, or a stream that can not stream, use ReadNext() methods to read in all records.
        /// </summary>
        /// <param name="index">Zero based index.</param>
        /// <returns>Null if record can not be read, otherwise returns a new record.</returns>
        public DbfRecord Read(int index)
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(_header);

            return(Read(index, orec) ? orec : null);
        }
Ejemplo n.º 2
0
        private static void TestReadFile()
        {
            //create a simple DBF file and output to args[0]
            var odbf = new DbfFile(Encoding.GetEncoding(1252));

            odbf.Open(Path.Combine(TestPath,"TestNew2.dbf"), FileMode.Open); //State_vars1.dbf  county_vars1.dbf

            //if (File.Exists("P:\\Development\\Library\\CS\\DbfLib\\DBFSamples\\filesource.txt"))
            //  File.Delete("P:\\Development\\Library\\CS\\DbfLib\\DBFSamples\\filesource.txt");

            var ofs = new FileStream(Path.Combine(TestPath,"filesource.txt"), FileMode.Create);
            var osw = new StreamWriter(ofs, Encoding.Default);

            //read and print records to screen...
            var orec = new DbfRecord(odbf.Header);

            for (int i = 0; i < odbf.Header.RecordCount; i++)
            {
                if (!odbf.Read(i, orec))
                    break;
                osw.WriteLine("index: " + orec.RecordIndex + ": " + orec);
            }

            /*
              while (odbf.ReadNext(orec))
              {
            osw.WriteLine("index: " + orec.RecordIndex + ": " + orec.ToString());
              }
              */

            osw.Flush();
            osw.Close();
        }
Ejemplo n.º 3
0
 public DbfWriter(SocialExplorer.IO.FastDBF.DbfFile dbf, DataFormatSettings dataFormat)
 {
     _dbf = dbf;
     _orec = new DbfRecord(_dbf.Header);
     _dataFormat = dataFormat;
     _formatter = new CdlValueFormatter(_dataFormat ?? new DataFormatSettings());
 }
Ejemplo n.º 4
0
        /// <summary>
        /// Tries to read a record and returns a new record object or null if nothing was read.
        /// </summary>
        /// <returns></returns>
        public DbfRecord ReadNext()
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(_header);

            return(ReadNext(orec) ? orec : null);
        }
Ejemplo n.º 5
0
 public DbfReader(TableInfo structure, SocialExplorer.IO.FastDBF.DbfFile dbf)
     : base(structure)
 {
     _dbf = dbf;
     _array = new string[structure.ColumnCount];
     _irec = new DbfRecord(_dbf.Header);
 }
Ejemplo n.º 6
0
        private static void TestReadHttpConn()
        {
            //Open a web file...
            //-------------------------------------
            var oWebDBF = new DbfFile(Encoding.GetEncoding(1252));
            var oWebFile = new WebClient();

            oWebDBF.Open(oWebFile.OpenRead("http://private.socialexplorer.com/State_vars1.dbf"));

            //read and print records to screen...
            var orecWeb = new DbfRecord(oWebDBF.Header);

            var ofs2 = new FileStream(Path.Combine(TestPath,"Webfile.txt"), FileMode.Create);
            var osw2 = new StreamWriter(ofs2);

            bool bIsForwardOnly = oWebDBF.IsForwardOnly;
            bool bIsReadOnly = oWebDBF.IsReadOnly;

            while (oWebDBF.ReadNext(orecWeb))
                osw2.WriteLine("index: " + orecWeb.RecordIndex + ": " + orecWeb);

            osw2.Flush();
            osw2.Close();

            oWebDBF.Close();
        }
Ejemplo n.º 7
0
 public Certificate(DbfRecord dbfRecord)
     : base(dbfRecord)
 {
     SNILS = dbfRecord["F1"].Trim();
     SPEC_CODE = dbfRecord["F2"].Trim();
     SPEC_NAME = dbfRecord["F3"].Trim();
     CERT_DATE = ToDateTime(dbfRecord["F4"].Trim());
 }
Ejemplo n.º 8
0
 public void Write(DbfRecord orec, bool bClearRecordAfterWrite)
 {
     Write(orec);
     if (bClearRecordAfterWrite)
     {
         orec.Clear();
     }
 }
Ejemplo n.º 9
0
        /// <summary>
        /// Update a record. RecordIndex (zero based index) must be more than -1, otherwise an exception is thrown.
        /// You can also use Write method which updates a record if it has RecordIndex or adds a new one if RecordIndex == -1.
        /// RecordIndex is set automatically when you call any Read() methods on this class.
        /// </summary>
        /// <param name="orec"></param>
        public void Update(DbfRecord orec)
        {
            //if header was never written, write it first, then output the record
            if (!_headerWritten)
            {
                WriteHeader();
            }


            //Check if record has an index
            if (orec.RecordIndex < 0)
            {
                throw new Exception("RecordIndex is not set, unable to update record. Set RecordIndex or call Write() method to add a new record to file.");
            }


            //Check if this record matches record size specified by header and number of columns.
            //Client can pass a record from another DBF that is incompatible with this one and that would corrupt the file.
            if (orec.Header != _header && (orec.Header.ColumnCount != _header.ColumnCount || orec.Header.RecordLength != _header.RecordLength))
            {
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies. Writing this record would corrupt the DBF file. " +
                                    "This is a programming error, have you mixed up DBF file objects?");
            }

            //DBF file writer can be null if stream is not writable to...
            if (_dbfFileWriter == null)
            {
                throw new Exception("Write stream is null. Either you have opened a stream that can not be " +
                                    "writen to (a read-only stream) or you have not opened a stream at all.");
            }


            //move to the specified record, note that an exception will be thrown if stream is not seekable!
            //This is ok, since we provide a function to check whether the stream is seekable.
            long nSeekToPosition = (long)_header.HeaderLength + (long)((long)orec.RecordIndex * (long)_header.RecordLength);

            //check whether we can seek to this position. Subtract 1 from file length (there is a terminating character 1A at the end of the file)
            //so if we hit end of file, there are no more records, so return false;
            if (_dbfFile.Length < nSeekToPosition)
            {
                throw new Exception("Invalid record position. Unable to save record.");
            }

            //move to record start
            _dbfFile.Seek(nSeekToPosition, SeekOrigin.Begin);

            //write
            orec.Write(_dbfFile);

            _footerUpdateNeeded = true;
        }
        /// <summary>
        /// Reads a record specified by index. This method requires the stream to be able to seek to position.
        /// If you are using a http stream, or a stream that can not stream, use ReadNext() methods to read in all records.
        /// </summary>
        /// <param name="index">Zero based index.</param>
        /// <returns>Null if record can not be read, otherwise returns a new record.</returns>
        public DbfRecord Read(int index)
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(mHeader);

            if (Read(index, orec))
            {
                return(orec);
            }
            else
            {
                return(null);
            }
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Tries to read a record and returns a new record object or null if nothing was read.
        /// </summary>
        /// <returns></returns>
        public DbfRecord ReadNext()
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(mHeader, 866);

            if (ReadNext(orec))
            {
                return(orec);
            }
            else
            {
                return(null);
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Reads a record specified by index into oFillRecord object. You can use this method
        /// to read in and process records without creating and discarding record objects.
        /// Note that you should check that your stream is not forward-only! If you have a forward only stream, use ReadNext() functions.
        /// </summary>
        /// <param name="index">Zero based record index.</param>
        /// <param name="oFillRecord">Record object to fill, must have same size and number of fields as thid DBF file header!</param>
        /// <remarks>
        /// <returns>True if read a record was read, otherwise false. If you read end of file false will be returned and oFillRecord will NOT be modified!</returns>
        /// The parameter record (oFillRecord) must match record size specified by the header and number of columns as well.
        /// It does not have to come from the same header, but it must match the structure. We are not going as far as to check size of each field.
        /// The idea is to be flexible but safe. It's a fine balance, these two are almost always at odds.
        /// </remarks>
        public bool Read(int index, DbfRecord oFillRecord)
        {
            //check if we can fill this record with data. it must match record size specified by header and number of columns.
            //we are not checking whether it comes from another DBF file or not, we just need the same structure. Allow flexibility but be safe.
            if (oFillRecord.Header != _header && (oFillRecord.Header.ColumnCount != _header.ColumnCount || oFillRecord.Header.RecordLength != _header.RecordLength))
            {
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies, so we are unable to read a record into oFillRecord. " +
                                    "This is a programming error, have you mixed up DBF file objects?");
            }

            //DBF file reader can be null if stream is not readable...
            if (_dbfFileReader == null)
            {
                throw new Exception("ReadStream is null, either you have opened a stream that can not be " +
                                    "read from (a write-only stream) or you have not opened a stream at all.");
            }


            //move to the specified record, note that an exception will be thrown is stream is not seekable!
            //This is ok, since we provide a function to check whether the stream is seekable.
            long nSeekToPosition = _header.HeaderLength + (index * _header.RecordLength);

            //check whether requested record exists. Subtract 1 from file length (there is a terminating character 1A at the end of the file)
            //so if we hit end of file, there are no more records, so return false;
            if (index < 0 || _dbfFile.Length - 1 <= nSeekToPosition)
            {
                return(false);
            }

            //move to record and read
            _dbfFile.Seek(nSeekToPosition, SeekOrigin.Begin);

            //read the record
            bool bRead = oFillRecord.Read(_dbfFile);

            if (bRead)
            {
                oFillRecord.RecordIndex = index;
            }

            return(bRead);
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Write a record to file. If RecordIndex is present, record will be updated, otherwise a new record will be written.
        /// Header will be output first if this is the first record being writen to file.
        /// This method does not require stream seek capability to add a new record.
        /// </summary>
        /// <param name="orec"></param>
        public void Write(DbfRecord orec)
        {
            //if header was never written, write it first, then output the record
            if (!_headerWritten)
            {
                WriteHeader();
            }

            //if this is a new record (RecordIndex should be -1 in that case)
            if (orec.RecordIndex < 0)
            {
                if (_dbfFileWriter.BaseStream.CanSeek)
                {
                    //calculate number of records in file. do not rely on header's RecordCount property since client can change that value.
                    //also note that some DBF files do not have ending 0x1A byte, so we subtract 1 and round off
                    //instead of just cast since cast would just drop decimals.
                    int nNumRecords = (int)Math.Round(((double)(_dbfFile.Length - _header.HeaderLength - 1) / _header.RecordLength));
                    if (nNumRecords < 0)
                    {
                        nNumRecords = 0;
                    }

                    orec.RecordIndex = nNumRecords;
                    Update(orec);
                    _header.RecordCount++;
                }
                else
                {
                    //we can not position this stream, just write out the new record.
                    orec.Write(_dbfFile);
                    _header.RecordCount++;
                }
            }
            else
            {
                Update(orec);
            }
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Read next record and fill data into parameter oFillRecord. Returns true if a record was read, otherwise false.
        /// </summary>
        /// <param name="oFillRecord"></param>
        /// <returns></returns>
        public bool ReadNext(DbfRecord oFillRecord)
        {
            //check if we can fill this record with data. it must match record size specified by header and number of columns.
            //we are not checking whether it comes from another DBF file or not, we just need the same structure. Allow flexibility but be safe.
            if (oFillRecord.Header != _header && (oFillRecord.Header.ColumnCount != _header.ColumnCount || oFillRecord.Header.RecordLength != _header.RecordLength))
            {
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies, so we are unable to read a record into oFillRecord. " +
                                    "This is a programming error, have you mixed up DBF file objects?");
            }

            //DBF file reader can be null if stream is not readable...
            if (_dbfFileReader == null)
            {
                throw new Exception("Read stream is null, either you have opened a stream that can not be " +
                                    "read from (a write-only stream) or you have not opened a stream at all.");
            }

            //read next record...
            bool bRead = oFillRecord.Read(_dbfFile);

            if (bRead)
            {
                if (_isForwardOnly)
                {
                    //zero based index! set before incrementing count.
                    oFillRecord.RecordIndex = _recordsReadCount;
                    _recordsReadCount++;
                }
                else
                {
                    oFillRecord.RecordIndex = ((int)((_dbfFile.Position - _header.HeaderLength) / _header.RecordLength)) - 1;
                }
            }

            return(bRead);
        }
        /// <summary>
        /// create precipitation and temperature gage location file in txt or dbf file
        /// </summary>
        private void createGageLocationFile()
        {
            FormatType type = Format;
            if (type != FormatType.ARCSWAT_DBF && type != FormatType.ARCSWAT_TEXT) return;
            if (_stations == null || _stations.Count == 0) return;

            //for ArcSWAT 2012 text format
            if (type == FormatType.ARCSWAT_TEXT)
            {
                StringBuilder sb_p = new StringBuilder();
                StringBuilder sb_t = new StringBuilder();
                sb_p.AppendLine("ID,NAME,LAT,LONG,ELEVATION");
                sb_t.AppendLine("ID,NAME,LAT,LONG,ELEVATION");
                foreach (ECStationInfo info in _stations)
                {
                    sb_p.AppendLine(info.ToArcSWAT2012CSVGageLocation(true)); //precipitation
                    sb_t.AppendLine(info.ToArcSWAT2012CSVGageLocation(false));//temperature
                }
                string pFileName = "pcp.txt";
                string tFileName = "tmp.txt";
                using (StreamWriter writer = new StreamWriter(_path + @"\" + pFileName))
                    writer.Write(sb_p.ToString());
                using (StreamWriter writer = new StreamWriter(_path + @"\" + tFileName))
                    writer.Write(sb_t.ToString());
            }
            else if (type == FormatType.ARCSWAT_DBF)
            {
                string pFileName = "pcp.dbf";
                string tFileName = "tmp.dbf";
                DbfFile pDBF = createDBFGageLocationFile(_path + @"\" + pFileName);
                DbfFile tDBF = createDBFGageLocationFile(_path + @"\" + tFileName);

                DbfRecord pRec = new DbfRecord(pDBF.Header);
                DbfRecord tRec = new DbfRecord(tDBF.Header);

                foreach (ECStationInfo info in _stations)
                {
                    info.ToArcSWAT2012CSVGageLocation(pDBF, true);
                    info.ToArcSWAT2012CSVGageLocation(tDBF, false);
                }
                pDBF.Close();
                tDBF.Close();
            }
        }
        /// <summary>
        /// Write data in given time range as ArcSWAT dbf file
        /// </summary>
        /// <param name="startYear"></param>
        /// <param name="endYear"></param>
        /// <param name="destinationFolder"></param>
        /// <returns></returns>
        private bool save2ArcSWATdbf(int startYear, int endYear, string destinationFolder)
        {
            string timeAffix = getTimeAffix();
            string pFile = string.Format("{0}\\P{1}{2}.dbf", Path.GetFullPath(destinationFolder), _id, timeAffix);  //precipitation
            string tFile = string.Format("{0}\\T{1}{2}.dbf", Path.GetFullPath(destinationFolder), _id, timeAffix);  //temperature

            this.setProgress(0,string.Format("Processing station {0}", _id));
            this.setProgress(0, pFile);
            this.setProgress(0, tFile);

            //create the dbf structure based on ArcSWAT document
            DbfFile pDBF = new DbfFile();
            pDBF.Open(pFile, FileMode.Create);
            pDBF.Header.AddColumn(new DbfColumn("DATE", DbfColumn.DbfColumnType.Date));
            pDBF.Header.AddColumn(new DbfColumn("PCP", DbfColumn.DbfColumnType.Number, 5, 1));

            DbfFile tDBF = new DbfFile();
            tDBF.Open(tFile, FileMode.Create);
            tDBF.Header.AddColumn(new DbfColumn("DATE", DbfColumn.DbfColumnType.Date));
            tDBF.Header.AddColumn(new DbfColumn("MAX", DbfColumn.DbfColumnType.Number, 5, 1));
            tDBF.Header.AddColumn(new DbfColumn("MIN", DbfColumn.DbfColumnType.Number, 5, 1));

            DbfRecord pRec = new DbfRecord(pDBF.Header);
            DbfRecord tRec = new DbfRecord(tDBF.Header);

            int processPercent = 0;
            bool hasResults = false;
            clearFailureYears();
            clearUncompletedYears();
            for (int i = startYear; i <= endYear; i++)
            {
                setProgress(processPercent, string.Format("Downloading data for station: {0}, year: {1}", _id, i));
                string resultsForOneYear = this.retrieveAnnualDailyClimateData(i, true);
                if (resultsForOneYear.Length == 0)
                {
                    addFailureYear(i);
                    continue;
                }

                processPercent += 1;
                setProgress(processPercent, "Writing data");

                using (CachedCsvReader csv = new CachedCsvReader(new StringReader(resultsForOneYear), true))
                {
                    if (csv.FieldCount >= 27)
                    {
                        hasResults = true;

                        string date = "";
                        while (csv.ReadNextRecord())
                        {
                            date = csv[0];
                            double p = ClimateString2Double(csv[TOTAL_PRECIPITATION_COL_INDEX]);
                            pRec[0] = date;
                            pRec[1] = p.ToString();
                            pDBF.Write(pRec, true);

                            double t_max = ClimateString2Double(csv[MAX_T_COL_INDEX]);
                            double t_min = ClimateString2Double(csv[MIN_T_COL_INDEX]);
                            tRec[0] = date;
                            tRec[1] = t_max.ToString();
                            tRec[2] = t_min.ToString();
                            tDBF.Write(tRec, true);
                        }
                        checkLastDayofYear(date);
                    }
                }
                processPercent += 1;
            }
            pDBF.Close();
            tDBF.Close();

            return hasResults;
        }
Ejemplo n.º 17
0
        static void Main(string[] args)
        {
            if(args.Length < 2)
              {
            //print help
            Console.WriteLine("\n\n");
            Console.WriteLine("Welcome to Social Explorer DBF 2 CSV Utility");
            Console.WriteLine("-------------------------------------------------");
            Console.WriteLine("\nParameters:");
            Console.WriteLine("1. input DBF file");
            Console.WriteLine("2. output CSV file");

            Console.WriteLine("\nOptional switches:");
            Console.WriteLine("/F  - format numbers so 5.5000 comes out as 5.5");
            Console.WriteLine("/P  - padded output, fixed width (/P trumps /F)");
            Console.WriteLine("/Q  - only output quotes when comma appears in data");

            Console.WriteLine("\n\nExample: dbf2csv \"in.dbf\" \"out.csv\" /P /Q");

              }
              else
              {

            //check if input DBF file exists...
            if(!File.Exists(args[0]))
            {
              Console.WriteLine("Input file '" + args[0] + "' does not exist!");
              return;
            }

            //create output csv file overwrite if already exists.
            if(File.Exists(args[1]))
            {
              //ask to overwrite:
              Console.WriteLine("Output CSV file '" + args[1] + "' already exists.");
              Console.WriteLine("Would you like to overwrite it? Press 'Y' for yes: ");
              if(Console.ReadKey().KeyChar.ToString().ToUpper() != "Y")
            return;
            }

            bool bSwitchF = false;
            bool bSwitchP = false;
            bool bSwitchQ = false;

            for(int i=0;i<args.Length;i++)
              if(args[i] == "/F")
            bSwitchF = true;

            for (int i = 0; i < args.Length; i++)
              if (args[i] == "/P")
            bSwitchP = true;

            for (int i = 0; i < args.Length; i++)
              if (args[i] == "/Q")
            bSwitchQ = true;

            //open DBF file and create CSV output file...
            StreamWriter swcsv = new StreamWriter(args[1], false, Encoding.Default);
            DbfFile dbf = new DbfFile(Encoding.UTF8);
            dbf.Open(args[0], FileMode.Open);

            //output column names
            for (int i = 0; i < dbf.Header.ColumnCount; i++)
            {
              if(dbf.Header[i].ColumnType != DbfColumn.DbfColumnType.Binary &&
             dbf.Header[i].ColumnType != DbfColumn.DbfColumnType.Memo)
            swcsv.Write((i == 0 ? "": ",") + dbf.Header[i].Name);
              else
            Console.WriteLine("WARNING: Excluding Binary/Memo field '" + dbf.Header[i].Name + "'");

            }

            swcsv.WriteLine();

            //output values for all but binary and memo...
            DbfRecord orec = new DbfRecord(dbf.Header);
            while(dbf.ReadNext(orec))
            {
              //output column values...
              if (!orec.IsDeleted)
              {
            for (int i = 0; i < orec.ColumnCount; i++)
            {
              if(orec.Column(i).ColumnType == DbfColumn.DbfColumnType.Character)
              {
                //string values: trim, enclose in quotes and escape quotes with double quotes
                string sval = orec[i];

                if(!bSwitchP)
                  sval = orec[i].Trim();

                if(!bSwitchQ || sval.IndexOf('"') > -1)
                  sval = ("\"" + sval.Replace("\"", "\"\"") + "\"");

                swcsv.Write(sval);

              }
              else if(orec.Column(i).ColumnType == DbfColumn.DbfColumnType.Date)
                swcsv.Write(orec.GetDateValue(i).ToString("MM-dd-yyyy"));
              else
              {
                if (bSwitchP)
                  swcsv.Write(orec[i]);
                else if(bSwitchF)
                  swcsv.Write(FormatNumber(orec[i].Trim()));
                else
                  swcsv.Write(orec[i].Trim());
              }

              //end record with a linefeed or end column with a comma.
              if(i < orec.ColumnCount-1)
                swcsv.Write(",");

            }

            //write line...
            swcsv.WriteLine();

              }

            }

            //close files...
            swcsv.Flush();
            swcsv.Close();
            dbf.Close();

              }
        }
Ejemplo n.º 18
0
        public void Write(DbfRecord orec, bool bClearRecordAfterWrite)
        {
            Write(orec);

            if (bClearRecordAfterWrite)
                orec.Clear();
        }
Ejemplo n.º 19
0
 public SmoPacient(DbfRecord dbfRecord)
     : base(dbfRecord)
 {
     FAM = dbfRecord["FAM"].Trim();
     IM = dbfRecord["IM"].Trim();
     OT = dbfRecord["OT"].Trim();
     DR = ToDateTime(dbfRecord["DR"].Trim());
     W = dbfRecord["W"].Trim();
     VPOLIS = dbfRecord["VPOLIS"].Trim();
     S_POL = dbfRecord["S_POL"].Trim();
     N_POL = dbfRecord["N_POL"].Trim();
     Q = dbfRecord["Q"].Trim();
     DP = ToDateTime(dbfRecord["DP"].Trim());
     DENDP = ToDateTime(dbfRecord["DENDP"].Trim());
     DOCTYPE = dbfRecord["DOCTYPE"].Trim();
     SN_PASP = dbfRecord["SN_PASP"].Trim();
     SNILS = dbfRecord["SNILS"].Trim();
     OKATO = dbfRecord["OKATO"].Trim();
     RNNAME = dbfRecord["RNNAME"].Trim();
     NPNAME = dbfRecord["NPNAME"].Trim();
     UL = dbfRecord["UL"].Trim();
     ULCODE = dbfRecord["ULCODE"].Trim();
     DOM = dbfRecord["DOM"].Trim();
     KOR = dbfRecord["KOR"].Trim();
     STR = dbfRecord["STR"].Trim();
     KV = dbfRecord["KV"].Trim();
     TEL = dbfRecord["TEL"].Trim();
     MCOD = dbfRecord["MCOD"].Trim();
     D_PR = ToDateTime(dbfRecord["D_PR"].Trim());
     D_OT = ToDateTime(dbfRecord["D_OT"].Trim());
     S_PR = dbfRecord["S_PR"].Trim();
 }
Ejemplo n.º 20
0
        /// <summary>
        /// Update a record. RecordIndex (zero based index) must be more than -1, otherwise an exception is thrown.
        /// You can also use Write method which updates a record if it has RecordIndex or adds a new one if RecordIndex == -1.
        /// RecordIndex is set automatically when you call any Read() methods on this class.
        /// </summary>
        /// <param name="orec"></param>
        public void Update(DbfRecord orec)
        {
            //if header was never written, write it first, then output the record
            if (!_headerWritten)
                WriteHeader();

            //Check if record has an index
            if (orec.RecordIndex < 0)
                throw new Exception("RecordIndex is not set, unable to update record. Set RecordIndex or call Write() method to add a new record to file.");

            //Check if this record matches record size specified by header and number of columns.
            //Client can pass a record from another DBF that is incompatible with this one and that would corrupt the file.
            if (orec.Header != _header && (orec.Header.ColumnCount != _header.ColumnCount || orec.Header.RecordLength != _header.RecordLength))
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies. Writing this record would corrupt the DBF file. " +
                                    "This is a programming error, have you mixed up DBF file objects?");

            //DBF file writer can be null if stream is not writable to...
            if (_dbfFileWriter == null)
                throw new Exception("Write stream is null. Either you have opened a stream that can not be " +
                                    "writen to (a read-only stream) or you have not opened a stream at all.");

            //move to the specified record, note that an exception will be thrown if stream is not seekable!
            //This is ok, since we provide a function to check whether the stream is seekable.
            long nSeekToPosition = (long)_header.HeaderLength + (long)((long)orec.RecordIndex * (long)_header.RecordLength);

            //check whether we can seek to this position. Subtract 1 from file length (there is a terminating character 1A at the end of the file)
            //so if we hit end of file, there are no more records, so return false;
            if (_dbfFile.Length < nSeekToPosition)
                throw new Exception("Invalid record position. Unable to save record.");

            //move to record start
            _dbfFile.Seek(nSeekToPosition, SeekOrigin.Begin);

            //write
            orec.Write(_dbfFile);
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Write a record to file. If RecordIndex is present, record will be updated, otherwise a new record will be written.
        /// Header will be output first if this is the first record being writen to file. 
        /// This method does not require stream seek capability to add a new record.
        /// </summary>
        /// <param name="orec"></param>
        public void Write(DbfRecord orec)
        {
            //if header was never written, write it first, then output the record
            if (!_headerWritten)
                WriteHeader();

            //if this is a new record (RecordIndex should be -1 in that case)
            if (orec.RecordIndex < 0)
            {
                if (_dbfFileWriter.BaseStream.CanSeek)
                {
                    //calculate number of records in file. do not rely on header's RecordCount property since client can change that value.
                    //also note that some DBF files do not have ending 0x1A byte, so we subtract 1 and round off
                    //instead of just cast since cast would just drop decimals.
                    int nNumRecords = (int)Math.Round(((double)(_dbfFile.Length - _header.HeaderLength - 1) / _header.RecordLength));
                    if (nNumRecords < 0)
                        nNumRecords = 0;

                    orec.RecordIndex = nNumRecords;
                    Update(orec);
                    _header.RecordCount++;

                }
                else
                {
                    //we can not position this stream, just write out the new record.
                    orec.Write(_dbfFile);
                    _header.RecordCount++;
                }
            }
            else
                Update(orec);
        }
Ejemplo n.º 22
0
        /// <summary>
        /// Read next record and fill data into parameter oFillRecord. Returns true if a record was read, otherwise false.
        /// </summary>
        /// <param name="oFillRecord"></param>
        /// <returns></returns>
        public bool ReadNext(DbfRecord oFillRecord)
        {
            //check if we can fill this record with data. it must match record size specified by header and number of columns.
            //we are not checking whether it comes from another DBF file or not, we just need the same structure. Allow flexibility but be safe.
            if (oFillRecord.Header != _header && (oFillRecord.Header.ColumnCount != _header.ColumnCount || oFillRecord.Header.RecordLength != _header.RecordLength))
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies, so we are unable to read a record into oFillRecord. " +
                                    "This is a programming error, have you mixed up DBF file objects?");

            //DBF file reader can be null if stream is not readable...
            if (_dbfFileReader == null)
                throw new Exception("Read stream is null, either you have opened a stream that can not be " +
                                    "read from (a write-only stream) or you have not opened a stream at all.");

            //read next record...
            bool bRead = oFillRecord.Read(_dbfFile);

            if (bRead)
            {
                if (_isForwardOnly)
                {
                    //zero based index! set before incrementing count.
                    oFillRecord.RecordIndex = _recordsReadCount;
                    _recordsReadCount++;
                }
                else
                    oFillRecord.RecordIndex = ((int)((_dbfFile.Position - _header.HeaderLength) / _header.RecordLength)) - 1;

            }

            return bRead;
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Tries to read a record and returns a new record object or null if nothing was read.
        /// </summary>
        /// <returns></returns>
        public DbfRecord ReadNext()
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(_header);

            return ReadNext(orec) ? orec : null;
        }
        /// <summary>
        /// Tries to read a record and returns a new record object or null if nothing was read.
        /// </summary>
        /// <returns></returns>
        public DbfRecord ReadNext()
        {
            //create a new record and fill it.
              DbfRecord orec = new DbfRecord(mHeader);

              if (ReadNext(orec))
            return orec;
              else
            return null;
        }
Ejemplo n.º 25
0
 protected static IEnumerable<DbfRecord> EnumRecords(DbfFile file, DbfHeader header)
 {
     var record = new DbfRecord(header);
     while (file.ReadNext(record))
     {
         yield return record;
     }
 }
Ejemplo n.º 26
0
 public Doctor(DbfRecord dbfRecord)
     : base(dbfRecord)
 {
     SNILS = dbfRecord["F1"].Trim();
     FAM = dbfRecord["F2"].Trim();
     IM = dbfRecord["F3"].Trim();
     OT = dbfRecord["F4"].Trim();
     SEX = dbfRecord["F5"].Trim();
     BIRTHDAY = ToDateTime(dbfRecord["F6"].Trim());
     ENDDAY = ToDateTime(dbfRecord["F7"].Trim());
 }
Ejemplo n.º 27
0
        /// <summary>
        /// Compares two DbfRecords.
        /// </summary>
        /// <param name="record1">
        /// The first record to compare.
        /// </param>
        /// <param name="record2">
        /// The second record to compare.
        /// </param>
        /// <returns>
        /// The <see cref="Diff"/> between the two records.
        /// </returns>
        public static Diff Compare(DbfRecord record1, DbfRecord record2)
        {
            var data1 = record1.Data;
            var data2 = record2.Data;

            if (data1.SequenceEqual(data2))
            {
                return new Diff(Operation.Unmodified, record1.RecordIndex);
            }

            if (record2.IsDeleted)
            {
                return new Diff(Operation.Deleted, record1.RecordIndex);
            }

            var columnDiffs = new List<ColumnDiff>();
            var equalCount = 0;
            for (var columnIndex = 0; columnIndex < record1.ColumnCount; ++columnIndex)
            {
                var columnData1 = new ArraySegmentWrapper<byte>(record1.ColumnData(columnIndex));
                var columnData2 = new ArraySegmentWrapper<byte>(record2.ColumnData(columnIndex));

                if (columnData1.SequenceEqual(columnData2))
                {
                    ++equalCount;
                }
                else
                {
                    var columnDiff = new ColumnDiff(columnIndex, record1[columnIndex], record2[columnIndex]);
                    columnDiffs.Add(columnDiff);
                }
            }

            if (equalCount == 0)
            {
                return new Diff(Operation.Deleted, record1.RecordIndex);
            }

            if (equalCount == record1.ColumnCount)
            {
                return new Diff(Operation.Unmodified, record1.RecordIndex);
            }

            return new Diff(Operation.Modified, record1.RecordIndex, columnDiffs);
        }
Ejemplo n.º 28
0
        private static void TestWriteNewDbf()
        {
            //create a simple DBF file and output to args[0]
            var odbf = new DbfFile(Encoding.GetEncoding(1252));
            odbf.Open(Path.Combine(TestPath,"TestNew2.dbf"), FileMode.Create);

            //create a header
            odbf.Header.AddColumn(new DbfColumn("StrCol", DbfColumn.DbfColumnType.Character, 20, 0));
            odbf.Header.AddColumn(new DbfColumn("DecCol1", DbfColumn.DbfColumnType.Number, 5, 1));
            odbf.Header.AddColumn(new DbfColumn("DecCol2", DbfColumn.DbfColumnType.Number, 5, 2));
            odbf.Header.AddColumn(new DbfColumn("DecCol3", DbfColumn.DbfColumnType.Number, 5, 3));
            odbf.Header.AddColumn(new DbfColumn("DecCol4", DbfColumn.DbfColumnType.Number, 15, 5));
            odbf.Header.AddColumn(new DbfColumn("NumCol1", DbfColumn.DbfColumnType.Number, 5, 0));
            odbf.Header.AddColumn(new DbfColumn("NumCol2", DbfColumn.DbfColumnType.Number, 10, 0));
            odbf.Header.AddColumn(new DbfColumn("DateCol1", DbfColumn.DbfColumnType.Date));
            odbf.Header.AddColumn(new DbfColumn("BoolCol1", DbfColumn.DbfColumnType.Boolean));

            //add some records...
            var orec = new DbfRecord(odbf.Header) {AllowDecimalTruncate = true};
            orec[0] = "Ahmed Test";
            orec[1] = "123.5";
            orec[2] = "12.35";
            orec[3] = "1.235";
            orec[4] = "1235.123456";
            orec[5] = "1235";
            orec[6] = "123567890";
            orec[7] = "11/07/2007";
            orec[8] = "f";
            odbf.Write(orec, true);

            orec[0] = "Stéfanié Singer";
            orec[1] = "-1.5";
            orec[2] = "-1.35";
            orec[3] = "1.235";
            orec[4] = "-1235.123";
            orec[5] = "15";
            orec[6] = "12345"; //put a decimal in integer, we won't throw an exception beacuse we do not test for that.
            orec[7] = "2008-12-21";
            orec[8] = "f";
            odbf.Write(orec, true);

            orec[0] = "Stéfanié Singer longer than fits in the DBF record!";
            orec[1] = "0.1";
            orec[2] = ".12";
            orec[3] = ".1";
            orec[4] = "";
            orec[5] = "-15";
            orec[6] = "-12345"; //put a decimal in integer, we won't throw an exception beacuse we do not test for that.
            orec[7] = "";
            orec[8] = "no";
            odbf.Write(orec);

            //overwrite first record with last record's data...
            orec.RecordIndex = 0;
            odbf.Write(orec);

            //odbf.Header.RecordCount = 50;
            odbf.WriteHeader();

            odbf.Close();

            //open the same DBF file we just output, and append a few records to it...
            odbf.Open(Path.Combine(TestPath,"TestNew2.dbf"), FileMode.Open);

            orec.Clear();
            orec[0] = "New record added!";
            orec[6] = "100";
            orec[8] = "t";
            odbf.Write(orec, true);

            orec[0] = "New record 2";
            orec[6] = "104";
            orec[8] = "y";
            odbf.Write(orec, true);

            orec[0] = "New record 3";
            orec[6] = "104";
            orec[8] = "TRUE";
            odbf.Write(orec, true);

            if (odbf.Read(0, orec))
            {
                orec[0] = "modified first record";
                odbf.Write(orec, true);
            }

            //read 3rd record and output to console...
            Console.WriteLine(odbf.Read(2).ToString());

            //now add a new record, forcing seek to end of file...
            orec.Clear();
            orec[0] = "New record 4";
            orec[6] = "500";
            orec[8] = "FALSE";
            odbf.Write(orec, true);

            odbf.Close();

            Console.ReadKey();
        }
Ejemplo n.º 29
0
        /// <summary>
        /// Reads a record specified by index into oFillRecord object. You can use this method 
        /// to read in and process records without creating and discarding record objects.
        /// Note that you should check that your stream is not forward-only! If you have a forward only stream, use ReadNext() functions.
        /// </summary>
        /// <param name="index">Zero based record index.</param>
        /// <param name="oFillRecord">Record object to fill, must have same size and number of fields as thid DBF file header!</param>
        /// <remarks>
        /// <returns>True if read a record was read, otherwise false. If you read end of file false will be returned and oFillRecord will NOT be modified!</returns>
        /// The parameter record (oFillRecord) must match record size specified by the header and number of columns as well.
        /// It does not have to come from the same header, but it must match the structure. We are not going as far as to check size of each field.
        /// The idea is to be flexible but safe. It's a fine balance, these two are almost always at odds.
        /// </remarks>
        public bool Read(int index, DbfRecord oFillRecord)
        {
            //check if we can fill this record with data. it must match record size specified by header and number of columns.
            //we are not checking whether it comes from another DBF file or not, we just need the same structure. Allow flexibility but be safe.
            if (oFillRecord.Header != _header && (oFillRecord.Header.ColumnCount != _header.ColumnCount || oFillRecord.Header.RecordLength != _header.RecordLength))
                throw new Exception("Record parameter does not have the same size and number of columns as the " +
                                    "header specifies, so we are unable to read a record into oFillRecord. " +
                                    "This is a programming error, have you mixed up DBF file objects?");

            //DBF file reader can be null if stream is not readable...
            if (_dbfFileReader == null)
                throw new Exception("ReadStream is null, either you have opened a stream that can not be " +
                                    "read from (a write-only stream) or you have not opened a stream at all.");

            //move to the specified record, note that an exception will be thrown is stream is not seekable!
            //This is ok, since we provide a function to check whether the stream is seekable.
            long nSeekToPosition = _header.HeaderLength + (index * _header.RecordLength);

            //check whether requested record exists. Subtract 1 from file length (there is a terminating character 1A at the end of the file)
            //so if we hit end of file, there are no more records, so return false;
            if (index < 0 || _dbfFile.Length - 1 <= nSeekToPosition)
                return false;

            //move to record and read
            _dbfFile.Seek(nSeekToPosition, SeekOrigin.Begin);

            //read the record
            bool bRead = oFillRecord.Read(_dbfFile);
            if (bRead)
                oFillRecord.RecordIndex = index;

            return bRead;
        }
Ejemplo n.º 30
0
        /// <summary>
        /// Reads a record specified by index. This method requires the stream to be able to seek to position. 
        /// If you are using a http stream, or a stream that can not stream, use ReadNext() methods to read in all records.
        /// </summary>
        /// <param name="index">Zero based index.</param>
        /// <returns>Null if record can not be read, otherwise returns a new record.</returns>
        public DbfRecord Read(int index)
        {
            //create a new record and fill it.
            DbfRecord orec = new DbfRecord(_header);

            return Read(index, orec) ? orec : null;
        }
Ejemplo n.º 31
0
        protected static AddressPartIndexWrite EnumRecord(dynamic headerObj, DbfRecord record)
        {
            string NAME = record[headerObj.NAME].Trim();
            string SOCR = record[headerObj.SOCR].Trim();
            string CODE = record[headerObj.CODE].Trim();
            string INDEX = record[headerObj.INDEX].Trim();
            //                    string GNINMB = record[headerObj.GNINMB].Trim();
            //                    string UNO = record[headerObj.UNO].Trim();
            //                    string OCATD = record[headerObj.OCATD].Trim();
            //                    string STATUS = record[headerObj.STATUS].Trim();

            if (!string.IsNullOrEmpty(INDEX) && !_Base.INDEX.IsMatch(INDEX))
            {
                AddressLoader.Log.Warn(ClassName + "Значение: {0} не может быть почтовым индексом", INDEX);
                return null;
            }

            if (!_Base.CODE.IsMatch(CODE))
            {
                AddressLoader.Log.Warn(ClassName + "Значение: {0} не может быть кодом", CODE);
                return null;
            }
            if (CODE.Length == 13)
            {
                // не актуальные данные
                if (CODE[11] != '0' || CODE[12] != '0') return null;

                CODE = CODE.Substring(0, 11) + "0000";
            }
            else if (CODE.Length == 17)
            {
                // не актуальные данные
                if (CODE[15] != '0' || CODE[16] != '0') return null;

                CODE = CODE.Substring(0, 15);
            }
            else
            {
                AddressLoader.Log.Warn(ClassName + "Значение: {0} не может быть кодом", CODE);
                return null;
            }

            int level = _Base.NoLevel;
            foreach (var l in _Base.LEVELS)
            {
                if (l.Key.IsMatch(CODE))
                {
                    level = l.Value;
                    break;
                }
            }

            if (level == _Base.NoLevel)
            {
                AddressLoader.Log.Warn(ClassName + "Для кода: {0} не найден уровень в КЛАДР", CODE);
                return null;
            }

            AddressPartIndexWrite d = new AddressPartIndexWrite();
            d.PostalCode = INDEX;
            d.Id = CODE;
            d.Level = level;
            d.Reduction = SOCR;
            d.Name = NAME;

            return d;
        }
        /// <summary>
        /// Reads a record specified by index. This method requires the stream to be able to seek to position. 
        /// If you are using a http stream, or a stream that can not stream, use ReadNext() methods to read in all records.
        /// </summary>
        /// <param name="index">Zero based index.</param>
        /// <returns>Null if record can not be read, otherwise returns a new record.</returns>
        public DbfRecord Read(int index)
        {
            //create a new record and fill it.
              DbfRecord orec = new DbfRecord(mHeader);

              if (Read(index, orec))
            return orec;
              else
            return null;
        }