//////////////////////////////////////////////////////////////////////////////////////////////////////////////// internal static async Task<S101Message> ReadFromAsync( ReadBuffer readBuffer, CancellationToken cancellationToken) { if (!await readBuffer.ReadAsync(cancellationToken)) { return null; } var slot = readBuffer[readBuffer.Index++]; byte messageType; S101Command command; try { await readBuffer.FillAsync(1, cancellationToken); messageType = GetMessageType(readBuffer); command = await S101Command.ReadFromAsync(readBuffer, cancellationToken); } catch (EndOfStreamException ex) { throw new S101Exception("Unexpected end of stream.", ex); } return new S101Message(slot, messageType, command); }
bool IResponse.Read(ReadBuffer buffer) { switch (state) { case STATE_NEED_HEADER: remainingHeader -= buffer.Read(header, Protocol.HeaderLength - remainingHeader, remainingHeader); if (remainingHeader > 0) return true; Debug.Assert(remainingHeader == 0); if (!ProcessHeader(header, out remainingData)) { state = STATE_DONE; return false; } state = STATE_NEED_BODY; goto case STATE_NEED_BODY; case STATE_NEED_BODY: var read = buffer.Read(data, dataReadOffset, remainingData); remainingData -= read; dataReadOffset += read; if (remainingData > 0) return true; Debug.Assert(remainingHeader == 0); state = STATE_DONE; break; } return false; }
internal override void PrepareRead(ReadBuffer buf, FieldDescription fieldDescription, int len) { if (fieldDescription.IsBinaryFormat) { buf.Skip(len); throw new SafeReadException(new NotSupportedException($"The field '{fieldDescription.Name}' has a type currently unknown to Npgsql (OID {fieldDescription.TypeOID}). You can retrieve it as a string by marking it as unknown, please see the FAQ.")); } base.PrepareRead(buf, fieldDescription, len); }
internal sealed override async Task ReadFromCoreAsync(ReadBuffer readBuffer, CancellationToken cancellationToken) { await base.ReadFromCoreAsync(readBuffer, cancellationToken); await readBuffer.FillAsync(3, cancellationToken); this.GetPacketFlagsDtdAndAppBytesLength(readBuffer); await readBuffer.FillAsync( this.applicationBytes, 0, this.applicationBytes.Length, cancellationToken); }
internal ParameterDescriptionMessage Load(ReadBuffer buf) { var numParams = buf.ReadInt16(); TypeOIDs.Clear(); for (var i = 0; i < numParams; i++) { TypeOIDs.Add(buf.ReadUInt32()); } return this; }
//////////////////////////////////////////////////////////////////////////////////////////////////////////////// internal static async Task<MessageDecodingStream> CreateAsync( ReadBuffer rawBuffer, byte[] discardBuffer, Action<byte> outOfFrameByteReceived, CancellationToken cancellationToken) { var result = new MessageDecodingStream(rawBuffer, discardBuffer, outOfFrameByteReceived); var newMessage = await S101Message.ReadFromAsync(result.deframedBuffer, cancellationToken); if ((newMessage != null) && newMessage.CanHaveMultiplePackets && ((newMessage.PacketFlags & PacketFlags.FirstPacket) == 0)) { throw new S101Exception(string.Format( CultureInfo.InvariantCulture, "Missing {0} flag in first packet.", PacketFlags.FirstPacket)); } result.message = newMessage; return result; }
internal void Load(ReadBuffer buf) { ColumnFormatCodes.Clear(); var binaryIndicator = buf.ReadByte(); switch (binaryIndicator) { case 0: IsBinary = false; break; case 1: IsBinary = true; break; default: throw new Exception("Invalid binary indicator in CopyInResponse message: " + binaryIndicator); } NumColumns = buf.ReadInt16(); for (var i = 0; i < NumColumns; i++) ColumnFormatCodes.Add((FormatCode)buf.ReadInt16()); }
internal override DataRowMessage Load(ReadBuffer buf) { NumColumns = buf.ReadInt16(); Buffer = buf; Column = -1; ColumnLen = -1; PosInColumn = 0; // TODO: Recycle message objects rather than recreating for each row _columnOffsets = new List<int>(NumColumns); for (var i = 0; i < NumColumns; i++) { _columnOffsets.Add(buf.ReadPosition); var len = buf.ReadInt32(); if (len != -1) { buf.Seek(len, SeekOrigin.Current); } } _endOffset = buf.ReadPosition; return this; }
public static DiskTagInt32 SeekAndReadDiskTagInt32(this FileStream fileStream, fpos_t position, ReadBuffer <DiskTagInt32> readBuffer) { fileStream.SeekFromBegin(position); return(fileStream.ReadDiskTagInt32(readBuffer)); }
public static DiskTagInt32 SeekAndReadDiskTagInt32(this BinaryReader binaryReader, fpos_t position, ReadBuffer <DiskTagInt32> readBuffer) { binaryReader.SeekFromBegin(position); return(binaryReader.ReadDiskTagInt32(readBuffer)); }
internal ReadyForQueryMessage Load(ReadBuffer buf) { TransactionStatusIndicator = (TransactionStatus)buf.ReadByte(); return this; }
decimal ISimpleTypeHandler <decimal> .Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { return(Read(buf, len, fieldDescription)); }
private MessageDecodingStream(ReadBuffer rawBuffer, byte[] discardBuffer, Action<byte> outOfFrameByteReceived) { this.rawBuffer = rawBuffer; this.discardBuffer = discardBuffer; this.outOfFrameByteReceived = outOfFrameByteReceived; // This buffer is kept small in size, because a new one is allocated for each message. // This has the effect that only the bytes of reads <= MessageHeaderMaxLength bytes are actually copied into // this buffer. Larger reads are automatically done by calling this.ReadDeframed (without copying the bytes // into the MessageHeaderMaxLength byte buffer first). The former happens when packet headers are read // (multiple small-sized reads), the latter happens when the payload is read (typically done with a buffer // >= 1024 bytes). // This approach minimizes the allocations per message, while guaranteeing the best possible performance for // header *and* payload reading. this.deframedBuffer = new ReadBuffer(this.ReadDeframedAsync, Constants.MessageHeaderMaxLength); this.deframingStream = new DeframingStream(this.rawBuffer, this.outOfFrameByteReceived); }
public override NpgsqlCircle Read(ReadBuffer buf, int len, FieldDescription fieldDescription = null) => new NpgsqlCircle(buf.ReadDouble(), buf.ReadDouble(), buf.ReadDouble());
string ISimpleTypeHandler <string> .Read(ReadBuffer buf, int len, [CanBeNull] FieldDescription fieldDescription) => ((ISimpleTypeHandler <NpgsqlInet>) this).Read(buf, len, fieldDescription).ToString();
internal static AuthenticationMD5PasswordMessage Load(ReadBuffer buf) { var salt = new byte[4]; buf.ReadBytes(salt, 0, 4); return new AuthenticationMD5PasswordMessage(salt); }
private void GetPacketFlagsDtdAndAppBytesLength(ReadBuffer readBuffer) { this.PacketFlags = (PacketFlags)readBuffer[readBuffer.Index++]; this.dtd = readBuffer[readBuffer.Index++]; this.applicationBytes = new byte[readBuffer[readBuffer.Index++]]; }
//////////////////////////////////////////////////////////////////////////////////////////////////////////////// private static byte GetMessageType(ReadBuffer readBuffer) { var messageType = readBuffer[readBuffer.Index++]; if (messageType != MessageType.Ember) { throw new S101Exception("Unexpected Message Type."); } return messageType; }
internal BackendKeyDataMessage(ReadBuffer buf) { BackendProcessId = buf.ReadInt32(); BackendSecretKey = buf.ReadInt32(); }
internal abstract DataRowMessage Load(ReadBuffer buf);
public PeregrineDataReader(ReadBuffer readBuffer) => _readBuffer = readBuffer;
public override void PrepareRead(ReadBuffer buf, int len, FieldDescription fieldDescription) { PrepareRead(buf, fieldDescription, len); }
public override IPAddress Read(ReadBuffer buf, int len, FieldDescription fieldDescription = null) => ((ISimpleTypeHandler <NpgsqlInet>) this).Read(buf, len, fieldDescription).Address;
public override NpgsqlInet Read(ReadBuffer buf, int len, FieldDescription fieldDescription = null) => InetHandler.DoRead(buf, fieldDescription, len, true);
internal override object ReadValueAsObjectFully(ReadBuffer buf, int len, FieldDescription fieldDescription = null) { return(fieldDescription?.TypeModifier == 1 ? (object)ReadFully <bool>(buf, len, fieldDescription) : ReadFully <BitArray>(buf, len, fieldDescription)); }
sbyte ISimpleTypeHandler <sbyte> .Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { return((sbyte)Read(buf, len, fieldDescription)); }
/// <summary> /// Отправка запроса к счетчику и получение данных /// </summary> /// <param name="Query">Запрос к счетчику в формате массива L1</param> /// <param name="Answer">Ответ счетчика в формате массива L1</param> /// <return>Успех</return>> public void OblikQuery(byte[] Query, out byte[] Answer) { bool success = false; //Флаг успеха операции SerialPort com = null; try { com = new SerialPort { PortName = "COM" + _ConParams.Port.ToString(), BaudRate = _ConParams.Baudrate.Value, Parity = Parity.None, DataBits = 8, StopBits = StopBits.One, ReadTimeout = 500, WriteTimeout = 500, DtrEnable = false, RtsEnable = false, Handshake = Handshake.None }; Answer = null; try { com.Open(); } catch (Exception e) { ChangeIOStatus(e.Message); throw new OblikIOException(e.Message); } int r = _ConParams.Repeats.Value; ChangeCmdStatus(StringsTable.SendReq); while ((r > 0) && (!success)) //Повтор при ошибке { com.DiscardOutBuffer(); //очистка буфера передачи com.DiscardInBuffer(); //очистка буфера приема try { if (Query == null) { throw new ArgumentNullException(paramName: nameof(Query)); } com.Write(Query, 0, Query.Length); } catch (Exception e) { ChangeIOStatus(e.Message); throw new OblikIOException(e.Message); } try { Answer = new byte[2]; r--; //Получение результата L1 ReadAnswer(com, _ConParams.Timeout.Value, 1, out byte[] ReadBuffer); Answer[0] = ReadBuffer[0]; if (Answer[0] != 1) { throw new OblikIOException(ParseChannelError(Answer[0])); } //Получение количества байт в ответе ReadAnswer(com, _ConParams.Timeout.Value, 1, out ReadBuffer); Answer[1] = ReadBuffer[0]; int len = ReadBuffer[0] + 1; Array.Resize(ref Answer, len + 2); //Получение всего ответа ReadAnswer(com, (int)(_ConParams.Timeout.Value / 5u), len, out ReadBuffer); ReadBuffer.CopyTo(Answer, 2); success = (ReadBuffer.Length == len); if (Answer[2] != 0) { throw new OblikIOException(ParseSegmentError(Answer[2])); } //Проверка контрольной суммы byte cs = 0; for (int i = 0; i < Answer.Length; i++) { cs ^= Answer[i]; } if (cs != 0) { throw new OblikIOException(StringsTable.CSCError); } } catch (Exception e) { success = false; ChangeIOStatus(e.Message); } } } finally { if (com != null) { com.Dispose(); } } if (!success) { ChangeIOStatus(StringsTable.ReqError); throw new OblikIOException(StringsTable.QueryErr); } }
internal CommandCompleteMessage Load(ReadBuffer buf, int len) { Rows = 0; OID = 0; var tag = buf.ReadString(len-1); buf.Skip(1); // Null terminator var tokens = tag.Split(); if (tokens.Length == 0) { return this; } switch (tokens[0]) { case "INSERT": StatementType = StatementType.Insert; uint oid; if (uint.TryParse(tokens[1], out oid)) { OID = oid; } else { Log.Error("Ignoring unparseable OID in CommandComplete: " + tokens[1]); } ParseRows(tokens[2]); break; case "DELETE": StatementType = StatementType.Delete; ParseRows(tokens[1]); break; case "UPDATE": StatementType = StatementType.Update; ParseRows(tokens[1]); break; case "SELECT": StatementType = StatementType.Select; // PostgreSQL 8.4 and below doesn't include the number of rows if (tokens.Length > 1) { ParseRows(tokens[1]); } break; case "MOVE": StatementType = StatementType.Move; ParseRows(tokens[1]); break; case "FETCH": StatementType = StatementType.Fetch; ParseRows(tokens[1]); break; case "COPY": StatementType = StatementType.Copy; if (tokens.Length > 1) { ParseRows(tokens[1]); } break; case "CREATE": if (tag.StartsWith("CREATE TABLE AS")) { StatementType = StatementType.CreateTableAs; ParseRows(tokens[3]); break; } goto default; default: StatementType = StatementType.Other; break; } return this; }
internal virtual void PrepareRead(ReadBuffer buf, FieldDescription fieldDescription, int len) { _readBuf = buf; _byteLen = len; _bytePos = -1; }
internal new CopyBothResponseMessage Load(ReadBuffer buf) { base.Load(buf); return this; }
internal override NpgsqlDateTime ReadPsv(ReadBuffer buf, int len, FieldDescription fieldDescription = null) => ReadTimeStamp(buf, len, fieldDescription);
// ReSharper disable once FunctionComplexityOverflow internal ErrorOrNoticeMessage(ReadBuffer buf) { while (true) { var code = (ErrorFieldTypeCode)buf.ReadByte(); switch (code) { case ErrorFieldTypeCode.Done: // Null terminator; error message fully consumed. return; case ErrorFieldTypeCode.Severity: Severity = buf.ReadNullTerminatedString(PGUtil.RelaxedUTF8Encoding); break; case ErrorFieldTypeCode.Code: Code = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.Message: Message = buf.ReadNullTerminatedString(PGUtil.RelaxedUTF8Encoding); break; case ErrorFieldTypeCode.Detail: Detail = buf.ReadNullTerminatedString(PGUtil.RelaxedUTF8Encoding); break; case ErrorFieldTypeCode.Hint: Hint = buf.ReadNullTerminatedString(PGUtil.RelaxedUTF8Encoding); break; case ErrorFieldTypeCode.Position: var positionStr = buf.ReadNullTerminatedString(); int position; if (!int.TryParse(positionStr, out position)) { Log.Warn("Non-numeric position in ErrorResponse: " + positionStr); continue; } Position = position; break; case ErrorFieldTypeCode.InternalPosition: var internalPositionStr = buf.ReadNullTerminatedString(); int internalPosition; if (!Int32.TryParse(internalPositionStr, out internalPosition)) { Log.Warn("Non-numeric position in ErrorResponse: " + internalPositionStr); continue; } InternalPosition = internalPosition; break; case ErrorFieldTypeCode.InternalQuery: InternalQuery = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.Where: Where = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.File: File = buf.ReadNullTerminatedString(PGUtil.RelaxedUTF8Encoding); break; case ErrorFieldTypeCode.Line: Line = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.Routine: Routine = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.SchemaName: SchemaName = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.TableName: TableName = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.ColumnName: ColumnName = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.DataTypeName: DataTypeName = buf.ReadNullTerminatedString(); break; case ErrorFieldTypeCode.ConstraintName: ConstraintName = buf.ReadNullTerminatedString(); break; default: // Unknown error field; consume and discard. buf.ReadNullTerminatedString(); break; } } }
public override short Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { return(buf.ReadInt16()); }
/// <summary>See <i>"X.690"</i><cite>X.690</cite>, chapter 8.1.2.</summary> private static EmberId ReadIdentifier(ReadBuffer readBuffer) { readBuffer.Fill(1); var leadingOctet = readBuffer[readBuffer.Index++]; var theClass = (Class)(leadingOctet & 0xC0); var isConstructed = (leadingOctet & 0x20) > 0; var number = leadingOctet & 0x1F; return new EmberId(theClass, isConstructed, (number <= 30) ? number : Read7Bit(readBuffer)); }
float ISimpleTypeHandler <float> .Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { return(Read(buf, len, fieldDescription)); }
/// <summary>See <i>"X.690"</i><cite>X.690</cite>, chapter 8.5 and /// <see href="http://technet.microsoft.com/en-us/library/0b34tf65.aspx">IEEE Floating-Point /// Representation</see>. Of course the assumption is that C# has the same floating point representation as C++ /// (pretty safe, as floating point calculations are done by the hardware).</summary> private static double ReadReal(ReadBuffer readBuffer, int length) { if (!BitConverter.IsLittleEndian) { throw new NotSupportedException("Method is not supported for big endian system."); } var position = readBuffer.Position; // 8.5.2 if (length == 0) { return 0.0; } var firstContentsOctet = readBuffer[readBuffer.Index++]; --length; long signBits; int exponentLength; // 8.5.3 - 8.5.7, encoding must be base 2, so the bits 6 to 3 must be 0. Moreover, bits 8 to 7 must not // both be 0 (which would imply a decimal encoding). This leaves exactly the 12 cases enumerated below. switch (firstContentsOctet) { case 0x40: return double.PositiveInfinity; // 8.5.9 case 0x41: return double.NegativeInfinity; // 8.5.9 case 0x42: return double.NaN; // 8.5.9 case 0x43: return -0.0; // 8.5.9 // 8.5.7.4 a) case 0x80: signBits = 0; exponentLength = 1; break; case 0xC0: signBits = long.MinValue; exponentLength = 1; break; // 8.5.7.4 b) case 0x81: signBits = 0; exponentLength = 2; break; case 0xC1: signBits = long.MinValue; exponentLength = 2; break; // 8.5.7.4 c) case 0x82: signBits = 0; exponentLength = 3; break; case 0xC2: signBits = long.MinValue; exponentLength = 3; break; // 8.5.7.4 d) case 0x83: signBits = 0; exponentLength = readBuffer[readBuffer.Index++]; --length; break; case 0xC3: signBits = long.MinValue; exponentLength = readBuffer[readBuffer.Index++]; --length; break; default: throw CreateEmberException("Unexpected encoding for Real at position {0}.", position); } var mantissaLength = length - exponentLength; // 8.5.7.5 if (mantissaLength < 1) { // The mantissa can never be 0, so there must be at least one byte for the mantissa. throw CreateEmberException("Incorrect length for Real at position {0}.", position); } var exponent = Read8Bit(readBuffer, exponentLength, true); var mantissa = Read8Bit(readBuffer, mantissaLength, false); if (exponent == 1024) { if (mantissa == 0) { return signBits == 0 ? double.PositiveInfinity : double.NegativeInfinity; } else { return double.NaN; } } // https://en.wikipedia.org/wiki/Double-precision_floating-point_format if ((exponent <= -Constants.DoubleExponentBias) || (exponent > Constants.DoubleExponentBias)) { throw CreateEmberException( "The exponent of the Real at position {0} exceeds the expected range.", position); } if (mantissa == 0) { throw CreateEmberException("The mantissa of the Real at position {0} is zero.", position); } // Normalization, required for IEEE floating point representation while ((mantissa & (Constants.DoubleExponentMask >> Constants.BitsPerByte)) == 0) { mantissa <<= Constants.BitsPerByte; } // In the 64-bit floating point format, the first non-zero binary digit is not stored but only assumed to // be bit 53. We therefore shift until we have the 53rd digit == 1 and then mask it out again. while ((mantissa & Constants.DoubleExponentMask) == 0) { mantissa <<= 1; } mantissa &= Constants.DoubleMantissaMask; var exponentBits = (exponent + Constants.DoubleExponentBias) << Constants.DoubleMantissaBits; return BitConverter.Int64BitsToDouble(signBits | exponentBits | mantissa); }
string ISimpleTypeHandler <string> .Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { return(Read(buf, len, fieldDescription).ToString()); }
/// <summary>See <i>"X.690"</i><cite>X.690</cite>, chapter 8.1.3.</summary> private static int? ReadLength(ReadBuffer readBuffer) { var position = readBuffer.Position; readBuffer.Fill(1); var leadingOctet = readBuffer[readBuffer.Index++]; if ((leadingOctet & 0x80) > 0) { var length = leadingOctet & 0x7F; if (length == 0) { return null; } readBuffer.Fill(length); var result = Read8Bit(readBuffer, length, false); if (result > int.MaxValue) { throw CreateEmberException("The length at position {0} exceeds the expected range.", position); } return (int)result; } else { return leadingOctet; } }
public override void PrepareRead(ReadBuffer buf, int len, FieldDescription fieldDescription = null) { _bytes = new byte[len]; _pos = 0; _readBuf = buf; }
private static long Read8Bit(ReadBuffer readBuffer, int length, bool isSigned) { if (length <= 0) { throw new EmberException("Unexpected zero length for integer."); } var position = readBuffer.Position; var mostSignificant = readBuffer[readBuffer.Index++]; long result; long leading; // - 1 accounts for the fact that we must not overwrite the sign bit by shifting in bits const int MostSignificantShift = Constants.BitsPerLong - Constants.BitsPerByte - 1; if (isSigned && ((mostSignificant & 0x80) != 0)) { result = (Constants.AllBitsSetLong << Constants.BitsPerByte) | mostSignificant; leading = Constants.AllBitsSetLong << MostSignificantShift; } else { result = mostSignificant; leading = 0x00; } for (--length; length > 0; --length) { const long DiscardBitsMask = Constants.AllBitsSetLong << MostSignificantShift; if ((result & DiscardBitsMask) != leading) { throw CreateEmberException( "The integer, length or exponent at position {0} exceeds the expected range.", position); } result <<= Constants.BitsPerByte; result |= readBuffer[readBuffer.Index++]; } return result; }
public static DiskSlotInt32 ReadDiskSlotInt32(this BinaryReader binaryReader, ReadBuffer <DiskSlotInt32> readBuffer) { var result = binaryReader.Read(readBuffer.Buffer, 0, readBuffer.Size); if (result != readBuffer.Size) { throw (new DiskSearchEngineException("Error reading [DiskSlot] in file (result != buffer.Length).")); } return(readBuffer.ConvertFromBuffer()); }
private static int Read7Bit(ReadBuffer readBuffer) { var position = readBuffer.Position; readBuffer.Fill(1); byte currentByte; var result = 0; while (((currentByte = readBuffer[readBuffer.Index++]) & 0x80) > 0) { result |= currentByte & 0x7F; // - 1 accounts for the fact that we must not overwrite the sign bit by shifting in bits const int DiscardBitsMask = Constants.AllBitsSetInt << (Constants.BitsPerInt - Constants.BitsPerEncodedByte - 1); if ((result & DiscardBitsMask) != 0) { throw CreateEmberException( "The identifier number or subidentifier at position {0} exceeds the expected range.", position); } result <<= Constants.BitsPerEncodedByte; readBuffer.Fill(1); } result |= currentByte; return result; }
private void ReadSubFileParameters(ReadBuffer readBuffer, long fileSize, MapFileInfoBuilder mapFileInfoBuilder) { // get and check the number of sub-files (1 byte) sbyte numberOfSubFiles = readBuffer.ReadByte(); if (numberOfSubFiles < 1) { throw new MapFileException("invalid number of sub-files: " + numberOfSubFiles); } mapFileInfoBuilder.numberOfSubFiles = numberOfSubFiles; SubFileParameter[] tempSubFileParameters = new SubFileParameter[numberOfSubFiles]; this.zoomLevelMinimum = sbyte.MaxValue; this.zoomLevelMaximum = sbyte.MinValue; // get and check the information for each sub-file for (sbyte currentSubFile = 0; currentSubFile < numberOfSubFiles; ++currentSubFile) { SubFileParameterBuilder subFileParameterBuilder = new SubFileParameterBuilder(); // get and check the base zoom level (1 byte) sbyte baseZoomLevel = readBuffer.ReadByte(); if (baseZoomLevel < 0 || baseZoomLevel > BASE_ZOOM_LEVEL_MAX) { throw new MapFileException("invalid base zoom level: " + baseZoomLevel); } subFileParameterBuilder.BaseZoomLevel = baseZoomLevel; // get and check the minimum zoom level (1 byte) sbyte zoomLevelMin = readBuffer.ReadByte(); if (zoomLevelMin < 0 || zoomLevelMin > 22) { throw new MapFileException("invalid minimum zoom level: " + zoomLevelMin); } subFileParameterBuilder.ZoomLevelMin = zoomLevelMin; // get and check the maximum zoom level (1 byte) sbyte zoomLevelMax = readBuffer.ReadByte(); if (zoomLevelMax < 0 || zoomLevelMax > 22) { throw new MapFileException("invalid maximum zoom level: " + zoomLevelMax); } subFileParameterBuilder.ZoomLevelMax = zoomLevelMax; // check for valid zoom level range if (zoomLevelMin > zoomLevelMax) { throw new MapFileException("invalid zoom level range: " + zoomLevelMin + SPACE + zoomLevelMax); } // get and check the start address of the sub-file (8 bytes) long startAddress = readBuffer.ReadLong(); if (startAddress < HEADER_SIZE_MIN || startAddress >= fileSize) { throw new MapFileException("invalid start address: " + startAddress); } subFileParameterBuilder.StartAddress = startAddress; long indexStartAddress = startAddress; if (mapFileInfoBuilder.optionalFields.IsDebugFile) { // the sub-file has an index signature before the index indexStartAddress += SIGNATURE_LENGTH_INDEX; } subFileParameterBuilder.IndexStartAddress = indexStartAddress; // get and check the size of the sub-file (8 bytes) long subFileSize = readBuffer.ReadLong(); if (subFileSize < 1) { throw new MapFileException("invalid sub-file size: " + subFileSize); } subFileParameterBuilder.SubFileSize = subFileSize; subFileParameterBuilder.BoundingBox = mapFileInfoBuilder.boundingBox; // add the current sub-file to the list of sub-files tempSubFileParameters[currentSubFile] = subFileParameterBuilder.Build(); // update the global minimum and maximum zoom level information if (this.zoomLevelMinimum > tempSubFileParameters[currentSubFile].ZoomLevelMin) { this.zoomLevelMinimum = tempSubFileParameters[currentSubFile].ZoomLevelMin; mapFileInfoBuilder.zoomLevelMin = this.zoomLevelMinimum; } if (this.zoomLevelMaximum < tempSubFileParameters[currentSubFile].ZoomLevelMax) { this.zoomLevelMaximum = tempSubFileParameters[currentSubFile].ZoomLevelMax; mapFileInfoBuilder.zoomLevelMax = this.zoomLevelMaximum; } } // create and fill the lookup table for the sub-files this.subFileParameters = new SubFileParameter[this.zoomLevelMaximum + 1]; for (int currentMapFile = 0; currentMapFile < numberOfSubFiles; ++currentMapFile) { SubFileParameter subFileParameter = tempSubFileParameters[currentMapFile]; for (sbyte zoomLevel = subFileParameter.ZoomLevelMin; zoomLevel <= subFileParameter.ZoomLevelMax; ++zoomLevel) { this.subFileParameters[zoomLevel] = subFileParameter; } } }
internal override NpgsqlDateTime ReadPsv(ReadBuffer buf, int len, FieldDescription fieldDescription = null) { var ts = ReadTimeStamp(buf, len, fieldDescription); return(new NpgsqlDateTime(ts.Date, ts.Time, DateTimeKind.Utc).ToLocalTime()); }
public static void save(String name, ComboBox history) { if (!File.Exists(historyFile)) { File.Create(historyFile).Close(); } byte[] bytes = File.ReadAllBytes(historyFile); ReadBuffer rb = new ReadBuffer(bytes); int sections = rb.readByte(); bool foundSector = false; WriteBuffer wb = new WriteBuffer(); wb.writeByte(sections); if (sections >= 0) { for (int i = 0; i < sections; i++) { String sectorName = rb.readString(); wb.writeString(sectorName); int sectorSize = rb.readInt(); if (sectorName != name) // Not our sector, copy { wb.writeInt(sectorSize); for (int o = 0; o < sectorSize; o++) { wb.writeString(rb.readString()); } } else // Our sector, insert our data // Empty previous data { for (int o = 0; o < sectorSize; o++) { rb.readString(); } // Insert new data int dataSize = history.Items.Count > historySize ? historySize : history.Items.Count; wb.writeInt(dataSize); for (int o = 0; o < dataSize; o++) { wb.writeString(history.Items[o].ToString()); } foundSector = true; } } } else { sections = 0; } if (!foundSector) { wb.writeString(name); int dataSize = history.Items.Count > historySize ? historySize : history.Items.Count; wb.writeInt(dataSize); for (int o = 0; o < dataSize; o++) { wb.writeString(history.Items[o].ToString()); } sections++; } byte[] data = wb.ToArray(); data[0] = (byte)sections; File.WriteAllBytes(historyFile, data); }
public override int Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { buf.ReadInt32(); throw new SafeReadException(new Exception("Safe read exception as requested")); }
internal override NpgsqlInet ReadPsv(ReadBuffer buf, int len, FieldDescription fieldDescription = null) => DoRead(buf, fieldDescription, len, false);
public override int Read(ReadBuffer buf, int len, FieldDescription fieldDescription) { throw new Exception("Non-safe read exception as requested"); }
public override void PrepareRead(ReadBuffer buf, int len, FieldDescription fieldDescription) { base.PrepareRead(buf, len, fieldDescription); _fieldDescription = fieldDescription; }
public void SetUp() { Underlying = new MemoryStream(); ReadBuffer = new ReadBuffer(null, Underlying, ReadBuffer.DefaultSize, PGUtil.UTF8Encoding); }
public override void PrepareRead(ReadBuffer buf, int len, FieldDescription fieldDescription) { _readBuf = buf; _pos = -1; _len = len - 4; // Subtract leading bit length field }
internal CommandCompleteMessage Load(ReadBuffer buf, int len) { Rows = 0; OID = 0; var tag = buf.ReadString(len - 1); buf.Skip(1); // Null terminator var tokens = tag.Split(); if (tokens.Length == 0) { return(this); } switch (tokens[0]) { case "INSERT": StatementType = StatementType.Insert; uint oid; if (uint.TryParse(tokens[1], out oid)) { OID = oid; } else { Log.Error("Ignoring unparseable OID in CommandComplete: " + tokens[1]); } ParseRows(tokens[2]); break; case "DELETE": StatementType = StatementType.Delete; ParseRows(tokens[1]); break; case "UPDATE": StatementType = StatementType.Update; ParseRows(tokens[1]); break; case "SELECT": StatementType = StatementType.Select; // PostgreSQL 8.4 and below doesn't include the number of rows if (tokens.Length > 1) { ParseRows(tokens[1]); } break; case "MOVE": StatementType = StatementType.Move; ParseRows(tokens[1]); break; case "FETCH": StatementType = StatementType.Fetch; ParseRows(tokens[1]); break; case "COPY": StatementType = StatementType.Copy; if (tokens.Length > 1) { ParseRows(tokens[1]); } break; case "CREATE": if (tag.StartsWith("CREATE TABLE AS")) { StatementType = StatementType.CreateTableAs; ParseRows(tokens[3]); break; } goto default; default: StatementType = StatementType.Other; break; } return(this); }
string ISimpleTypeHandler <string> .Read(ReadBuffer buf, int len, [CanBeNull] FieldDescription fieldDescription) => Read(buf, len, fieldDescription).ToString();
//////////////////////////////////////////////////////////////////////////////////////////////////////////////// /// <summary>Initializes a new instance of the <see cref="DeframingStream"/> class.</summary> internal DeframingStream(ReadBuffer readBuffer, Action<byte> outOfFrameByteReceived) : base(readBuffer, null) { this.outOfFrameByteReceived = outOfFrameByteReceived; }
private bool ReadByte(ReadBuffer readBuffer) { var currentByte = readBuffer[readBuffer.Index++]; switch (this.state) { case State.BeforeFrame: if (currentByte == Frame.BeginOfFrame) { this.state = State.InFrame; } else { this.outOfFrameByteReceived(currentByte); } break; case State.InFrame: if (currentByte < Frame.InvalidStart) { this.crc = Crc.AddCrcCcitt(this.crc, currentByte); this.decodedQueue.Enqueue(currentByte); } else { switch (currentByte) { case Frame.EscapeByte: this.state = State.InFrameEscaped; break; case Frame.BeginOfFrame: this.decodedQueue.Clear(); this.crc = 0xFFFF; break; case Frame.EndOfFrame: this.state = State.AfterFrame; if (this.crc != 0xF0B8) { this.decodedQueue.Clear(); } return false; default: this.state = State.AfterFrame; this.decodedQueue.Clear(); break; } } break; case State.InFrameEscaped: if (currentByte >= Frame.InvalidStart) { this.state = State.AfterFrame; this.decodedQueue.Clear(); } currentByte = (byte)(currentByte ^ Frame.EscapeXor); this.crc = Crc.AddCrcCcitt(this.crc, currentByte); this.decodedQueue.Enqueue(currentByte); this.state = State.InFrame; break; } return true; }
internal static AuthenticationGSSContinueMessage Load(ReadBuffer buf, int len) { len -= 4; // The AuthRequestType code var authenticationData = new byte[len]; buf.ReadBytes(authenticationData, 0, len); return new AuthenticationGSSContinueMessage(authenticationData); }