private static Result WriteDoc(ref RowWriter writer, TypeArgument typeArg, TestDoc doc) { Result r = writer.WriteString("Id", doc.Id); if (r != Result.Success) { return(r); } r = writer.WriteInt32("Cost", doc.Cost); if (r != Result.Success) { return(r); } r = writer.WriteString("Status", doc.Status); if (r != Result.Success) { return(r); } r = writer.WriteString("Description", doc.Description); if (r != Result.Success) { return(r); } return(Result.Success); }
private Result WriteOperationResult(long index, out ReadOnlyMemory <byte> buffer) { if (index >= this.results.Count) { buffer = ReadOnlyMemory <byte> .Empty; return(Result.Success); } RowBuffer row = new RowBuffer(2 * 1024); row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchResultLayout, BatchSchemaProvider.BatchLayoutResolver); Result r = RowWriter.WriteBuffer(ref row, this.results[(int)index], BatchResponsePayloadWriter.WriteResult); if (r != Result.Success) { buffer = null; return(r); } MemoryStream output = new MemoryStream(row.Length); row.WriteTo(output); buffer = new Memory <byte>(output.GetBuffer(), 0, (int)output.Length); return(r); }
private Result WriteOperation(long index, out ReadOnlyMemory <byte> buffer) { if (this.bodyStream.Length > this.maxBodyLength) { this.shouldDeleteLastWrittenRecord = true; buffer = default(ReadOnlyMemory <byte>); return(Result.Success); } this.bodyStreamPositionBeforeWritingCurrentRecord = this.bodyStream.Length; if (index >= this.operations.Count) { buffer = default(ReadOnlyMemory <byte>); return(Result.Success); } ItemBatchOperation operation = this.operations.Array[this.operations.Offset + (int)index]; RowBuffer row = new RowBuffer(this.operationResizableWriteBuffer.Memory.Length, this.operationResizableWriteBuffer); row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchOperationLayout, BatchSchemaProvider.BatchLayoutResolver); Result r = RowWriter.WriteBuffer(ref row, operation, ItemBatchOperation.WriteOperation); if (r != Result.Success) { buffer = null; return(r); } this.lastWrittenOperationIndex = (int)index; buffer = this.operationResizableWriteBuffer.Memory.Slice(0, row.Length); return(Result.Success); }
private void RowWriterButton_Click(object sender, RoutedEventArgs e) { try { List <string> stringList = new List <string>(); stringList.Add("This"); stringList.Add("is"); stringList.Add("a"); stringList.Add("quote, comma"); stringList.Add("test"); using (MemoryStream ms = new MemoryStream()) using (StreamWriter sw = new StreamWriter(ms)) { var writer = new RowWriter(sw); writer.Write(stringList); sw.Flush(); ms.Seek(0, SeekOrigin.Begin); using (StreamReader sr = new StreamReader(ms)) { while (sr.EndOfStream == false) { LogMessage(sr.ReadLine()); } } } } catch (Exception ex) { LogError(ex); } }
private static Result WriteResult(ref RowWriter writer, TypeArgument typeArg, TransactionalBatchOperationResult result) { Result r = writer.WriteInt32("statusCode", (int)result.StatusCode); if (r != Result.Success) { return(r); } if (result.SubStatusCode != SubStatusCodes.Unknown) { r = writer.WriteInt32("subStatusCode", (int)result.SubStatusCode); if (r != Result.Success) { return(r); } } if (result.ETag != null) { r = writer.WriteString("eTag", result.ETag); if (r != Result.Success) { return(r); } } if (result.ResourceStream != null) { r = writer.WriteBinary("resourceBody", BatchResponsePayloadWriter.StreamToBytes(result.ResourceStream)); if (r != Result.Success) { return(r); } } if (result.RetryAfter != null) { r = writer.WriteUInt32("retryAfterMilliseconds", (uint)result.RetryAfter.TotalMilliseconds); if (r != Result.Success) { return(r); } } r = writer.WriteFloat64("requestCharge", result.RequestCharge); if (r != Result.Success) { return(r); } return(Result.Success); }
public MemoryStream ToHybridRowStream() { RowBuffer row = new RowBuffer(80000); row.InitLayout(HybridRowVersion.V1, BatchTestBase.TestDocLayout, BatchTestBase.LayoutResolver); Result r = RowWriter.WriteBuffer(ref row, this, TestDoc.WriteDoc); Assert.AreEqual(Result.Success, r); MemoryStream output = new MemoryStream(row.Length); row.WriteTo(output); output.Position = 0; return(output); }
public void Write(IEnumerable <TModel> models, IList <string> header = null) { if (PropertyMapper.RequiredHeaderOnWrite && header == null) { throw new TableIOException(new[] { new ErrorDetail { Type = "HeaderRequired", Message = $"Header is required on write.", } }); } var propertyMaps = PropertyMapper.CreatePropertyMaps(typeof(TModel), header); // decide valid column size. var validColumnSize = propertyMaps.Any() ? propertyMaps.Max(m => m.ColumnIndex) + 1 : 0; if (ColumnSize.HasValue) { if (ColumnSize < validColumnSize) { throw new TableIOException(new[] { new ErrorDetail { Type = "InvalidColumnSize", Message = "Column size is invalid.", } }); } validColumnSize = ColumnSize.Value; } else if (header != null) { if (header.Count < validColumnSize) { throw new TableIOException(new[] { new ErrorDetail { Type = "InvalidTableHeader", Message = "Table header is invalid.", } }); } validColumnSize = header.Count; RowWriter.Write(header.Cast <object>().ToArray()); } foreach (var model in models) { RowWriter.Write(ConvertToRow(model, propertyMaps, validColumnSize)); } }
internal virtual Result WriteRequestProperties(ref RowWriter writer, bool pkWritten) { if (this.Properties == null) { return(Result.Success); } if (this.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj) && binaryIdObj is byte[] binaryId) { Result r = writer.WriteBinary("binaryId", binaryId); if (r != Result.Success) { return(r); } } if (this.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj) && epkObj is byte[] epk) { Result r = writer.WriteBinary("effectivePartitionKey", epk); if (r != Result.Success) { return(r); } } if (!pkWritten && this.Properties.TryGetValue(HttpConstants.HttpHeaders.PartitionKey, out object pkStrObj) && pkStrObj is string pkString) { Result r = writer.WriteString("partitionKey", pkString); if (r != Result.Success) { return(r); } } if (this.Properties.TryGetValue(WFConstants.BackendHeaders.TimeToLiveInSeconds, out object ttlObj) && ttlObj is string ttlStr && int.TryParse(ttlStr, out int ttl)) { Result r = writer.WriteInt32("timeToLiveInSeconds", ttl); if (r != Result.Success) { return(r); } } return(Result.Success); }
public void TestWriteOperationWithBinaryIdByteArray() { ISpanResizer <byte> resizer = new MemorySpanResizer <byte>(100); RowBuffer row = new RowBuffer(capacity: 100, resizer: resizer); row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchOperationLayout, BatchSchemaProvider.BatchLayoutResolver); byte[] testBinaryId = new byte[] { 1, 2, 3, 4, }; ItemRequestOptions requestOptions = new(); requestOptions.Properties = new Dictionary <string, object>() { { WFConstants.BackendHeaders.BinaryId, testBinaryId }, }; TransactionalBatchItemRequestOptions transactionalBatchItemRequestOptions = TransactionalBatchItemRequestOptions.FromItemRequestOptions(requestOptions); ItemBatchOperation operation = new ItemBatchOperation( operationType: OperationType.Patch, operationIndex: 0, partitionKey: Cosmos.PartitionKey.Null, requestOptions: transactionalBatchItemRequestOptions); int length = operation.GetApproximateSerializedLength(); Assert.AreEqual(testBinaryId.Length, length); Result r = RowWriter.WriteBuffer(ref row, operation, ItemBatchOperation.WriteOperation); if (r != Result.Success) { Assert.Fail(r.ToString()); } bool foundBinaryId = false; RowReader reader = new RowReader(ref row); while (reader.Read()) { if (reader.PathSpan == Utf8String.TranscodeUtf16("binaryId")) { foundBinaryId = true; reader.ReadBinary(out byte[] binaryId); CollectionAssert.AreEqual(testBinaryId, binaryId); } } Assert.IsTrue(foundBinaryId); }
private void WriteHeaderIfRequired() { if (isHeaderWritten) { return; } ProtoWriter.WriteFieldHeader(1, WireType.StartGroup, writer); currentResultToken = ProtoWriter.StartSubItem(resultIndex, writer); IList <ProtoDataColumn> columns = columnFactory.GetColumns(reader, options); new HeaderWriter(writer).WriteHeader(columns); rowWriter = new RowWriter(writer, columns, options); isHeaderWritten = true; }
public static void BulkInsert <T>(this TdsConnection cnn, IEnumerable <T> objects, string tableName, Dictionary <string, PropertyInfo> columnMapping) { var writer = cnn.TdsPackage.Writer; var reader = cnn.TdsPackage.Reader; var parser = cnn.StreamParser; MetadataBulkCopy[] metaDataAllColumns = null; writer.SendExecuteBatch($"SET FMTONLY ON select * from {tableName} SET FMTONLY OFF", cnn.SqlTransactionId); parser.ParseInput(count => { metaDataAllColumns = reader.ColMetaDataBulkCopy(count); }); writer.ColumnsMetadata = columnMapping != null?GetUsedColumns(metaDataAllColumns, columnMapping) : GetUsedColumns(metaDataAllColumns); var bulkInsert = CreateBulkInsertStatement(tableName, writer.ColumnsMetadata); writer.SendExecuteBatch(bulkInsert, cnn.SqlTransactionId); parser.ParseInput(); writer.NewPackage(TdsEnums.MT_BULK); var columnWriter = new TdsColumnWriter(writer); var rowWriter = RowWriter.GetComplexWriter <T>(columnWriter); WriteBulkInsertColMetaData(writer); foreach (var o in objects) { writer.WriteByte(TdsEnums.SQLROW); rowWriter(columnWriter, o); } writer.WriteByteArray(Done); writer.SendLastMessage(); parser.ParseInput(); if (parser.Status != ParseStatus.Done) { parser.ParseInput(); } }
private void BothButton_Click(object sender, RoutedEventArgs e) { try { var columnList = new List <string>(); List <string> stringList = new List <string>(); stringList.Add("jack, says"); stringList.Add("he "); stringList.Add("ran"); stringList.Add("over"); stringList.Add("John's"); stringList.Add("foot"); using (MemoryStream ms = new MemoryStream()) using (StreamWriter sw = new StreamWriter(ms)) { var writer = new RowWriter(sw); writer.Write(stringList); sw.Flush(); ms.Seek(0, SeekOrigin.Begin); using (StreamReader sr = new StreamReader(ms)) { var reader = new RowReader(sr); while (reader.CanRead()) { PrintColumnList(reader.ReadRow()); } } } } catch (Exception ex) { LogError(ex); } }
internal static Result WriteOperation(ref RowWriter writer, TypeArgument typeArg, ItemBatchOperation operation) { bool pkWritten = false; Result r = writer.WriteInt32("operationType", (int)operation.OperationType); if (r != Result.Success) { return(r); } r = writer.WriteInt32("resourceType", (int)ResourceType.Document); if (r != Result.Success) { return(r); } if (operation.PartitionKeyJson != null) { r = writer.WriteString("partitionKey", operation.PartitionKeyJson); if (r != Result.Success) { return(r); } pkWritten = true; } if (operation.Id != null) { r = writer.WriteString("id", operation.Id); if (r != Result.Success) { return(r); } } if (!operation.ResourceBody.IsEmpty) { r = writer.WriteBinary("resourceBody", operation.ResourceBody.Span); if (r != Result.Success) { return(r); } } if (operation.RequestOptions != null) { TransactionalBatchItemRequestOptions options = operation.RequestOptions; if (options.IndexingDirective.HasValue) { string indexingDirectiveString = IndexingDirectiveStrings.FromIndexingDirective(options.IndexingDirective.Value); r = writer.WriteString("indexingDirective", indexingDirectiveString); if (r != Result.Success) { return(r); } } if (options.IfMatchEtag != null) { r = writer.WriteString("ifMatch", options.IfMatchEtag); if (r != Result.Success) { return(r); } } else if (options.IfNoneMatchEtag != null) { r = writer.WriteString("ifNoneMatch", options.IfNoneMatchEtag); if (r != Result.Success) { return(r); } } if (options.Properties != null) { if (options.Properties.TryGetValue(WFConstants.BackendHeaders.BinaryId, out object binaryIdObj)) { byte[] binaryId = binaryIdObj as byte[]; if (binaryId != null) { r = writer.WriteBinary("binaryId", binaryId); if (r != Result.Success) { return(r); } } } if (options.Properties.TryGetValue(WFConstants.BackendHeaders.EffectivePartitionKey, out object epkObj)) { byte[] epk = epkObj as byte[]; if (epk != null) { r = writer.WriteBinary("effectivePartitionKey", epk); if (r != Result.Success) { return(r); } } } if (!pkWritten && options.Properties.TryGetValue( HttpConstants.HttpHeaders.PartitionKey, out object pkStrObj)) { string pkString = pkStrObj as string; if (pkString != null) { r = writer.WriteString("partitionKey", pkString); if (r != Result.Success) { return(r); } } } if (options.Properties.TryGetValue(WFConstants.BackendHeaders.TimeToLiveInSeconds, out object ttlObj)) { string ttlStr = ttlObj as string; if (ttlStr != null && int.TryParse(ttlStr, out int ttl)) { r = writer.WriteInt32("timeToLiveInSeconds", ttl); if (r != Result.Success) { return(r); } } } } } return(Result.Success); }
/// <summary> /// Serialize an <see cref="System.Data.IDataReader"/> to a binary stream using protocol-buffers. /// </summary> /// <param name="stream">The <see cref="System.IO.Stream"/> to write to.</param> /// <param name="reader">The <see cref="System.Data.IDataReader"/>who's contents to serialize.</param> /// <param name="options"><see cref="ProtoDataWriterOptions"/> specifying any custom serialization options.</param> public void Serialize(Stream stream, IDataReader reader, ProtoDataWriterOptions options) { if (stream == null) { throw new ArgumentNullException("stream"); } if (reader == null) { throw new ArgumentNullException("reader"); } // Null options are permitted to be passed in. options = options ?? new ProtoDataWriterOptions(); // For a (minor) performance improvement, Serialize() has been left // as a single long method with functions manually inlined. var resultIndex = 0; using (var writer = new ProtoWriter(stream, null, null)) { do { // This is the underlying protocol buffers structure we use: // // <1 StartGroup> each DataTable // <SubItem> // <2 StartGroup> each DataColumn // <SubItem> // <1 String> Column Name // <2 Variant> Column ProtoDataType (enum casted to int) // </SubItem> // <3 StartGroup> each DataRow // <SubItem> // <(# Column Index) (corresponding type)> Field Value // </SubItem> // </SubItem> // // NB if Field Value is a DataTable, the whole DataTable is // write the table ProtoWriter.WriteFieldHeader(1, WireType.StartGroup, writer); SubItemToken resultToken = ProtoWriter.StartSubItem(resultIndex, writer); var columns = new ProtoDataColumnFactory().GetColumns(reader, options); new HeaderWriter(writer).WriteHeader(columns); var rowWriter = new RowWriter(writer, columns, options); // write the rows while (reader.Read()) { rowWriter.WriteRow(reader); } ProtoWriter.EndSubItem(resultToken, writer); resultIndex++; }while (reader.NextResult()); } }