public void TestWriteOperationWithBinaryIdByteArray()
        {
            ISpanResizer <byte> resizer = new MemorySpanResizer <byte>(100);
            RowBuffer           row     = new RowBuffer(capacity: 100, resizer: resizer);

            row.InitLayout(HybridRowVersion.V1, BatchSchemaProvider.BatchOperationLayout, BatchSchemaProvider.BatchLayoutResolver);

            byte[]             testBinaryId   = new byte[] { 1, 2, 3, 4, };
            ItemRequestOptions requestOptions = new();

            requestOptions.Properties = new Dictionary <string, object>()
            {
                { WFConstants.BackendHeaders.BinaryId, testBinaryId },
            };
            TransactionalBatchItemRequestOptions transactionalBatchItemRequestOptions =
                TransactionalBatchItemRequestOptions.FromItemRequestOptions(requestOptions);
            ItemBatchOperation operation = new ItemBatchOperation(
                operationType: OperationType.Patch,
                operationIndex: 0,
                partitionKey: Cosmos.PartitionKey.Null,
                requestOptions: transactionalBatchItemRequestOptions);

            int length = operation.GetApproximateSerializedLength();

            Assert.AreEqual(testBinaryId.Length, length);

            Result r = RowWriter.WriteBuffer(ref row, operation, ItemBatchOperation.WriteOperation);

            if (r != Result.Success)
            {
                Assert.Fail(r.ToString());
            }

            bool      foundBinaryId = false;
            RowReader reader        = new RowReader(ref row);

            while (reader.Read())
            {
                if (reader.PathSpan == Utf8String.TranscodeUtf16("binaryId"))
                {
                    foundBinaryId = true;
                    reader.ReadBinary(out byte[] binaryId);
                    CollectionAssert.AreEqual(testBinaryId, binaryId);
                }
            }

            Assert.IsTrue(foundBinaryId);
        }
        /// <summary>
        /// Adds as many operations as possible from the provided list of operations
        /// in the list order while having the body stream not exceed maxBodySize.
        /// </summary>
        /// <param name="operations">Operations to be added; read-only.</param>
        /// <param name="cancellationToken"><see cref="CancellationToken"/> representing request cancellation.</param>
        /// <param name="ensureContinuousOperationIndexes">Whether to stop adding operations to the request once there is non-continuity in the operation indexes.</param>
        /// <returns>Any pending operations that were not included in the request.</returns>
        protected async Task <ArraySegment <ItemBatchOperation> > CreateBodyStreamAsync(
            ArraySegment <ItemBatchOperation> operations,
            CancellationToken cancellationToken,
            bool ensureContinuousOperationIndexes = false)
        {
            int estimatedMaxOperationLength = 0;
            int approximateTotalLength      = 0;

            int previousOperationIndex = -1;
            int materializedCount      = 0;

            foreach (ItemBatchOperation operation in operations)
            {
                if (ensureContinuousOperationIndexes && previousOperationIndex != -1 && operation.OperationIndex != previousOperationIndex + 1)
                {
                    break;
                }

                await operation.EncryptAndMaterializeResourceAsync(this.serializerCore, cancellationToken);

                materializedCount++;

                previousOperationIndex = operation.OperationIndex;

                int currentLength = operation.GetApproximateSerializedLength();
                estimatedMaxOperationLength = Math.Max(currentLength, estimatedMaxOperationLength);

                approximateTotalLength += currentLength;
                if (approximateTotalLength > this.maxBodyLength)
                {
                    break;
                }

                if (materializedCount == this.maxOperationCount)
                {
                    break;
                }
            }

            this.operations = new ArraySegment <ItemBatchOperation>(operations.Array, operations.Offset, materializedCount);

            const int operationSerializationOverheadOverEstimateInBytes = 200;

            this.bodyStream = new MemoryStream(approximateTotalLength + (operationSerializationOverheadOverEstimateInBytes * materializedCount));
            this.operationResizableWriteBuffer = new MemorySpanResizer <byte>(estimatedMaxOperationLength + operationSerializationOverheadOverEstimateInBytes);

            Result r = await this.bodyStream.WriteRecordIOAsync(default(Segment), this.WriteOperation);

            Debug.Assert(r == Result.Success, "Failed to serialize batch request");

            this.bodyStream.Position = 0;

            if (this.shouldDeleteLastWrittenRecord)
            {
                this.bodyStream.SetLength(this.bodyStreamPositionBeforeWritingCurrentRecord);
                this.operations = new ArraySegment <ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex);
            }
            else
            {
                this.operations = new ArraySegment <ItemBatchOperation>(operations.Array, operations.Offset, this.lastWrittenOperationIndex + 1);
            }

            int overflowOperations = operations.Count - this.operations.Count;

            return(new ArraySegment <ItemBatchOperation>(operations.Array, this.operations.Count + operations.Offset, overflowOperations));
        }