public async Task Init() { while (_parser.Read() == false) { await RefillParserBuffer(_stream, _buffer, _parser, _token); } if (_state.CurrentTokenType != JsonParserToken.StartArray) { ThrowUnexpectedToken(JsonParserToken.StartArray, _state); } }
private IEnumerable <(string key, long index, BlittableJsonReaderObject value)> InternalGetCompareExchangeValues() { var state = new JsonParserState(); using (var parser = new UnmanagedJsonParser(_context, state, "Import/CompareExchange")) using (var builder = new BlittableJsonDocumentBuilder(_context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "Import/CompareExchange", parser, state)) { foreach (var reader in ReadArray()) { using (reader) { if (reader.TryGet("Key", out string key) == false || reader.TryGet("Value", out LazyStringValue value) == false) { _result.CompareExchange.ErroredCount++; _result.AddWarning("Could not read compare exchange entry."); continue; } builder.ReadNestedObject(); SetBuffer(parser, value); parser.Read(); builder.Read(); builder.FinalizeDocument(); yield return(key, 0, builder.CreateReader()); builder.Renew("import/cmpxchg", BlittableJsonDocumentBuilder.UsageMode.ToDisk); } } } }
private unsafe bool AboutToReadPropertyNameInternal(UnmanagedJsonParser reader, JsonParserState state) { if (_state != State.None) { if (!AboutToReadWithStateUnlikely(reader, state)) return false; } _state = State.None; while (true) { if (reader.Read() == false) return false; if (state.CurrentTokenType != JsonParserToken.String) return true; // let the caller handle that if (_readingMetadataObject == false) { if (state.StringSize == 9 && state.StringBuffer[0] == (byte)'@' && *(long*)(state.StringBuffer + 1) == 7022344802737087853) _readingMetadataObject = true; return true; } if (AboutToReadPropertyNameInMetadataUnlikely(reader, state, out bool aboutToReadPropertyName)) return aboutToReadPropertyName; } }
private static void ReadNextToken(UnmanagedJsonParser parser, MemoryStream stream, bool isFirst = false) { if (parser.Read() == false) { RefillParserBuffer(stream, parser, CancellationToken.None); //.Wait(); if (parser.Read() == false) { Console.WriteLine("OMASMASDAS"); Environment.Exit(2222); } } else if (isFirst) { Console.WriteLine("ERRRRRRRRR"); Console.Out.Flush(); } }
private static async Task <bool> IsClusterTransaction(Stream stream, UnmanagedJsonParser parser, JsonOperationContext.ManagedPinnedBuffer buffer, JsonParserState state) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (ReadClusterTransactionProperty(state)) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } return(GetStringPropertyValue(state) == nameof(TransactionMode.ClusterWide)); } return(false); }
private static void ReadNextToken(Stream stream, UnmanagedJsonParser parser, JsonOperationContext.ManagedPinnedBuffer buffer) { while (parser.Read() == false) { var read = stream.Read(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Buffer.Count); if (read == 0) { throw new EndOfStreamException("The stream ended unexpectedly"); } parser.SetBuffer(buffer, read); } }
public void CanReadAll(string name) { using (var ctx = JsonOperationContext.ShortTermSingleUse()) using (var stream = typeof(UnmanageJsonReaderTests).GetTypeInfo().Assembly.GetManifestResourceStream(name)) using (var parser = new UnmanagedJsonParser(ctx, new JsonParserState(), "test")) { var buffer = new byte[4096]; fixed(byte *pBuffer = buffer) { while (stream.Position != stream.Length) { var read = stream.Read(buffer, 0, buffer.Length); parser.SetBuffer(pBuffer, read); while (parser.Read()) { } } } } }
public static async Task BuildCommandsAsync(JsonOperationContext ctx, BatchHandler.MergedBatchCommand command, Stream stream, DocumentDatabase database, ServerStore serverStore) { CommandData[] cmds = Empty; List <string> identities = null; List <int> positionInListToCommandIndex = null; int index = -1; var state = new JsonParserState(); using (ctx.GetManagedBuffer(out JsonOperationContext.ManagedPinnedBuffer buffer)) using (var parser = new UnmanagedJsonParser(ctx, state, "bulk_docs")) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } if (GetLongFromStringBuffer(state) != 8314892176759549763) // Commands { ThrowUnexpectedToken(JsonParserToken.String, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartArray) { ThrowUnexpectedToken(JsonParserToken.StartArray, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType == JsonParserToken.EndArray) { break; } index++; if (index >= cmds.Length) { cmds = IncreaseSizeOfCommandsBuffer(index, cmds); } var commandData = await ReadSingleCommand(ctx, stream, state, parser, buffer, default); if (commandData.Type == CommandType.PATCH) { commandData.PatchCommand = new PatchDocumentCommand(ctx, commandData.Id, commandData.ChangeVector, false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, false, false, true ); } if (commandData.Type == CommandType.PUT && string.IsNullOrEmpty(commandData.Id) == false && commandData.Id[commandData.Id.Length - 1] == '|') { if (identities == null) { identities = new List <string>(); positionInListToCommandIndex = new List <int>(); } // queue identities requests in order to send them at once to the leader (using List for simplicity) identities.Add(commandData.Id); positionInListToCommandIndex.Add(index); } cmds[index] = commandData; } if (identities != null) { await GetIdentitiesValues(ctx, database, serverStore, identities, positionInListToCommandIndex, cmds); } command.ParsedCommands = new ArraySegment <CommandData>(cmds, 0, index + 1); if (await IsClusterTransaction(stream, parser, buffer, state)) { command.IsClusterTransaction = true; } } }
private static async Task <CommandData> ReadSingleCommand( JsonOperationContext ctx, Stream stream, JsonParserState state, UnmanagedJsonParser parser, JsonOperationContext.ManagedPinnedBuffer buffer, CancellationToken token) { var commandData = new CommandData(); if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.EndObject) { break; } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } switch (GetPropertyType(state)) { case CommandPropertyName.Type: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.Type = GetCommandType(state, ctx); break; case CommandPropertyName.Id: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.Id = null; break; case JsonParserToken.String: commandData.Id = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.Name: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.Name = null; break; case JsonParserToken.String: commandData.Name = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.DestinationId: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.DestinationId = null; break; case JsonParserToken.String: commandData.DestinationId = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.DestinationName: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.DestinationName = null; break; case JsonParserToken.String: commandData.DestinationName = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.ContentType: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.ContentType = string.Empty; break; case JsonParserToken.String: commandData.ContentType = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.Document: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } commandData.Document = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, token); break; case CommandPropertyName.Patch: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var patch = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, token); commandData.Patch = PatchRequest.Parse(patch, out commandData.PatchArgs); break; case CommandPropertyName.PatchIfMissing: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var patchIfMissing = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, token); commandData.PatchIfMissing = PatchRequest.Parse(patchIfMissing, out commandData.PatchIfMissingArgs); break; case CommandPropertyName.ChangeVector: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.Null) { commandData.ChangeVector = null; } else { if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.ChangeVector = GetLazyStringValue(ctx, state); } break; case CommandPropertyName.Index: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.Integer) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.Index = state.Long; break; case CommandPropertyName.IdPrefixed: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.IdPrefixed = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.ReturnDocument: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.ReturnDocument = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.Counters: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var counterOps = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, token); commandData.Counters = DocumentCountersOperation.Parse(counterOps); break; case CommandPropertyName.FromEtl: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.FromEtl = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.NoSuchProperty: // unknown command - ignore it while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.StartObject || state.CurrentTokenType == JsonParserToken.StartArray) { await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, token); } break; } } switch (commandData.Type) { case CommandType.None: ThrowInvalidType(); break; case CommandType.PUT: if (commandData.Document == null) { ThrowMissingDocumentProperty(); } break; case CommandType.PATCH: if (commandData.Patch == null) { ThrowMissingPatchProperty(); } break; case CommandType.AttachmentPUT: if (commandData.Name == null) { ThrowMissingNameProperty(); } break; case CommandType.Counters: if (commandData.Counters == null) { ThrowMissingNameProperty(); } break; } return(commandData); }
public async Task <ImportResult> Import(DocumentsOperationContext context, Stream stream, Action <IOperationProgress> onProgress = null) { var result = new ImportResult(); var progress = new IndeterminateProgress(); var state = new JsonParserState(); JsonOperationContext.ManagedPinnedBuffer buffer; using (context.GetManagedBuffer(out buffer)) using (var parser = new UnmanagedJsonParser(context, state, "fileName")) { var operateOnType = "__top_start_object"; var buildVersion = 0L; var identities = new Dictionary <string, long>(); VersioningStorage versioningStorage = null; while (true) { if (parser.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { if (state.CurrentTokenType != JsonParserToken.EndObject) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } break; } parser.SetBuffer(buffer, read); continue; } switch (state.CurrentTokenType) { case JsonParserToken.String: unsafe { operateOnType = new LazyStringValue(null, state.StringBuffer, state.StringSize, context).ToString(); } break; case JsonParserToken.Integer: switch (operateOnType) { case "BuildVersion": buildVersion = state.Long; break; } break; case JsonParserToken.StartObject: if (operateOnType == "__top_start_object") { operateOnType = null; break; } context.CachedProperties.NewDocument(); var builder = new BlittableJsonDocumentBuilder(_batchPutCommand.Context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "ImportObject", parser, state); builder.ReadNestedObject(); while (builder.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } parser.SetBuffer(buffer, read); } builder.FinalizeDocument(); if (operateOnType == "Docs" && Options.OperateOnTypes.HasFlag(DatabaseItemType.Documents)) { progress.Progress = "Importing Documents"; onProgress?.Invoke(progress); PatchDocument patch = null; PatchRequest patchRequest = null; if (string.IsNullOrWhiteSpace(Options.TransformScript) == false) { patch = new PatchDocument(context.DocumentDatabase); patchRequest = new PatchRequest { Script = Options.TransformScript }; } result.DocumentsCount++; var reader = builder.CreateReader(); var document = new Document { Data = reader, }; if (Options.IncludeExpired == false && document.Expired(_database.Time.GetUtcNow())) { continue; } TransformScriptOrDisableVersioningIfNeeded(context, patch, reader, document, patchRequest); _batchPutCommand.Add(document.Data); if (result.DocumentsCount % 1000 == 0) { progress.Progress = $"Imported {result.DocumentsCount} documents"; onProgress?.Invoke(progress); } await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false); } else if (operateOnType == "RevisionDocuments" && Options.OperateOnTypes.HasFlag(DatabaseItemType.RevisionDocuments)) { if (versioningStorage == null) { break; } result.RevisionDocumentsCount++; var reader = builder.CreateReader(); _batchPutCommand.Add(reader); await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false);; } else { using (builder) { switch (operateOnType) { case "Attachments": result.Warnings.Add("Attachments are not supported anymore. Use RavenFS isntead. Skipping."); break; case "Indexes": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Indexes) == false) { continue; } result.IndexesCount++; progress.Progress = "importing Indexes"; onProgress?.Invoke(progress); try { IndexProcessor.Import(builder, _database, buildVersion, Options.RemoveAnalyzers); } catch (Exception e) { result.Warnings.Add($"Could not import index. Message: {e.Message}"); } break; case "Transformers": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Transformers) == false) { continue; } result.TransformersCount++; progress.Progress = "Importing Transformers"; onProgress?.Invoke(progress); try { TransformerProcessor.Import(builder, _database, buildVersion); } catch (Exception e) { result.Warnings.Add($"Could not import transformer. Message: {e.Message}"); } break; case "Identities": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Identities)) { result.IdentitiesCount++; progress.Progress = "Importing Identities"; onProgress?.Invoke(progress); using (var reader = builder.CreateReader()) { try { string identityKey, identityValueString; long identityValue; if (reader.TryGet("Key", out identityKey) == false || reader.TryGet("Value", out identityValueString) == false || long.TryParse(identityValueString, out identityValue) == false) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Skipping."); } else { identities[identityKey] = identityValue; } } catch (Exception e) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Error: {e}. Skipping."); } } } break; default: result.Warnings.Add( $"The following type is not recognized: '{operateOnType}'. Skipping."); break; } } } break; case JsonParserToken.StartArray: switch (operateOnType) { case "RevisionDocuments": // We are taking a reference here since the documents import can activate or disable the versioning. // We hold a local copy because the user can disable the bundle during the import process, exteranly. // In this case we want to continue to import the revisions documents. versioningStorage = _database.BundleLoader.VersioningStorage; _batchPutCommand.IsRevision = true; break; } break; case JsonParserToken.EndArray: switch (operateOnType) { case "Docs": await FinishBatchOfDocuments(); _batchPutCommand = new MergedBatchPutCommand(_database, buildVersion); break; case "RevisionDocuments": await FinishBatchOfDocuments(); break; case "Identities": if (identities.Count > 0) { using (var tx = context.OpenWriteTransaction()) { _database.DocumentsStorage.UpdateIdentities(context, identities); tx.Commit(); } } identities = null; break; } break; } } } return(result); }
public static async Task <ArraySegment <CommandData> > BuildCommandsAsync(JsonOperationContext ctx, Stream stream, DocumentDatabase database, ServerStore serverStore) { CommandData[] cmds = Empty; int index = -1; var state = new JsonParserState(); using (ctx.GetManagedBuffer(out JsonOperationContext.ManagedPinnedBuffer buffer)) using (var parser = new UnmanagedJsonParser(ctx, state, "bulk_docs")) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } if (GetLongFromStringBuffer(state) != 8314892176759549763) // Commands { ThrowUnexpectedToken(JsonParserToken.String, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartArray) { ThrowUnexpectedToken(JsonParserToken.StartArray, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType == JsonParserToken.EndArray) { break; } index++; if (index >= cmds.Length) { cmds = IncreaseSizeOfCommandsBuffer(index, cmds); } var commandData = await ReadSingleCommand(ctx, stream, state, parser, buffer, default(CancellationToken)); if (commandData.Type == CommandType.PATCH) { commandData.PatchCommand = new PatchDocumentCommand(ctx, commandData.Id, commandData.ChangeVector, false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, false, false ); } if (commandData.Type == CommandType.PUT && string.IsNullOrEmpty(commandData.Id) == false && commandData.Id[commandData.Id.Length - 1] == '|') { var(_, id) = await serverStore.GenerateClusterIdentityAsync(commandData.Id, database.Name); commandData.Id = id; } cmds[index] = commandData; } } return(new ArraySegment <CommandData>(cmds, 0, index + 1)); }
public static async Task BuildCommandsAsync(JsonOperationContext ctx, BatchHandler.MergedBatchCommand command, Stream stream, DocumentDatabase database, ServerStore serverStore) { CommandData[] cmds = Empty; List <string> identities = null; List <int> positionInListToCommandIndex = null; int index = -1; var state = new JsonParserState(); using (ctx.GetManagedBuffer(out JsonOperationContext.ManagedPinnedBuffer buffer)) using (var parser = new UnmanagedJsonParser(ctx, state, "bulk_docs")) /* In case we have a conflict between attachment with the same name we need attachment information from metadata */ /* we can't know from advanced if we will need this information so we save this for all batch commands */ using (var modifier = new BlittableMetadataModifier(ctx, false, false, DatabaseItemType.Attachments)) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } if (GetLongFromStringBuffer(state) != 8314892176759549763) // Commands { ThrowUnexpectedToken(JsonParserToken.String, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartArray) { ThrowUnexpectedToken(JsonParserToken.StartArray, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType == JsonParserToken.EndArray) { break; } index++; if (index >= cmds.Length) { cmds = IncreaseSizeOfCommandsBuffer(index, cmds); } var commandData = await ReadSingleCommand(ctx, stream, state, parser, buffer, modifier, default); if (commandData.Type == CommandType.PATCH) { commandData.PatchCommand = new PatchDocumentCommand( ctx, commandData.Id, commandData.ChangeVector, skipPatchIfChangeVectorMismatch: false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, isTest: false, debugMode: false, collectResultsNeeded: true, returnDocument: commandData.ReturnDocument ); } if (commandData.Type == CommandType.BatchPATCH) { commandData.PatchCommand = new BatchPatchDocumentCommand( ctx, commandData.Ids, skipPatchIfChangeVectorMismatch: false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, isTest: false, debugMode: false, collectResultsNeeded: true ); } if (commandData.Type == CommandType.PUT && string.IsNullOrEmpty(commandData.Id) == false && commandData.Id[commandData.Id.Length - 1] == '|') { if (identities == null) { identities = new List <string>(); positionInListToCommandIndex = new List <int>(); } // queue identities requests in order to send them at once to the leader (using List for simplicity) identities.Add(commandData.Id); positionInListToCommandIndex.Add(index); } cmds[index] = commandData; } if (identities != null) { await GetIdentitiesValues(ctx, database, serverStore, identities, positionInListToCommandIndex, cmds); } command.ParsedCommands = new ArraySegment <CommandData>(cmds, 0, index + 1); if (await IsClusterTransaction(stream, parser, buffer, state)) { command.IsClusterTransaction = true; } } }