public void BlittableMetadataModifier_WhileIdContainsNoEscapeCharacters_ResultInLazyStringWithoutEscapeInformation() { const string json = "{\"@metadata\": { \"@id\": \"u1\"}}"; using (var ctx = JsonOperationContext.ShortTermSingleUse()) { var buffer = Encoding.UTF8.GetBytes(json); var state = new JsonParserState(); var modifier = new BlittableMetadataModifier(ctx); using (var parser = new UnmanagedJsonParser(ctx, state, "test")) fixed(byte *pBuffer = buffer) { parser.SetBuffer(pBuffer, buffer.Length); using ( var builder = new BlittableJsonDocumentBuilder(ctx, BlittableJsonDocumentBuilder.UsageMode.None, "test", parser, state, null, modifier)) { builder.ReadObjectDocument(); builder.Read(); builder.FinalizeDocument(); } } Assert.NotNull(modifier.Id.EscapePositions); } }
public Task <CommandData> MoveNext(JsonOperationContext ctx, BlittableMetadataModifier modifier) { if (_parser.Read()) { if (_state.CurrentTokenType == JsonParserToken.EndArray) { return(null); } return(ReadSingleCommand(ctx, _stream, _state, _parser, _buffer, modifier, _token)); } return(MoveNextUnlikely(ctx, modifier)); }
private static async Task <CommandData> ReadSingleCommand( JsonOperationContext ctx, Stream stream, JsonParserState state, UnmanagedJsonParser parser, JsonOperationContext.ManagedPinnedBuffer buffer, BlittableMetadataModifier modifier, CancellationToken token) { var commandData = new CommandData(); if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.EndObject) { break; } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } switch (GetPropertyType(state)) { case CommandPropertyName.Type: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.Type = GetCommandType(state, ctx); break; case CommandPropertyName.Id: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.Id = null; break; case JsonParserToken.String: commandData.Id = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.Ids: commandData.Ids = await ReadJsonArray(ctx, stream, parser, state, buffer, token); break; case CommandPropertyName.Name: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.Name = null; break; case JsonParserToken.String: commandData.Name = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.DestinationId: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.DestinationId = null; break; case JsonParserToken.String: commandData.DestinationId = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.DestinationName: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.DestinationName = null; break; case JsonParserToken.String: commandData.DestinationName = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.ContentType: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } switch (state.CurrentTokenType) { case JsonParserToken.Null: commandData.ContentType = string.Empty; break; case JsonParserToken.String: commandData.ContentType = GetStringPropertyValue(state); break; default: ThrowUnexpectedToken(JsonParserToken.String, state); break; } break; case CommandPropertyName.Document: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } commandData.Document = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, modifier, token); commandData.SeenAttachments = modifier.SeenAttachments; commandData.SeenCounters = modifier.SeenCounters; break; case CommandPropertyName.Patch: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var patch = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, modifier, token); commandData.Patch = PatchRequest.Parse(patch, out commandData.PatchArgs); break; case CommandPropertyName.PatchIfMissing: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var patchIfMissing = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, modifier, token); commandData.PatchIfMissing = PatchRequest.Parse(patchIfMissing, out commandData.PatchIfMissingArgs); break; case CommandPropertyName.ChangeVector: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.Null) { commandData.ChangeVector = null; } else { if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.ChangeVector = GetLazyStringValue(ctx, state); } break; case CommandPropertyName.Index: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.Integer) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.Index = state.Long; break; case CommandPropertyName.IdPrefixed: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.IdPrefixed = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.ReturnDocument: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.ReturnDocument = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.Counters: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } var counterOps = await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, modifier, token); commandData.Counters = DocumentCountersOperation.Parse(counterOps); break; case CommandPropertyName.FromEtl: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.True && state.CurrentTokenType != JsonParserToken.False) { ThrowUnexpectedToken(JsonParserToken.True, state); } commandData.FromEtl = state.CurrentTokenType == JsonParserToken.True; break; case CommandPropertyName.AttachmentType: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.Null) { commandData.AttachmentType = AttachmentType.Document; } else { if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.AttachmentType = GetAttachmentType(state, ctx); } break; case CommandPropertyName.NoSuchProperty: // unknown command - ignore it while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType == JsonParserToken.StartObject || state.CurrentTokenType == JsonParserToken.StartArray) { await ReadJsonObject(ctx, stream, commandData.Id, parser, state, buffer, modifier, token); } break; case CommandPropertyName.ForceRevisionCreationStrategy: while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser, token); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } commandData.ForceRevisionCreationStrategy = GetEnumValue(state, ctx); break; } } switch (commandData.Type) { case CommandType.None: ThrowInvalidType(); break; case CommandType.PUT: if (commandData.Document == null) { ThrowMissingDocumentProperty(); } break; case CommandType.PATCH: if (commandData.Patch == null) { ThrowMissingPatchProperty(); } break; case CommandType.AttachmentPUT: if (commandData.Name == null) { ThrowMissingNameProperty(); } break; case CommandType.Counters: if (commandData.Counters == null) { ThrowMissingNameProperty(); } break; } return(commandData); }
private async Task <CommandData> MoveNextUnlikely(JsonOperationContext ctx, BlittableMetadataModifier modifier) { do { await RefillParserBuffer(_stream, _buffer, _parser, _token); } while (_parser.Read() == false); if (_state.CurrentTokenType == JsonParserToken.EndArray) { return new CommandData { Type = CommandType.None } } ; return(await ReadSingleCommand(ctx, _stream, _state, _parser, _buffer, modifier, _token)); } }
public static async Task BuildCommandsAsync(JsonOperationContext ctx, BatchHandler.MergedBatchCommand command, Stream stream, DocumentDatabase database, ServerStore serverStore) { CommandData[] cmds = Empty; List <string> identities = null; List <int> positionInListToCommandIndex = null; int index = -1; var state = new JsonParserState(); using (ctx.GetManagedBuffer(out JsonOperationContext.ManagedPinnedBuffer buffer)) using (var parser = new UnmanagedJsonParser(ctx, state, "bulk_docs")) using (var modifier = new BlittableMetadataModifier(ctx)) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartObject) { ThrowUnexpectedToken(JsonParserToken.StartObject, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.String) { ThrowUnexpectedToken(JsonParserToken.String, state); } if (GetLongFromStringBuffer(state) != 8314892176759549763) // Commands { ThrowUnexpectedToken(JsonParserToken.String, state); } while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType != JsonParserToken.StartArray) { ThrowUnexpectedToken(JsonParserToken.StartArray, state); } while (true) { while (parser.Read() == false) { await RefillParserBuffer(stream, buffer, parser); } if (state.CurrentTokenType == JsonParserToken.EndArray) { break; } index++; if (index >= cmds.Length) { cmds = IncreaseSizeOfCommandsBuffer(index, cmds); } var commandData = await ReadSingleCommand(ctx, stream, state, parser, buffer, modifier, default); if (commandData.Type == CommandType.PATCH) { commandData.PatchCommand = new PatchDocumentCommand( ctx, commandData.Id, commandData.ChangeVector, skipPatchIfChangeVectorMismatch: false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, isTest: false, debugMode: false, collectResultsNeeded: true, returnDocument: commandData.ReturnDocument ); } if (commandData.Type == CommandType.BatchPATCH) { commandData.PatchCommand = new BatchPatchDocumentCommand( ctx, commandData.Ids, skipPatchIfChangeVectorMismatch: false, (commandData.Patch, commandData.PatchArgs), (commandData.PatchIfMissing, commandData.PatchIfMissingArgs), database, isTest: false, debugMode: false, collectResultsNeeded: true ); } if (commandData.Type == CommandType.PUT && string.IsNullOrEmpty(commandData.Id) == false && commandData.Id[commandData.Id.Length - 1] == '|') { if (identities == null) { identities = new List <string>(); positionInListToCommandIndex = new List <int>(); } // queue identities requests in order to send them at once to the leader (using List for simplicity) identities.Add(commandData.Id); positionInListToCommandIndex.Add(index); } cmds[index] = commandData; } if (identities != null) { await GetIdentitiesValues(ctx, database, serverStore, identities, positionInListToCommandIndex, cmds); } command.ParsedCommands = new ArraySegment <CommandData>(cmds, 0, index + 1); if (await IsClusterTransaction(stream, parser, buffer, state)) { command.IsClusterTransaction = true; } } }
private IEnumerable <DocumentItem> ReadDocuments(INewDocumentActions actions = null) { if (UnmanagedJsonParserHelper.Read(_peepingTomStream, _parser, _state, _buffer) == false) { UnmanagedJsonParserHelper.ThrowInvalidJson("Unexpected end of json", _peepingTomStream, _parser); } if (_state.CurrentTokenType != JsonParserToken.StartArray) { UnmanagedJsonParserHelper.ThrowInvalidJson("Expected start array, but got " + _state.CurrentTokenType, _peepingTomStream, _parser); } var context = _context; var legacyImport = _buildVersionType == BuildVersionType.V3; var modifier = new BlittableMetadataModifier(context) { ReadFirstEtagOfLegacyRevision = legacyImport, ReadLegacyEtag = _readLegacyEtag, OperateOnTypes = _operateOnTypes }; var builder = CreateBuilder(context, modifier); try { List <DocumentItem.AttachmentStream> attachments = null; while (true) { if (UnmanagedJsonParserHelper.Read(_peepingTomStream, _parser, _state, _buffer) == false) { UnmanagedJsonParserHelper.ThrowInvalidJson("Unexpected end of json while reading docs", _peepingTomStream, _parser); } if (_state.CurrentTokenType == JsonParserToken.EndArray) { break; } if (actions != null) { var oldContext = context; context = actions.GetContextForNewDocument(); if (oldContext != context) { builder.Dispose(); builder = CreateBuilder(context, modifier); } } builder.Renew("import/object", BlittableJsonDocumentBuilder.UsageMode.ToDisk); _context.CachedProperties.NewDocument(); ReadObject(builder); var data = builder.CreateReader(); builder.Reset(); if (data.TryGet(Constants.Documents.Metadata.Key, out BlittableJsonReaderObject metadata) && metadata.TryGet(DocumentItem.ExportDocumentType.Key, out string type)) { if (type != DocumentItem.ExportDocumentType.Attachment) { var msg = $"Ignoring an item of type `{type}`. " + data; if (_log.IsOperationsEnabled) { _log.Operations(msg); } _result.AddWarning(msg); continue; } if (attachments == null) { attachments = new List <DocumentItem.AttachmentStream>(); } var attachment = new DocumentItem.AttachmentStream { Stream = actions.GetTempStream() }; ProcessAttachmentStream(context, data, ref attachment); attachments.Add(attachment); continue; } if (legacyImport) { if (modifier.Id.Contains(HiLoHandler.RavenHiloIdPrefix)) { data.Modifications = new DynamicJsonValue { [Constants.Documents.Metadata.Key] = new DynamicJsonValue { [Constants.Documents.Metadata.Collection] = CollectionName.HiLoCollection } }; } } if (data.Modifications != null) { data = context.ReadObject(data, modifier.Id, BlittableJsonDocumentBuilder.UsageMode.ToDisk); } _result.LegacyLastDocumentEtag = modifier.LegacyEtag; yield return(new DocumentItem { Document = new Document { Data = data, Id = modifier.Id, ChangeVector = modifier.ChangeVector, Flags = modifier.Flags, NonPersistentFlags = modifier.NonPersistentFlags, LastModified = modifier.LastModified ?? _database.Time.GetUtcNow(), }, Attachments = attachments }); attachments = null; } } finally { builder.Dispose(); } }
private IEnumerable <DocumentItem> ReadLegacyAttachments(INewDocumentActions actions) { if (UnmanagedJsonParserHelper.Read(_peepingTomStream, _parser, _state, _buffer) == false) { UnmanagedJsonParserHelper.ThrowInvalidJson("Unexpected end of json", _peepingTomStream, _parser); } if (_state.CurrentTokenType != JsonParserToken.StartArray) { UnmanagedJsonParserHelper.ThrowInvalidJson("Expected start array, but got " + _state.CurrentTokenType, _peepingTomStream, _parser); } var context = _context; var modifier = new BlittableMetadataModifier(context); var builder = CreateBuilder(context, modifier); try { while (true) { if (UnmanagedJsonParserHelper.Read(_peepingTomStream, _parser, _state, _buffer) == false) { UnmanagedJsonParserHelper.ThrowInvalidJson("Unexpected end of json while reading legacy attachments", _peepingTomStream, _parser); } if (_state.CurrentTokenType == JsonParserToken.EndArray) { break; } if (actions != null) { var oldContext = context; context = actions.GetContextForNewDocument(); if (oldContext != context) { builder.Dispose(); builder = CreateBuilder(context, modifier); } } builder.Renew("import/object", BlittableJsonDocumentBuilder.UsageMode.ToDisk); _context.CachedProperties.NewDocument(); ReadObject(builder); var data = builder.CreateReader(); builder.Reset(); var attachment = new DocumentItem.AttachmentStream { Stream = actions.GetTempStream() }; var attachmentInfo = ProcessLegacyAttachment(context, data, ref attachment); if (ShouldSkip(attachmentInfo)) { continue; } var dummyDoc = new DocumentItem { Document = new Document { Data = WriteDummyDocumentForAttachment(context, attachmentInfo), Id = attachmentInfo.Id, ChangeVector = string.Empty, Flags = DocumentFlags.HasAttachments, NonPersistentFlags = NonPersistentDocumentFlags.FromSmuggler, LastModified = _database.Time.GetUtcNow(), }, Attachments = new List <DocumentItem.AttachmentStream> { attachment } }; yield return(dummyDoc); } } finally { builder.Dispose(); } }
private BlittableJsonDocumentBuilder CreateBuilder(JsonOperationContext context, BlittableMetadataModifier modifier) { return(new BlittableJsonDocumentBuilder(context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "import/object", _parser, _state, modifier: modifier)); }
private async Task <IOperationResult> DoBulkInsert(Action <IOperationProgress> onProgress, CancellationToken token) { var progress = new BulkInsertProgress(); try { var logger = LoggingSource.Instance.GetLogger <MergedInsertBulkCommand>(Database.Name); IDisposable currentCtxReset = null, previousCtxReset = null; try { using (ContextPool.AllocateOperationContext(out JsonOperationContext context)) using (var buffer = JsonOperationContext.ManagedPinnedBuffer.LongLivedInstance()) { currentCtxReset = ContextPool.AllocateOperationContext(out JsonOperationContext docsCtx); var requestBodyStream = RequestBodyStream(); using (var parser = new BatchRequestParser.ReadMany(context, requestBodyStream, buffer, token)) { await parser.Init(); var array = new BatchRequestParser.CommandData[8]; var numberOfCommands = 0; long totalSize = 0; while (true) { using (var modifier = new BlittableMetadataModifier(docsCtx)) { var task = parser.MoveNext(docsCtx, modifier); if (task == null) { break; } token.ThrowIfCancellationRequested(); // if we are going to wait on the network, flush immediately if ((task.Wait(5) == false && numberOfCommands > 0) || // but don't batch too much anyway totalSize > 16 * Voron.Global.Constants.Size.Megabyte) { using (ReplaceContextIfCurrentlyInUse(task, numberOfCommands, array)) { await Database.TxMerger.Enqueue(new MergedInsertBulkCommand { Commands = array, NumberOfCommands = numberOfCommands, Database = Database, Logger = logger, TotalSize = totalSize }); } progress.BatchCount++; progress.Processed += numberOfCommands; progress.LastProcessedId = array[numberOfCommands - 1].Id; onProgress(progress); previousCtxReset?.Dispose(); previousCtxReset = currentCtxReset; currentCtxReset = ContextPool.AllocateOperationContext(out docsCtx); numberOfCommands = 0; totalSize = 0; } var commandData = await task; if (commandData.Type == CommandType.None) { break; } totalSize += commandData.Document.Size; if (numberOfCommands >= array.Length) { Array.Resize(ref array, array.Length * 2); } array[numberOfCommands++] = commandData; } } if (numberOfCommands > 0) { await Database.TxMerger.Enqueue(new MergedInsertBulkCommand { Commands = array, NumberOfCommands = numberOfCommands, Database = Database, Logger = logger, TotalSize = totalSize }); progress.BatchCount++; progress.Processed += numberOfCommands; progress.LastProcessedId = array[numberOfCommands - 1].Id; onProgress(progress); } } } } finally { currentCtxReset?.Dispose(); previousCtxReset?.Dispose(); } HttpContext.Response.StatusCode = (int)HttpStatusCode.Created; return(new BulkOperationResult { Total = progress.Processed }); } catch (Exception e) { HttpContext.Response.Headers["Connection"] = "close"; throw new InvalidOperationException("Failed to process bulk insert. " + progress, e); } }
private async Task <IOperationResult> DoBulkInsert(Action <IOperationProgress> onProgress, CancellationToken token) { var progress = new BulkInsertProgress(); try { var logger = LoggingSource.Instance.GetLogger <MergedInsertBulkCommand>(Database.Name); IDisposable currentCtxReset = null, previousCtxReset = null; try { using (ContextPool.AllocateOperationContext(out JsonOperationContext context)) using (context.GetMemoryBuffer(out var buffer)) { currentCtxReset = ContextPool.AllocateOperationContext(out JsonOperationContext docsCtx); var requestBodyStream = RequestBodyStream(); using (var parser = new BatchRequestParser.ReadMany(context, requestBodyStream, buffer, token)) { await parser.Init(); var array = new BatchRequestParser.CommandData[8]; var numberOfCommands = 0; long totalSize = 0; int operationsCount = 0; while (true) { using (var modifier = new BlittableMetadataModifier(docsCtx)) { var task = parser.MoveNext(docsCtx, modifier); if (task == null) { break; } token.ThrowIfCancellationRequested(); // if we are going to wait on the network, flush immediately if ((task.Wait(5) == false && numberOfCommands > 0) || // but don't batch too much anyway totalSize > 16 * Voron.Global.Constants.Size.Megabyte || operationsCount >= 8192) { using (ReplaceContextIfCurrentlyInUse(task, numberOfCommands, array)) { await Database.TxMerger.Enqueue(new MergedInsertBulkCommand { Commands = array, NumberOfCommands = numberOfCommands, Database = Database, Logger = logger, TotalSize = totalSize }); } ClearStreamsTempFiles(); progress.BatchCount++; progress.Total += numberOfCommands; progress.LastProcessedId = array[numberOfCommands - 1].Id; onProgress(progress); previousCtxReset?.Dispose(); previousCtxReset = currentCtxReset; currentCtxReset = ContextPool.AllocateOperationContext(out docsCtx); numberOfCommands = 0; totalSize = 0; operationsCount = 0; } var commandData = await task; if (commandData.Type == CommandType.None) { break; } if (commandData.Type == CommandType.AttachmentPUT) { commandData.AttachmentStream = await WriteAttachment(commandData.ContentLength, parser.GetBlob(commandData.ContentLength)); } (long size, int opsCount) = GetSizeAndOperationsCount(commandData); operationsCount += opsCount; totalSize += size; if (numberOfCommands >= array.Length) { Array.Resize(ref array, array.Length + Math.Min(1024, array.Length)); } array[numberOfCommands++] = commandData; switch (commandData.Type) { case CommandType.PUT: progress.DocumentsProcessed++; break; case CommandType.AttachmentPUT: progress.AttachmentsProcessed++; break; case CommandType.Counters: progress.CountersProcessed++; break; case CommandType.TimeSeriesBulkInsert: progress.TimeSeriesProcessed++; break; } } } if (numberOfCommands > 0) { await Database.TxMerger.Enqueue(new MergedInsertBulkCommand { Commands = array, NumberOfCommands = numberOfCommands, Database = Database, Logger = logger, TotalSize = totalSize }); progress.BatchCount++; progress.Total += numberOfCommands; progress.LastProcessedId = array[numberOfCommands - 1].Id; #pragma warning disable CS0618 // Type or member is obsolete progress.Processed = progress.DocumentsProcessed; #pragma warning restore CS0618 // Type or member is obsolete onProgress(progress); } } } } finally { currentCtxReset?.Dispose(); previousCtxReset?.Dispose(); ClearStreamsTempFiles(); } HttpContext.Response.StatusCode = (int)HttpStatusCode.Created; return(new BulkOperationResult { Total = progress.Total, DocumentsProcessed = progress.DocumentsProcessed, AttachmentsProcessed = progress.AttachmentsProcessed, CountersProcessed = progress.CountersProcessed, TimeSeriesProcessed = progress.TimeSeriesProcessed }); } catch (Exception e) { HttpContext.Response.Headers["Connection"] = "close"; throw new InvalidOperationException("Failed to process bulk insert. " + progress, e); } }