private void HandleBatchOfDocumentsIfNecessary() { var prevDoneAndHasEnough = _command.Context.AllocatedMemory > Constants.Size.Megabyte && _prevCommandTask.IsCompleted; var currentReachedLimit = _command.Context.AllocatedMemory > _enqueueThreshold.GetValue(SizeUnit.Bytes); if (currentReachedLimit == false && prevDoneAndHasEnough == false) { return; } var prevCommand = _prevCommand; var prevCommandTask = _prevCommandTask; _prevCommand = _command; _prevCommandTask = _database.TxMerger.Enqueue(_command).AsTask(); if (prevCommand != null) { using (prevCommand) { prevCommandTask.Wait(); Debug.Assert(prevCommand.IsDisposed == false, "we rely on reusing this context on the next batch, so it has to be disposed here"); } } _command = new MergedBatchPutCommand(_database, _buildType, _log) { IsRevision = _isRevision }; }
public DatabaseDocumentActions(DocumentDatabase database, BuildVersionType buildType, bool isRevision, Logger log) { _database = database; _buildType = buildType; _isRevision = isRevision; _log = log; _enqueueThreshold = new Sparrow.Size( (sizeof(int) == IntPtr.Size || database.Configuration.Storage.ForceUsing32BitsPager) ? 2 : 32, SizeUnit.Megabytes); _command = new MergedBatchPutCommand(database, buildType, log) { IsRevision = isRevision }; }
private async Task HandleBatchOfDocuments(DocumentsOperationContext context, UnmanagedJsonParser parser, long buildVersion) { if (_batchPutCommand.TotalSize >= 16 * Voron.Global.Constants.Size.Megabyte) { if (_prevCommand != null) { using (_prevCommand) { await _prevCommandTask; ResetContextAndParser(context, parser); } } _prevCommandTask = _database.TxMerger.Enqueue(_batchPutCommand); _prevCommand = _batchPutCommand; _batchPutCommand = new MergedBatchPutCommand(_database, buildVersion); } }
private void FinishBatchOfDocuments() { if (_prevCommand != null) { using (_prevCommand) AsyncHelpers.RunSync(() => _prevCommandTask); _prevCommand = null; } if (_command.Documents.Count > 0) { using (_command) AsyncHelpers.RunSync(() => _database.TxMerger.Enqueue(_command)); } _command = null; }
private async Task FinishBatchOfDocuments() { if (_prevCommand != null) { using (_prevCommand) { await _prevCommandTask; } _prevCommand = null; } if (_batchPutCommand.Documents.Count > 0) { using (_batchPutCommand) { await _database.TxMerger.Enqueue(_batchPutCommand); } } _batchPutCommand = null; }
private void HandleBatchOfDocumentsIfNecessary() { var commandSize = _command.GetCommandAllocationSize(); var prevDoneAndHasEnough = commandSize > Constants.Size.Megabyte && _prevCommandTask.IsCompleted; var currentReachedLimit = commandSize > _enqueueThreshold.GetValue(SizeUnit.Bytes); if (currentReachedLimit == false && prevDoneAndHasEnough == false) { return; } var prevCommand = _prevCommand; var prevCommandTask = _prevCommandTask; var commandTask = _database.TxMerger.Enqueue(_command); // we ensure that we first enqueue the command to if we // fail to do that, we won't be waiting on the previous // one _prevCommand = _command; _prevCommandTask = commandTask; if (prevCommand != null) { using (prevCommand) { prevCommandTask.GetAwaiter().GetResult(); Debug.Assert(prevCommand.IsDisposed == false, "we rely on reusing this context on the next batch, so it has to be disposed here"); } } _command = new MergedBatchPutCommand(_database, _buildType, _log, _missingDocumentsForRevisions) { IsRevision = _isRevision }; }
public async Task <ImportResult> Import(DocumentsOperationContext context, Stream stream, Action <IOperationProgress> onProgress = null) { var result = new ImportResult(); var progress = new IndeterminateProgress(); var state = new JsonParserState(); JsonOperationContext.ManagedPinnedBuffer buffer; using (context.GetManagedBuffer(out buffer)) using (var parser = new UnmanagedJsonParser(context, state, "fileName")) { var operateOnType = "__top_start_object"; var buildVersion = 0L; var identities = new Dictionary <string, long>(); VersioningStorage versioningStorage = null; while (true) { if (parser.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { if (state.CurrentTokenType != JsonParserToken.EndObject) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } break; } parser.SetBuffer(buffer, read); continue; } switch (state.CurrentTokenType) { case JsonParserToken.String: unsafe { operateOnType = new LazyStringValue(null, state.StringBuffer, state.StringSize, context).ToString(); } break; case JsonParserToken.Integer: switch (operateOnType) { case "BuildVersion": buildVersion = state.Long; break; } break; case JsonParserToken.StartObject: if (operateOnType == "__top_start_object") { operateOnType = null; break; } context.CachedProperties.NewDocument(); var builder = new BlittableJsonDocumentBuilder(_batchPutCommand.Context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "ImportObject", parser, state); builder.ReadNestedObject(); while (builder.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } parser.SetBuffer(buffer, read); } builder.FinalizeDocument(); if (operateOnType == "Docs" && Options.OperateOnTypes.HasFlag(DatabaseItemType.Documents)) { progress.Progress = "Importing Documents"; onProgress?.Invoke(progress); PatchDocument patch = null; PatchRequest patchRequest = null; if (string.IsNullOrWhiteSpace(Options.TransformScript) == false) { patch = new PatchDocument(context.DocumentDatabase); patchRequest = new PatchRequest { Script = Options.TransformScript }; } result.DocumentsCount++; var reader = builder.CreateReader(); var document = new Document { Data = reader, }; if (Options.IncludeExpired == false && document.Expired(_database.Time.GetUtcNow())) { continue; } TransformScriptOrDisableVersioningIfNeeded(context, patch, reader, document, patchRequest); _batchPutCommand.Add(document.Data); if (result.DocumentsCount % 1000 == 0) { progress.Progress = $"Imported {result.DocumentsCount} documents"; onProgress?.Invoke(progress); } await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false); } else if (operateOnType == "RevisionDocuments" && Options.OperateOnTypes.HasFlag(DatabaseItemType.RevisionDocuments)) { if (versioningStorage == null) { break; } result.RevisionDocumentsCount++; var reader = builder.CreateReader(); _batchPutCommand.Add(reader); await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false);; } else { using (builder) { switch (operateOnType) { case "Attachments": result.Warnings.Add("Attachments are not supported anymore. Use RavenFS isntead. Skipping."); break; case "Indexes": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Indexes) == false) { continue; } result.IndexesCount++; progress.Progress = "importing Indexes"; onProgress?.Invoke(progress); try { IndexProcessor.Import(builder, _database, buildVersion, Options.RemoveAnalyzers); } catch (Exception e) { result.Warnings.Add($"Could not import index. Message: {e.Message}"); } break; case "Transformers": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Transformers) == false) { continue; } result.TransformersCount++; progress.Progress = "Importing Transformers"; onProgress?.Invoke(progress); try { TransformerProcessor.Import(builder, _database, buildVersion); } catch (Exception e) { result.Warnings.Add($"Could not import transformer. Message: {e.Message}"); } break; case "Identities": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Identities)) { result.IdentitiesCount++; progress.Progress = "Importing Identities"; onProgress?.Invoke(progress); using (var reader = builder.CreateReader()) { try { string identityKey, identityValueString; long identityValue; if (reader.TryGet("Key", out identityKey) == false || reader.TryGet("Value", out identityValueString) == false || long.TryParse(identityValueString, out identityValue) == false) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Skipping."); } else { identities[identityKey] = identityValue; } } catch (Exception e) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Error: {e}. Skipping."); } } } break; default: result.Warnings.Add( $"The following type is not recognized: '{operateOnType}'. Skipping."); break; } } } break; case JsonParserToken.StartArray: switch (operateOnType) { case "RevisionDocuments": // We are taking a reference here since the documents import can activate or disable the versioning. // We hold a local copy because the user can disable the bundle during the import process, exteranly. // In this case we want to continue to import the revisions documents. versioningStorage = _database.BundleLoader.VersioningStorage; _batchPutCommand.IsRevision = true; break; } break; case JsonParserToken.EndArray: switch (operateOnType) { case "Docs": await FinishBatchOfDocuments(); _batchPutCommand = new MergedBatchPutCommand(_database, buildVersion); break; case "RevisionDocuments": await FinishBatchOfDocuments(); break; case "Identities": if (identities.Count > 0) { using (var tx = context.OpenWriteTransaction()) { _database.DocumentsStorage.UpdateIdentities(context, identities); tx.Commit(); } } identities = null; break; } break; } } } return(result); }
public SmugglerImporter(DocumentDatabase database, DatabaseSmugglerOptions options = null) { _database = database; _batchPutCommand = new MergedBatchPutCommand(_database, 0); Options = options ?? new DatabaseSmugglerOptions(); }