public IDisposable Initialize(DatabaseSmugglerOptions options, SmugglerResult result, out long buildVersion) { _result = result; _returnBuffer = _context.GetManagedBuffer(out _buffer); _state = new JsonParserState(); _parser = new UnmanagedJsonParser(_context, _state, "file"); if (UnmanagedJsonParserHelper.Read(_peepingTomStream, _parser, _state, _buffer) == false) { UnmanagedJsonParserHelper.ThrowInvalidJson("Unexpected end of json.", _peepingTomStream, _parser); } if (_state.CurrentTokenType != JsonParserToken.StartObject) { UnmanagedJsonParserHelper.ThrowInvalidJson("Expected start object, but got " + _state.CurrentTokenType, _peepingTomStream, _parser); } buildVersion = ReadBuildVersion(); _buildVersionType = BuildVersion.Type(buildVersion); return(new DisposableAction(() => { _parser.Dispose(); _returnBuffer.Dispose(); _returnWriteBuffer.Dispose(); })); }
public async Task <ImportResult> Import(DocumentsOperationContext context, Stream stream, Action <IOperationProgress> onProgress = null) { var result = new ImportResult(); var progress = new IndeterminateProgress(); var state = new JsonParserState(); JsonOperationContext.ManagedPinnedBuffer buffer; using (context.GetManagedBuffer(out buffer)) using (var parser = new UnmanagedJsonParser(context, state, "fileName")) { var operateOnType = "__top_start_object"; var buildVersion = 0L; var identities = new Dictionary <string, long>(); VersioningStorage versioningStorage = null; while (true) { if (parser.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { if (state.CurrentTokenType != JsonParserToken.EndObject) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } break; } parser.SetBuffer(buffer, read); continue; } switch (state.CurrentTokenType) { case JsonParserToken.String: unsafe { operateOnType = new LazyStringValue(null, state.StringBuffer, state.StringSize, context).ToString(); } break; case JsonParserToken.Integer: switch (operateOnType) { case "BuildVersion": buildVersion = state.Long; break; } break; case JsonParserToken.StartObject: if (operateOnType == "__top_start_object") { operateOnType = null; break; } context.CachedProperties.NewDocument(); var builder = new BlittableJsonDocumentBuilder(_batchPutCommand.Context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "ImportObject", parser, state); builder.ReadNestedObject(); while (builder.Read() == false) { var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length); if (read == 0) { throw new EndOfStreamException("Stream ended without reaching end of json content"); } parser.SetBuffer(buffer, read); } builder.FinalizeDocument(); if (operateOnType == "Docs" && Options.OperateOnTypes.HasFlag(DatabaseItemType.Documents)) { progress.Progress = "Importing Documents"; onProgress?.Invoke(progress); PatchDocument patch = null; PatchRequest patchRequest = null; if (string.IsNullOrWhiteSpace(Options.TransformScript) == false) { patch = new PatchDocument(context.DocumentDatabase); patchRequest = new PatchRequest { Script = Options.TransformScript }; } result.DocumentsCount++; var reader = builder.CreateReader(); var document = new Document { Data = reader, }; if (Options.IncludeExpired == false && document.Expired(_database.Time.GetUtcNow())) { continue; } TransformScriptOrDisableVersioningIfNeeded(context, patch, reader, document, patchRequest); _batchPutCommand.Add(document.Data); if (result.DocumentsCount % 1000 == 0) { progress.Progress = $"Imported {result.DocumentsCount} documents"; onProgress?.Invoke(progress); } await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false); } else if (operateOnType == "RevisionDocuments" && Options.OperateOnTypes.HasFlag(DatabaseItemType.RevisionDocuments)) { if (versioningStorage == null) { break; } result.RevisionDocumentsCount++; var reader = builder.CreateReader(); _batchPutCommand.Add(reader); await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false);; } else { using (builder) { switch (operateOnType) { case "Attachments": result.Warnings.Add("Attachments are not supported anymore. Use RavenFS isntead. Skipping."); break; case "Indexes": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Indexes) == false) { continue; } result.IndexesCount++; progress.Progress = "importing Indexes"; onProgress?.Invoke(progress); try { IndexProcessor.Import(builder, _database, buildVersion, Options.RemoveAnalyzers); } catch (Exception e) { result.Warnings.Add($"Could not import index. Message: {e.Message}"); } break; case "Transformers": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Transformers) == false) { continue; } result.TransformersCount++; progress.Progress = "Importing Transformers"; onProgress?.Invoke(progress); try { TransformerProcessor.Import(builder, _database, buildVersion); } catch (Exception e) { result.Warnings.Add($"Could not import transformer. Message: {e.Message}"); } break; case "Identities": if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Identities)) { result.IdentitiesCount++; progress.Progress = "Importing Identities"; onProgress?.Invoke(progress); using (var reader = builder.CreateReader()) { try { string identityKey, identityValueString; long identityValue; if (reader.TryGet("Key", out identityKey) == false || reader.TryGet("Value", out identityValueString) == false || long.TryParse(identityValueString, out identityValue) == false) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Skipping."); } else { identities[identityKey] = identityValue; } } catch (Exception e) { result.Warnings.Add($"Cannot import the following identity: '{reader}'. Error: {e}. Skipping."); } } } break; default: result.Warnings.Add( $"The following type is not recognized: '{operateOnType}'. Skipping."); break; } } } break; case JsonParserToken.StartArray: switch (operateOnType) { case "RevisionDocuments": // We are taking a reference here since the documents import can activate or disable the versioning. // We hold a local copy because the user can disable the bundle during the import process, exteranly. // In this case we want to continue to import the revisions documents. versioningStorage = _database.BundleLoader.VersioningStorage; _batchPutCommand.IsRevision = true; break; } break; case JsonParserToken.EndArray: switch (operateOnType) { case "Docs": await FinishBatchOfDocuments(); _batchPutCommand = new MergedBatchPutCommand(_database, buildVersion); break; case "RevisionDocuments": await FinishBatchOfDocuments(); break; case "Identities": if (identities.Count > 0) { using (var tx = context.OpenWriteTransaction()) { _database.DocumentsStorage.UpdateIdentities(context, identities); tx.Commit(); } } identities = null; break; } break; } } } return(result); }