public Task <List <CounterState> > ReadDeltasAsync(int start, int pageSize, CancellationToken cancellationToken) { var results = new List <CounterState>(); while (_reader.Read() && _reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); var snapshot = RavenJToken.ReadFrom(_reader); var name = snapshot.Value <string>("CounterName"); var group = snapshot.Value <string>("GroupName"); var sign = snapshot.Value <char>("Sign"); var value = snapshot.Value <long>("Value"); var delta = new CounterState { CounterName = name, GroupName = group, Value = value, Sign = sign }; results.Add(delta); } return(new CompletedTask <List <CounterState> >(results)); }
private async Task <int> ImportTransformers(JsonTextReader jsonReader) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { Options.CancelToken.Token.ThrowIfCancellationRequested(); var transformer = RavenJToken.ReadFrom(jsonReader); if ((Options.OperateOnTypes & ItemType.Transformers) != ItemType.Transformers) { continue; } var transformerName = transformer.Value <string>("name"); await Operations.PutTransformer(transformerName, transformer); count++; } await Operations.PutTransformer(null, null); // force flush return(count); }
private void AssertValidVersionAndTables(Stream log) { try { var versionInfo = (RavenJObject)RavenJToken.ReadFrom(new BsonReader(log)); if (versionInfo.Value <int>("Version") != version) { throw new InvalidOperationException("Invalid Munin file version!"); } var tableNames = versionInfo.Value <RavenJArray>("Tables"); if (tableNames.Length != tables.Count) { throw new InvalidOperationException("Different number of tables stored in the Munin file"); } for (int i = 0; i < tableNames.Length; i++) { if (tableNames[i].Value <string>() != tables[i].Name) { throw new InvalidOperationException("Table at position " + i + " is expected to be " + tables[i].Name + " but was actually " + tableNames[i]); } } } catch (Exception e) { throw new InvalidOperationException( "Could not open Munin data file, probably not a Munin file or an out of date file", e); } }
protected object Deserialize(JsonReader reader, Type type) { using (RavenJTokenReader tokenReader = new RavenJTokenReader(RavenJToken.ReadFrom(reader))) { return(serializer.Deserialize(tokenReader, type)); } }
/// <summary> /// Reads the JSON representation of the object. /// </summary> /// <param name="reader">The <see cref="T:Raven.Imports.Newtonsoft.Json.JsonReader"/> to read from.</param> /// <param name="objectType">Type of the object.</param> /// <param name="existingValue">The existing value of object being read.</param> /// <param name="serializer">The calling serializer.</param> /// <returns>The object value.</returns> public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { var token = RavenJToken.ReadFrom(reader); var val = token as RavenJValue; if (val != null) { return(val.Value); } var array = token as RavenJArray; if (array != null) { var dynamicJsonObject = new DynamicJsonObject(new RavenJObject()); return(new DynamicList(array.Select(dynamicJsonObject.TransformToValue).ToArray())); } var typeName = token.Value <string>("$type"); if (typeName != null) { var type = Type.GetType(typeName, false); if (type != null) { return(serializer.Deserialize(new RavenJTokenReader(token), type)); } } return(new DynamicJsonObject((RavenJObject)((RavenJObject)token).CloneToken())); }
private async Task <int> ImportIndexes(JsonReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) { continue; } var indexName = index.Value <string>("name"); if (indexName.StartsWith("Temp/")) { continue; } if (index.Value <RavenJObject>("definition").Value <bool>("IsCompiled")) { continue; // can't import compiled indexes } if ((options.OperateOnTypes & ItemType.RemoveAnalyzers) == ItemType.RemoveAnalyzers) { index.Value <RavenJObject>("definition").Remove("Analyzers"); } await PutIndex(indexName, index); count++; } await PutIndex(null, null); return(count); }
private async Task <int> ImportDeletedAttachments(JsonReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); var deletedAttachmentInfo = new JsonSerializer { Converters = { new JsonToJsonConverter(), new StreamFromJsonConverter() } }.Deserialize <Tombstone>(new RavenJTokenReader(item)); ShowProgress("Importing deleted attachments {0}", deletedAttachmentInfo.Key); await DeleteAttachment(deletedAttachmentInfo.Key); count++; } return(count); }
private async Task <int> ImportAttachments(RavenConnectionStringOptions dst, JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = { new JsonToJsonConverter(), new StreamFromJsonConverter() } }.Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); await PutAttachment(dst, attachmentExportInfo); count++; } await PutAttachment(dst, null); // force flush return(count); }
public Task <List <CounterSummary> > ReadSnapshotsAsync(int start, int pageSize, CancellationToken cancellationToken) { var results = new List <CounterSummary>(); while (_reader.Read() && _reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); var snapshot = RavenJToken.ReadFrom(_reader); var group = snapshot.Value <string>("Group"); var name = snapshot.Value <string>("Name"); var positive = snapshot.Value <long>("Positive"); var negative = snapshot.Value <long>("Negative"); var summary = new CounterSummary { CounterName = name, GroupName = group, Decrements = negative, Increments = positive }; results.Add(summary); } return(new CompletedTask <List <CounterSummary> >(results)); }
private async Task <int> ImportAttachments(RavenConnectionStringOptions dst, JsonTextReader jsonReader) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { Options.CancelToken.Token.ThrowIfCancellationRequested(); var item = RavenJToken.ReadFrom(jsonReader); if ((Options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = DefaultConverters } .Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); Operations.ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); if (Options.StripReplicationInformation) { attachmentExportInfo.Metadata = Operations.StripReplicationInformationFromMetadata(attachmentExportInfo.Metadata); } await Operations.PutAttachment(dst, attachmentExportInfo); count++; } await Operations.PutAttachment(dst, null); // force flush return(count); }
private async Task <int> ImportIdentities(JsonTextReader jsonReader) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { Options.CancelToken.Token.ThrowIfCancellationRequested(); var identity = RavenJToken.ReadFrom(jsonReader); var identityName = identity.Value <string>("Key"); if (FilterIdentity(identityName, Options.OperateOnTypes) == false) { continue; } await Operations.SeedIdentityFor(identityName, identity.Value <long>("Value")); count++; } await Operations.SeedIdentityFor(null, -1); // force flush return(count); }
private async Task <int> ImportDeletedDocuments(JsonReader jsonReader) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { Options.CancelToken.Token.ThrowIfCancellationRequested(); var item = RavenJToken.ReadFrom(jsonReader); var deletedDocumentInfo = new JsonSerializer { Converters = { new JsonToJsonConverter(), new StreamFromJsonConverter() } }.Deserialize <Tombstone>(new RavenJTokenReader(item)); Operations.ShowProgress("Importing deleted documents {0}", deletedDocumentInfo.Key); await Operations.DeleteDocument(deletedDocumentInfo.Key); count++; } return(count); }
private static string WritePackagesToRaven(Stream strema, IDocumentStore store) { var json = RavenJToken.ReadFrom(new JsonTextReader(new StreamReader(strema))) .Value <RavenJObject>("d"); using (var session = store.OpenSession()) { foreach (RavenJObject result in json.Value <RavenJArray>("results")) { ModifyResult(result); session.Advanced.Defer(new PutCommandData { Document = result, Metadata = new RavenJObject { { "Raven-Entity-Name", "Packages" } }, Key = "packages/" + result.Value <string>("PackageId") + "/" + result.Value <string>("Version") }); } session.SaveChanges(); } return(json.Value <string>("__next")); }
/// <summary> /// Begins the async query. /// </summary> /// <param name="index">The index.</param> /// <param name="query">The query.</param> /// <param name="includes">The include paths</param> /// <returns></returns> public Task <QueryResult> QueryAsync(string index, IndexQuery query, string[] includes) { EnsureIsNotNullOrEmpty(index, "index"); var path = query.GetIndexQueryUrl(url, index, "indexes"); if (includes != null && includes.Length > 0) { path += "&" + string.Join("&", includes.Select(x => "include=" + x).ToArray()); } var request = jsonRequestFactory.CreateHttpJsonRequest(this, path, "GET", credentials, convention); return(request.ReadResponseStringAsync() .ContinueWith(task => AttemptToProcessResponse(() => { RavenJObject json; using (var reader = new JsonTextReader(new StringReader(task.Result))) json = (RavenJObject)RavenJToken.ReadFrom(reader); return new QueryResult { IsStale = Convert.ToBoolean(json["IsStale"].ToString()), IndexTimestamp = json.Value <DateTime>("IndexTimestamp"), IndexEtag = new Guid(request.ResponseHeaders["ETag"].First()), Results = ((RavenJArray)json["Results"]).Cast <RavenJObject>().ToList(), TotalResults = Convert.ToInt32(json["TotalResults"].ToString()), SkippedResults = Convert.ToInt32(json["SkippedResults"].ToString()), Includes = ((RavenJArray)json["Includes"]).Cast <RavenJObject>().ToList(), }; }))); }
private static IEnumerable <JsonDocument> YieldDocumentsInBatch(Stream partialStream) { using (var stream = new GZipStream(partialStream, CompressionMode.Decompress, leaveOpen: true)) { var reader = new BinaryReader(stream); var count = reader.ReadInt32(); for (int i = 0; i < count; i++) { var doc = (RavenJObject)RavenJToken.ReadFrom(new BsonReader(reader)); var metadata = doc.Value <RavenJObject>("@metadata"); if (metadata == null) { throw new InvalidOperationException("Could not find metadata for document"); } var id = metadata.Value <string>("@id"); if (string.IsNullOrEmpty(id)) { throw new InvalidOperationException("Could not get id from metadata"); } doc.Remove("@metadata"); yield return(new JsonDocument { Key = id, DataAsJson = doc, Metadata = metadata }); } } }
public Task <List <TransformerDefinition> > ReadTransformersAsync(int start, int batchSize, CancellationToken cancellationToken) { var results = new List <TransformerDefinition>(); while (_reader.Read() && _reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); var transformer = RavenJToken.ReadFrom(_reader); if (_options.OperateOnTypes.HasFlag(DatabaseItemType.Transformers) == false) { continue; } var transformerName = transformer.Value <string>("name"); var definition = transformer.Value <RavenJObject>("definition"); var transformerDefinition = definition.JsonDeserialization <TransformerDefinition>(); transformerDefinition.Name = transformerName; results.Add(transformerDefinition); } return(new CompletedTask <List <TransformerDefinition> >(results)); }
IEnumerable <Task> CreateSampleData() { // this code assumes a small enough dataset, and doesn't do any sort // of paging or batching whatsoever. ShowCreateSampleData = false; IsGeneratingSampleData = true; WorkStarted("creating sample data"); WorkStarted("creating sample indexes"); using (var documentSession = Server.OpenSession()) using (var sampleData = typeof(SummaryViewModel).Assembly.GetManifestResourceStream("Raven.Studio.SampleData.MvcMusicStore_Dump.json")) using (var streamReader = new StreamReader(sampleData)) { var putTask = documentSession.Advanced.AsyncDatabaseCommands .DeleteDocumentAsync("forceAuth_" + Guid.NewGuid()); yield return(putTask); if (putTask.Exception != null) { yield break; } var musicStoreData = (RavenJObject)RavenJToken.ReadFrom(new JsonTextReader(streamReader)); foreach (var index in musicStoreData.Value <RavenJArray>("Indexes")) { var indexName = index.Value <string>("name"); var putDoc = documentSession.Advanced.AsyncDatabaseCommands .PutIndexAsync(indexName, index.Value <RavenJObject>("definition").JsonDeserialization <IndexDefinition>(), true); yield return(putDoc); } WorkCompleted("creating sample indexes"); var batch = documentSession.Advanced.AsyncDatabaseCommands .BatchAsync( musicStoreData.Value <RavenJArray>("Docs").OfType <RavenJObject>().Select( doc => { var metadata = doc.Value <RavenJObject>("@metadata"); doc.Remove("@metadata"); return(new PutCommandData { Document = doc, Metadata = metadata, Key = metadata.Value <string>("@id"), }); }).ToArray() ); yield return(batch); WorkCompleted("creating sample data"); IsGeneratingSampleData = false; RecentDocumentsStatus = "Retrieving sample documents."; RetrieveSummary(); } }
protected override object DeserializeMessage(Stream input) { using (var streamReader = new StreamReader(input)) using (var jsonReader = new JsonTextReader(streamReader)) using (RavenJTokenReader tokenReader = new RavenJTokenReader(RavenJToken.ReadFrom(jsonReader))) { return(serializer.Deserialize(tokenReader)); } }
private async Task ImportIncrementalData(CounterConnectionStringOptions connectionString, Stream stream) { CountingStream sizeStream; JsonTextReader jsonReader; if (SmugglerHelper.TryGetJsonReaderForStream(stream, out jsonReader, out sizeStream) == false) { throw new InvalidOperationException("Failed to get reader for the data stream."); } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } ICounterStore store = null; try { if (jsonReader.Read() == false && jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } store = new CounterStore { Url = connectionString.Url, Name = connectionString.CounterStoreId, Credentials = new OperationCredentials(connectionString.ApiKey, connectionString.Credentials) }; store.Initialize(true); ShowProgress($"Initialized connection to counter store (name = {store.Name})"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { if (jsonReader.TokenType != JsonToken.StartObject) { continue; } var counterDelta = RavenJToken.ReadFrom(jsonReader).ToObject <CounterState>(); ShowProgress($"Importing counter {counterDelta.GroupName} - {counterDelta.CounterName}"); if (counterDelta.Sign == ValueSign.Negative) { counterDelta.Value = -counterDelta.Value; } store.Batch.ScheduleChange(counterDelta.GroupName, counterDelta.CounterName, counterDelta.Value); } ShowProgress("Finished import of the current file."); await store.Batch.FlushAsync().WithCancellation(CancellationToken).ConfigureAwait(false); } finally { store?.Dispose(); } }
public virtual object Deserialize(Stream stream, System.Type type) { using (StreamReader r = new StreamReader(stream)) using (var jsonReader = new JsonTextReader(r)) using (RavenJTokenReader tokenReader = new RavenJTokenReader(RavenJToken.ReadFrom(jsonReader))) { return(serializer.Deserialize(tokenReader, type)); } }
private async Task <int> ImportDocuments(JsonTextReader jsonReader) { var now = SystemTime.UtcNow; var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { Options.CancelToken.Token.ThrowIfCancellationRequested(); var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); var size = DocumentHelpers.GetRoughSize(document); if (size > 1024 * 1024) { Console.WriteLine("Large document warning: {0:#,#.##;;0} kb - {1}", (double)size / 1024, document["@metadata"].Value <string>("@id")); } if ((Options.OperateOnTypes & ItemType.Documents) != ItemType.Documents) { continue; } if (Options.MatchFilters(document) == false) { continue; } if (Options.ShouldExcludeExpired && Options.ExcludeExpired(document, now)) { continue; } if (!string.IsNullOrEmpty(Options.TransformScript)) { document = await Operations.TransformDocument(document, Options.TransformScript); } if (document == null) { continue; } await Operations.PutDocument(document, (int)size); count++; if (count % Options.BatchSize == 0) { Operations.ShowProgress("Read {0:#,#;;0} documents", count); } } await Operations.PutDocument(null, -1); // force flush return(count); }
protected Task SkipAsync(JsonReader reader, CancellationToken cancellationToken) { while (reader.Read() && reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); RavenJToken.ReadFrom(reader); } return(new CompletedTask()); }
private async Task <int> ImportAttachments(JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) { return(count); } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Attachments", jsonReader.Value) == false) { throw new InvalidDataException("Attachment property was expected"); } if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = { new JsonToJsonConverter() } }.Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); await PutAttachment(attachmentExportInfo); count++; } await PutAttachment(null); // force flush return(count); }
public object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType == JsonToken.Null) { return(null); } var json = RavenJToken.ReadFrom(reader); object result; TryRead(json as RavenJObject, out result); // Throw exception? return(result); }
private async Task <int> ImportIndexes(JsonReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Indexes", jsonReader.Value) == false) { throw new InvalidDataException("Indexes property was expected"); } if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) { continue; } var indexName = index.Value <string>("name"); if (indexName.StartsWith("Temp/")) { continue; } if (index.Value <RavenJObject>("definition").Value <bool>("IsCompiled")) { continue; // can't import compiled indexes } await PutIndex(indexName, index); count++; } await PutIndex(null, null); return(count); }
private IEnumerable <JsonDocument> YieldDocumentsInBatch(CancellationTimeout timeout, Stream partialStream, Action <int> increaseDocumentsCount) { using (var stream = new GZipStream(partialStream, CompressionMode.Decompress, leaveOpen: true)) { var reader = new BinaryReader(stream); var count = reader.ReadInt32(); for (var i = 0; i < count; i++) { timeout.Delay(); var doc = (RavenJObject)RavenJToken.ReadFrom(new BsonReader(reader) { DateTimeKindHandling = DateTimeKind.Unspecified }); var metadata = doc.Value <RavenJObject>("@metadata"); if (metadata == null) { throw new InvalidOperationException("Could not find metadata for document"); } var id = metadata.Value <string>("@id"); if (string.IsNullOrEmpty(id)) { throw new InvalidOperationException("Could not get id from metadata"); } if (id.Equals(Constants.BulkImportHeartbeatDocKey, StringComparison.InvariantCultureIgnoreCase)) { continue; //its just a token document, should not get written into the database } //the purpose of the heartbeat document is to make sure that the connection doesn't time-out //during long pauses in the bulk insert operation. // Currently used by smuggler to make sure that the connection doesn't time out if there is a //continuation token and lots of document skips doc.Remove("@metadata"); yield return(new JsonDocument { Key = id, DataAsJson = doc, Metadata = metadata }); } increaseDocumentsCount(count); } }
private Task WriteDocuments(JsonTextReader jsonReader) { var batch = new List <RavenJObject>(); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var document = RavenJToken.ReadFrom(jsonReader); batch.Add((RavenJObject)document); if (batch.Count >= BatchSize) { return(FlushBatch(batch) .ContinueOnSuccess(() => WriteDocuments(jsonReader))); } } return(FlushBatch(batch)); }
public Task <List <KeyValuePair <string, Etag> > > ReadDocumentDeletionsAsync(Etag fromEtag, Etag maxEtag, CancellationToken cancellationToken) { var results = new List <KeyValuePair <string, Etag> >(); while (_reader.Read() && _reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); var deletion = RavenJToken.ReadFrom(_reader); var key = deletion.Value <string>("Key"); results.Add(new KeyValuePair <string, Etag>(key, null)); } return(new CompletedTask <List <KeyValuePair <string, Etag> > >(results)); }
private IEnumerable <Task> CreateSampleData() { var commands = database.Value.AsyncDatabaseCommands; output("Createing Sample Data, Please wait..."); // this code assumes a small enough dataset, and doesn't do any sort // of paging or batching whatsoever. using (var sampleData = typeof(HomeModel).Assembly.GetManifestResourceStream("Raven.Studio.Assets.EmbeddedData.MvcMusicStore_Dump.json")) using (var streamReader = new StreamReader(sampleData)) { output("Reading documents"); var musicStoreData = (RavenJObject)RavenJToken.ReadFrom(new JsonTextReader(streamReader)); foreach (var index in musicStoreData.Value <RavenJArray>("Indexes")) { var indexName = index.Value <string>("name"); var ravenJObject = index.Value <RavenJObject>("definition"); output("Adding index " + indexName); var putDoc = commands .PutIndexAsync(indexName, ravenJObject.JsonDeserialization <IndexDefinition>(), true); yield return(putDoc); } output("Storing documents"); var batch = commands.BatchAsync( musicStoreData.Value <RavenJArray>("Docs").OfType <RavenJObject>().Select( doc => { var metadata = doc.Value <RavenJObject>("@metadata"); doc.Remove("@metadata"); return(new PutCommandData { Document = doc, Metadata = metadata, Key = metadata.Value <string>("@id"), }); }).ToArray() ); yield return(batch); } }
public Task <List <KeyValuePair <string, long> > > ReadIdentitiesAsync(CancellationToken cancellationToken) { var results = new List <KeyValuePair <string, long> >(); while (_reader.Read() && _reader.TokenType != JsonToken.EndArray) { cancellationToken.ThrowIfCancellationRequested(); var identity = RavenJToken.ReadFrom(_reader); var name = identity.Value <string>("Key"); var value = identity.Value <long>("Value"); results.Add(new KeyValuePair <string, long>(name, value)); } return(new CompletedTask <List <KeyValuePair <string, long> > >(results)); }