/// <summary> /// /// </summary> /// <param name="jsonWriter"></param> /// <param name="options"></param> /// <param name="result"></param> /// <param name="maxEtags">Max etags are inclusive</param> protected async override void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtags) { jsonWriter.WritePropertyName("DocsDeletions"); jsonWriter.WriteStartArray(); result.LastDocDeleteEtag = await ExportDocumentsDeletion(options, jsonWriter, result.LastDocDeleteEtag, maxEtags.LastDocDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("AttachmentsDeletions"); jsonWriter.WriteStartArray(); result.LastAttachmentsDeleteEtag = await ExportAttachmentsDeletion(options, jsonWriter, result.LastAttachmentsDeleteEtag, maxEtags.LastAttachmentsDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); }
public override async Task ExportDeletions(JsonTextWriter jsonWriter, ExportDataResult result, LastEtagsInfo maxEtagsToFetch) { jsonWriter.WritePropertyName("DocsDeletions"); jsonWriter.WriteStartArray(); result.LastDocDeleteEtag = await Operations.ExportDocumentsDeletion(jsonWriter, result.LastDocDeleteEtag, maxEtagsToFetch.LastDocDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("AttachmentsDeletions"); jsonWriter.WriteStartArray(); result.LastAttachmentsDeleteEtag = await Operations.ExportAttachmentsDeletion(jsonWriter, result.LastAttachmentsDeleteEtag, maxEtagsToFetch.LastAttachmentsDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); }
public override void Execute(object parameter) { var saveFile = new SaveFileDialog { DefaultFileName = string.Format("Dump of {0}, {1}", ApplicationModel.Database.Value.Name, DateTimeOffset.Now.ToString("MMM dd yyyy HH-mm", CultureInfo.InvariantCulture)), DefaultExt = ".raven.dump", Filter = "Raven Dumps|*.raven.dump", }; if (saveFile.ShowDialog() != true) return; stream = saveFile.OpenFile(); gZipStream = new GZipStream(stream, CompressionMode.Compress); streamWriter = new StreamWriter(gZipStream); jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; taskModel.TaskStatus = TaskStatus.Started; output(String.Format("Exporting to {0}", saveFile.SafeFileName)); output("Begin reading indexes"); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); ReadIndexes(0) .Catch(exception => Infrastructure.Execute.OnTheUI(() => Finish(exception))); }
public void ValueFormatting() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.WriteStartArray(); jsonWriter.WriteValue('@'); jsonWriter.WriteValue("\r\n\t\f\b?{\\r\\n\"\'"); jsonWriter.WriteValue(true); jsonWriter.WriteValue(10); jsonWriter.WriteValue(10.99); jsonWriter.WriteValue(0.99); jsonWriter.WriteValue(0.000000000000000001d); jsonWriter.WriteValue(0.000000000000000001m); jsonWriter.WriteValue((string)null); jsonWriter.WriteValue((object)null); jsonWriter.WriteValue("This is a string."); jsonWriter.WriteNull(); jsonWriter.WriteUndefined(); jsonWriter.WriteEndArray(); } string expected = @"[""@"",""\r\n\t\f\b?{\\r\\n\""'"",true,10,10.99,0.99,1E-18,0.000000000000000001,null,null,""This is a string."",null,undefined]"; string result = sb.ToString(); Console.WriteLine("ValueFormatting"); Console.WriteLine(result); Assert.AreEqual(expected, result); }
private void StreamToClient(Stream stream, int pageSize, Etag etag) { using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(stream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Storage.Batch(accessor => { var files = accessor.GetFilesAfter(etag, pageSize); foreach (var file in files) { timeout.Delay(); var doc = RavenJObject.FromObject(file); doc.WriteTo(writer); writer.WriteRaw(Environment.NewLine); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
private void StreamToClient(Stream stream, int pageSize, Etag etag, OrderedPartCollection<AbstractFileReadTrigger> readTriggers) { using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(stream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Storage.Batch(accessor => { var files = accessor.GetFilesAfter(etag, pageSize); foreach (var file in files) { if (readTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false) continue; timeout.Delay(); var doc = RavenJObject.FromObject(file); doc.WriteTo(writer); writer.WriteRaw(Environment.NewLine); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public void ExportDatabase() { using (var stream = File.Create(outputDirectory)) using (var gZipStream = new GZipStream(stream, CompressionMode.Compress,leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); //Indexes jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); WriteIndexes(jsonWriter); jsonWriter.WriteEndArray(); //Documents jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); WriteDocuments(jsonWriter); jsonWriter.WriteEndArray(); //Attachments jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); WriteAttachments(jsonWriter); jsonWriter.WriteEndArray(); //Transformers jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); WriteTransformers(jsonWriter); jsonWriter.WriteEndArray(); //Identities jsonWriter.WritePropertyName("Identities"); jsonWriter.WriteStartArray(); WriteIdentities(jsonWriter); jsonWriter.WriteEndArray(); //end of export jsonWriter.WriteEndObject(); streamWriter.Flush(); } }
private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter) { var bufferStream = new BufferedStream(stream, 1024 * 64); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(bufferStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Action<JsonDocument> addDocument = doc => { timeout.Delay(); doc.ToJson().WriteTo(writer); writer.WriteRaw(Environment.NewLine); }; Database.TransactionalStorage.Batch(accessor => { // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSettingDocumentsInDocumentCache()) { if (string.IsNullOrEmpty(startsWith)) { Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, addDocument); } else { var nextPageStartInternal = nextPageStart; Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, addDocument, skipAfter: skipAfter); nextPageStart = nextPageStartInternal; } } }); writer.WriteEndArray(); writer.WritePropertyName("NextPageStart"); writer.WriteValue(nextPageStart); writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } }
public override void Respond(IHttpContext context) { using (context.Response.Streaming()) { context.Response.ContentType = "application/json; charset=utf-8"; using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Database.TransactionalStorage.Batch(accessor => { var startsWith = context.Request.QueryString["startsWith"]; int pageSize = context.GetPageSize(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) pageSize = int.MaxValue; // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSettingDocumentsInDocumentCache()) { if (string.IsNullOrEmpty(startsWith)) { Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(), doc => doc.WriteTo(writer)); } else { Database.GetDocumentsWithIdStartingWith( startsWith, context.Request.QueryString["matches"], context.Request.QueryString["exclude"], context.GetStart(), pageSize, doc => doc.WriteTo(writer)); } } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } } }
public override void Respond(IHttpContext context) { context.Response.BufferOutput = false; var match = urlMatcher.Match(context.GetRequestUrl()); var index = match.Groups[1].Value; var query = context.GetIndexQueryFromHttpContext(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) query.PageSize = int.MaxValue; var isHeadRequest = context.Request.HttpMethod == "HEAD"; if (isHeadRequest) query.PageSize = 0; JsonWriter writer = null; Database.Query(index, query, information => { context.Response.AddHeader("Raven-Result-Etag", information.ResultEtag.ToString()); context.Response.AddHeader("Raven-Index-Etag", information.IndexEtag.ToString()); context.Response.AddHeader("Raven-Is-Stale", information.IsStable ? "true" : "false"); context.Response.AddHeader("Raven-Index", information.Index); context.Response.AddHeader("Raven-Total-Results", information.TotalResults.ToString(CultureInfo.InvariantCulture)); context.Response.AddHeader("Raven-Index-Timestamp", information.IndexTimestamp.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture)); if (isHeadRequest) return; writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream)); writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); }, result => result.WriteTo(writer, Default.Converters)); if (isHeadRequest) return; writer.WriteEndArray(); writer.WriteEndObject(); if (writer != null) { writer.Flush(); writer.Close(); } }
public override void Execute(object parameter) { var saveFile = new SaveFileDialog { DefaultExt = ".ravendump", Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; var name = ApplicationModel.Database.Value.Name; var normalizedName = new string(name.Select(ch => Path.GetInvalidPathChars().Contains(ch) ? '_' : ch).ToArray()); var defaultFileName = string.Format("Dump of {0}, {1}", normalizedName, DateTimeOffset.Now.ToString("dd MMM yyyy HH-mm", CultureInfo.InvariantCulture)); try { saveFile.DefaultFileName = defaultFileName; } catch { } if (saveFile.ShowDialog() != true) return; taskModel.CanExecute.Value = false; stream = saveFile.OpenFile(); gZipStream = new GZipStream(stream, CompressionMode.Compress); streamWriter = new StreamWriter(gZipStream); jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; taskModel.TaskStatus = TaskStatus.Started; output(String.Format("Exporting to {0}", saveFile.SafeFileName)); output("Begin reading indexes"); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); ReadIndexes(0) .Catch(exception => { taskModel.ReportError(exception); Infrastructure.Execute.OnTheUI(() => Finish(exception)); }); }
private void StreamToClient(Stream stream, ExportOptions options, Lazy<NameValueCollection> headers, IPrincipal user) { var old = CurrentOperationContext.Headers.Value; var oldUser = CurrentOperationContext.User.Value; try { CurrentOperationContext.Headers.Value = headers; CurrentOperationContext.User.Value = user; Database.TransactionalStorage.Batch(accessor => { var bufferStream = new BufferedStream(stream, 1024 * 64); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var streamWriter = new StreamWriter(bufferStream)) using (var writer = new JsonTextWriter(streamWriter)) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); var exporter = new SmugglerExporter(Database, options); exporter.Export(item => WriteToStream(writer, item, timeout), cts.Token); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } }); } finally { CurrentOperationContext.Headers.Value = old; CurrentOperationContext.User.Value = oldUser; } }
public override void Respond(IHttpContext context) { context.Response.BufferOutput = false; using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Database.TransactionalStorage.Batch(accessor => { var startsWith = context.Request.QueryString["startsWith"]; int pageSize = context.GetPageSize(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) pageSize = int.MaxValue; if (string.IsNullOrEmpty(startsWith)) { Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(), doc => doc.WriteTo(writer)); } else { Database.GetDocumentsWithIdStartingWith( startsWith, context.Request.QueryString["matches"], context.GetStart(), pageSize, doc => doc.WriteTo(writer)); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public void WriteBytesInArray() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); string text = "Hello world."; byte[] data = Encoding.UTF8.GetBytes(text); using (JsonTextWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.Formatting = Formatting.Indented; Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting); jsonWriter.WriteStartArray(); jsonWriter.WriteValue(data); jsonWriter.WriteValue(data); jsonWriter.WriteValue((object)data); jsonWriter.WriteValue((byte[])null); jsonWriter.WriteValue((Uri)null); jsonWriter.WriteEndArray(); } string expected = @"[ ""SGVsbG8gd29ybGQu"", ""SGVsbG8gd29ybGQu"", ""SGVsbG8gd29ybGQu"", null, null ]"; string result = sb.ToString(); Assert.AreEqual(expected, result); }
public void Path() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); string text = "Hello world."; byte[] data = Encoding.UTF8.GetBytes(text); using (JsonTextWriter writer = new JsonTextWriter(sw)) { writer.Formatting = Formatting.Indented; writer.WriteStartArray(); Assert.AreEqual("", writer.Path); writer.WriteStartObject(); Assert.AreEqual("[0]", writer.Path); writer.WritePropertyName("Property1"); Assert.AreEqual("[0].Property1", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1", writer.Path); writer.WriteValue(1); Assert.AreEqual("[0].Property1[0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1][0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1][0][0]", writer.Path); writer.WriteEndObject(); Assert.AreEqual("[0]", writer.Path); writer.WriteStartObject(); Assert.AreEqual("[1]", writer.Path); writer.WritePropertyName("Property2"); Assert.AreEqual("[1].Property2", writer.Path); writer.WriteStartConstructor("Constructor1"); Assert.AreEqual("[1].Property2", writer.Path); writer.WriteNull(); Assert.AreEqual("[1].Property2[0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[1].Property2[1]", writer.Path); writer.WriteValue(1); Assert.AreEqual("[1].Property2[1][0]", writer.Path); writer.WriteEnd(); Assert.AreEqual("[1].Property2[1]", writer.Path); writer.WriteEndObject(); Assert.AreEqual("[1]", writer.Path); writer.WriteEndArray(); Assert.AreEqual("", writer.Path); } Assert.AreEqual(@"[ { ""Property1"": [ 1, [ [ [] ] ] ] }, { ""Property2"": new Constructor1( null, [ 1 ] ) } ]", sb.ToString()); }
public void QuoteChar() { StringWriter sw = new StringWriter(); JsonTextWriter writer = new JsonTextWriter(sw); writer.Formatting = Formatting.Indented; writer.QuoteChar = '\''; writer.WriteStartArray(); writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc)); writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero)); writer.DateFormatHandling = DateFormatHandling.MicrosoftDateFormat; writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc)); writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero)); writer.WriteValue(new byte[] { 1, 2, 3 }); writer.WriteValue(TimeSpan.Zero); writer.WriteValue(new Uri("http://www.google.com/")); writer.WriteValue(Guid.Empty); writer.WriteEnd(); Assert.AreEqual(@"[ '2000-01-01T01:01:01Z', '2000-01-01T01:01:01+00:00', '\/Date(946688461000)\/', '\/Date(946688461000+0000)\/', 'AQID', '00:00:00', 'http://www.google.com/', '00000000-0000-0000-0000-000000000000' ]", sw.ToString()); }
private static string GenerateOutput(Dictionary<string, object> result, int indent) { var stringWriter = new StringWriter(); var writer = new JsonTextWriter(stringWriter) { Formatting = Formatting.Indented }; writer.WriteStartObject(); foreach (var o in result) { writer.WritePropertyName(o.Key); var ravenJToken = o.Value as RavenJToken; if (ravenJToken != null) { ravenJToken.WriteTo(writer); continue; } var conflicted = o.Value as Conflicted; if (conflicted != null) { writer.WriteComment(">>>> conflict start"); writer.WriteStartArray(); foreach (var token in conflicted.Values) { token.WriteTo(writer); } writer.WriteEndArray(); writer.WriteComment("<<<< conflict end"); continue; } var arrayWithWarning = o.Value as ArrayWithWarning; if(arrayWithWarning != null) { writer.WriteComment(">>>> auto merged array start"); arrayWithWarning.MergedArray.WriteTo(writer); writer.WriteComment("<<<< auto merged array end"); continue; } var resolver = o.Value as ConflictsResolver; if(resolver != null) { using(var stringReader = new StringReader(resolver.Resolve(indent + 1))) { var first = true; string line ; while((line = stringReader.ReadLine()) != null) { if(first == false) { writer.WriteRaw(Environment.NewLine); for (var i = 0; i < indent; i++) { writer.WriteRaw(new string(writer.IndentChar, writer.Indentation)); } } if(first) writer.WriteRawValue(line); else writer.WriteRaw(line); first = false; } } continue; } throw new InvalidOperationException("Could not understand how to deal with: " + o.Value); } writer.WriteEndObject(); return stringWriter.GetStringBuilder().ToString(); }
private async Task ExportIdentities(JsonTextWriter jsonWriter, ItemType operateOnTypes) { Operations.ShowProgress("Exporting Identities"); var identities = await Operations.GetIdentities(); Operations.ShowProgress("Exported {0} following identities: {1}", identities.Count, string.Join(", ", identities.Select(x => x.Key))); var filteredIdentities = identities.Where(x => FilterIdentity(x.Key, operateOnTypes)).ToList(); Operations.ShowProgress("After filtering {0} identities need to be exported: {1}", filteredIdentities.Count, string.Join(", ", filteredIdentities.Select(x => x.Key))); jsonWriter.WritePropertyName("Identities"); jsonWriter.WriteStartArray(); foreach (var identityInfo in filteredIdentities) { new RavenJObject { { "Key", identityInfo.Key }, { "Value", identityInfo.Value } }.WriteTo(jsonWriter); } jsonWriter.WriteEndArray(); Operations.ShowProgress("Done with exporting indentities"); }
protected override Task SerializeToStreamAsync(Stream stream, TransportContext context) { var streamWriter = new StreamWriter(stream); var writer = new JsonTextWriter(streamWriter); writer.WriteStartArray(); foreach (var result in results) { if (result == null) { writer.WriteNull(); continue; } writer.WriteStartObject(); writer.WritePropertyName("Status"); writer.WriteValue((int) result.StatusCode); writer.WritePropertyName("Headers"); writer.WriteStartObject(); foreach (var header in result.Headers.Concat(result.Content.Headers)) { foreach (var val in header.Value) { writer.WritePropertyName(header.Key); writer.WriteValue(val); } } writer.WriteEndObject(); writer.WritePropertyName("Result"); var jsonContent = (JsonContent)result.Content; if(jsonContent.Data != null) jsonContent.Data.WriteTo(writer, Default.Converters); writer.WriteEndObject(); } writer.WriteEndArray(); writer.Flush(); return new CompletedTask(); }
public void WriteReadBoundaryDecimals() { StringWriter sw = new StringWriter(); JsonTextWriter writer = new JsonTextWriter(sw); writer.WriteStartArray(); writer.WriteValue(decimal.MaxValue); writer.WriteValue(decimal.MinValue); writer.WriteEndArray(); string json = sw.ToString(); StringReader sr = new StringReader(json); JsonTextReader reader = new JsonTextReader(sr); Assert.IsTrue(reader.Read()); decimal? max = reader.ReadAsDecimal(); Assert.AreEqual(decimal.MaxValue, max); decimal? min = reader.ReadAsDecimal(); Assert.AreEqual(decimal.MinValue, min); Assert.IsTrue(reader.Read()); }
public void WriteReadWrite() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented }) { jsonWriter.WriteStartArray(); jsonWriter.WriteValue(true); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("integer"); jsonWriter.WriteValue(99); jsonWriter.WritePropertyName("string"); jsonWriter.WriteValue("how now brown cow?"); jsonWriter.WritePropertyName("array"); jsonWriter.WriteStartArray(); for (int i = 0; i < 5; i++) { jsonWriter.WriteValue(i); } jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("decimal"); jsonWriter.WriteValue(990.00990099m); jsonWriter.WriteEndObject(); jsonWriter.WriteValue(5); jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); jsonWriter.WriteValue("This is a string."); jsonWriter.WriteNull(); jsonWriter.WriteNull(); jsonWriter.WriteEndArray(); } string json = sb.ToString(); JsonSerializer serializer = new JsonSerializer(); object jsonObject = serializer.Deserialize(new JsonTextReader(new StringReader(json))); sb = new StringBuilder(); sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented }) { serializer.Serialize(jsonWriter, jsonObject); } Assert.AreEqual(json, sb.ToString()); }
public void WriteFloatingPointNumber() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.WriteStartArray(); jsonWriter.WriteValue(0.0); jsonWriter.WriteValue(0f); jsonWriter.WriteValue(0.1); jsonWriter.WriteValue(1.0); jsonWriter.WriteValue(1.000001); jsonWriter.WriteValue(0.000001); jsonWriter.WriteValue(double.Epsilon); jsonWriter.WriteValue(double.PositiveInfinity); jsonWriter.WriteValue(double.NegativeInfinity); jsonWriter.WriteValue(double.NaN); jsonWriter.WriteValue(double.MaxValue); jsonWriter.WriteValue(double.MinValue); jsonWriter.WriteValue(float.PositiveInfinity); jsonWriter.WriteValue(float.NegativeInfinity); jsonWriter.WriteValue(float.NaN); jsonWriter.WriteEndArray(); } Assert.AreEqual(@"[0.0,0.0,0.1,1.0,1.000001,1E-06,4.94065645841247E-324,Infinity,-Infinity,NaN,1.7976931348623157E+308,-1.7976931348623157E+308,Infinity,-Infinity,NaN]", sb.ToString()); }
public string ExportData(SmugglerOptions options, bool incremental, bool lastEtagsFromFile) { options = options ?? smugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile) ReadLastEtagsFromFile(options); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } using (var streamWriter = new StreamWriter(new GZipStream(File.Create(file), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Documents) == ItemType.Documents) { options.LastDocsEtag = ExportDocuments(options, jsonWriter, options.LastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Attachments) == ItemType.Attachments) { options.LastAttachmentEtag = ExportAttachments(jsonWriter, options.LastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); return file; }
private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream) { var sentDocuments = false; var bufferStream = new BufferedStream(stream, 1024 * 64); var lastBatchSentTime = Stopwatch.StartNew(); using (var writer = new JsonTextWriter(new StreamWriter(bufferStream))) { var options = subscriptions.GetBatchOptions(id); writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) { Etag lastProcessedDocEtag = null; var batchSize = 0; var batchDocCount = 0; var processedDocumentsCount = 0; var hasMoreDocs = false; var config = subscriptions.GetSubscriptionConfig(id); var startEtag = config.AckEtag; var criteria = config.Criteria; bool isPrefixCriteria = !string.IsNullOrWhiteSpace(criteria.KeyStartsWith); Func<JsonDocument, bool> addDocument = doc => { timeout.Delay(); if (doc == null) { // we only have this heartbeat when the streaming has gone on for a long time // and we haven't sent anything to the user in a while (because of filtering, skipping, etc). writer.WriteRaw(Environment.NewLine); writer.Flush(); if (lastBatchSentTime.ElapsedMilliseconds > 30000) return false; return true; } processedDocumentsCount++; // We cant continue because we have already maxed out the batch bytes size. if (options.MaxSize.HasValue && batchSize >= options.MaxSize) return false; // We cant continue because we have already maxed out the amount of documents to send. if (batchDocCount >= options.MaxDocCount) return false; // We can continue because we are ignoring system documents. if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase)) return true; // We can continue because we are ignoring the document as it doesn't fit the criteria. if (MatchCriteria(criteria, doc) == false) return true; doc.ToJson().WriteTo(writer); writer.WriteRaw(Environment.NewLine); batchSize += doc.SerializedSizeOnDisk; batchDocCount++; return true; // We get the next document }; var retries = 0; do { var lastProcessedDocumentsCount = processedDocumentsCount; Database.TransactionalStorage.Batch(accessor => { // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache()) { if (isPrefixCriteria) { // If we don't get any document from GetDocumentsWithIdStartingWith it could be that we are in presence of a lagoon of uninteresting documents, so we are hitting a timeout. lastProcessedDocEtag = Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument); hasMoreDocs = false; } else { // It doesn't matter if we match the criteria or not, the document has been already processed. lastProcessedDocEtag = Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument); // If we don't get any document from GetDocuments it may be a signal that something is wrong. if (lastProcessedDocEtag == null) { hasMoreDocs = false; } else { var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag(); hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag); startEtag = lastProcessedDocEtag; } retries = lastProcessedDocumentsCount == batchDocCount ? retries : 0; } } }); if (lastBatchSentTime.ElapsedMilliseconds >= 30000) { if (batchDocCount == 0) log.Warn("Subscription filtered out all possible documents for {0:#,#;;0} seconds in a row, stopping operation", lastBatchSentTime.Elapsed.TotalSeconds); break; } if (lastProcessedDocumentsCount == processedDocumentsCount) { if (retries == 3) { log.Warn("Subscription processing did not end up replicating any documents for 3 times in a row, stopping operation", retries); } else { log.Warn("Subscription processing did not end up replicating any documents, due to possible storage error, retry number: {0}", retries); } retries++; } } while (retries < 3 && hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize)); writer.WriteEndArray(); if (batchDocCount > 0 || processedDocumentsCount > 0 || isPrefixCriteria) { writer.WritePropertyName("LastProcessedEtag"); writer.WriteValue(lastProcessedDocEtag.ToString()); sentDocuments = true; } writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } } if (sentDocuments) subscriptions.UpdateBatchSentTime(id); }
public virtual async Task<ExportDataResult> ExportData(SmugglerExportOptions exportOptions) { Operations.Configure(SmugglerOptions); Operations.Initialize(SmugglerOptions); var result = new ExportDataResult { FilePath = exportOptions.ToFile, LastAttachmentsEtag = SmugglerOptions.StartAttachmentsEtag, LastDocsEtag = SmugglerOptions.StartDocsEtag, LastDocDeleteEtag = SmugglerOptions.StartDocsDeletionEtag, LastAttachmentsDeleteEtag = SmugglerOptions.StartAttachmentsDeletionEtag }; if (SmugglerOptions.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; else Directory.CreateDirectory(result.FilePath); } if (SmugglerOptions.StartDocsEtag == Etag.Empty && SmugglerOptions.StartAttachmentsEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { // ReSharper disable once AssignNullToNotNullAttribute result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath) == false) break; counter++; } } } SmugglerExportException lastException = null; bool ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(result.FilePath); try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { Operations.ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode // lastException = new SmugglerExportException // { // LastEtag = Etag.Empty, // File = ownedStream ? result.FilePath : null // }; } try { using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); // used to synchronize max returned values for put/delete operations var maxEtags = Operations.FetchCurrentMaxEtags(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Documents)) { try { result.LastDocsEtag = await ExportDocuments(exportOptions.From, jsonWriter, result.LastDocsEtag, maxEtags.LastDocsEtag); } catch (SmugglerExportException e) { result.LastDocsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { result.LastAttachmentsEtag = await ExportAttachments(exportOptions.From, jsonWriter, result.LastAttachmentsEtag, maxEtags.LastAttachmentsEtag); } catch (SmugglerExportException e) { result.LastAttachmentsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); if (SmugglerOptions.ExportDeletions) { await ExportDeletions(jsonWriter, result, maxEtags); } await ExportIdentities(jsonWriter, SmugglerOptions.OperateOnTypes); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (SmugglerOptions.Incremental) WriteLastEtagsToFile(result, result.FilePath); if (SmugglerOptions.ExportDeletions) { Operations.PurgeTombstones(result); } if (lastException != null) throw lastException; return result; } finally { if (ownedStream && stream != null) stream.Dispose(); } }
private void WriteRequestsFromQueueToFile(ConcurrentQueue<RavenJObject> messages, string filePath, bool isCompressed, bool printOutput, ManualResetEvent mre) { RavenJObject notification; var requestsCounter = 0; using (var stream = File.Create(filePath)) { Stream finalStream = stream; if (isCompressed) finalStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true); using (var streamWriter = new StreamWriter(finalStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartArray(); while (messages.TryDequeue(out notification) || mre.WaitOne(0) == false) { if (notification == null) { Thread.Sleep(100); continue; } requestsCounter++; if (printOutput) { Console.WriteLine("Request #{0} Stored", requestsCounter); } notification.WriteTo(jsonWriter); } jsonWriter.WriteEndArray(); streamWriter.Flush(); } if (isCompressed) finalStream.Dispose(); } }
public void BadWriteEndArray() { ExceptionAssert.Throws<JsonWriterException>( "No token to close. Path ''.", () => { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.WriteStartArray(); jsonWriter.WriteValue(0.0); jsonWriter.WriteEndArray(); jsonWriter.WriteEndArray(); } }); }
public void ReadLongJsonArray() { int valueCount = 10000; StringWriter sw = new StringWriter(); JsonTextWriter writer = new JsonTextWriter(sw); writer.WriteStartArray(); for (int i = 0; i < valueCount; i++) { writer.WriteValue(i); } writer.WriteEndArray(); string json = sw.ToString(); JsonTextReader reader = new JsonTextReader(new StringReader(json)); Assert.IsTrue(reader.Read()); for (int i = 0; i < valueCount; i++) { Assert.IsTrue(reader.Read()); Assert.AreEqual((long)i, reader.Value); } Assert.IsTrue(reader.Read()); Assert.IsFalse(reader.Read()); }
private void WriteToken(JsonTextWriter writer, string propertyName, Object propertyValue) { if (isMetadataResolver && ( propertyName.StartsWith("Raven-Replication-") || propertyName.StartsWith("@") || propertyName == "Last-Modified" || propertyName == "Raven-Last-Modified" ) ) { return; } writer.WritePropertyName(propertyName); var ravenJToken = propertyValue as RavenJToken; if (ravenJToken != null) { ravenJToken.WriteTo(writer); return; } var conflicted = propertyValue as Conflicted; if (conflicted != null) { writer.WriteComment(">>>> conflict start"); writer.WriteStartArray(); foreach (var token in conflicted.Values) { token.WriteTo(writer); } writer.WriteEndArray(); writer.WriteComment("<<<< conflict end"); return; } var arrayWithWarning = propertyValue as ArrayWithWarning; if (arrayWithWarning != null) { writer.WriteComment(">>>> auto merged array start"); arrayWithWarning.MergedArray.WriteTo(writer); writer.WriteComment("<<<< auto merged array end"); return; } throw new InvalidOperationException("Could not understand how to deal with: " + propertyValue); }
public virtual async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile && backupStatus == null) ReadLastEtagsFromFile(options); if (backupStatus != null) ReadLastEtagsFromClass(options, backupStatus); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } #else if(incremental) throw new NotSupportedException("Incremental exports are not supported in SL."); #endif await DetectServerSupportedFeatures(); SmugglerExportException lastException = null; bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } catch (SmugglerExportException e) { options.LastDocsEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } catch (SmugglerExportException e) { options.LastAttachmentEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); #endif if (lastException != null) throw lastException; return file; } finally { if (ownedStream && stream != null) stream.Dispose(); } }