public override void Execute(object parameter) { var saveFile = new SaveFileDialog { DefaultFileName = string.Format("Dump of {0}, {1}", ApplicationModel.Database.Value.Name, DateTimeOffset.Now.ToString("MMM dd yyyy HH-mm", CultureInfo.InvariantCulture)), DefaultExt = ".raven.dump", Filter = "Raven Dumps|*.raven.dump", }; if (saveFile.ShowDialog() != true) return; stream = saveFile.OpenFile(); gZipStream = new GZipStream(stream, CompressionMode.Compress); streamWriter = new StreamWriter(gZipStream); jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; taskModel.TaskStatus = TaskStatus.Started; output(String.Format("Exporting to {0}", saveFile.SafeFileName)); output("Begin reading indexes"); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); ReadIndexes(0) .Catch(exception => Infrastructure.Execute.OnTheUI(() => Finish(exception))); }
private void StreamToClient(Stream stream, int pageSize, Etag etag) { using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(stream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Storage.Batch(accessor => { var files = accessor.GetFilesAfter(etag, pageSize); foreach (var file in files) { timeout.Delay(); var doc = RavenJObject.FromObject(file); doc.WriteTo(writer); writer.WriteRaw(Environment.NewLine); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
private void StreamToClient(Stream stream, int pageSize, Etag etag, OrderedPartCollection<AbstractFileReadTrigger> readTriggers) { using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(stream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Storage.Batch(accessor => { var files = accessor.GetFilesAfter(etag, pageSize); foreach (var file in files) { if (readTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false) continue; timeout.Delay(); var doc = RavenJObject.FromObject(file); doc.WriteTo(writer); writer.WriteRaw(Environment.NewLine); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public override void Execute(object parameter) { TaskCheckBox attachmentUI = taskModel.TaskInputs.FirstOrDefault(x => x.Name == "Include Attachments") as TaskCheckBox; includeAttachments = attachmentUI != null && attachmentUI.Value; var saveFile = new SaveFileDialog { DefaultExt = ".ravendump", Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; var name = ApplicationModel.Database.Value.Name; var normalizedName = new string(name.Select(ch => Path.GetInvalidPathChars().Contains(ch) ? '_' : ch).ToArray()); var defaultFileName = string.Format("Dump of {0}, {1}", normalizedName, DateTimeOffset.Now.ToString("dd MMM yyyy HH-mm", CultureInfo.InvariantCulture)); try { saveFile.DefaultFileName = defaultFileName; } catch { } if (saveFile.ShowDialog() != true) return; taskModel.CanExecute.Value = false; stream = saveFile.OpenFile(); gZipStream = new GZipStream(stream, CompressionMode.Compress); streamWriter = new StreamWriter(gZipStream); jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; taskModel.TaskStatus = TaskStatus.Started; output(String.Format("Exporting to {0}", saveFile.SafeFileName)); jsonWriter.WriteStartObject(); Action finalized = () => { jsonWriter.WriteEndObject(); Infrastructure.Execute.OnTheUI(() => Finish(null)); }; Action readAttachments = () => ReadAttachments(Guid.Empty, 0, callback: finalized); Action readDocuments = () => ReadDocuments(Guid.Empty, 0, callback: includeAttachments ? readAttachments : finalized); try { ReadIndexes(0, callback: readDocuments); } catch (Exception ex) { taskModel.ReportError(ex); Infrastructure.Execute.OnTheUI(() => Finish(ex)); } }
private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter) { var bufferStream = new BufferedStream(stream, 1024 * 64); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var writer = new JsonTextWriter(new StreamWriter(bufferStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Action<JsonDocument> addDocument = doc => { timeout.Delay(); doc.ToJson().WriteTo(writer); writer.WriteRaw(Environment.NewLine); }; Database.TransactionalStorage.Batch(accessor => { // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSettingDocumentsInDocumentCache()) { if (string.IsNullOrEmpty(startsWith)) { Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, addDocument); } else { var nextPageStartInternal = nextPageStart; Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, addDocument, skipAfter: skipAfter); nextPageStart = nextPageStartInternal; } } }); writer.WriteEndArray(); writer.WritePropertyName("NextPageStart"); writer.WriteValue(nextPageStart); writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } }
public override void Respond(IHttpContext context) { using (context.Response.Streaming()) { context.Response.ContentType = "application/json; charset=utf-8"; using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Database.TransactionalStorage.Batch(accessor => { var startsWith = context.Request.QueryString["startsWith"]; int pageSize = context.GetPageSize(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) pageSize = int.MaxValue; // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSettingDocumentsInDocumentCache()) { if (string.IsNullOrEmpty(startsWith)) { Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(), doc => doc.WriteTo(writer)); } else { Database.GetDocumentsWithIdStartingWith( startsWith, context.Request.QueryString["matches"], context.Request.QueryString["exclude"], context.GetStart(), pageSize, doc => doc.WriteTo(writer)); } } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } } }
public override void Respond(IHttpContext context) { context.Response.BufferOutput = false; var match = urlMatcher.Match(context.GetRequestUrl()); var index = match.Groups[1].Value; var query = context.GetIndexQueryFromHttpContext(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) query.PageSize = int.MaxValue; var isHeadRequest = context.Request.HttpMethod == "HEAD"; if (isHeadRequest) query.PageSize = 0; JsonWriter writer = null; Database.Query(index, query, information => { context.Response.AddHeader("Raven-Result-Etag", information.ResultEtag.ToString()); context.Response.AddHeader("Raven-Index-Etag", information.IndexEtag.ToString()); context.Response.AddHeader("Raven-Is-Stale", information.IsStable ? "true" : "false"); context.Response.AddHeader("Raven-Index", information.Index); context.Response.AddHeader("Raven-Total-Results", information.TotalResults.ToString(CultureInfo.InvariantCulture)); context.Response.AddHeader("Raven-Index-Timestamp", information.IndexTimestamp.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture)); if (isHeadRequest) return; writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream)); writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); }, result => result.WriteTo(writer, Default.Converters)); if (isHeadRequest) return; writer.WriteEndArray(); writer.WriteEndObject(); if (writer != null) { writer.Flush(); writer.Close(); } }
public override void Execute(object parameter) { var saveFile = new SaveFileDialog { DefaultExt = ".ravendump", Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; var name = ApplicationModel.Database.Value.Name; var normalizedName = new string(name.Select(ch => Path.GetInvalidPathChars().Contains(ch) ? '_' : ch).ToArray()); var defaultFileName = string.Format("Dump of {0}, {1}", normalizedName, DateTimeOffset.Now.ToString("dd MMM yyyy HH-mm", CultureInfo.InvariantCulture)); try { saveFile.DefaultFileName = defaultFileName; } catch { } if (saveFile.ShowDialog() != true) return; taskModel.CanExecute.Value = false; stream = saveFile.OpenFile(); gZipStream = new GZipStream(stream, CompressionMode.Compress); streamWriter = new StreamWriter(gZipStream); jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; taskModel.TaskStatus = TaskStatus.Started; output(String.Format("Exporting to {0}", saveFile.SafeFileName)); output("Begin reading indexes"); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); ReadIndexes(0) .Catch(exception => { taskModel.ReportError(exception); Infrastructure.Execute.OnTheUI(() => Finish(exception)); }); }
public void Export(string file) { using (var streamWriter = new StreamWriter(new GZipStream(File.Create(file), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); WriteItemsFromDb(jsonWriter, "Indexes", start => _documentDatabase.GetIndexes(start, 128)); WriteItemsFromDb(jsonWriter, "Docs", start => _documentDatabase.GetDocuments(start, 128, null)); WriteItemsFromDb(jsonWriter, "Attachments", GetAttachments); jsonWriter.WriteEndObject(); } }
public void ExportDatabase() { using (var stream = File.Create(outputDirectory)) using (var gZipStream = new GZipStream(stream, CompressionMode.Compress,leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); //Indexes jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); WriteIndexes(jsonWriter); jsonWriter.WriteEndArray(); //Documents jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); WriteDocuments(jsonWriter); jsonWriter.WriteEndArray(); //Attachments jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); WriteAttachments(jsonWriter); jsonWriter.WriteEndArray(); //Transformers jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); WriteTransformers(jsonWriter); jsonWriter.WriteEndArray(); //Identities jsonWriter.WritePropertyName("Identities"); jsonWriter.WriteStartArray(); WriteIdentities(jsonWriter); jsonWriter.WriteEndArray(); //end of export jsonWriter.WriteEndObject(); streamWriter.Flush(); } }
private void StreamToClient(Stream stream, ExportOptions options, Lazy<NameValueCollection> headers, IPrincipal user) { var old = CurrentOperationContext.Headers.Value; var oldUser = CurrentOperationContext.User.Value; try { CurrentOperationContext.Headers.Value = headers; CurrentOperationContext.User.Value = user; Database.TransactionalStorage.Batch(accessor => { var bufferStream = new BufferedStream(stream, 1024 * 64); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) using (var streamWriter = new StreamWriter(bufferStream)) using (var writer = new JsonTextWriter(streamWriter)) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); var exporter = new SmugglerExporter(Database, options); exporter.Export(item => WriteToStream(writer, item, timeout), cts.Token); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } }); } finally { CurrentOperationContext.Headers.Value = old; CurrentOperationContext.User.Value = oldUser; } }
public override void Respond(IHttpContext context) { context.Response.BufferOutput = false; using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream))) { writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); Database.TransactionalStorage.Batch(accessor => { var startsWith = context.Request.QueryString["startsWith"]; int pageSize = context.GetPageSize(int.MaxValue); if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"])) pageSize = int.MaxValue; if (string.IsNullOrEmpty(startsWith)) { Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(), doc => doc.WriteTo(writer)); } else { Database.GetDocumentsWithIdStartingWith( startsWith, context.Request.QueryString["matches"], context.GetStart(), pageSize, doc => doc.WriteTo(writer)); } }); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public void WriteReadWrite() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented }) { jsonWriter.WriteStartArray(); jsonWriter.WriteValue(true); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("integer"); jsonWriter.WriteValue(99); jsonWriter.WritePropertyName("string"); jsonWriter.WriteValue("how now brown cow?"); jsonWriter.WritePropertyName("array"); jsonWriter.WriteStartArray(); for (int i = 0; i < 5; i++) { jsonWriter.WriteValue(i); } jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("decimal"); jsonWriter.WriteValue(990.00990099m); jsonWriter.WriteEndObject(); jsonWriter.WriteValue(5); jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); jsonWriter.WriteValue("This is a string."); jsonWriter.WriteNull(); jsonWriter.WriteNull(); jsonWriter.WriteEndArray(); } string json = sb.ToString(); JsonSerializer serializer = new JsonSerializer(); object jsonObject = serializer.Deserialize(new JsonTextReader(new StringReader(json))); sb = new StringBuilder(); sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented }) { serializer.Serialize(jsonWriter, jsonObject); } Assert.AreEqual(json, sb.ToString()); }
private void WriteIndexes(JsonTextWriter jsonWriter) { var indexDefinitionsBasePath = Path.Combine(baseDirectory, indexDefinitionFolder); var indexes = Directory.GetFiles(indexDefinitionsBasePath, "*.index"); int currentIndexCount = 0; foreach (var file in indexes) { var ravenObj = RavenJObject.Parse(File.ReadAllText(file)); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("name"); jsonWriter.WriteValue(ravenObj.Value<string>("Name")); jsonWriter.WritePropertyName("definition"); ravenObj.WriteTo(jsonWriter); jsonWriter.WriteEndObject(); currentIndexCount++; ReportProgress("indexes", currentIndexCount, indexes.Count()); } }
public void WriteObjectNestedInConstructor() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("con"); jsonWriter.WriteStartConstructor("Ext.data.JsonStore"); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("aa"); jsonWriter.WriteValue("aa"); jsonWriter.WriteEndObject(); jsonWriter.WriteEndConstructor(); jsonWriter.WriteEndObject(); } Assert.AreEqual(@"{""con"":new Ext.data.JsonStore({""aa"":""aa""})}", sb.ToString()); }
protected override Task SerializeToStreamAsync(Stream stream, TransportContext context) { var streamWriter = new StreamWriter(stream); var writer = new JsonTextWriter(streamWriter); writer.WriteStartArray(); foreach (var result in results) { if (result == null) { writer.WriteNull(); continue; } writer.WriteStartObject(); writer.WritePropertyName("Status"); writer.WriteValue((int) result.StatusCode); writer.WritePropertyName("Headers"); writer.WriteStartObject(); foreach (var header in result.Headers.Concat(result.Content.Headers)) { foreach (var val in header.Value) { writer.WritePropertyName(header.Key); writer.WriteValue(val); } } writer.WriteEndObject(); writer.WritePropertyName("Result"); var jsonContent = (JsonContent)result.Content; if(jsonContent.Data != null) jsonContent.Data.WriteTo(writer, Default.Converters); writer.WriteEndObject(); } writer.WriteEndArray(); writer.Flush(); return new CompletedTask(); }
private static string GenerateOutput(Dictionary<string, object> result, int indent) { var stringWriter = new StringWriter(); var writer = new JsonTextWriter(stringWriter) { Formatting = Formatting.Indented }; writer.WriteStartObject(); foreach (var o in result) { writer.WritePropertyName(o.Key); var ravenJToken = o.Value as RavenJToken; if (ravenJToken != null) { ravenJToken.WriteTo(writer); continue; } var conflicted = o.Value as Conflicted; if (conflicted != null) { writer.WriteComment(">>>> conflict start"); writer.WriteStartArray(); foreach (var token in conflicted.Values) { token.WriteTo(writer); } writer.WriteEndArray(); writer.WriteComment("<<<< conflict end"); continue; } var arrayWithWarning = o.Value as ArrayWithWarning; if(arrayWithWarning != null) { writer.WriteComment(">>>> auto merged array start"); arrayWithWarning.MergedArray.WriteTo(writer); writer.WriteComment("<<<< auto merged array end"); continue; } var resolver = o.Value as ConflictsResolver; if(resolver != null) { using(var stringReader = new StringReader(resolver.Resolve(indent + 1))) { var first = true; string line ; while((line = stringReader.ReadLine()) != null) { if(first == false) { writer.WriteRaw(Environment.NewLine); for (var i = 0; i < indent; i++) { writer.WriteRaw(new string(writer.IndentChar, writer.Indentation)); } } if(first) writer.WriteRawValue(line); else writer.WriteRaw(line); first = false; } } continue; } throw new InvalidOperationException("Could not understand how to deal with: " + o.Value); } writer.WriteEndObject(); return stringWriter.GetStringBuilder().ToString(); }
public void WriteRawInObject() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.Formatting = Formatting.Indented; jsonWriter.WriteStartObject(); jsonWriter.WriteRaw(@"""PropertyName"":[1,2,3,4,5]"); jsonWriter.WriteEnd(); } string expected = @"{""PropertyName"":[1,2,3,4,5]}"; string result = sb.ToString(); Assert.AreEqual(expected, result); }
private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream) { var sentDocuments = false; var bufferStream = new BufferedStream(stream, 1024 * 64); var lastBatchSentTime = Stopwatch.StartNew(); using (var writer = new JsonTextWriter(new StreamWriter(bufferStream))) { var options = subscriptions.GetBatchOptions(id); writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); using (var cts = new CancellationTokenSource()) using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout)) { Etag lastProcessedDocEtag = null; var batchSize = 0; var batchDocCount = 0; var processedDocumentsCount = 0; var hasMoreDocs = false; var config = subscriptions.GetSubscriptionConfig(id); var startEtag = config.AckEtag; var criteria = config.Criteria; bool isPrefixCriteria = !string.IsNullOrWhiteSpace(criteria.KeyStartsWith); Func<JsonDocument, bool> addDocument = doc => { timeout.Delay(); if (doc == null) { // we only have this heartbeat when the streaming has gone on for a long time // and we haven't sent anything to the user in a while (because of filtering, skipping, etc). writer.WriteRaw(Environment.NewLine); writer.Flush(); if (lastBatchSentTime.ElapsedMilliseconds > 30000) return false; return true; } processedDocumentsCount++; // We cant continue because we have already maxed out the batch bytes size. if (options.MaxSize.HasValue && batchSize >= options.MaxSize) return false; // We cant continue because we have already maxed out the amount of documents to send. if (batchDocCount >= options.MaxDocCount) return false; // We can continue because we are ignoring system documents. if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase)) return true; // We can continue because we are ignoring the document as it doesn't fit the criteria. if (MatchCriteria(criteria, doc) == false) return true; doc.ToJson().WriteTo(writer); writer.WriteRaw(Environment.NewLine); batchSize += doc.SerializedSizeOnDisk; batchDocCount++; return true; // We get the next document }; var retries = 0; do { var lastProcessedDocumentsCount = processedDocumentsCount; Database.TransactionalStorage.Batch(accessor => { // we may be sending a LOT of documents to the user, and most // of them aren't going to be relevant for other ops, so we are going to skip // the cache for that, to avoid filling it up very quickly using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache()) { if (isPrefixCriteria) { // If we don't get any document from GetDocumentsWithIdStartingWith it could be that we are in presence of a lagoon of uninteresting documents, so we are hitting a timeout. lastProcessedDocEtag = Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument); hasMoreDocs = false; } else { // It doesn't matter if we match the criteria or not, the document has been already processed. lastProcessedDocEtag = Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument); // If we don't get any document from GetDocuments it may be a signal that something is wrong. if (lastProcessedDocEtag == null) { hasMoreDocs = false; } else { var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag(); hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag); startEtag = lastProcessedDocEtag; } retries = lastProcessedDocumentsCount == batchDocCount ? retries : 0; } } }); if (lastBatchSentTime.ElapsedMilliseconds >= 30000) { if (batchDocCount == 0) log.Warn("Subscription filtered out all possible documents for {0:#,#;;0} seconds in a row, stopping operation", lastBatchSentTime.Elapsed.TotalSeconds); break; } if (lastProcessedDocumentsCount == processedDocumentsCount) { if (retries == 3) { log.Warn("Subscription processing did not end up replicating any documents for 3 times in a row, stopping operation", retries); } else { log.Warn("Subscription processing did not end up replicating any documents, due to possible storage error, retry number: {0}", retries); } retries++; } } while (retries < 3 && hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize)); writer.WriteEndArray(); if (batchDocCount > 0 || processedDocumentsCount > 0 || isPrefixCriteria) { writer.WritePropertyName("LastProcessedEtag"); writer.WriteValue(lastProcessedDocEtag.ToString()); sentDocuments = true; } writer.WriteEndObject(); writer.Flush(); bufferStream.Flush(); } } if (sentDocuments) subscriptions.UpdateBatchSentTime(id); }
public void WriteEndOnProperty() { StringWriter sw = new StringWriter(); JsonTextWriter writer = new JsonTextWriter(sw); writer.QuoteChar = '\''; writer.WriteStartObject(); writer.WritePropertyName("Blah"); writer.WriteEnd(); Assert.AreEqual("{'Blah':null}", sw.ToString()); }
public void Path() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); string text = "Hello world."; byte[] data = Encoding.UTF8.GetBytes(text); using (JsonTextWriter writer = new JsonTextWriter(sw)) { writer.Formatting = Formatting.Indented; writer.WriteStartArray(); Assert.AreEqual("", writer.Path); writer.WriteStartObject(); Assert.AreEqual("[0]", writer.Path); writer.WritePropertyName("Property1"); Assert.AreEqual("[0].Property1", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1", writer.Path); writer.WriteValue(1); Assert.AreEqual("[0].Property1[0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1][0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[0].Property1[1][0][0]", writer.Path); writer.WriteEndObject(); Assert.AreEqual("[0]", writer.Path); writer.WriteStartObject(); Assert.AreEqual("[1]", writer.Path); writer.WritePropertyName("Property2"); Assert.AreEqual("[1].Property2", writer.Path); writer.WriteStartConstructor("Constructor1"); Assert.AreEqual("[1].Property2", writer.Path); writer.WriteNull(); Assert.AreEqual("[1].Property2[0]", writer.Path); writer.WriteStartArray(); Assert.AreEqual("[1].Property2[1]", writer.Path); writer.WriteValue(1); Assert.AreEqual("[1].Property2[1][0]", writer.Path); writer.WriteEnd(); Assert.AreEqual("[1].Property2[1]", writer.Path); writer.WriteEndObject(); Assert.AreEqual("[1]", writer.Path); writer.WriteEndArray(); Assert.AreEqual("", writer.Path); } Assert.AreEqual(@"[ { ""Property1"": [ 1, [ [ [] ] ] ] }, { ""Property2"": new Constructor1( null, [ 1 ] ) } ]", sb.ToString()); }
public void Indentation() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonTextWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.Formatting = Formatting.Indented; Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting); jsonWriter.Indentation = 5; Assert.AreEqual(5, jsonWriter.Indentation); jsonWriter.IndentChar = '_'; Assert.AreEqual('_', jsonWriter.IndentChar); jsonWriter.QuoteName = true; Assert.AreEqual(true, jsonWriter.QuoteName); jsonWriter.QuoteChar = '\''; Assert.AreEqual('\'', jsonWriter.QuoteChar); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("propertyName"); jsonWriter.WriteValue(double.NaN); jsonWriter.WriteEndObject(); } string expected = @"{ _____'propertyName': NaN }"; string result = sb.ToString(); Assert.AreEqual(expected, result); }
public virtual async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile && backupStatus == null) ReadLastEtagsFromFile(options); if (backupStatus != null) ReadLastEtagsFromClass(options, backupStatus); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } #else if(incremental) throw new NotSupportedException("Incremental exports are not supported in SL."); #endif await DetectServerSupportedFeatures(); SmugglerExportException lastException = null; bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } catch (SmugglerExportException e) { options.LastDocsEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } catch (SmugglerExportException e) { options.LastAttachmentEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); #endif if (lastException != null) throw lastException; return file; } finally { if (ownedStream && stream != null) stream.Dispose(); } }
public virtual async Task<ExportDataResult> ExportData(SmugglerExportOptions exportOptions) { Operations.Configure(SmugglerOptions); Operations.Initialize(SmugglerOptions); var result = new ExportDataResult { FilePath = exportOptions.ToFile, LastAttachmentsEtag = SmugglerOptions.StartAttachmentsEtag, LastDocsEtag = SmugglerOptions.StartDocsEtag, LastDocDeleteEtag = SmugglerOptions.StartDocsDeletionEtag, LastAttachmentsDeleteEtag = SmugglerOptions.StartAttachmentsDeletionEtag }; if (SmugglerOptions.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; else Directory.CreateDirectory(result.FilePath); } if (SmugglerOptions.StartDocsEtag == Etag.Empty && SmugglerOptions.StartAttachmentsEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { // ReSharper disable once AssignNullToNotNullAttribute result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath) == false) break; counter++; } } } SmugglerExportException lastException = null; bool ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(result.FilePath); try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { Operations.ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode // lastException = new SmugglerExportException // { // LastEtag = Etag.Empty, // File = ownedStream ? result.FilePath : null // }; } try { using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); // used to synchronize max returned values for put/delete operations var maxEtags = Operations.FetchCurrentMaxEtags(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Documents)) { try { result.LastDocsEtag = await ExportDocuments(exportOptions.From, jsonWriter, result.LastDocsEtag, maxEtags.LastDocsEtag); } catch (SmugglerExportException e) { result.LastDocsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { result.LastAttachmentsEtag = await ExportAttachments(exportOptions.From, jsonWriter, result.LastAttachmentsEtag, maxEtags.LastAttachmentsEtag); } catch (SmugglerExportException e) { result.LastAttachmentsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (SmugglerOptions.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); if (SmugglerOptions.ExportDeletions) { await ExportDeletions(jsonWriter, result, maxEtags); } await ExportIdentities(jsonWriter, SmugglerOptions.OperateOnTypes); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (SmugglerOptions.Incremental) WriteLastEtagsToFile(result, result.FilePath); if (SmugglerOptions.ExportDeletions) { Operations.PurgeTombstones(result); } if (lastException != null) throw lastException; return result; } finally { if (ownedStream && stream != null) stream.Dispose(); } }
public void State() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { Assert.AreEqual(WriteState.Start, jsonWriter.WriteState); jsonWriter.WriteStartObject(); Assert.AreEqual(WriteState.Object, jsonWriter.WriteState); Assert.AreEqual("", jsonWriter.Path); jsonWriter.WritePropertyName("CPU"); Assert.AreEqual(WriteState.Property, jsonWriter.WriteState); Assert.AreEqual("CPU", jsonWriter.Path); jsonWriter.WriteValue("Intel"); Assert.AreEqual(WriteState.Object, jsonWriter.WriteState); Assert.AreEqual("CPU", jsonWriter.Path); jsonWriter.WritePropertyName("Drives"); Assert.AreEqual(WriteState.Property, jsonWriter.WriteState); Assert.AreEqual("Drives", jsonWriter.Path); jsonWriter.WriteStartArray(); Assert.AreEqual(WriteState.Array, jsonWriter.WriteState); jsonWriter.WriteValue("DVD read/writer"); Assert.AreEqual(WriteState.Array, jsonWriter.WriteState); Assert.AreEqual("Drives[0]", jsonWriter.Path); jsonWriter.WriteEnd(); Assert.AreEqual(WriteState.Object, jsonWriter.WriteState); Assert.AreEqual("Drives", jsonWriter.Path); jsonWriter.WriteEndObject(); Assert.AreEqual(WriteState.Start, jsonWriter.WriteState); Assert.AreEqual("", jsonWriter.Path); } }
public void CloseWithRemainingContent() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.Formatting = Formatting.Indented; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("CPU"); jsonWriter.WriteValue("Intel"); jsonWriter.WritePropertyName("PSU"); jsonWriter.WriteValue("500W"); jsonWriter.WritePropertyName("Drives"); jsonWriter.WriteStartArray(); jsonWriter.WriteValue("DVD read/writer"); jsonWriter.WriteComment("(broken)"); jsonWriter.WriteValue("500 gigabyte hard drive"); jsonWriter.WriteValue("200 gigabype hard drive"); jsonWriter.Close(); } string expected = @"{ ""CPU"": ""Intel"", ""PSU"": ""500W"", ""Drives"": [ ""DVD read/writer"" /*(broken)*/, ""500 gigabyte hard drive"", ""200 gigabype hard drive"" ] }"; string result = sb.ToString(); Assert.AreEqual(expected, result); }
public void Indenting() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { jsonWriter.Formatting = Formatting.Indented; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("CPU"); jsonWriter.WriteValue("Intel"); jsonWriter.WritePropertyName("PSU"); jsonWriter.WriteValue("500W"); jsonWriter.WritePropertyName("Drives"); jsonWriter.WriteStartArray(); jsonWriter.WriteValue("DVD read/writer"); jsonWriter.WriteComment("(broken)"); jsonWriter.WriteValue("500 gigabyte hard drive"); jsonWriter.WriteValue("200 gigabype hard drive"); jsonWriter.WriteEnd(); jsonWriter.WriteEndObject(); Assert.AreEqual(WriteState.Start, jsonWriter.WriteState); } // { // "CPU": "Intel", // "PSU": "500W", // "Drives": [ // "DVD read/writer" // /*(broken)*/, // "500 gigabyte hard drive", // "200 gigabype hard drive" // ] // } string expected = @"{ ""CPU"": ""Intel"", ""PSU"": ""500W"", ""Drives"": [ ""DVD read/writer"" /*(broken)*/, ""500 gigabyte hard drive"", ""200 gigabype hard drive"" ] }"; string result = sb.ToString(); Assert.AreEqual(expected, result); }
public string ExportData(SmugglerOptions options, bool incremental, bool lastEtagsFromFile) { options = options ?? smugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile) ReadLastEtagsFromFile(options); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } using (var streamWriter = new StreamWriter(new GZipStream(File.Create(file), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Documents) == ItemType.Documents) { options.LastDocsEtag = ExportDocuments(options, jsonWriter, options.LastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Attachments) == ItemType.Attachments) { options.LastAttachmentEtag = ExportAttachments(jsonWriter, options.LastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); return file; }
private MergeResult GenerateOutput(Dictionary<string, object> result, int indent) { var documentStringWriter = new StringWriter(); var documentWriter = new JsonTextWriter(documentStringWriter) { Formatting = Formatting.Indented, IndentChar = ' ', Indentation = 4 }; var metadataStringWriter = new StringWriter(); var metadataWriter = new JsonTextWriter(metadataStringWriter) { Formatting = Formatting.Indented, IndentChar = ' ', Indentation = 4 }; documentWriter.WriteStartObject(); foreach (var o in result) { var resolver = o.Value as ConflictsResolver; if (resolver != null) { WriteConflictResolver(o.Key, documentWriter, metadataWriter, resolver, o.Key == "@metadata" ? 0 : indent + 1); } else { WriteToken(o.Key == "@metadata" ? metadataWriter : documentWriter, o.Key, o.Value); } } documentWriter.WriteEndObject(); return new MergeResult() { Document = documentStringWriter.GetStringBuilder().ToString(), Metadata = metadataStringWriter.GetStringBuilder().ToString() }; }
public void WriteRawValue() { StringBuilder sb = new StringBuilder(); StringWriter sw = new StringWriter(sb); using (JsonWriter jsonWriter = new JsonTextWriter(sw)) { int i = 0; string rawJson = "[1,2]"; jsonWriter.WriteStartObject(); while (i < 3) { jsonWriter.WritePropertyName("d" + i); jsonWriter.WriteRawValue(rawJson); i++; } jsonWriter.WriteEndObject(); } Assert.AreEqual(@"{""d0"":[1,2],""d1"":[1,2],""d2"":[1,2]}", sb.ToString()); }