private void StreamToClient(Stream stream, int pageSize, Etag etag, OrderedPartCollection<AbstractFileReadTrigger> readTriggers)
        {
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
            using (var writer = new JsonTextWriter(new StreamWriter(stream)))
            {
                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        if (readTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false)
                            continue;

                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
            }
        }
        private void StreamToClient(Stream stream, int pageSize, Etag etag)
        {
            using (var cts = new CancellationTokenSource())
			using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
			using (var writer = new JsonTextWriter(new StreamWriter(stream)))
			{
			    writer.WriteStartObject();
			    writer.WritePropertyName("Results");
			    writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
			}
        }
Example #3
0
		public override void Respond(IHttpContext context)
		{
			using (context.Response.Streaming())
			{
				context.Response.ContentType = "application/json; charset=utf-8";

				using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream)))
				{
					writer.WriteStartObject();
					writer.WritePropertyName("Results");
					writer.WriteStartArray();

					Database.TransactionalStorage.Batch(accessor =>
					{
						var startsWith = context.Request.QueryString["startsWith"];
						int pageSize = context.GetPageSize(int.MaxValue);
						if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"]))
							pageSize = int.MaxValue;

						// we may be sending a LOT of documents to the user, and most 
						// of them aren't going to be relevant for other ops, so we are going to skip
						// the cache for that, to avoid filling it up very quickly
						using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
						{
							if (string.IsNullOrEmpty(startsWith))
							{
								Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(),
								                      doc => doc.WriteTo(writer));
							}
							else
							{
								Database.GetDocumentsWithIdStartingWith(
									startsWith,
									context.Request.QueryString["matches"],
                                    context.Request.QueryString["exclude"],
									context.GetStart(),
									pageSize,
									doc => doc.WriteTo(writer));
							}
						}
					});

					writer.WriteEndArray();
					writer.WriteEndObject();
					writer.Flush();
				}
			}
		}
Example #4
0
        public override void ExecuteResult(ControllerContext context)
        {
            if (context == null)
                throw new ArgumentNullException("context");

            var response = context.HttpContext.Response;
            response.ContentType = !string.IsNullOrEmpty(ContentType) ? ContentType : "application/json";

            if (ContentEncoding != null)
                response.ContentEncoding = ContentEncoding;

            if (Data == null) return;
            var writer = new JsonTextWriter(response.Output) { Formatting = Formatting };

            var serializer = JsonSerializer.Create(SerializerSettings);
            serializer.Serialize(writer, Data);
            writer.Flush();
        }
Example #5
0
		public Guid Save(ToDo todo)
		{
			database.BeginTransaction();

			var id = Guid.NewGuid();

			var ms = new MemoryStream();
			var jsonTextWriter = new JsonTextWriter(new StreamWriter(ms));
			new JsonSerializer().Serialize(
				jsonTextWriter,
				todo
				);
			jsonTextWriter.Flush();

			todos.Put(id.ToByteArray(), ms.ToArray());

			database.Commit();

			return id;
		}
Example #6
0
        public override void ExecuteResult(ControllerContext context)
        {
            if(context == null) {
                throw new ArgumentNullException("context");
            }

            var response = context.HttpContext.Response;
            response.ContentType = "application/json";

            if(data == null) {
                return;
            }

            var writer = new JsonTextWriter(response.Output);

            var serializer = new JsonSerializer();
            serializer.Configure();
            serializer.Serialize(writer, data);
            writer.Flush();
        }
Example #7
0
        private void StreamToClient(Stream stream, ExportOptions options, Lazy<NameValueCollection> headers, IPrincipal user)
        {
            var old = CurrentOperationContext.Headers.Value;
            var oldUser = CurrentOperationContext.User.Value;
            try
            {
                CurrentOperationContext.Headers.Value = headers;
                CurrentOperationContext.User.Value = user;

                Database.TransactionalStorage.Batch(accessor =>
                {
                    var bufferStream = new BufferedStream(stream, 1024 * 64);

                    using (var cts = new CancellationTokenSource())
                    using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                    using (var streamWriter = new StreamWriter(bufferStream))
                    using (var writer = new JsonTextWriter(streamWriter))
                    {
                        writer.WriteStartObject();
                        writer.WritePropertyName("Results");
                        writer.WriteStartArray();

                        var exporter = new SmugglerExporter(Database, options);

                        exporter.Export(item => WriteToStream(writer, item, timeout), cts.Token);

                        writer.WriteEndArray();
                        writer.WriteEndObject();
                        writer.Flush();
                        bufferStream.Flush();
                    }
                });
            }
            finally
            {
                CurrentOperationContext.Headers.Value = old;
                CurrentOperationContext.User.Value = oldUser;
            }
        }
Example #8
0
		public override void Respond(IHttpContext context)
		{
			context.Response.BufferOutput = false;
		
			using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream)))
			{
				writer.WriteStartObject();
				writer.WritePropertyName("Results");
				writer.WriteStartArray();

				Database.TransactionalStorage.Batch(accessor =>
				{
					var startsWith = context.Request.QueryString["startsWith"];
					int pageSize = context.GetPageSize(int.MaxValue);
					if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"]))
						pageSize = int.MaxValue;

					if (string.IsNullOrEmpty(startsWith))
					{
						Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(), 
							doc => doc.WriteTo(writer));
					}
					else
					{
						Database.GetDocumentsWithIdStartingWith(
							startsWith,
							context.Request.QueryString["matches"],
							context.GetStart(),
							pageSize,
							doc => doc.WriteTo(writer));
					}
				});

				writer.WriteEndArray();
				writer.WriteEndObject();
				writer.Flush();
			}
		}
Example #9
0
        public static void CreateInfoPackageForDatabase(ZipArchive package, DocumentDatabase database, RequestManager requestManager, string zipEntryPrefix = null)
        {
            zipEntryPrefix = zipEntryPrefix ?? string.Empty;

            var databaseName = database.Name;
            if (string.IsNullOrWhiteSpace(databaseName))
                databaseName = Constants.SystemDatabase;

            var jsonSerializer = JsonExtensions.CreateDefaultJsonSerializer();
            jsonSerializer.Formatting=Formatting.Indented;

            if (database.StartupTasks.OfType<ReplicationTask>().Any())
            {
                var replication = package.CreateEntry(zipEntryPrefix + "replication.json", compressionLevel);

                using (var statsStream = replication.Open())
                using (var streamWriter = new StreamWriter(statsStream))
                {
                    jsonSerializer.Serialize(streamWriter, ReplicationUtils.GetReplicationInformation(database));
                    streamWriter.Flush();
                }
            }

            var sqlReplicationTask = database.StartupTasks.OfType<SqlReplicationTask>().FirstOrDefault();
            if (sqlReplicationTask != null)
            {
                var replication = package.CreateEntry(zipEntryPrefix + "sql_replication.json", compressionLevel);

                using (var statsStream = replication.Open())
                using (var streamWriter = new StreamWriter(statsStream))
                {
                    jsonSerializer.Serialize(streamWriter, sqlReplicationTask.Statistics);
                    streamWriter.Flush();
                }
            }

            var stats = package.CreateEntry(zipEntryPrefix + "stats.json", compressionLevel);

            using (var statsStream = stats.Open())
            using (var streamWriter = new StreamWriter(statsStream))
            {
                jsonSerializer.Serialize(streamWriter, database.Statistics);
                streamWriter.Flush();
            }

            var metrics = package.CreateEntry(zipEntryPrefix + "metrics.json", compressionLevel);

            using (var metricsStream = metrics.Open())
            using (var streamWriter = new StreamWriter(metricsStream))
            {
                jsonSerializer.Serialize(streamWriter, database.CreateMetrics());
                streamWriter.Flush();
            }

            var logs = package.CreateEntry(zipEntryPrefix + "logs.csv", compressionLevel);

            using (var logsStream = logs.Open())
            using (var streamWriter = new StreamWriter(logsStream))
            {
                var target = LogManager.GetTarget<DatabaseMemoryTarget>();

                if (target == null) streamWriter.WriteLine("DatabaseMemoryTarget was not registered in the log manager, logs are not available");
                else
                {
                    var boundedMemoryTarget = target[databaseName];
                    var log = boundedMemoryTarget.GeneralLog;

                    streamWriter.WriteLine("time,logger,level,message,exception");

                    foreach (var logEvent in log)
                    {
                        streamWriter.WriteLine("{0:O},{1},{2},{3},{4}", logEvent.TimeStamp, logEvent.LoggerName, logEvent.Level, logEvent.FormattedMessage, logEvent.Exception);
                    }
                }

                streamWriter.Flush();
            }

            var config = package.CreateEntry(zipEntryPrefix + "config.json", compressionLevel);

            using (var configStream = config.Open())
            using (var streamWriter = new StreamWriter(configStream))
            using (var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented })
            {
                GetConfigForDebug(database).WriteTo(jsonWriter, new EtagJsonConverter());
                jsonWriter.Flush();
            }

            var indexes = package.CreateEntry(zipEntryPrefix + "indexes.json", compressionLevel);

            using (var indexesStream = indexes.Open())
            using (var streamWriter = new StreamWriter(indexesStream))
            {
                jsonSerializer.Serialize(streamWriter, database.IndexDefinitionStorage.IndexDefinitions.ToDictionary(x => x.Key, x => x.Value));
                streamWriter.Flush();
            }

            var currentlyIndexing = package.CreateEntry(zipEntryPrefix + "currently-indexing.json", compressionLevel);

            using (var currentlyIndexingStream = currentlyIndexing.Open())
            using (var streamWriter = new StreamWriter(currentlyIndexingStream))
            {
                jsonSerializer.Serialize(streamWriter, GetCurrentlyIndexingForDebug(database));
                streamWriter.Flush();
            }

            var queries = package.CreateEntry(zipEntryPrefix + "queries.json", compressionLevel);

            using (var queriesStream = queries.Open())
            using (var streamWriter = new StreamWriter(queriesStream))
            {
                jsonSerializer.Serialize(streamWriter, database.WorkContext.CurrentlyRunningQueries);
                streamWriter.Flush();
            }

            var version = package.CreateEntry(zipEntryPrefix + "version.json", compressionLevel);

            using (var versionStream = version.Open())
            using (var streamWriter = new StreamWriter(versionStream))
            {
                jsonSerializer.Serialize(streamWriter, new
                {
                    DocumentDatabase.ProductVersion,
                    DocumentDatabase.BuildVersion   
                });
                streamWriter.Flush();
            }

            var prefetchStatus = package.CreateEntry(zipEntryPrefix + "prefetch-status.json", compressionLevel);

            using (var prefetchStatusStream = prefetchStatus.Open())
            using (var streamWriter = new StreamWriter(prefetchStatusStream))
            {
                jsonSerializer.Serialize(streamWriter, GetPrefetchingQueueStatusForDebug(database));
                streamWriter.Flush();
            }

            var requestTracking = package.CreateEntry(zipEntryPrefix + "request-tracking.json", compressionLevel);

            using (var requestTrackingStream = requestTracking.Open())
            using (var streamWriter = new StreamWriter(requestTrackingStream))
            {
                jsonSerializer.Serialize(streamWriter, GetRequestTrackingForDebug(requestManager, databaseName));
                streamWriter.Flush();
            }

            var tasks = package.CreateEntry(zipEntryPrefix + "tasks.json", compressionLevel);

            using (var tasksStream = tasks.Open())
            using (var streamWriter = new StreamWriter(tasksStream))
            {
                jsonSerializer.Serialize(streamWriter, GetTasksForDebug(database));
                streamWriter.Flush();
            }

            var systemUtilization = package.CreateEntry(zipEntryPrefix + "system-utilization.json", compressionLevel);

            using (var systemUtilizationStream = systemUtilization.Open())
            using (var streamWriter = new StreamWriter(systemUtilizationStream))
            {
                long totalPhysicalMemory = -1;
                long availableMemory = -1;
                object cpuTimes = null;

                try
                {
                    totalPhysicalMemory = MemoryStatistics.TotalPhysicalMemory;
                    availableMemory = MemoryStatistics.AvailableMemoryInMb;

                    using (var searcher = new ManagementObjectSearcher("select * from Win32_PerfFormattedData_PerfOS_Processor"))
                    {
                        cpuTimes = searcher.Get()
                            .Cast<ManagementObject>()
                            .Select(mo => new
                            {
                                Name = mo["Name"],
                                Usage = string.Format("{0} %", mo["PercentProcessorTime"])
                            }).ToArray();	
                    }
                }
                catch (Exception e)
                {
                    cpuTimes = "Could not get CPU times" + Environment.NewLine + e;
                }

                jsonSerializer.Serialize(streamWriter, new
                {
                    TotalPhysicalMemory = string.Format("{0:#,#.##;;0} MB", totalPhysicalMemory),
                    AvailableMemory = string.Format("{0:#,#.##;;0} MB", availableMemory),
                    CurrentCpuUsage = cpuTimes
                });

                streamWriter.Flush();
            }
        }
Example #10
0
        protected virtual async Task SendMessage(MemoryStream memoryStream, JsonSerializer serializer, object message, WebSocketSendAsync sendAsync, CancellationToken callCancelled)
        {
            memoryStream.Position = 0;
            var jsonTextWriter = new JsonTextWriter(new StreamWriter(memoryStream));
            serializer.Serialize(jsonTextWriter, message);
            jsonTextWriter.Flush();

            var arraySegment = new ArraySegment<byte>(memoryStream.GetBuffer(), 0, (int)memoryStream.Position);
            await sendAsync(arraySegment, 1, true, callCancelled);
        }
        private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
        {
            var sentDocuments = false;

            var bufferStream = new BufferedStream(stream, 1024 * 64);

            var lastBatchSentTime = Stopwatch.StartNew();
            using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
            {
                var options = subscriptions.GetBatchOptions(id);

                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                using (var cts = new CancellationTokenSource())
                using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                {
                    Etag lastProcessedDocEtag = null;

                    var batchSize = 0;
                    var batchDocCount = 0;
                    var processedDocumentsCount = 0;
                    var hasMoreDocs = false;
                    var config = subscriptions.GetSubscriptionConfig(id);
                    var startEtag =  config.AckEtag;
                    var criteria = config.Criteria;

                    bool isPrefixCriteria = !string.IsNullOrWhiteSpace(criteria.KeyStartsWith);

                    Func<JsonDocument, bool> addDocument = doc =>
                    {
                        timeout.Delay();
                        if (doc == null)
                        {
                            // we only have this heartbeat when the streaming has gone on for a long time
                            // and we haven't sent anything to the user in a while (because of filtering, skipping, etc).
                            writer.WriteRaw(Environment.NewLine);
                            writer.Flush();
                            if (lastBatchSentTime.ElapsedMilliseconds > 30000)
                                return false;

                            return true;
                        }

                        processedDocumentsCount++;

                        // We cant continue because we have already maxed out the batch bytes size.
                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return false;

                        // We cant continue because we have already maxed out the amount of documents to send.
                        if (batchDocCount >= options.MaxDocCount)
                            return false;

                        // We can continue because we are ignoring system documents.
                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return true;

                        // We can continue because we are ignoring the document as it doesn't fit the criteria.
                        if (MatchCriteria(criteria, doc) == false)
                            return true;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;

                        return true; // We get the next document
                    };

                    var retries = 0;
                    do
                    {
                        var lastProcessedDocumentsCount = processedDocumentsCount;

                        Database.TransactionalStorage.Batch(accessor =>
                        {
                            // we may be sending a LOT of documents to the user, and most 
                            // of them aren't going to be relevant for other ops, so we are going to skip
                            // the cache for that, to avoid filling it up very quickly
                            using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache())
                            {
                                if (isPrefixCriteria)
                                {
                                    // If we don't get any document from GetDocumentsWithIdStartingWith it could be that we are in presence of a lagoon of uninteresting documents, so we are hitting a timeout.
                                    lastProcessedDocEtag = Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    hasMoreDocs = false;
                                }
                                else
                                {
                                    // It doesn't matter if we match the criteria or not, the document has been already processed.
                                    lastProcessedDocEtag = Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    // If we don't get any document from GetDocuments it may be a signal that something is wrong.
                                    if (lastProcessedDocEtag == null)
                                    {
                                        hasMoreDocs = false;
                                    }
                                    else
                                    {
                                        var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
                                        hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

                                        startEtag = lastProcessedDocEtag;
                                    }

                                    retries = lastProcessedDocumentsCount == batchDocCount ? retries : 0;
                                }
                            }							
                        });

                        if (lastBatchSentTime.ElapsedMilliseconds >= 30000)
                        {
                            if (batchDocCount == 0)
                                log.Warn("Subscription filtered out all possible documents for {0:#,#;;0} seconds in a row, stopping operation", lastBatchSentTime.Elapsed.TotalSeconds);
                            break;
                        }

                        if (lastProcessedDocumentsCount == processedDocumentsCount)
                        {
                            if (retries == 3)
                            {
                                log.Warn("Subscription processing did not end up replicating any documents for 3 times in a row, stopping operation", retries);
                            }
                            else
                            {
                                log.Warn("Subscription processing did not end up replicating any documents, due to possible storage error, retry number: {0}", retries);
                            }

                            retries++;
                        }
                    } while (retries < 3 && hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

                    writer.WriteEndArray();

                    if (batchDocCount > 0 || processedDocumentsCount > 0 || isPrefixCriteria)
                    {
                        writer.WritePropertyName("LastProcessedEtag");
                        writer.WriteValue(lastProcessedDocEtag.ToString());

                        sentDocuments = true;
                    }

                    writer.WriteEndObject();
                    writer.Flush();

                    bufferStream.Flush();
                }
            }

            if (sentDocuments)
                subscriptions.UpdateBatchSentTime(id);
        }
Example #12
0
    public void FloatingPointNonFiniteNumbers()
    {
      StringBuilder sb = new StringBuilder();
      StringWriter sw = new StringWriter(sb);

      using (JsonWriter jsonWriter = new JsonTextWriter(sw))
      {
        jsonWriter.Formatting = Formatting.Indented;

        jsonWriter.WriteStartArray();
        jsonWriter.WriteValue(double.NaN);
        jsonWriter.WriteValue(double.PositiveInfinity);
        jsonWriter.WriteValue(double.NegativeInfinity);
        jsonWriter.WriteValue(float.NaN);
        jsonWriter.WriteValue(float.PositiveInfinity);
        jsonWriter.WriteValue(float.NegativeInfinity);
        jsonWriter.WriteEndArray();

        jsonWriter.Flush();
      }

      string expected = @"[
  NaN,
  Infinity,
  -Infinity,
  NaN,
  Infinity,
  -Infinity
]";
      string result = sb.ToString();

      Assert.AreEqual(expected, result);
    }
		private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
		{
			var sentDocuments = false;

			using (var streamWriter = new StreamWriter(stream))
			using (var writer = new JsonTextWriter(streamWriter))
			{
				var options = subscriptions.GetBatchOptions(id);

				writer.WriteStartObject();
				writer.WritePropertyName("Results");
				writer.WriteStartArray();

				using (var cts = new CancellationTokenSource())
				using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
				{
                    Etag lastProcessedDocEtag = null;

					var batchSize = 0;
					var batchDocCount = 0;
					var hasMoreDocs = false;

					var config = subscriptions.GetSubscriptionConfig(id);
					var startEtag = config.AckEtag;
					var criteria = config.Criteria;

                    Action<JsonDocument> addDocument = doc =>
                    {
                        timeout.Delay();

                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return;

                        if (batchDocCount >= options.MaxDocCount)
                            return;

                        lastProcessedDocEtag = doc.Etag;

                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return;

                        if (MatchCriteria(criteria, doc) == false)
                            return;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;
                    };

                    int nextStart = 0;

					do
					{
						Database.TransactionalStorage.Batch(accessor =>
						{
							// we may be sending a LOT of documents to the user, and most 
							// of them aren't going to be relevant for other ops, so we are going to skip
							// the cache for that, to avoid filling it up very quickly
							using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
							{    
                                if (!string.IsNullOrWhiteSpace(criteria.KeyStartsWith))
                                {
                                    Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
                                else
                                {
                                    Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
							}

							if (lastProcessedDocEtag == null)
								hasMoreDocs = false;
							else
							{
								var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
								hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

								startEtag = lastProcessedDocEtag;
							}
						});
					} while (hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

					writer.WriteEndArray();

					if (batchDocCount > 0)
					{
						writer.WritePropertyName("LastProcessedEtag");
						writer.WriteValue(lastProcessedDocEtag.ToString());

						sentDocuments = true;
					}

					writer.WriteEndObject();
					writer.Flush();
				}
			}

			if (sentDocuments)
				subscriptions.UpdateBatchSentTime(id);
		}
Example #14
0
        private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter, 
            Lazy<NameValueCollection> headers, IPrincipal user)
        {
            var old = CurrentOperationContext.Headers.Value;
            var oldUser = CurrentOperationContext.User.Value;
            try
            {
                CurrentOperationContext.Headers.Value = headers;
                CurrentOperationContext.User.Value = user;


                var bufferStream = new BufferedStream(stream, 1024 * 64);
                using (var cts = new CancellationTokenSource())
                using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
                {
                    writer.WriteStartObject();
                    writer.WritePropertyName("Results");
                    writer.WriteStartArray();

                    Action<JsonDocument> addDocument = doc =>
                    {
                        timeout.Delay();
                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);
                    };

                    Database.TransactionalStorage.Batch(accessor =>
                    {
                        // we may be sending a LOT of documents to the user, and most 
                        // of them aren't going to be relevant for other ops, so we are going to skip
                        // the cache for that, to avoid filling it up very quickly
                        using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache())
                        {
                            if (string.IsNullOrEmpty(startsWith))
                            {
                                Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, addDocument);
                            }
                            else
                            {
                                var nextPageStartInternal = nextPageStart;

                                Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, addDocument, skipAfter: skipAfter);

                                nextPageStart = nextPageStartInternal;
                            }
                        }
                    });

                    writer.WriteEndArray();
                    writer.WritePropertyName("NextPageStart");
                    writer.WriteValue(nextPageStart);
                    writer.WriteEndObject();
                    writer.Flush();
                    bufferStream.Flush();
                }
            }
            finally
            {
                CurrentOperationContext.Headers.Value = old;
                CurrentOperationContext.User.Value = oldUser;
            }
        }
		private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter)
		{
			using (var cts = new CancellationTokenSource())
			using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
			using (var writer = new JsonTextWriter(new StreamWriter(stream)))
			{
				writer.WriteStartObject();
				writer.WritePropertyName("Results");
				writer.WriteStartArray();

				Database.TransactionalStorage.Batch(accessor =>
				{
					// we may be sending a LOT of documents to the user, and most 
					// of them aren't going to be relevant for other ops, so we are going to skip
					// the cache for that, to avoid filling it up very quickly
					using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
					{
						if (string.IsNullOrEmpty(startsWith))
							Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, doc =>
							{
								timeout.Delay();
								doc.WriteTo(writer);
                                writer.WriteRaw(Environment.NewLine);
							});
						else
						{
							var nextPageStartInternal = nextPageStart;

							Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, doc =>
							{
								timeout.Delay();
								doc.WriteTo(writer);
                                writer.WriteRaw(Environment.NewLine);
							},skipAfter: skipAfter);

							nextPageStart = nextPageStartInternal;
						}
					}
				});

				writer.WriteEndArray();
				writer.WritePropertyName("NextPageStart");
				writer.WriteValue(nextPageStart);
				writer.WriteEndObject();
				writer.Flush();
			}
		}
Example #16
0
			protected override Task SerializeToStreamAsync(Stream stream, TransportContext context)
			{
				var streamWriter = new StreamWriter(stream);
				var writer = new JsonTextWriter(streamWriter);
				writer.WriteStartArray();

				foreach (var result in results)
				{
					if (result == null)
					{
						writer.WriteNull();
						continue;
					}

					writer.WriteStartObject();
					writer.WritePropertyName("Status");
					writer.WriteValue((int) result.StatusCode);
					writer.WritePropertyName("Headers");
					writer.WriteStartObject();

					foreach (var header in result.Headers.Concat(result.Content.Headers))
					{
						foreach (var val in header.Value)
						{
							writer.WritePropertyName(header.Key);
							writer.WriteValue(val);
						}
					}

					writer.WriteEndObject();
					writer.WritePropertyName("Result");

					var jsonContent = (JsonContent)result.Content;

					if(jsonContent.Data != null)
						jsonContent.Data.WriteTo(writer, Default.Converters);

					writer.WriteEndObject();
				}

				writer.WriteEndArray();
				writer.Flush();
				return new CompletedTask();
			}
Example #17
0
    public void FloatingPointNonFiniteNumbers_Zero()
    {
      StringBuilder sb = new StringBuilder();
      StringWriter sw = new StringWriter(sb);

      using (JsonWriter jsonWriter = new JsonTextWriter(sw))
      {
        jsonWriter.Formatting = Formatting.Indented;
        jsonWriter.FloatFormatHandling = FloatFormatHandling.DefaultValue;

        jsonWriter.WriteStartArray();
        jsonWriter.WriteValue(double.NaN);
        jsonWriter.WriteValue(double.PositiveInfinity);
        jsonWriter.WriteValue(double.NegativeInfinity);
        jsonWriter.WriteValue(float.NaN);
        jsonWriter.WriteValue(float.PositiveInfinity);
        jsonWriter.WriteValue(float.NegativeInfinity);
        jsonWriter.WriteValue((double?)double.NaN);
        jsonWriter.WriteValue((double?)double.PositiveInfinity);
        jsonWriter.WriteValue((double?)double.NegativeInfinity);
        jsonWriter.WriteValue((float?)float.NaN);
        jsonWriter.WriteValue((float?)float.PositiveInfinity);
        jsonWriter.WriteValue((float?)float.NegativeInfinity);
        jsonWriter.WriteEndArray();

        jsonWriter.Flush();
      }

      string expected = @"[
  0.0,
  0.0,
  0.0,
  0.0,
  0.0,
  0.0,
  null,
  null,
  null,
  null,
  null,
  null
]";
      string result = sb.ToString();

      Assert.AreEqual(expected, result);
    }