Exemplo n.º 1
0
        private void StreamToClient(Stream stream, int pageSize, Etag etag, OrderedPartCollection<AbstractFileReadTrigger> readTriggers)
        {
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
            using (var writer = new JsonTextWriter(new StreamWriter(stream)))
            {
                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        if (readTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false)
                            continue;

                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
            }
        }
        private void StreamToClient(Stream stream, int pageSize, Etag etag)
        {
            using (var cts = new CancellationTokenSource())
			using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
			using (var writer = new JsonTextWriter(new StreamWriter(stream)))
			{
			    writer.WriteStartObject();
			    writer.WritePropertyName("Results");
			    writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
			}
        }
Exemplo n.º 3
0
        private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter)
        {
            var bufferStream = new BufferedStream(stream, 1024 * 64);
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
            {
                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                Action<JsonDocument> addDocument = doc =>
                {
                    timeout.Delay();
                    doc.ToJson().WriteTo(writer);
                    writer.WriteRaw(Environment.NewLine);
                };

                Database.TransactionalStorage.Batch(accessor =>
                {
                    // we may be sending a LOT of documents to the user, and most 
                    // of them aren't going to be relevant for other ops, so we are going to skip
                    // the cache for that, to avoid filling it up very quickly
                    using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
                    {
                        if (string.IsNullOrEmpty(startsWith))
                        {
                            Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, addDocument);
                        }
                        else
                        {
                            var nextPageStartInternal = nextPageStart;

                            Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, addDocument, skipAfter: skipAfter);

                            nextPageStart = nextPageStartInternal;
                        }
                    }
                });

                writer.WriteEndArray();
                writer.WritePropertyName("NextPageStart");
                writer.WriteValue(nextPageStart);
                writer.WriteEndObject();
                writer.Flush();
                bufferStream.Flush();
            }
        }
Exemplo n.º 4
0
		private static string GenerateOutput(Dictionary<string, object> result, int indent)
		{
			var stringWriter = new StringWriter();
			var writer = new JsonTextWriter(stringWriter)
			{
				Formatting = Formatting.Indented
			};

			writer.WriteStartObject();
			foreach (var o in result)
			{
				writer.WritePropertyName(o.Key);
				var ravenJToken = o.Value as RavenJToken;
				if (ravenJToken != null)
				{
					ravenJToken.WriteTo(writer);
					continue;
				}
				var conflicted = o.Value as Conflicted;
				if (conflicted != null)
				{
					writer.WriteComment(">>>> conflict start");
					writer.WriteStartArray();
					foreach (var token in conflicted.Values)
					{
						token.WriteTo(writer);
					}
					writer.WriteEndArray();
					writer.WriteComment("<<<< conflict end");
					continue;
				}

				var arrayWithWarning = o.Value as ArrayWithWarning;
				if(arrayWithWarning != null)
				{
					writer.WriteComment(">>>> auto merged array start");
					arrayWithWarning.MergedArray.WriteTo(writer);
					writer.WriteComment("<<<< auto merged array end");
					continue;
				}

				var resolver = o.Value as ConflictsResolver;
				if(resolver != null)
				{
					using(var stringReader = new StringReader(resolver.Resolve(indent + 1)))
					{
						var first = true;
						string line ;
						while((line = stringReader.ReadLine()) != null)
						{
							if(first == false)
							{
								writer.WriteRaw(Environment.NewLine);
								for (var i = 0; i < indent; i++)
								{
									writer.WriteRaw(new string(writer.IndentChar, writer.Indentation));
								}
							}
							if(first)
								writer.WriteRawValue(line);
							else
								writer.WriteRaw(line);


							first = false;

						}
					}
					continue;
				}
				throw new InvalidOperationException("Could not understand how to deal with: " + o.Value);
			}
			writer.WriteEndObject();
			return stringWriter.GetStringBuilder().ToString();
		}
Exemplo n.º 5
0
        private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
        {
            var sentDocuments = false;

            var bufferStream = new BufferedStream(stream, 1024 * 64);

            var lastBatchSentTime = Stopwatch.StartNew();
            using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
            {
                var options = subscriptions.GetBatchOptions(id);

                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                using (var cts = new CancellationTokenSource())
                using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                {
                    Etag lastProcessedDocEtag = null;

                    var batchSize = 0;
                    var batchDocCount = 0;
                    var processedDocumentsCount = 0;
                    var hasMoreDocs = false;
                    var config = subscriptions.GetSubscriptionConfig(id);
                    var startEtag =  config.AckEtag;
                    var criteria = config.Criteria;

                    bool isPrefixCriteria = !string.IsNullOrWhiteSpace(criteria.KeyStartsWith);

                    Func<JsonDocument, bool> addDocument = doc =>
                    {
                        timeout.Delay();
                        if (doc == null)
                        {
                            // we only have this heartbeat when the streaming has gone on for a long time
                            // and we haven't sent anything to the user in a while (because of filtering, skipping, etc).
                            writer.WriteRaw(Environment.NewLine);
                            writer.Flush();
                            if (lastBatchSentTime.ElapsedMilliseconds > 30000)
                                return false;

                            return true;
                        }

                        processedDocumentsCount++;

                        // We cant continue because we have already maxed out the batch bytes size.
                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return false;

                        // We cant continue because we have already maxed out the amount of documents to send.
                        if (batchDocCount >= options.MaxDocCount)
                            return false;

                        // We can continue because we are ignoring system documents.
                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return true;

                        // We can continue because we are ignoring the document as it doesn't fit the criteria.
                        if (MatchCriteria(criteria, doc) == false)
                            return true;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;

                        return true; // We get the next document
                    };

                    var retries = 0;
                    do
                    {
                        var lastProcessedDocumentsCount = processedDocumentsCount;

                        Database.TransactionalStorage.Batch(accessor =>
                        {
                            // we may be sending a LOT of documents to the user, and most 
                            // of them aren't going to be relevant for other ops, so we are going to skip
                            // the cache for that, to avoid filling it up very quickly
                            using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache())
                            {
                                if (isPrefixCriteria)
                                {
                                    // If we don't get any document from GetDocumentsWithIdStartingWith it could be that we are in presence of a lagoon of uninteresting documents, so we are hitting a timeout.
                                    lastProcessedDocEtag = Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    hasMoreDocs = false;
                                }
                                else
                                {
                                    // It doesn't matter if we match the criteria or not, the document has been already processed.
                                    lastProcessedDocEtag = Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    // If we don't get any document from GetDocuments it may be a signal that something is wrong.
                                    if (lastProcessedDocEtag == null)
                                    {
                                        hasMoreDocs = false;
                                    }
                                    else
                                    {
                                        var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
                                        hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

                                        startEtag = lastProcessedDocEtag;
                                    }

                                    retries = lastProcessedDocumentsCount == batchDocCount ? retries : 0;
                                }
                            }							
                        });

                        if (lastBatchSentTime.ElapsedMilliseconds >= 30000)
                        {
                            if (batchDocCount == 0)
                                log.Warn("Subscription filtered out all possible documents for {0:#,#;;0} seconds in a row, stopping operation", lastBatchSentTime.Elapsed.TotalSeconds);
                            break;
                        }

                        if (lastProcessedDocumentsCount == processedDocumentsCount)
                        {
                            if (retries == 3)
                            {
                                log.Warn("Subscription processing did not end up replicating any documents for 3 times in a row, stopping operation", retries);
                            }
                            else
                            {
                                log.Warn("Subscription processing did not end up replicating any documents, due to possible storage error, retry number: {0}", retries);
                            }

                            retries++;
                        }
                    } while (retries < 3 && hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

                    writer.WriteEndArray();

                    if (batchDocCount > 0 || processedDocumentsCount > 0 || isPrefixCriteria)
                    {
                        writer.WritePropertyName("LastProcessedEtag");
                        writer.WriteValue(lastProcessedDocEtag.ToString());

                        sentDocuments = true;
                    }

                    writer.WriteEndObject();
                    writer.Flush();

                    bufferStream.Flush();
                }
            }

            if (sentDocuments)
                subscriptions.UpdateBatchSentTime(id);
        }
Exemplo n.º 6
0
    public void WriteRawInObject()
    {
      StringBuilder sb = new StringBuilder();
      StringWriter sw = new StringWriter(sb);

      using (JsonWriter jsonWriter = new JsonTextWriter(sw))
      {
        jsonWriter.Formatting = Formatting.Indented;

        jsonWriter.WriteStartObject();
        jsonWriter.WriteRaw(@"""PropertyName"":[1,2,3,4,5]");
        jsonWriter.WriteEnd();
      }

      string expected = @"{""PropertyName"":[1,2,3,4,5]}";
      string result = sb.ToString();

      Assert.AreEqual(expected, result);
    }
Exemplo n.º 7
0
    public void WriteRawInArray()
    {
      StringBuilder sb = new StringBuilder();
      StringWriter sw = new StringWriter(sb);

      using (JsonWriter jsonWriter = new JsonTextWriter(sw))
      {
        jsonWriter.Formatting = Formatting.Indented;

        jsonWriter.WriteStartArray();
        jsonWriter.WriteValue(double.NaN);
        jsonWriter.WriteRaw(",[1,2,3,4,5]");
        jsonWriter.WriteRaw(",[1,2,3,4,5]");
        jsonWriter.WriteValue(float.NaN);
        jsonWriter.WriteEndArray();
      }

      string expected = @"[
  NaN,[1,2,3,4,5],[1,2,3,4,5],
  NaN
]";
      string result = sb.ToString();

      Assert.AreEqual(expected, result);
    }
Exemplo n.º 8
0
		private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
		{
			var sentDocuments = false;

			using (var streamWriter = new StreamWriter(stream))
			using (var writer = new JsonTextWriter(streamWriter))
			{
				var options = subscriptions.GetBatchOptions(id);

				writer.WriteStartObject();
				writer.WritePropertyName("Results");
				writer.WriteStartArray();

				using (var cts = new CancellationTokenSource())
				using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
				{
                    Etag lastProcessedDocEtag = null;

					var batchSize = 0;
					var batchDocCount = 0;
					var hasMoreDocs = false;

					var config = subscriptions.GetSubscriptionConfig(id);
					var startEtag = config.AckEtag;
					var criteria = config.Criteria;

                    Action<JsonDocument> addDocument = doc =>
                    {
                        timeout.Delay();

                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return;

                        if (batchDocCount >= options.MaxDocCount)
                            return;

                        lastProcessedDocEtag = doc.Etag;

                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return;

                        if (MatchCriteria(criteria, doc) == false)
                            return;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;
                    };

                    int nextStart = 0;

					do
					{
						Database.TransactionalStorage.Batch(accessor =>
						{
							// we may be sending a LOT of documents to the user, and most 
							// of them aren't going to be relevant for other ops, so we are going to skip
							// the cache for that, to avoid filling it up very quickly
							using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
							{    
                                if (!string.IsNullOrWhiteSpace(criteria.KeyStartsWith))
                                {
                                    Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
                                else
                                {
                                    Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
							}

							if (lastProcessedDocEtag == null)
								hasMoreDocs = false;
							else
							{
								var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
								hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

								startEtag = lastProcessedDocEtag;
							}
						});
					} while (hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

					writer.WriteEndArray();

					if (batchDocCount > 0)
					{
						writer.WritePropertyName("LastProcessedEtag");
						writer.WriteValue(lastProcessedDocEtag.ToString());

						sentDocuments = true;
					}

					writer.WriteEndObject();
					writer.Flush();
				}
			}

			if (sentDocuments)
				subscriptions.UpdateBatchSentTime(id);
		}
Exemplo n.º 9
0
    public void WriteRawInStart()
    {
      StringBuilder sb = new StringBuilder();
      StringWriter sw = new StringWriter(sb);

      using (JsonWriter jsonWriter = new JsonTextWriter(sw))
      {
        jsonWriter.Formatting = Formatting.Indented;
        jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;

        jsonWriter.WriteRaw("[1,2,3,4,5]");
        jsonWriter.WriteWhitespace("  ");
        jsonWriter.WriteStartArray();
        jsonWriter.WriteValue(double.NaN);
        jsonWriter.WriteEndArray();
      }

      string expected = @"[1,2,3,4,5]  [
  NaN
]";
      string result = sb.ToString();

      Assert.AreEqual(expected, result);
    }