private void StreamToClient(Stream stream, int pageSize, Etag etag, OrderedPartCollection<AbstractFileReadTrigger> readTriggers)
        {
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
            using (var writer = new JsonTextWriter(new StreamWriter(stream)))
            {
                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        if (readTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false)
                            continue;

                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
            }
        }
        private void StreamToClient(Stream stream, int pageSize, Etag etag)
        {
            using (var cts = new CancellationTokenSource())
			using (var timeout = cts.TimeoutAfter(FileSystemsLandlord.SystemConfiguration.DatabaseOperationTimeout))
			using (var writer = new JsonTextWriter(new StreamWriter(stream)))
			{
			    writer.WriteStartObject();
			    writer.WritePropertyName("Results");
			    writer.WriteStartArray();

                Storage.Batch(accessor =>
                {
                    var files = accessor.GetFilesAfter(etag, pageSize);
                    foreach (var file in files)
                    {
                        timeout.Delay();
                        var doc = RavenJObject.FromObject(file);
                        doc.WriteTo(writer);

                        writer.WriteRaw(Environment.NewLine);
                    }
                });

                writer.WriteEndArray();
                writer.WriteEndObject();
                writer.Flush();
			}
        }
Beispiel #3
0
		public override void Respond(IHttpContext context)
		{
			using (var cts = new CancellationTokenSource())
			{
				var timeout = cts.TimeoutAfter(Settings.DatbaseOperationTimeout);
				var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(context), cts.Token, timeout);
				switch (context.Request.HttpMethod)
				{
					case "POST":
						Batch(context);
						break;
					case "DELETE":
						OnBulkOperation(context, databaseBulkOperations.DeleteByIndex);
						break;
					case "PATCH":
						var patchRequestJson = context.ReadJsonArray();
						var patchRequests = patchRequestJson.Cast<RavenJObject>().Select(PatchRequest.FromJson).ToArray();
						OnBulkOperation(context, (index, query, allowStale) =>
							databaseBulkOperations.UpdateByIndex(index, query, patchRequests, allowStale));
						break;
					case "EVAL":
						var advPatchRequestJson = context.ReadJsonObject<RavenJObject>();
						var advPatch = ScriptedPatchRequest.FromJson(advPatchRequestJson);
						OnBulkOperation(context, (index, query, allowStale) =>
							databaseBulkOperations.UpdateByIndex(index, query, advPatch, allowStale));
						break;
				}
			}
		}
		public async Task<HttpResponseMessage> BulkPost()
		{
		    using (var cts = new CancellationTokenSource())
            using (cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            {
                var jsonCommandArray = await ReadJsonArrayAsync();

				cts.Token.ThrowIfCancellationRequested();

                var transactionInformation = GetRequestTransaction();
                var commands =
                    (from RavenJObject jsonCommand in jsonCommandArray select CommandDataFactory.CreateCommand(jsonCommand, transactionInformation)).ToArray();

                Log.Debug(
                    () =>
                    {
                        if (commands.Length > 15) // this is probably an import method, we will input minimal information, to avoid filling up the log
                        {
                            return "\tExecuted "
                                   + string.Join(
                                       ", ", commands.GroupBy(x => x.Method).Select(x => string.Format("{0:#,#;;0} {1} operations", x.Count(), x.Key)));
                        }

                        var sb = new StringBuilder();
                        foreach (var commandData in commands)
                        {
                            sb.AppendFormat("\t{0} {1}{2}", commandData.Method, commandData.Key, Environment.NewLine);
                        }
                        return sb.ToString();
                    });

                var batchResult = Database.Batch(commands, cts.Token);
                return GetMessageWithObject(batchResult);
            }
		}
		public HttpResponseMessage BulkDelete(string id)
		{
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatbaseOperationTimeout))
            {
                var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(), cts.Token, timeout);
                return OnBulkOperation(databaseBulkOperations.DeleteByIndex, id);
            }
		}
        public async Task<HttpResponseMessage> BulkPost()
        {
            using (var cts = new CancellationTokenSource())
            using (cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            {
                RavenJArray jsonCommandArray;

                try
                {
                    jsonCommandArray = await ReadJsonArrayAsync();
                }
                catch (InvalidOperationException e)
                {
                    Log.DebugException("Failed to deserialize document batch request." , e);
                    return GetMessageWithObject(new
                    {
                        Message = "Could not understand json, please check its validity."
                    }, (HttpStatusCode)422); //http code 422 - Unprocessable entity
                    
                }
                catch (InvalidDataException e)
                {
                    Log.DebugException("Failed to deserialize document batch request." , e);
                    return GetMessageWithObject(new
                    {
                        e.Message
                    }, (HttpStatusCode)422); //http code 422 - Unprocessable entity
                }

                cts.Token.ThrowIfCancellationRequested();

                var transactionInformation = GetRequestTransaction();
                var commands =
                    (from RavenJObject jsonCommand in jsonCommandArray select CommandDataFactory.CreateCommand(jsonCommand, transactionInformation)).ToArray();

                Log.Debug(
                    () =>
                    {
                        if (commands.Length > 15) // this is probably an import method, we will input minimal information, to avoid filling up the log
                        {
                            return "\tExecuted "
                                   + string.Join(
                                       ", ", commands.GroupBy(x => x.Method).Select(x => string.Format("{0:#,#;;0} {1} operations", x.Count(), x.Key)));
                        }

                        var sb = new StringBuilder();
                        foreach (var commandData in commands)
                        {
                            sb.AppendFormat("\t{0} {1}{2}", commandData.Method, commandData.Key, Environment.NewLine);
                        }
                        return sb.ToString();
                    });

                var batchResult = Database.Batch(commands, cts.Token);
                return GetMessageWithObject(batchResult);
            }
        }
		public async Task<HttpResponseMessage> BulkPatch(string id)
		{
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatbaseOperationTimeout))
            {
                var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(), cts.Token, timeout);
                var patchRequestJson = await ReadJsonArrayAsync();
                var patchRequests = patchRequestJson.Cast<RavenJObject>().Select(PatchRequest.FromJson).ToArray();
                return OnBulkOperation((index, query, allowStale) => databaseBulkOperations.UpdateByIndex(index, query, patchRequests, allowStale), id);
            }
		}
Beispiel #8
0
		public override void Respond(IHttpContext context)
		{
			switch (context.Request.HttpMethod)
			{
				case "GET":
					long documentsCount = 0;
					Etag lastDocEtag = Etag.Empty;
					Database.TransactionalStorage.Batch(accessor =>
					{
						lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
						documentsCount = accessor.Documents.GetDocumentsCount();
					});

					lastDocEtag = lastDocEtag.HashWith(BitConverter.GetBytes(documentsCount));
					if (context.MatchEtag(lastDocEtag))
					{
						context.SetStatusToNotModified();
					}
					else
					{
						context.WriteHeaders(new RavenJObject(), lastDocEtag);

						using (var cts = new CancellationTokenSource())
						{
							cts.TimeoutAfter(Settings.DatbaseOperationTimeout);

							var startsWith = context.Request.QueryString["startsWith"];
							if (string.IsNullOrEmpty(startsWith))
								context.WriteJson(Database.GetDocuments(context.GetStart(), context.GetPageSize(Database.Configuration.MaxPageSize), context.GetEtagFromQueryString(), cts.Token));
							else
								context.WriteJson(Database.GetDocumentsWithIdStartingWith(
									startsWith,
									context.Request.QueryString["matches"],
									context.Request.QueryString["exclude"],
									context.GetStart(),
									context.GetPageSize(Database.Configuration.MaxPageSize), 
									cts.Token));
						}
					}
					break;
				case "POST":
					var json = context.ReadJson();
					var id = Database.Put(null, Etag.Empty, json,
					                      context.Request.Headers.FilterHeaders(),
					                      GetRequestTransaction(context));
					context.SetStatusToCreated("/docs/" + Uri.EscapeUriString(id.Key));
					context.WriteJson(id);
					break;
			}
		}
		public HttpResponseMessage DocsGet()
		{
		    using (var cts = new CancellationTokenSource())
		    using (cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
		    {
		        long documentsCount = 0;
		        var lastDocEtag = Etag.Empty;
		        Database.TransactionalStorage.Batch(
		            accessor =>
		            {
		                lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
		                documentsCount = accessor.Documents.GetDocumentsCount();
		            });

		        lastDocEtag = lastDocEtag.HashWith(BitConverter.GetBytes(documentsCount));
		        if (MatchEtag(lastDocEtag)) return GetEmptyMessage(HttpStatusCode.NotModified);

		        var startsWith = GetQueryStringValue("startsWith");
		        HttpResponseMessage msg;
		        int nextPageStart = GetNextPageStart();
			    if (string.IsNullOrEmpty(startsWith))
			    {
				    var results = Database.Documents.GetDocuments(GetStart(), GetPageSize(Database.Configuration.MaxPageSize), 
						GetEtagFromQueryString(), cts.Token);
				    msg = GetMessageWithObject(results);
			    }
			    else
			    {
				    var transformer = GetQueryStringValue("transformer");
				    var transformerParameters = this.ExtractTransformerParameters();

				    msg =
					    GetMessageWithObject(
						    Database.Documents.GetDocumentsWithIdStartingWith(
							    startsWith,
							    GetQueryStringValue("matches"),
							    GetQueryStringValue("exclude"),
							    GetStart(),
							    GetPageSize(Database.Configuration.MaxPageSize),
							    cts.Token,
							    ref nextPageStart, transformer, transformerParameters,
								skipAfter: GetQueryStringValue("skipAfter")));
			    }

			    WriteHeaders(new RavenJObject { { Constants.NextPageStart, nextPageStart } }, lastDocEtag, msg);

		        return msg;
		    }
		}
Beispiel #10
0
		public override void Respond(IHttpContext context)
		{
			using (context.Response.Streaming())
			{
				context.Response.ContentType = "application/json; charset=utf-8";

				var match = urlMatcher.Match(context.GetRequestUrl());
				var index = match.Groups[1].Value;

				var query = context.GetIndexQueryFromHttpContext(int.MaxValue);
				if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"]))
					query.PageSize = int.MaxValue;
				var isHeadRequest = context.Request.HttpMethod == "HEAD";
				if (isHeadRequest)
					query.PageSize = 0;

				using (var writer = GetOutputWriter(context))
				{
					// we may be sending a LOT of documents to the user, and most 
					// of them aren't going to be relevant for other ops, so we are going to skip
					// the cache for that, to avoid filling it up very quickly
					using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
					using (var cts = new CancellationTokenSource())
					using(var timeout = cts.TimeoutAfter(Settings.DatbaseOperationTimeout))
					{
						Database.Query(index, query, cts.Token, information =>
						{
							context.Response.AddHeader("Raven-Result-Etag", information.ResultEtag.ToString());
							context.Response.AddHeader("Raven-Index-Etag", information.IndexEtag.ToString());
							context.Response.AddHeader("Raven-Is-Stale", information.IsStable ? "true" : "false");
							context.Response.AddHeader("Raven-Index", information.Index);
							context.Response.AddHeader("Raven-Total-Results", information.TotalResults.ToString(CultureInfo.InvariantCulture));
							context.Response.AddHeader("Raven-Index-Timestamp",
													   information.IndexTimestamp.ToString(Default.DateTimeFormatsToWrite,
																						   CultureInfo.InvariantCulture));

							if (isHeadRequest)
								return;
							writer.WriteHeader();
						}, o =>
						{
							timeout.Delay();
                            Database.WorkContext.UpdateFoundWork();
							writer.Write(o);
						});
					}
				}
			}
		}
Beispiel #11
0
        private void StreamToClient(Stream stream, string startsWith, int start, int pageSize, Etag etag, string matches, int nextPageStart, string skipAfter)
        {
            var bufferStream = new BufferedStream(stream, 1024 * 64);
            using (var cts = new CancellationTokenSource())
            using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
            {
                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                Action<JsonDocument> addDocument = doc =>
                {
                    timeout.Delay();
                    doc.ToJson().WriteTo(writer);
                    writer.WriteRaw(Environment.NewLine);
                };

                Database.TransactionalStorage.Batch(accessor =>
                {
                    // we may be sending a LOT of documents to the user, and most 
                    // of them aren't going to be relevant for other ops, so we are going to skip
                    // the cache for that, to avoid filling it up very quickly
                    using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
                    {
                        if (string.IsNullOrEmpty(startsWith))
                        {
                            Database.Documents.GetDocuments(start, pageSize, etag, cts.Token, addDocument);
                        }
                        else
                        {
                            var nextPageStartInternal = nextPageStart;

                            Database.Documents.GetDocumentsWithIdStartingWith(startsWith, matches, null, start, pageSize, cts.Token, ref nextPageStartInternal, addDocument, skipAfter: skipAfter);

                            nextPageStart = nextPageStartInternal;
                        }
                    }
                });

                writer.WriteEndArray();
                writer.WritePropertyName("NextPageStart");
                writer.WriteValue(nextPageStart);
                writer.WriteEndObject();
                writer.Flush();
                bufferStream.Flush();
            }
        }
		public HttpResponseMessage BulkDelete(string id)
		{
            // we don't use using because execution is async
		    var cts = new CancellationTokenSource();
		    var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout);

	            var indexDefinition = Database.IndexDefinitionStorage.GetIndexDefinition(id);
				if (indexDefinition == null)
					throw new IndexDoesNotExistsException(string.Format("Index '{0}' does not exist.", id));

				if (indexDefinition.IsMapReduce)
					throw new InvalidOperationException("Cannot execute DeleteByIndex operation on Map-Reduce indexes.");


                var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(), cts, timeout);
            return OnBulkOperation(databaseBulkOperations.DeleteByIndex, id, cts, timeout);
            }
Beispiel #13
0
		public HttpResponseMessage IndexGet(string id)
		{
            using (var cts = new CancellationTokenSource())
            using (cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            {
                var index = id;
                if (string.IsNullOrEmpty(GetQueryStringValue("definition")) == false) 
                    return GetIndexDefinition(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("source")) == false) 
                    return GetIndexSource(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("debug")) == false) 
                    return DebugIndex(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("explain")) == false) 
                    return GetExplanation(index);

                return GetIndexQueryResult(index, cts.Token);
            }
		}
Beispiel #14
0
        private void StreamToClient(Stream stream, ExportOptions options, Lazy<NameValueCollection> headers, IPrincipal user)
        {
            var old = CurrentOperationContext.Headers.Value;
            var oldUser = CurrentOperationContext.User.Value;
            try
            {
                CurrentOperationContext.Headers.Value = headers;
                CurrentOperationContext.User.Value = user;

                Database.TransactionalStorage.Batch(accessor =>
                {
                    var bufferStream = new BufferedStream(stream, 1024 * 64);

                    using (var cts = new CancellationTokenSource())
                    using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                    using (var streamWriter = new StreamWriter(bufferStream))
                    using (var writer = new JsonTextWriter(streamWriter))
                    {
                        writer.WriteStartObject();
                        writer.WritePropertyName("Results");
                        writer.WriteStartArray();

                        var exporter = new SmugglerExporter(Database, options);

                        exporter.Export(item => WriteToStream(writer, item, timeout), cts.Token);

                        writer.WriteEndArray();
                        writer.WriteEndObject();
                        writer.Flush();
                        bufferStream.Flush();
                    }
                });
            }
            finally
            {
                CurrentOperationContext.Headers.Value = old;
                CurrentOperationContext.User.Value = oldUser;
            }
        }
Beispiel #15
0
		public HttpResponseMessage IndexGet(string id)
		{
            using (var cts = new CancellationTokenSource())
            using (cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
            {
                var index = id;
                if (string.IsNullOrEmpty(GetQueryStringValue("definition")) == false) 
                    return GetIndexDefinition(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("source")) == false) 
                    return GetIndexSource(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("debug")) == false) 
                    return DebugIndex(index);

                if (string.IsNullOrEmpty(GetQueryStringValue("explain")) == false) 
                    return GetExplanation(index);

                try
                {
                    return GetIndexQueryResult(index, cts.Token);
                }
                catch (OperationCanceledException e)
                {
                    throw new TimeoutException(string.Format("The query did not produce results in {0}", DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout), e);
                }
            }
		}
		public HttpResponseMessage SteamQueryGet(string id)
		{
			var cts = new CancellationTokenSource();
			var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout);
			var msg = GetEmptyMessage();

			var index = id;
			var query = GetIndexQuery(int.MaxValue);
			if (string.IsNullOrEmpty(GetQueryStringValue("pageSize"))) query.PageSize = int.MaxValue;
			var isHeadRequest = InnerRequest.Method == HttpMethod.Head;
			if (isHeadRequest) query.PageSize = 0;

			var accessor = Database.TransactionalStorage.CreateAccessor(); //accessor will be disposed in the StreamQueryContent.SerializeToStreamAsync!

			try
			{
				var queryOp = new QueryActions.DatabaseQueryOperation(Database, index, query, accessor, cts);
				queryOp.Init();
				msg.Content = new StreamQueryContent(InnerRequest, queryOp, accessor, timeout,
					mediaType => msg.Content.Headers.ContentType = new MediaTypeHeaderValue(mediaType) {CharSet = "utf-8"});

				msg.Headers.Add("Raven-Result-Etag", queryOp.Header.ResultEtag.ToString());
				msg.Headers.Add("Raven-Index-Etag", queryOp.Header.IndexEtag.ToString());
				msg.Headers.Add("Raven-Is-Stale", queryOp.Header.IsStale ? "true" : "false");
				msg.Headers.Add("Raven-Index", queryOp.Header.Index);
				msg.Headers.Add("Raven-Total-Results", queryOp.Header.TotalResults.ToString(CultureInfo.InvariantCulture));
				msg.Headers.Add(
					"Raven-Index-Timestamp", queryOp.Header.IndexTimestamp.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture));


				if (IsCsvDownloadRequest(InnerRequest))
				{
					msg.Content.Headers.Add("Content-Disposition", "attachment; filename=export.csv");
				}
			}
			catch (Exception)
			{
				accessor.Dispose();
				throw;
			}

			return msg;
		}
		private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
		{
			var sentDocuments = false;

			using (var streamWriter = new StreamWriter(stream))
			using (var writer = new JsonTextWriter(streamWriter))
			{
				var options = subscriptions.GetBatchOptions(id);

				writer.WriteStartObject();
				writer.WritePropertyName("Results");
				writer.WriteStartArray();

				using (var cts = new CancellationTokenSource())
				using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
				{
                    Etag lastProcessedDocEtag = null;

					var batchSize = 0;
					var batchDocCount = 0;
					var hasMoreDocs = false;

					var config = subscriptions.GetSubscriptionConfig(id);
					var startEtag = config.AckEtag;
					var criteria = config.Criteria;

                    Action<JsonDocument> addDocument = doc =>
                    {
                        timeout.Delay();

                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return;

                        if (batchDocCount >= options.MaxDocCount)
                            return;

                        lastProcessedDocEtag = doc.Etag;

                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return;

                        if (MatchCriteria(criteria, doc) == false)
                            return;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;
                    };

                    int nextStart = 0;

					do
					{
						Database.TransactionalStorage.Batch(accessor =>
						{
							// we may be sending a LOT of documents to the user, and most 
							// of them aren't going to be relevant for other ops, so we are going to skip
							// the cache for that, to avoid filling it up very quickly
							using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
							{    
                                if (!string.IsNullOrWhiteSpace(criteria.KeyStartsWith))
                                {
                                    Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
                                else
                                {
                                    Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);
                                }
							}

							if (lastProcessedDocEtag == null)
								hasMoreDocs = false;
							else
							{
								var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
								hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

								startEtag = lastProcessedDocEtag;
							}
						});
					} while (hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

					writer.WriteEndArray();

					if (batchDocCount > 0)
					{
						writer.WritePropertyName("LastProcessedEtag");
						writer.WriteValue(lastProcessedDocEtag.ToString());

						sentDocuments = true;
					}

					writer.WriteEndObject();
					writer.Flush();
				}
			}

			if (sentDocuments)
				subscriptions.UpdateBatchSentTime(id);
		}
        private void StreamToClient(long id, SubscriptionActions subscriptions, Stream stream)
        {
            var sentDocuments = false;

            var bufferStream = new BufferedStream(stream, 1024 * 64);

            var lastBatchSentTime = Stopwatch.StartNew();
            using (var writer = new JsonTextWriter(new StreamWriter(bufferStream)))
            {
                var options = subscriptions.GetBatchOptions(id);

                writer.WriteStartObject();
                writer.WritePropertyName("Results");
                writer.WriteStartArray();

                using (var cts = new CancellationTokenSource())
                using (var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout))
                {
                    Etag lastProcessedDocEtag = null;

                    var batchSize = 0;
                    var batchDocCount = 0;
                    var processedDocumentsCount = 0;
                    var hasMoreDocs = false;
                    var config = subscriptions.GetSubscriptionConfig(id);
                    var startEtag =  config.AckEtag;
                    var criteria = config.Criteria;

                    bool isPrefixCriteria = !string.IsNullOrWhiteSpace(criteria.KeyStartsWith);

                    Func<JsonDocument, bool> addDocument = doc =>
                    {
                        timeout.Delay();
                        if (doc == null)
                        {
                            // we only have this heartbeat when the streaming has gone on for a long time
                            // and we haven't sent anything to the user in a while (because of filtering, skipping, etc).
                            writer.WriteRaw(Environment.NewLine);
                            writer.Flush();
                            if (lastBatchSentTime.ElapsedMilliseconds > 30000)
                                return false;

                            return true;
                        }

                        processedDocumentsCount++;

                        // We cant continue because we have already maxed out the batch bytes size.
                        if (options.MaxSize.HasValue && batchSize >= options.MaxSize)
                            return false;

                        // We cant continue because we have already maxed out the amount of documents to send.
                        if (batchDocCount >= options.MaxDocCount)
                            return false;

                        // We can continue because we are ignoring system documents.
                        if (doc.Key.StartsWith("Raven/", StringComparison.InvariantCultureIgnoreCase))
                            return true;

                        // We can continue because we are ignoring the document as it doesn't fit the criteria.
                        if (MatchCriteria(criteria, doc) == false)
                            return true;

                        doc.ToJson().WriteTo(writer);
                        writer.WriteRaw(Environment.NewLine);

                        batchSize += doc.SerializedSizeOnDisk;
                        batchDocCount++;

                        return true; // We get the next document
                    };

                    var retries = 0;
                    do
                    {
                        var lastProcessedDocumentsCount = processedDocumentsCount;

                        Database.TransactionalStorage.Batch(accessor =>
                        {
                            // we may be sending a LOT of documents to the user, and most 
                            // of them aren't going to be relevant for other ops, so we are going to skip
                            // the cache for that, to avoid filling it up very quickly
                            using (DocumentCacher.SkipSetAndGetDocumentsInDocumentCache())
                            {
                                if (isPrefixCriteria)
                                {
                                    // If we don't get any document from GetDocumentsWithIdStartingWith it could be that we are in presence of a lagoon of uninteresting documents, so we are hitting a timeout.
                                    lastProcessedDocEtag = Database.Documents.GetDocumentsWithIdStartingWith(criteria.KeyStartsWith, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    hasMoreDocs = false;
                                }
                                else
                                {
                                    // It doesn't matter if we match the criteria or not, the document has been already processed.
                                    lastProcessedDocEtag = Database.Documents.GetDocuments(-1, options.MaxDocCount - batchDocCount, startEtag, cts.Token, addDocument);

                                    // If we don't get any document from GetDocuments it may be a signal that something is wrong.
                                    if (lastProcessedDocEtag == null)
                                    {
                                        hasMoreDocs = false;
                                    }
                                    else
                                    {
                                        var lastDocEtag = accessor.Staleness.GetMostRecentDocumentEtag();
                                        hasMoreDocs = EtagUtil.IsGreaterThan(lastDocEtag, lastProcessedDocEtag);

                                        startEtag = lastProcessedDocEtag;
                                    }

                                    retries = lastProcessedDocumentsCount == batchDocCount ? retries : 0;
                                }
                            }							
                        });

                        if (lastBatchSentTime.ElapsedMilliseconds >= 30000)
                        {
                            if (batchDocCount == 0)
                                log.Warn("Subscription filtered out all possible documents for {0:#,#;;0} seconds in a row, stopping operation", lastBatchSentTime.Elapsed.TotalSeconds);
                            break;
                        }

                        if (lastProcessedDocumentsCount == processedDocumentsCount)
                        {
                            if (retries == 3)
                            {
                                log.Warn("Subscription processing did not end up replicating any documents for 3 times in a row, stopping operation", retries);
                            }
                            else
                            {
                                log.Warn("Subscription processing did not end up replicating any documents, due to possible storage error, retry number: {0}", retries);
                            }

                            retries++;
                        }
                    } while (retries < 3 && hasMoreDocs && batchDocCount < options.MaxDocCount && (options.MaxSize.HasValue == false || batchSize < options.MaxSize));

                    writer.WriteEndArray();

                    if (batchDocCount > 0 || processedDocumentsCount > 0 || isPrefixCriteria)
                    {
                        writer.WritePropertyName("LastProcessedEtag");
                        writer.WriteValue(lastProcessedDocEtag.ToString());

                        sentDocuments = true;
                    }

                    writer.WriteEndObject();
                    writer.Flush();

                    bufferStream.Flush();
                }
            }

            if (sentDocuments)
                subscriptions.UpdateBatchSentTime(id);
        }
        public async Task<HttpResponseMessage> BulkEval(string id)
        {
            RavenJObject advPatchRequestJson;

            try
            {
                advPatchRequestJson = await ReadJsonObjectAsync<RavenJObject>();
            }
            catch (InvalidOperationException e)
            {
                Log.DebugException("Failed to deserialize document batch request." , e);
                return GetMessageWithObject(new
                {
                    Message = "Could not understand json, please check its validity."
                }, (HttpStatusCode)422); //http code 422 - Unprocessable entity

            }
            catch (InvalidDataException e)
            {
                Log.DebugException("Failed to deserialize document batch request." , e);
                return GetMessageWithObject(new
                {
                    e.Message
                }, (HttpStatusCode)422); //http code 422 - Unprocessable entity
            }

            // we don't use using because execution is async
            var cts = new CancellationTokenSource();
            var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout);

            var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(), cts, timeout);

            var advPatch = ScriptedPatchRequest.FromJson(advPatchRequestJson);
            return OnBulkOperation((index, query, options) => databaseBulkOperations.UpdateByIndex(index, query, advPatch, options), id, timeout);
        }
Beispiel #20
0
		public override void Respond(IHttpContext context)
		{
			using (context.Response.Streaming())
			{
				context.Response.ContentType = "application/json; charset=utf-8";

				using (var cts = new CancellationTokenSource())
				using (var writer = new JsonTextWriter(new StreamWriter(context.Response.OutputStream)))
				using (var timeout = cts.TimeoutAfter(Settings.DatbaseOperationTimeout))
				{
					writer.WriteStartObject();
					writer.WritePropertyName("Results");
					writer.WriteStartArray();

					Database.TransactionalStorage.Batch(accessor =>
					{
						var startsWith = context.Request.QueryString["startsWith"];
						int pageSize = context.GetPageSize(int.MaxValue);
						if (string.IsNullOrEmpty(context.Request.QueryString["pageSize"]))
							pageSize = int.MaxValue;

						// we may be sending a LOT of documents to the user, and most 
						// of them aren't going to be relevant for other ops, so we are going to skip
						// the cache for that, to avoid filling it up very quickly
						using (DocumentCacher.SkipSettingDocumentsInDocumentCache())
						{
							if (string.IsNullOrEmpty(startsWith))
							{
								Database.GetDocuments(context.GetStart(), pageSize, context.GetEtagFromQueryString(),
									cts.Token,
									doc =>
									{
										timeout.Delay();
										doc.WriteTo(writer);
									});
							}
							else
							{
								Database.GetDocumentsWithIdStartingWith(
									startsWith,
									context.Request.QueryString["matches"],
									context.Request.QueryString["exclude"],
									context.GetStart(),
									pageSize,
									cts.Token,
									doc =>
									{
										timeout.Delay();
                                        Database.WorkContext.UpdateFoundWork();
										doc.WriteTo(writer);
									});
							}
						}
					});

					writer.WriteEndArray();
					writer.WriteEndObject();
					writer.Flush();
				}
			}
		}
		public async Task<HttpResponseMessage> BulkEval(string id)
		{
            // we don't use using because execution is async
		    var cts = new CancellationTokenSource();
		    var timeout = cts.TimeoutAfter(DatabasesLandlord.SystemConfiguration.DatabaseOperationTimeout);

                var databaseBulkOperations = new DatabaseBulkOperations(Database, GetRequestTransaction(), cts, timeout);
                var advPatchRequestJson = await ReadJsonObjectAsync<RavenJObject>();
                var advPatch = ScriptedPatchRequest.FromJson(advPatchRequestJson);
            return OnBulkOperation((index, query, allowStale) => databaseBulkOperations.UpdateByIndex(index, query, advPatch, allowStale), id, cts, timeout);
            }
        public bool TimerCallback()
        {
            if (Database.Disposed)
            {
                return false;
            }

            if (Monitor.TryEnter(locker) == false)
                return false;

            try
            {
                DateTime currentTime = SystemTime.UtcNow;
                string nowAsStr = currentTime.GetDefaultRavenFormat();
                logger.Debug("Trying to find expired documents to delete");
                var query = "Expiry:[* TO " + nowAsStr + "]";

                var list = new List<string>();
                int start = 0;
                while (true)
                {
                    const int pageSize = 1024;

                    QueryResultWithIncludes queryResult;
                    using (var cts = new CancellationTokenSource())
                    using (Database.DisableAllTriggersForCurrentThread())
                    using (cts.TimeoutAfter(TimeSpan.FromMinutes(5)))
                    {
                        queryResult = Database.Queries.Query(RavenDocumentsByExpirationDate, new IndexQuery
                        {
                            Start = start,
                            PageSize = pageSize,
                            Cutoff = currentTime,
                            Query = query,
                            FieldsToFetch = new[] { "__document_id" }
                        }, cts.Token);
                    }

                    if (queryResult.Results.Count == 0)
                        break;

                    list.AddRange(queryResult.Results.Select(result => result.Value<string>("__document_id")).Where(x => string.IsNullOrEmpty(x) == false));

                    if (queryResult.Results.Count < pageSize)
                        break;

                    start += pageSize;

                    if (Database.Disposed)
                        return false;
                }

                if (list.Count == 0)
                    return true;

                logger.Debug(
                    () => string.Format("Deleting {0} expired documents: [{1}]", list.Count, string.Join(", ", list)));

                foreach (var id in list)
                {
                    Database.Documents.Delete(id, null, null);

                    if (Database.Disposed)
                        return false;
                }
            }
            catch (Exception e)
            {
                logger.ErrorException("Error when trying to find expired documents", e);
            }
            finally
            {
                Monitor.Exit(locker);
            }
            return true;
        }
        public async Task<HttpResponseMessage> BulkInsertPost()
        {
            if (string.IsNullOrEmpty(GetQueryStringValue("no-op")) == false)
            {
                // this is a no-op request which is there just to force the client HTTP layer to handle the authentication
                // only used for legacy clients
                return GetEmptyMessage();
            }
            if ("generate-single-use-auth-token".Equals(GetQueryStringValue("op"), StringComparison.InvariantCultureIgnoreCase))
            {
                // using windows auth with anonymous access = none sometimes generate a 401 even though we made two requests
                // instead of relying on windows auth, which require request buffering, we generate a one time token and return it.
                // we KNOW that the user have access to this db for writing, since they got here, so there is no issue in generating 
                // a single use token for them.

                var authorizer = (MixedModeRequestAuthorizer)Configuration.Properties[typeof(MixedModeRequestAuthorizer)];

                var token = authorizer.GenerateSingleUseAuthToken(DatabaseName, User);
                return GetMessageWithObject(new
                {
                    Token = token
                });
            }

            if (HttpContext.Current != null)
                HttpContext.Current.Server.ScriptTimeout = 60 * 60 * 6; // six hours should do it, I think.

            var options = new BulkInsertOptions
            {
                OverwriteExisting = GetOverwriteExisting(),
                CheckReferencesInIndexes = GetCheckReferencesInIndexes(),
				SkipOverwriteIfUnchanged = GetSkipOverwriteIfUnchanged()
            };

            var operationId = ExtractOperationId();
            var sp = Stopwatch.StartNew();

            var status = new BulkInsertStatus();
            status.IsTimedOut = false;

            var documents = 0;
            var mre = new ManualResetEventSlim(false);
            var tre = new CancellationTokenSource();
            
            var inputStream = await InnerRequest.Content.ReadAsStreamAsync().ConfigureAwait(false);
            var currentDatabase = Database;
            var timeout = tre.TimeoutAfter(currentDatabase.Configuration.BulkImportBatchTimeout);
            var user = CurrentOperationContext.User.Value;
            var headers = CurrentOperationContext.Headers.Value;
            Exception error = null;
            var task = Task.Factory.StartNew(() =>
            {
                try
                {
                    CurrentOperationContext.User.Value = user;
                    CurrentOperationContext.Headers.Value = headers;
                    currentDatabase.Documents.BulkInsert(options, YieldBatches(timeout, inputStream, mre, batchSize => documents += batchSize), operationId, tre.Token, timeout);
                }
				catch (InvalidDataException e)
				{
					status.Faulted = true;
					status.State = RavenJObject.FromObject(new { Error = "Could not understand json.", InnerError = e.SimplifyException().Message });
					status.IsSerializationError = true;
					error = e;
				}
				catch (OperationCanceledException)
                {
                    // happens on timeout
                    currentDatabase.Notifications.RaiseNotifications(new BulkInsertChangeNotification { OperationId = operationId, Message = "Operation cancelled, likely because of a batch timeout", Type = DocumentChangeTypes.BulkInsertError });
                    status.IsTimedOut = true;
                    status.Faulted = true;
                }
                catch (Exception e)
                {
                    status.Faulted = true;
                    status.State = RavenJObject.FromObject(new { Error = e.SimplifyException().Message });
                    error = e;
                }
                finally
                {
                    status.Completed = true;
                    status.Documents = documents;
	                CurrentOperationContext.User.Value = null;
	                CurrentOperationContext.Headers.Value = null;

					timeout.Dispose();
                }
			}, tre.Token);

            long id;
            Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription
                                                 {
                                                     StartTime = SystemTime.UtcNow,
                                                     TaskType = TaskActions.PendingTaskType.BulkInsert,
                                                     Payload = operationId.ToString()
                                                 }, out id, tre);

            await task;

            if (error != null)
            {
				var httpStatusCode = status.IsSerializationError ? (HttpStatusCode)422 : HttpStatusCode.InternalServerError;
	            return GetMessageWithObject(new
                {
                    error.Message,
                    Error = error.ToString()
				}, httpStatusCode);
            }
	        if (status.IsTimedOut)
				throw new TimeoutException("Bulk insert operation did not receive new data longer than configured threshold");

            sp.Stop();

            AddRequestTraceInfo(log => log.AppendFormat("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id));

            return GetMessageWithObject(new
            {
                OperationId = id
            });
        }
Beispiel #24
0
        private void GetIndexQueryResult(IHttpContext context, string index)
        {
            Etag indexEtag;
            QueryResultWithIncludes queryResult;
	        using (var cts = new CancellationTokenSource())
	        {
		        cts.TimeoutAfter(Settings.DatbaseOperationTimeout);
		        queryResult = ExecuteQuery(context, index, cts.Token, out indexEtag);
	        }
	        if (queryResult == null)
                return;

            var includes = context.Request.QueryString.GetValues("include") ?? new string[0];
            var loadedIds = new HashSet<string>(
                queryResult.Results
                    .Where(x => x["@metadata"] != null)
                    .Select(x => x["@metadata"].Value<string>("@id"))
                    .Where(x => x != null)
                );
            var command = new AddIncludesCommand(Database, GetRequestTransaction(context),
                                                 (etag, doc) => queryResult.Includes.Add(doc), includes, loadedIds);
            foreach (var result in queryResult.Results)
            {
                command.Execute(result);
            }
            command.AlsoInclude(queryResult.IdsToInclude);

            context.WriteETag(indexEtag);
            if (queryResult.NonAuthoritativeInformation)
                context.SetStatusToNonAuthoritativeInformation();

            context.WriteJson(queryResult);
        }
        private void TimerCallback(object state)
        {
            if (executing)
                return;

            executing = true;
            try
            {
                var currentTime = SystemTime.UtcNow;
                var currentExpiryThresholdTime = currentTime.AddHours(-Settings.HoursToKeepMessagesBeforeExpiring);
                var expiryThresholdAsStr = currentExpiryThresholdTime.ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture);
                logger.Debug("Trying to find expired documents to delete...");
                var query = "(Status:3 OR Status:4) AND ProcessedAt:[* TO " + expiryThresholdAsStr + "]";

                var list = new List<string>();
                var start = 0;
                while (true)
                {
                    const int pageSize = 1024;

                    QueryResultWithIncludes queryResult;
                    using (var cts = new CancellationTokenSource())
                    using (Database.DisableAllTriggersForCurrentThread())
                    {
                        cts.TimeoutAfter(TimeSpan.FromMinutes(5));
                        queryResult = Database.Query(indexName, new IndexQuery
                        {
                            Start = start,
                            PageSize = pageSize,
                            Cutoff = currentTime,
                            Query = query,
                            FieldsToFetch = new[] { "__document_id" }
                        } , cts.Token);
                    }

                    if (queryResult.Results.Count == 0)
                        break;

                    list.AddRange(queryResult.Results.Select(result => result.Value<string>("__document_id")).Where(x => string.IsNullOrEmpty(x) == false));

                    if (queryResult.Results.Count < pageSize)
                        break;

                    start += pageSize;
                }

                if (list.Count == 0)
                {
                    logger.Debug("No expired documents found");
                    return;
                }

                logger.Debug(() => string.Format("Deleting {0} expired documents: [{1}]", list.Count, string.Join(", ", list)));

                foreach (var id in list)
                {
                    Database.Delete(id, null, null);
                }
            }
            catch (Exception e)
            {
                logger.ErrorException("Error when trying to find expired documents", e);
            }
            finally
            {
                executing = false;
            }

        }