Ejemplo n.º 1
0
		public override void Respond(IHttpContext context)
		{
			if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false)
			{
				// this is a no-op request which is there just to force the client HTTP layer to handle the authentication
				// only used for legacy clients
				return; 
			}
			if("generate-single-use-auth-token".Equals(context.Request.QueryString["op"],StringComparison.InvariantCultureIgnoreCase))
			{
				// using windows auth with anonymous access = none sometimes generate a 401 even though we made two requests
				// instead of relying on windows auth, which require request buffering, we generate a one time token and return it.
				// we KNOW that the user have access to this db for writing, since they got here, so there is no issue in generating 
				// a single use token for them.
				var token = server.RequestAuthorizer.GenerateSingleUseAuthToken(Database, context.User);
				context.WriteJson(new
				{
					Token = token
				});
				return;
			}

			if (HttpContext.Current != null)
			{
				HttpContext.Current.Server.ScriptTimeout = 60*60*6; // six hours should do it, I think.
			}
			var options = new BulkInsertOptions
			{
				CheckForUpdates = context.GetCheckForUpdates(),
				CheckReferencesInIndexes = context.GetCheckReferencesInIndexes()
			};

			var operationId = ExtractOperationId(context);
			var sp = Stopwatch.StartNew();

			var status = new BulkInsertStatus();

			int documents = 0;
			var mre = new ManualResetEventSlim(false);

			var currentDatbase = Database;
			var task = Task.Factory.StartNew(() =>
			{
				currentDatbase.BulkInsert(options, YieldBatches(context, mre, batchSize => documents += batchSize), operationId);
			    status.Documents = documents;
			    status.Completed = true;
			});

			long id;
			Database.AddTask(task, status, out id);

			mre.Wait(Database.WorkContext.CancellationToken);

			context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id));

			context.WriteJson(new
			{
				OperationId = id
			});
		}
		public ChunkedRemoteBulkInsertOperation(BulkInsertOptions options, AsyncServerClient client, IDatabaseChanges changes, int chunkSize)
		{
			this.options = options;
			this.client = client;
			this.changes = changes;
			this.chunkSize = chunkSize;
		}
Ejemplo n.º 3
0
		public override void Respond(IHttpContext context)
		{
			if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false)
			{
				// this is a no-op request which is there just to force the client HTTP layer
				// to handle the authentication
				return; 
			}

			var options = new BulkInsertOptions
			{
				CheckForUpdates = context.GetCheckForUpdates(),
				CheckReferencesInIndexes = context.GetCheckReferencesInIndexes()
			};

			var sp = Stopwatch.StartNew();

			var documents = Database.BulkInsert(options, YieldBatches(context));

			context.Log(log => log.Debug("\tBulk inserted {0:#,#;;0} documents in {1}", documents, sp.Elapsed));

			context.WriteJson(new
			{
				Documents = documents
			});
		}
 public ChunkedRemoteBulkInsertOperation(BulkInsertOptions options, AsyncServerClient client, IDatabaseChanges changes)
 {
     this.options = options;
     this.client = client;
     this.changes = changes;			
     currentChunkSize = 0;
     current = GetBulkInsertOperation();
 }
		public ChunkedRemoteBulkInsertOperation(BulkInsertOptions options, AsyncServerClient client, IDatabaseChanges changes, int chunkSize,long? documentSizeInChunkLimit = null)
		{
			this.options = options;
			this.client = client;
			this.changes = changes;
			this.chunkSize = chunkSize;
			this.documentSizeInChunkLimit = documentSizeInChunkLimit;
			documentSizeInChunk = 0;
		}
		/// <summary>
		/// Create new instance of this class
		/// </summary>
		public EmbeddedBulkInsertOperation(DocumentDatabase database,BulkInsertOptions options)
		{
			this.options = options;
			queue = new BlockingCollection<JsonDocument>(options.BatchSize * 8);
			doBulkInsert = Task.Factory.StartNew(() =>
			{
				database.BulkInsert(options, YieldDocuments());
			});
		}
		public ChunkedRemoteBulkInsertOperation(BulkInsertOptions options, AsyncServerClient client, IDatabaseChanges changes, int chunkSize,long? documentSizeInChunkLimit = null)
		{
			this.options = options;
			this.client = client;
			this.changes = changes;
			this.chunkSize = chunkSize;
			this.documentSizeInChunkLimit = documentSizeInChunkLimit;
			documentSizeInChunk = 0;
			if(documentSizeInChunkLimit.HasValue)
				Console.WriteLine("Limit of document size in chunk = " + documentSizeInChunkLimit.Value);
		}
Ejemplo n.º 8
0
		public BulkInsertOperation(string database, IDocumentStore documentStore, DocumentSessionListeners listeners, BulkInsertOptions options)
		{
			this.documentStore = documentStore;
			databaseCommands = database == null
				                   ? documentStore.DatabaseCommands.ForSystemDatabase()
				                   : documentStore.DatabaseCommands.ForDatabase(database);

			generateEntityIdOnTheClient = new GenerateEntityIdOnTheClient(documentStore, entity => documentStore.Conventions.GenerateDocumentKey(database, databaseCommands, entity));
			operation = databaseCommands.GetBulkInsertOperation(options);
			entityToJson = new EntityToJson(documentStore, listeners);
		}
Ejemplo n.º 9
0
 public ShardedBulkInsertOperation(string database, ShardedDocumentStore shardedDocumentStore, BulkInsertOptions options)
 {
     this.database = database;
     this.shardedDocumentStore = shardedDocumentStore;
     this.options = options;
     shards = shardedDocumentStore.ShardStrategy.Shards;
     Bulks = new Dictionary<string, BulkInsertOperation>();
     generateEntityIdOnTheClient = new GenerateEntityIdOnTheClient(shardedDocumentStore.Conventions,
         entity => AsyncHelpers.RunSync(() => shardedDocumentStore.Conventions.GenerateDocumentKeyAsync(database, DatabaseCommands, entity)));
     shardResolutionStrategy = shardedDocumentStore.ShardStrategy.ShardResolutionStrategy;
     shardStrategy = this.shardedDocumentStore.ShardStrategy;
 }
Ejemplo n.º 10
0
		public override void Respond(IHttpContext context)
		{
			if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false)
			{
				// this is a no-op request which is there just to force the client HTTP layer
				// to handle the authentication
				return; 
			}
			if (HttpContext.Current != null)
			{
				HttpContext.Current.Server.ScriptTimeout = 60*60*6; // six hours should do it, I think.
			}
			var options = new BulkInsertOptions
			{
				CheckForUpdates = context.GetCheckForUpdates(),
				CheckReferencesInIndexes = context.GetCheckReferencesInIndexes()
			};

			var sp = Stopwatch.StartNew();

			var status = new RavenJObject
			{
				{"Documents", 0},
				{"Completed", false}
			};

			int documents = 0;
			var mre = new ManualResetEventSlim(false);

			var currentDatbase = Database;
			var task = Task.Factory.StartNew(() =>
			{
				documents = currentDatbase.BulkInsert(options, YieldBatches(context, mre));
				status["Documents"] = documents;
				status["Completed"] = true;
			});

			long id;
			Database.AddTask(task, status, out id);

			mre.Wait(Database.WorkContext.CancellationToken);

			context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id));

			context.WriteJson(new
			{
				OperationId = id
			});
		}
Ejemplo n.º 11
0
		public BulkInsertOperation(string database, IDocumentStore documentStore, DocumentSessionListeners listeners, BulkInsertOptions options, IDatabaseChanges changes)
		{
			this.documentStore = documentStore;

			database = database ?? MultiDatabase.GetDatabaseName(documentStore.Url);

			// Fitzchak: Should not be ever null because of the above code, please refactor this.
			DatabaseCommands = database == null
				? documentStore.AsyncDatabaseCommands.ForSystemDatabase()
				: documentStore.AsyncDatabaseCommands.ForDatabase(database);

			generateEntityIdOnTheClient = new GenerateEntityIdOnTheClient(documentStore.Conventions, entity => documentStore.Conventions.GenerateDocumentKeyAsync(database, DatabaseCommands, entity).ResultUnwrap());
			Operation = GetBulkInsertOperation(options, DatabaseCommands, changes);
			entityToJson = new EntityToJson(documentStore, listeners);
		}
Ejemplo n.º 12
0
		public RemoteBulkInsertOperation(BulkInsertOptions options, ServerClient client)
		{
			this.client = client;
			items = new BlockingCollection<RavenJObject>(options.BatchSize*8);
			string requestUrl = "/bulkInsert?";
			if (options.CheckForUpdates)
				requestUrl += "checkForUpdates=true";
			if (options.CheckReferencesInIndexes)
				requestUrl += "&checkReferencesInIndexes=true";

			// this will force the HTTP layer to authenticate, meaning that our next request won't have to
			HttpJsonRequest req = client.CreateRequest("POST", requestUrl + "&no-op=for-auth-only",
			                                           disableRequestCompression: true);
			req.ExecuteRequest();


			httpJsonRequest = client.CreateRequest("POST", requestUrl, disableRequestCompression: true);
			// the request may take a long time to process, so we need to set a large timeout value
			httpJsonRequest.Timeout = TimeSpan.FromHours(6); 
			nextTask = httpJsonRequest.GetRawRequestStream()
			                          .ContinueWith(task =>
			                          {
				                          Stream requestStream = task.Result;
				                          while (true)
				                          {
					                          var batch = new List<RavenJObject>();
					                          RavenJObject item;
					                          while (items.TryTake(out item, 200))
					                          {
						                          if (item == null) // marker
						                          {
							                          FlushBatch(requestStream, batch);
							                          return;
						                          }
						                          batch.Add(item);
						                          if (batch.Count >= options.BatchSize)
							                          break;
					                          }
					                          FlushBatch(requestStream, batch);
				                          }
			                          });
		}
Ejemplo n.º 13
0
        public ActionResult Index()
        {
            var sw = Stopwatch.StartNew();
            var data = new string('a', 2000);
            var options = new BulkInsertOptions() { CheckForUpdates = true, BatchSize = 2048};

            using (var bulkInsert = MvcApplication.DocumentStore.BulkInsert(options: options))
            {
                for (int i = 0; i < Quantity; i++)
                {
                    bulkInsert.Store(new Document { Data = data });
                }
            }

            sw.Stop();

            var result = string.Format("Initialized {0:n0} documents in {1} minutes. {2} per second",
                                        Quantity,
                                        sw.Elapsed.TotalMinutes,
                                        Math.Round(Quantity / sw.Elapsed.TotalSeconds));
            return Content(result);
        }
Ejemplo n.º 14
0
        protected virtual void BeginBulkInsert(Action <BulkInsertOperation> bulkInsertAction)
        {
            if (bulkInsertAction == null)
            {
                return;
            }

            var options = new Raven.Abstractions.Data.BulkInsertOptions()
            {
                OverwriteExisting = true
            };

            using (var bulkInsert = _documentSession.Advanced.DocumentStore.BulkInsert(null, options))
            {
                bulkInsertAction(bulkInsert);

                if (Specific.BulkInsertOptions.IsWaitForLastTaskToFinish)
                {
                    bulkInsert.WaitForLastTaskToFinish().Wait();
                }
            }
        }
Ejemplo n.º 15
0
		public RemoteBulkInsertOperation(BulkInsertOptions options, ServerClient client)
		{
			this.options = options;
			this.client = client;
			items = new BlockingCollection<RavenJObject>(options.BatchSize*8);
			string requestUrl = "/bulkInsert?";
			if (options.CheckForUpdates)
				requestUrl += "checkForUpdates=true";
			if (options.CheckReferencesInIndexes)
				requestUrl += "&checkReferencesInIndexes=true";

			var expect100Continue = client.Expect100Continue();

			// this will force the HTTP layer to authenticate, meaning that our next request won't have to
			HttpJsonRequest req = client.CreateRequest("POST", requestUrl + "&op=generate-single-use-auth-token",
														disableRequestCompression: true);
			var token = req.ReadResponseJson();


			httpJsonRequest = client.CreateRequest("POST", requestUrl, disableRequestCompression: true);
			// the request may take a long time to process, so we need to set a large timeout value
			httpJsonRequest.PrepareForLongRequest();
			httpJsonRequest.AddOperationHeader("Single-Use-Auth-Token", token.Value<string>("Token"));
			nextTask = httpJsonRequest.GetRawRequestStream()
			                          .ContinueWith(task =>
			                          {
				                          try
				                          {
					                          expect100Continue.Dispose();
				                          }
				                          catch (Exception)
				                          {
				                          }
										  WriteQueueToServer(task);
			                          });
		}
Ejemplo n.º 16
0
		public  ShardedBulkInsertOperation ShardedBulkInsert(string database = null, ShardedDocumentStore store = null, BulkInsertOptions options = null)
		{
			return new ShardedBulkInsertOperation(database, this, options ?? new BulkInsertOptions());
		}
 public BulkInsertOperation BulkInsert(string database = null, BulkInsertOptions options = null)
 {
     return Inner.BulkInsert(database, options);
 }
Ejemplo n.º 18
0
		public abstract BulkInsertOperation BulkInsert(string database = null, BulkInsertOptions options = null);
Ejemplo n.º 19
0
 public BulkInsertOperation BulkInsert(string database = null, BulkInsertOptions options = null)
 {
     return server.DocumentStore.BulkInsert(database, options);
 }
Ejemplo n.º 20
0
        public void cache_leftovers_after_documents_bulk_insert(string requestedStorage)
        {
            using (var store = NewRemoteDocumentStore(true, requestedStorage: requestedStorage))
            {
                store.DatabaseCommands.Admin.StopIndexing();
                Assert.Equal(0, GetCachedItemsCount(store));

                var bulkOptions = new BulkInsertOptions
                {
                    OverwriteExisting = true
                };
                using (var bulk = store.BulkInsert(options: bulkOptions))
                {
                    bulk.Store(new Order { CompanyId = "companies/1" });
                    bulk.Store(new Order { CompanyId = "companies/1" });
                }
                // not in cache until we load the documents
                Assert.Equal(0, GetCachedItemsCount(store));

                using (var session = store.OpenSession())
                {
                    session.Load<Order>("orders/1");
                    Assert.Equal(1, GetCachedItemsCount(store));

                    session.Load<Order>("orders/2");
                    Assert.Equal(2, GetCachedItemsCount(store));
                }

                using (var session = store.OpenSession())
                {
                    var order = session.Load<Order>("orders/1");
                    order.CompanyId = "companies/2";
                    using (var bulk = store.BulkInsert(options: bulkOptions))
                        bulk.Store(order);

                    Assert.Equal(1, GetCachedItemsCount(store));
                }

                using (var session = store.OpenSession())
                {
                    session.Load<Order>("orders/1");
                    Assert.Equal(2, GetCachedItemsCount(store));
                }

                using (var session = store.OpenSession())
                {
                    var order = session.Load<Order>("orders/2");
                    order.CompanyId = "companies/2";
                    using (var bulk = store.BulkInsert(options: bulkOptions))
                        bulk.Store(order);

                    Assert.Equal(1, GetCachedItemsCount(store));
                }

                using (var session = store.OpenSession())
                {
                    session.Load<Order>("orders/2");
                    Assert.Equal(2, GetCachedItemsCount(store));
                }
            }
        }
Ejemplo n.º 21
0
		public int BulkInsert(BulkInsertOptions options, IEnumerable<IEnumerable<JsonDocument>> docBatches)
		{
			var documents = 0;
			TransactionalStorage.Batch(accessor =>
			{
				RaiseNotifications(new DocumentChangeNotification
				{
					Type = DocumentChangeTypes.BulkInsertStarted
				});
				foreach (var docs in docBatches)
				{
					WorkContext.CancellationToken.ThrowIfCancellationRequested();
					lock (putSerialLock)
					{
						var inserts = 0;
						var batch = 0;
						var keys = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
						foreach (var doc in docs)
						{
							if (options.CheckReferencesInIndexes)
								keys.Add(doc.Key);
							documents++;
							batch++;
							AssertPutOperationNotVetoed(doc.Key, doc.Metadata, doc.DataAsJson, null);
							foreach (var trigger in PutTriggers)
							{
								trigger.Value.OnPut(doc.Key, doc.DataAsJson, doc.Metadata, null);
							}
							var result = accessor.Documents.InsertDocument(doc.Key, doc.DataAsJson, doc.Metadata, options.CheckForUpdates);
							if (result.Updated == false)
								inserts++;
							foreach (var trigger in PutTriggers)
							{
								trigger.Value.AfterPut(doc.Key, doc.DataAsJson, doc.Metadata, result.Etag, null);
							}
						}
						if (options.CheckReferencesInIndexes)
						{
							foreach (var key in keys)
							{
								CheckReferenceBecauseOfDocumentUpdate(key, accessor);
							}
						}
						accessor.Documents.IncrementDocumentCount(inserts);
						accessor.General.PulseTransaction();
						workContext.ShouldNotifyAboutWork(() => "BulkInsert batch of " + batch + " docs");
						workContext.NotifyAboutWork(); // forcing notification so we would start indexing right away
					}
				}
				RaiseNotifications(new DocumentChangeNotification
				{
					Type = DocumentChangeTypes.BulkInsertEnded
				});
				if (documents == 0)
					return;
				workContext.ShouldNotifyAboutWork(() => "BulkInsert of " + documents + " docs");
			});
			return documents;
		}
Ejemplo n.º 22
0
		public override BulkInsertOperation BulkInsert(string database = null, BulkInsertOptions options = null)
		{
            return new BulkInsertOperation(database, this, Listeners, options ?? new BulkInsertOptions(), Changes(database));
		}
Ejemplo n.º 23
0
		public int BulkInsert(BulkInsertOptions options, IEnumerable<IEnumerable<JsonDocument>> docBatches)
		{
			var documents = 0;
			TransactionalStorage.Batch(accessor =>
			{
				RaiseNotifications(new DocumentChangeNotification
				{
					Type = DocumentChangeTypes.BulkInsertStarted
				}, null);
				foreach (var docs in docBatches)
				{
					WorkContext.CancellationToken.ThrowIfCancellationRequested();
					lock (putSerialLock)
					{
                        var inserts = 0;
					    var batch = 0;
						var keys = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
						foreach (var doc in docs)
						{
							RemoveReservedProperties(doc.DataAsJson);
							RemoveMetadataReservedProperties(doc.Metadata);
		
							if (options.CheckReferencesInIndexes)
								keys.Add(doc.Key);
							documents++;
						    batch++;
							AssertPutOperationNotVetoed(doc.Key, doc.Metadata, doc.DataAsJson, null);
							foreach (var trigger in PutTriggers)
							{
								trigger.Value.OnPut(doc.Key, doc.DataAsJson, doc.Metadata, null);
							}
							var result = accessor.Documents.InsertDocument(doc.Key, doc.DataAsJson, doc.Metadata, options.CheckForUpdates);
							if (result.Updated == false)
								inserts++;

							doc.Metadata.EnsureSnapshot("Metadata was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");
							doc.DataAsJson.EnsureSnapshot("Document was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");

							foreach (var trigger in PutTriggers)
							{
								trigger.Value.AfterPut(doc.Key, doc.DataAsJson, doc.Metadata, result.Etag, null);
							}
						}
						if (options.CheckReferencesInIndexes)
						{
							foreach (var key in keys)
							{
								CheckReferenceBecauseOfDocumentUpdate(key, accessor);
							}
						}
						accessor.Documents.IncrementDocumentCount(inserts);
						accessor.General.PulseTransaction();
                        workContext.ShouldNotifyAboutWork(() => "BulkInsert batch of " + batch + " docs");
                        workContext.NotifyAboutWork(); // forcing notification so we would start indexing right away
					}
				}
				RaiseNotifications(new DocumentChangeNotification
				{
					Type = DocumentChangeTypes.BulkInsertEnded
				}, null);
				if (documents == 0)
					return;
				workContext.ShouldNotifyAboutWork(() => "BulkInsert of " + documents + " docs");
			});
			return documents;
		}
Ejemplo n.º 24
0
        private static void StoreTrialData(string databaseName, int numberOfTrials)
        {
            var watch = new Stopwatch();

            LogMessage("----------------------------------------------------------------------------------------------------------");
            LogMessage(string.Format("StoreTrialData({0}, {1})", databaseName, numberOfTrials));

            LogMessage(string.Format("Generating {0} records for insertion", numberOfTrials));
            watch.Start();
            var trialBatch = new List<TrialData>();
            var random = new Random();
            for (var i = 1; i <= numberOfTrials; i++)
            {
                var parameters = new List<Parameter> {new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 1, Value = random.NextDouble() },
                                                      new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 2, Value = random.NextDouble() },
                                                      new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 3, Value = random.NextDouble() }};
                var trialData = new TrialData { StressNumber = i, Parameters = parameters.ToArray() };

                trialBatch.Add(trialData);
            }
            watch.Stop();
            LogMessage(string.Format("Data generated in {1}:{2}:{3}.{4}", numberOfTrials, watch.Elapsed.Hours, watch.Elapsed.Minutes, watch.Elapsed.Seconds, watch.Elapsed.Milliseconds));

            LogMessage(string.Format("Connecting to RavenDB instance: {0}", serverUrl));
            IDocumentStore documentStore = new DocumentStore { Url = serverUrl };

            LogMessage("Initializing document store");
            documentStore.Initialize();

            watch.Reset();
            watch.Start();
            LogMessage("Starting bulk insert");
            var builkInsertOptions = new BulkInsertOptions
            {
                BatchSize = 10000,
                CheckForUpdates = false,
                CheckReferencesInIndexes = false
            };
            using (var bulkInsert = documentStore.BulkInsert(databaseName, builkInsertOptions))
            {
                foreach (var item in trialBatch)
                    bulkInsert.Store(item);
            }

            watch.Stop();
            LogMessage(string.Format("Inserted {0} records in {1}:{2}:{3}.{4}", numberOfTrials, watch.Elapsed.Hours, watch.Elapsed.Minutes, watch.Elapsed.Seconds, watch.Elapsed.Milliseconds));
            LogMessage("----------------------------------------------------------------------------------------------------------");
        }
Ejemplo n.º 25
0
 protected virtual ILowLevelBulkInsertOperation GetBulkInsertOperation(BulkInsertOptions options, IAsyncDatabaseCommands commands, IDatabaseChanges changes)
 {
     return commands.GetBulkInsertOperation(options, changes);
 }
Ejemplo n.º 26
0
        public int BulkInsert(BulkInsertOptions options, IEnumerable<IEnumerable<JsonDocument>> docBatches, Guid operationId, CancellationToken token)
        {
            var documents = 0;
            TransactionalStorage.Batch(accessor =>
            {
                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                {
                    OperationId = operationId,
                    Type = DocumentChangeTypes.BulkInsertStarted
                });
                using (var cts = CancellationTokenSource.CreateLinkedTokenSource(token, WorkContext.CancellationToken))
                foreach (var docs in docBatches)
                {
                    cts.Token.ThrowIfCancellationRequested();

                    using (Database.DocumentLock.Lock())
                    {
                        var inserts = 0;
                        var batch = 0;
                        var keys = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
						var collectionsAndEtags = new Dictionary<string, Etag>(StringComparer.OrdinalIgnoreCase);

                        var docsToInsert = docs.ToArray();

                        foreach (var doc in docsToInsert)
                        {
                            try
                            {
                                if (string.IsNullOrEmpty(doc.Key))
                                    throw new InvalidOperationException("Cannot try to bulk insert a document without a key");

                                RemoveReservedProperties(doc.DataAsJson);
                                RemoveMetadataReservedProperties(doc.Metadata);

                                if (options.CheckReferencesInIndexes)
                                    keys.Add(doc.Key);
                                documents++;
                                batch++;
                                AssertPutOperationNotVetoed(doc.Key, doc.Metadata, doc.DataAsJson, null);

								if (options.OverwriteExisting && options.SkipOverwriteIfUnchanged)
								{
									var existingDoc = accessor.Documents.DocumentByKey(doc.Key);

									if (IsTheSameDocument(doc, existingDoc)) 
										continue;
								}

	                            foreach (var trigger in Database.PutTriggers)
                                {
                                    trigger.Value.OnPut(doc.Key, doc.DataAsJson, doc.Metadata, null);
                                }

	                            var result = accessor.Documents.InsertDocument(doc.Key, doc.DataAsJson, doc.Metadata, options.OverwriteExisting);
                                if (result.Updated == false)
                                    inserts++;

                                doc.Etag = result.Etag;

                                doc.Metadata.EnsureSnapshot(
                                "Metadata was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");
                                doc.DataAsJson.EnsureSnapshot(
                                "Document was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");

	                            var entityName = doc.Metadata.Value<string>(Constants.RavenEntityName);

	                            Etag highestEtagInCollection;
	                            if (string.IsNullOrEmpty(entityName) == false && (collectionsAndEtags.TryGetValue(entityName, out highestEtagInCollection) == false || 
									result.Etag.CompareTo(highestEtagInCollection) > 0))
	                            {
		                            collectionsAndEtags[entityName] = result.Etag;
	                            }

                                foreach (var trigger in Database.PutTriggers)
                                {
                                    trigger.Value.AfterPut(doc.Key, doc.DataAsJson, doc.Metadata, result.Etag, null);
                                }
                            }
                            catch (Exception e)
                            {
                                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                                {
                                    OperationId = operationId,
                                    Message = e.Message,
                                    Etag = doc.Etag,
                                    Id = doc.Key,
                                    Type = DocumentChangeTypes.BulkInsertError
                                });

                                throw;
                            }
                        }

                        if (options.CheckReferencesInIndexes)
                        {
                            foreach (var key in keys)
                            {
                                Database.Indexes.CheckReferenceBecauseOfDocumentUpdate(key, accessor);
                            }
                        }

                        accessor.Documents.IncrementDocumentCount(inserts);
                        accessor.General.PulseTransaction();

	                    foreach (var collectionEtagPair in collectionsAndEtags)
	                    {
                            Database.LastCollectionEtags.Update(collectionEtagPair.Key, collectionEtagPair.Value);
	                    }

                        WorkContext.ShouldNotifyAboutWork(() => "BulkInsert batch of " + batch + " docs");
                        WorkContext.NotifyAboutWork(); // forcing notification so we would start indexing right away
                        WorkContext.UpdateFoundWork();
                    }
                }

                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                {
                    OperationId = operationId,
                    Type = DocumentChangeTypes.BulkInsertEnded
                });
                if (documents == 0)
                    return;
                WorkContext.ShouldNotifyAboutWork(() => "BulkInsert of " + documents + " docs");
            });
            return documents;
        }
Ejemplo n.º 27
0
        public int BulkInsert(BulkInsertOptions options, IEnumerable<IEnumerable<JsonDocument>> docBatches, Guid operationId)
        {
            var documents = 0;
            TransactionalStorage.Batch(accessor =>
            {
                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                {
                    OperationId = operationId,
                    Type = DocumentChangeTypes.BulkInsertStarted
                });
                foreach (var docs in docBatches)
                {
                    WorkContext.CancellationToken.ThrowIfCancellationRequested();

                    using (Database.DocumentLock.Lock())
                    {
                        var inserts = 0;
                        var batch = 0;
                        var keys = new HashSet<string>(StringComparer.OrdinalIgnoreCase);

                        var docsToInsert = docs.ToArray();

                        foreach (var doc in docsToInsert)
                        {
                            try
                            {
                                RemoveReservedProperties(doc.DataAsJson);
                                RemoveMetadataReservedProperties(doc.Metadata);

                                if (options.CheckReferencesInIndexes)
                                    keys.Add(doc.Key);
                                documents++;
                                batch++;
                                AssertPutOperationNotVetoed(doc.Key, doc.Metadata, doc.DataAsJson, null);
                                foreach (var trigger in Database.PutTriggers)
                                {
                                    trigger.Value.OnPut(doc.Key, doc.DataAsJson, doc.Metadata, null);
                                }
                                var result = accessor.Documents.InsertDocument(doc.Key, doc.DataAsJson, doc.Metadata, options.OverwriteExisting);
                                if (result.Updated == false)
                                    inserts++;

                                doc.Etag = result.Etag;

                                doc.Metadata.EnsureSnapshot(
                                "Metadata was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");
                                doc.DataAsJson.EnsureSnapshot(
                                "Document was written to the database, cannot modify the document after it was written (changes won't show up in the db). Did you forget to call CreateSnapshot() to get a clean copy?");


                                foreach (var trigger in Database.PutTriggers)
                                {
                                    trigger.Value.AfterPut(doc.Key, doc.DataAsJson, doc.Metadata, result.Etag, null);
                                }
                            }
                            catch (Exception e)
                            {
                                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                                {
                                    OperationId = operationId,
                                    Message = e.Message,
                                    Etag = doc.Etag,
                                    Id = doc.Key,
                                    Type = DocumentChangeTypes.BulkInsertError
                                });

                                throw;
                            }
                        }

                        if (options.CheckReferencesInIndexes)
                        {
                            foreach (var key in keys)
                            {
                                Database.Indexes.CheckReferenceBecauseOfDocumentUpdate(key, accessor);
                            }
                        }

                        accessor.Documents.IncrementDocumentCount(inserts);
                        accessor.General.PulseTransaction();

                        WorkContext.ShouldNotifyAboutWork(() => "BulkInsert batch of " + batch + " docs");
                        WorkContext.NotifyAboutWork(); // forcing notification so we would start indexing right away
                    }
                }

                Database.Notifications.RaiseNotifications(new BulkInsertChangeNotification
                {
                    OperationId = operationId,
                    Type = DocumentChangeTypes.BulkInsertEnded
                });
                if (documents == 0)
                    return;
                WorkContext.ShouldNotifyAboutWork(() => "BulkInsert of " + documents + " docs");
            });
            return documents;
        }
Ejemplo n.º 28
0
        public async Task<HttpResponseMessage> BulkInsertPost()
        {
            if (string.IsNullOrEmpty(GetQueryStringValue("no-op")) == false)
            {
                // this is a no-op request which is there just to force the client HTTP layer to handle the authentication
                // only used for legacy clients
                return GetEmptyMessage();
            }
            if ("generate-single-use-auth-token".Equals(GetQueryStringValue("op"), StringComparison.InvariantCultureIgnoreCase))
            {
                // using windows auth with anonymous access = none sometimes generate a 401 even though we made two requests
                // instead of relying on windows auth, which require request buffering, we generate a one time token and return it.
                // we KNOW that the user have access to this db for writing, since they got here, so there is no issue in generating 
                // a single use token for them.

                var authorizer = (MixedModeRequestAuthorizer)Configuration.Properties[typeof(MixedModeRequestAuthorizer)];

                var token = authorizer.GenerateSingleUseAuthToken(DatabaseName, User);
                return GetMessageWithObject(new
                {
                    Token = token
                });
            }

            if (HttpContext.Current != null)
                HttpContext.Current.Server.ScriptTimeout = 60 * 60 * 6; // six hours should do it, I think.

            var options = new BulkInsertOptions
            {
                OverwriteExisting = GetOverwriteExisting(),
                CheckReferencesInIndexes = GetCheckReferencesInIndexes(),
				SkipOverwriteIfUnchanged = GetSkipOverwriteIfUnchanged()
            };

            var operationId = ExtractOperationId();
            var sp = Stopwatch.StartNew();

            var status = new BulkInsertStatus();
            status.IsTimedOut = false;

            var documents = 0;
            var mre = new ManualResetEventSlim(false);
            var tre = new CancellationTokenSource();
            
            var inputStream = await InnerRequest.Content.ReadAsStreamAsync().ConfigureAwait(false);
            var currentDatabase = Database;
            var timeout = tre.TimeoutAfter(currentDatabase.Configuration.BulkImportBatchTimeout);
            var user = CurrentOperationContext.User.Value;
            var headers = CurrentOperationContext.Headers.Value;
            Exception error = null;
            var task = Task.Factory.StartNew(() =>
            {
                try
                {
                    CurrentOperationContext.User.Value = user;
                    CurrentOperationContext.Headers.Value = headers;
                    currentDatabase.Documents.BulkInsert(options, YieldBatches(timeout, inputStream, mre, batchSize => documents += batchSize), operationId, tre.Token, timeout);
                }
				catch (InvalidDataException e)
				{
					status.Faulted = true;
					status.State = RavenJObject.FromObject(new { Error = "Could not understand json.", InnerError = e.SimplifyException().Message });
					status.IsSerializationError = true;
					error = e;
				}
				catch (OperationCanceledException)
                {
                    // happens on timeout
                    currentDatabase.Notifications.RaiseNotifications(new BulkInsertChangeNotification { OperationId = operationId, Message = "Operation cancelled, likely because of a batch timeout", Type = DocumentChangeTypes.BulkInsertError });
                    status.IsTimedOut = true;
                    status.Faulted = true;
                }
                catch (Exception e)
                {
                    status.Faulted = true;
                    status.State = RavenJObject.FromObject(new { Error = e.SimplifyException().Message });
                    error = e;
                }
                finally
                {
                    status.Completed = true;
                    status.Documents = documents;
	                CurrentOperationContext.User.Value = null;
	                CurrentOperationContext.Headers.Value = null;

					timeout.Dispose();
                }
			}, tre.Token);

            long id;
            Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription
                                                 {
                                                     StartTime = SystemTime.UtcNow,
                                                     TaskType = TaskActions.PendingTaskType.BulkInsert,
                                                     Payload = operationId.ToString()
                                                 }, out id, tre);

            await task;

            if (error != null)
            {
				var httpStatusCode = status.IsSerializationError ? (HttpStatusCode)422 : HttpStatusCode.InternalServerError;
	            return GetMessageWithObject(new
                {
                    error.Message,
                    Error = error.ToString()
				}, httpStatusCode);
            }
	        if (status.IsTimedOut)
				throw new TimeoutException("Bulk insert operation did not receive new data longer than configured threshold");

            sp.Stop();

            AddRequestTraceInfo(log => log.AppendFormat("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id));

            return GetMessageWithObject(new
            {
                OperationId = id
            });
        }
		protected override ILowLevelBulkInsertOperation GetBulkInsertOperation(BulkInsertOptions options, IAsyncDatabaseCommands commands, IDatabaseChanges changes)
		{
			return null; // ugly code
		}
		public ChunkedBulkInsertOperation(string database, IDocumentStore documentStore, DocumentSessionListeners listeners, BulkInsertOptions options, IDatabaseChanges changes, int chunkSize)
			: base(database, documentStore, listeners, options, changes)
		{
			Operation = new ChunkedRemoteBulkInsertOperation(options, (AsyncServerClient)DatabaseCommands, changes, chunkSize);
		}
Ejemplo n.º 31
0
		public override BulkInsertOperation BulkInsert(string database = null, BulkInsertOptions options = null)
		{
			throw new NotSupportedException("Cannot use BulkInsert using Sharded store, use ShardedBulkInsert, instead");
		}