public override void Respond(IHttpContext context) { if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false) { // this is a no-op request which is there just to force the client HTTP layer to handle the authentication // only used for legacy clients return; } if("generate-single-use-auth-token".Equals(context.Request.QueryString["op"],StringComparison.InvariantCultureIgnoreCase)) { // using windows auth with anonymous access = none sometimes generate a 401 even though we made two requests // instead of relying on windows auth, which require request buffering, we generate a one time token and return it. // we KNOW that the user have access to this db for writing, since they got here, so there is no issue in generating // a single use token for them. var token = server.RequestAuthorizer.GenerateSingleUseAuthToken(Database, context.User); context.WriteJson(new { Token = token }); return; } if (HttpContext.Current != null) { HttpContext.Current.Server.ScriptTimeout = 60*60*6; // six hours should do it, I think. } var options = new BulkInsertOptions { CheckForUpdates = context.GetCheckForUpdates(), CheckReferencesInIndexes = context.GetCheckReferencesInIndexes() }; var operationId = ExtractOperationId(context); var sp = Stopwatch.StartNew(); var status = new BulkInsertStatus(); int documents = 0; var mre = new ManualResetEventSlim(false); var currentDatbase = Database; var task = Task.Factory.StartNew(() => { currentDatbase.BulkInsert(options, YieldBatches(context, mre, batchSize => documents += batchSize), operationId); status.Documents = documents; status.Completed = true; }); long id; Database.AddTask(task, status, out id); mre.Wait(Database.WorkContext.CancellationToken); context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id)); context.WriteJson(new { OperationId = id }); }
public override void Respond(IHttpContext context) { if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false) { // this is a no-op request which is there just to force the client HTTP layer // to handle the authentication return; } var options = new BulkInsertOptions { CheckForUpdates = context.GetCheckForUpdates(), CheckReferencesInIndexes = context.GetCheckReferencesInIndexes() }; var sp = Stopwatch.StartNew(); var documents = Database.BulkInsert(options, YieldBatches(context)); context.Log(log => log.Debug("\tBulk inserted {0:#,#;;0} documents in {1}", documents, sp.Elapsed)); context.WriteJson(new { Documents = documents }); }
public override void Respond(IHttpContext context) { if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false) { // this is a no-op request which is there just to force the client HTTP layer // to handle the authentication return; } if (HttpContext.Current != null) { HttpContext.Current.Server.ScriptTimeout = 60 * 60 * 6; // six hours should do it, I think. } var options = new BulkInsertOptions { CheckForUpdates = context.GetCheckForUpdates(), CheckReferencesInIndexes = context.GetCheckReferencesInIndexes() }; var sp = Stopwatch.StartNew(); var status = new RavenJObject { { "Documents", 0 }, { "Completed", false } }; int documents = 0; var mre = new ManualResetEventSlim(false); var currentDatbase = Database; var task = Task.Factory.StartNew(() => { documents = currentDatbase.BulkInsert(options, YieldBatches(context, mre)); status["Documents"] = documents; status["Completed"] = true; }); long id; Database.AddTask(task, status, out id); mre.Wait(Database.WorkContext.CancellationToken); context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id)); context.WriteJson(new { OperationId = id }); }
public override void Respond(IHttpContext context) { if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false) { // this is a no-op request which is there just to force the client HTTP layer // to handle the authentication return; } if (HttpContext.Current != null) { HttpContext.Current.Server.ScriptTimeout = 60*60*6; // six hours should do it, I think. } var options = new BulkInsertOptions { CheckForUpdates = context.GetCheckForUpdates(), CheckReferencesInIndexes = context.GetCheckReferencesInIndexes() }; var sp = Stopwatch.StartNew(); var status = new RavenJObject { {"Documents", 0}, {"Completed", false} }; int documents = 0; var mre = new ManualResetEventSlim(false); var currentDatbase = Database; var task = Task.Factory.StartNew(() => { documents = currentDatbase.BulkInsert(options, YieldBatches(context, mre)); status["Documents"] = documents; status["Completed"] = true; }); long id; Database.AddTask(task, status, out id); mre.Wait(Database.WorkContext.CancellationToken); context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id)); context.WriteJson(new { OperationId = id }); }
public override void Respond(IHttpContext context) { if (string.IsNullOrEmpty(context.Request.QueryString["no-op"]) == false) { // this is a no-op request which is there just to force the client HTTP layer to handle the authentication // only used for legacy clients return; } if ("generate-single-use-auth-token".Equals(context.Request.QueryString["op"], StringComparison.InvariantCultureIgnoreCase)) { // using windows auth with anonymous access = none sometimes generate a 401 even though we made two requests // instead of relying on windows auth, which require request buffering, we generate a one time token and return it. // we KNOW that the user have access to this db for writing, since they got here, so there is no issue in generating // a single use token for them. var token = server.RequestAuthorizer.GenerateSingleUseAuthToken(Database, context.User); context.WriteJson(new { Token = token }); return; } if (HttpContext.Current != null) { HttpContext.Current.Server.ScriptTimeout = 60 * 60 * 6; // six hours should do it, I think. } var options = new BulkInsertOptions { CheckForUpdates = context.GetCheckForUpdates(), CheckReferencesInIndexes = context.GetCheckReferencesInIndexes() }; var operationId = ExtractOperationId(context); var sp = Stopwatch.StartNew(); var status = new BulkInsertStatus(); int documents = 0; var mre = new ManualResetEventSlim(false); var currentDatbase = Database; var task = Task.Factory.StartNew(() => { currentDatbase.BulkInsert(options, YieldBatches(context, mre, batchSize => documents += batchSize), operationId); status.Documents = documents; status.Completed = true; }); long id; Database.AddTask(task, status, out id); mre.Wait(Database.WorkContext.CancellationToken); context.Log(log => log.Debug("\tBulk inserted received {0:#,#;;0} documents in {1}, task #: {2}", documents, sp.Elapsed, id)); context.WriteJson(new { OperationId = id }); }