public SerializableResponseData(StoredProcedureResponse spResponse) { if (spResponse == null) throw new ArgumentNullException("spResponse"); ResultSets = spResponse.ResultSets; OutputParameters = spResponse.OutputParameters; ReturnValue = spResponse.ReturnValue; }
public async Task ExecuteTestWithMultipleStreamParameters() { string sprocId = Guid.NewGuid().ToString(); string sprocBody = @"function(param1, param2, param3) { var context = getContext(); var response = context.getResponse(); response.setBody(param1+param2+param3); }"; StoredProcedureResponse storedProcedureResponse = await this.scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, sprocBody, storedProcedureResponse); // Insert document and then query string testPartitionId = Guid.NewGuid().ToString(); var payload = new { id = testPartitionId, user = testPartitionId }; ItemResponse <dynamic> createItemResponse = await this.container.CreateItemAsync <dynamic>(payload); Assert.AreEqual(HttpStatusCode.Created, createItemResponse.StatusCode); MemoryStream streamPayload = new MemoryStream(Encoding.UTF8.GetBytes(@"[""one"",""two"",""three""]")); ResponseMessage response = await this.scripts.ExecuteStoredProcedureStreamAsync( storedProcedureId : sprocId, streamPayload : streamPayload, partitionKey : new Cosmos.PartitionKey(testPartitionId), requestOptions : null, cancellationToken : default(CancellationToken)); using (StreamReader reader = new StreamReader(response.Content)) { string text = await reader.ReadToEndAsync(); Assert.AreEqual(@"""onetwothree""", text); } StoredProcedureResponse deleteResponse = await this.scripts.DeleteStoredProcedureAsync(sprocId); Assert.AreEqual(HttpStatusCode.NoContent, deleteResponse.StatusCode); }
public static async Task <IActionResult> UpdateAccountDetails( [HttpTrigger(AuthorizationLevel.Function, "put", Route = "account/{id}")] HttpRequest request, [CosmosDB( databaseName: "Rodrap50", collectionName: "Financials", ConnectionStringSetting = "CosmosDBConnection")] DocumentClient client, ILogger logger ) { logger.LogInformation("C# HTTP CreateAccount trigger function processed a request."); string requestBody = await new StreamReader(request.Body).ReadToEndAsync(); var account = JsonConvert.DeserializeObject <Account>(requestBody); StoredProcedureResponse <Account> sprocResponse = await client.ExecuteStoredProcedureAsync <Account>( "/dbs/Rodrap50/colls/Financials/sprocs/UpdateAccountDetails/", new RequestOptions { PartitionKey = new PartitionKey("account") }, account); account = sprocResponse.Response; AccountsRequest summaryRequest = new AccountsRequest(); summaryRequest.accounts = new Account[] { account }; StoredProcedureResponse <AccountsResponse> sprocResponse2 = await client.ExecuteStoredProcedureAsync <AccountsResponse>( "/dbs/Rodrap50/colls/Financials/sprocs/UpdateAccountSummary/", new RequestOptions { PartitionKey = new PartitionKey("accountsummary") }, summaryRequest); Account generalAccount = null; if (account.SoftAccount) { var docUri = UriFactory.CreateDocumentUri("Rodrap50", "Financials", account.GeneralAccountId); generalAccount = await client.ReadDocumentAsync <Account>(docUri); } else { generalAccount = account; } StoredProcedureResponse <ListingsResponse> sprocResponse3 = await client.ExecuteStoredProcedureAsync <ListingsResponse>( "/dbs/Rodrap50/colls/Financials/sprocs/UpdateAccountListing/", new RequestOptions { PartitionKey = new PartitionKey("accountsummary") }, generalAccount); return(new OkObjectResult(account)); }
public async Task SprocContractTest() { string sprocId = Guid.NewGuid().ToString(); string sprocBody = "function() { { var x = 42; } }"; StoredProcedureResponse storedProcedureResponse = await this.scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); Assert.IsTrue(storedProcedureResponse.RequestCharge > 0); StoredProcedureProperties sprocSettings = storedProcedureResponse; Assert.AreEqual(sprocId, sprocSettings.Id); Assert.IsNotNull(sprocSettings.ResourceId); Assert.IsNotNull(sprocSettings.ETag); Assert.IsTrue(sprocSettings.LastModified.HasValue); Assert.IsTrue(sprocSettings.LastModified.Value > new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc), sprocSettings.LastModified.Value.ToString()); }
public async Task IteratorTest() { using (CosmosClient cosmosClient = TestCommon.CreateCosmosClient(new CosmosClientOptions() { Serializer = new FaultySerializer() })) { // Should not use the custom serializer for these operations Scripts scripts = cosmosClient.GetContainer(this.database.Id, this.container.Id).Scripts; string sprocBody = "function() { { var x = 42; } }"; int numberOfSprocs = 3; string[] sprocIds = new string[numberOfSprocs]; for (int i = 0; i < numberOfSprocs; i++) { string sprocId = Guid.NewGuid().ToString(); sprocIds[i] = sprocId; StoredProcedureResponse storedProcedureResponse = await scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); } List <string> readSprocIds = new List <string>(); FeedIterator <StoredProcedureProperties> iter = scripts.GetStoredProcedureQueryIterator <StoredProcedureProperties>(); while (iter.HasMoreResults) { FeedResponse <StoredProcedureProperties> currentResultSet = await iter.ReadNextAsync(); { foreach (StoredProcedureProperties storedProcedureSettingsEntry in currentResultSet) { readSprocIds.Add(storedProcedureSettingsEntry.Id); } } } CollectionAssert.AreEquivalent(sprocIds, readSprocIds); } }
public async Task ExecuteTestWithNonAppliableArraylikeParameters() { string sprocId = Guid.NewGuid().ToString(); string sprocBody = @"function() { var context = getContext(); var response = context.getResponse(); response.setBody(true); }"; StoredProcedureResponse storedProcedureResponse = await this.scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, sprocBody, storedProcedureResponse); // Insert document and then query string testPartitionId = Guid.NewGuid().ToString(); var payload = new { id = testPartitionId, user = testPartitionId }; ItemResponse<dynamic> createItemResponse = await this.container.CreateItemAsync<dynamic>(payload); Assert.AreEqual(HttpStatusCode.Created, createItemResponse.StatusCode); MemoryStream[] streamPayloads = new MemoryStream[] { new MemoryStream(Encoding.UTF8.GetBytes(@"{""0"":""onetwothree"", ""length"": 600000}")), // 600000 parameters -> "out of stack space" new MemoryStream(Encoding.UTF8.GetBytes(@"{""0"":""onetwothree"", ""length"": 1e9}")), // 1 billion parameters -> exceeds javascript .apply maximum }; foreach (MemoryStream streamPayload in streamPayloads) { ResponseMessage response = await this.scripts.ExecuteStoredProcedureStreamAsync( storedProcedureId: sprocId, streamPayload: streamPayload, partitionKey: new Cosmos.PartitionKey(testPartitionId), requestOptions: null, cancellationToken: default(CancellationToken)); Assert.AreEqual(HttpStatusCode.BadRequest, response.StatusCode); } StoredProcedureResponse deleteResponse = await this.scripts.DeleteStoredProcedureAsync(sprocId); Assert.AreEqual(HttpStatusCode.NoContent, deleteResponse.StatusCode); }
public async Task <bool> CreateDocumentSprocAsync(T newDocument, string partitionKeyValue) { var client = DocumentDBClientConfig.GetClientInstance; var storedProcedureId = "addProductSproc"; var requestOptions = new RequestOptions { PartitionKey = new PartitionKey(partitionKeyValue), PreTriggerInclude = new List <string> { "preCreateProductIncludeDateCreated" } }; var storedProcedureUri = UriFactory.CreateStoredProcedureUri(_databaseId, _collectionId, storedProcedureId); StoredProcedureResponse <string> storedProcedureResponse = await client.ExecuteStoredProcedureAsync <string>(storedProcedureUri, requestOptions, newDocument); if (storedProcedureResponse.StatusCode == System.Net.HttpStatusCode.OK) { return(true); } return(false); }
public async Task CreateStoredProcedure(string spName) { var spBody = File.ReadAllText($"StoredProcedures/{spName}.js"); Scripts scripts = _container.Scripts; StoredProcedureProperties storedProcedure = new StoredProcedureProperties { Id = spName, Body = spBody }; StoredProcedureResponse storedProcedureResponse = await scripts.CreateStoredProcedureAsync(storedProcedure); var spResult = storedProcedureResponse.Resource; string message = "Create StoredProcedure"; Printer.PrintLine(message: message); WriteLine($"SP Created, Id: {spResult.Id}, \n\tSelfLink: {spResult.SelfLink}"); Printer.PrintLine(noOfTimes: (101 + message.Length)); }
/// <summary> /// Bulk import using a stored procedure. /// </summary> /// <param name="client"></param> /// <param name="collection"></param> /// <param name="inputDirectory"></param> /// <param name="inputFileMask"></param> /// <returns></returns> public static async Task RunBulkImport( DocumentClient client, DocumentCollection collection, string inputDirectory, string inputFileMask = "*.json") { int maxFiles = 2000; int maxScriptSize = 50000; // 1. Get the files. string[] fileNames = Directory.GetFiles(inputDirectory, inputFileMask); DirectoryInfo di = new DirectoryInfo(inputDirectory); FileInfo[] fileInfos = di.GetFiles(inputFileMask); int currentCount = 0; int fileCount = maxFiles != 0 ? Math.Min(maxFiles, fileNames.Length) : fileNames.Length; string body = File.ReadAllText(@".\JS\BulkImport.js"); StoredProcedure sproc = new StoredProcedure { Id = "BulkImport", Body = body }; await TryDeleteStoredProcedure(client, collection, sproc.Id); sproc = await ExecuteWithRetries <ResourceResponse <StoredProcedure> >(client, () => client.CreateStoredProcedureAsync(collection.SelfLink, sproc)); while (currentCount < fileCount) { string argsJson = CreateBulkInsertScriptArguments(fileNames, currentCount, fileCount, maxScriptSize); var args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(argsJson) }; StoredProcedureResponse <int> scriptResult = await ExecuteWithRetries <StoredProcedureResponse <int> >(client, () => client.ExecuteStoredProcedureAsync <int>(sproc.SelfLink, args)); int currentlyInserted = scriptResult.Response; currentCount += currentlyInserted; } }
internal async Task <int> InvokeBulkInsertSproc(List <DocumentDBHash> documents) { int maxFiles = 2000, maxScriptSize = 50000; int currentCount = 0; int fileCount = maxFiles != 0 ? Math.Min(maxFiles, documents.Count) : documents.Count; Uri sproc = UriFactory.CreateStoredProcedureUri(_databaseId, _collectionId, "bulkImport.js"); // 4. Create a batch of docs (MAX is limited by request size (2M) and to script for execution. // We send batches of documents to create to script. // Each batch size is determined by MaxScriptSize. // MaxScriptSize should be so that: // -- it fits into one request (MAX reqest size is 16Kb). // -- it doesn't cause the script to time out. // -- it is possible to experiment with MaxScriptSize to get best perf given number of throttles, etc. while (currentCount < fileCount) { // 5. Create args for current batch. // Note that we could send a string with serialized JSON and JSON.parse it on the script side, // but that would cause script to run longer. Since script has timeout, unload the script as much // as we can and do the parsing by client and framework. The script will get JavaScript objects. string argsJson = CreateBulkInsertScriptArguments(documents, currentCount, fileCount, maxScriptSize); var args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(argsJson) }; // 6. execute the batch. StoredProcedureResponse <int> scriptResult = await _client.ExecuteStoredProcedureAsync <int>( sproc, //new RequestOptions { PartitionKey = new PartitionKey("mmmmm") }, args); // 7. Prepare for next batch. int currentlyInserted = scriptResult.Response; currentCount += currentlyInserted; } return(currentCount); }
public async Task ExecuteTestWithMultipleParameters() { string sprocId = Guid.NewGuid().ToString(); string sprocBody = @"function(param1, param2, param3) { var context = getContext(); var response = context.getResponse(); response.setBody(param1+param2+param3); }"; StoredProcedureResponse storedProcedureResponse = await this.scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, sprocBody, storedProcedureResponse); // Insert document and then query string testPartitionId = Guid.NewGuid().ToString(); var payload = new { id = testPartitionId, user = testPartitionId }; ItemResponse <dynamic> createItemResponse = await this.container.CreateItemAsync <dynamic>(payload); Assert.AreEqual(HttpStatusCode.Created, createItemResponse.StatusCode); StoredProcedureExecuteResponse <string> sprocResponse2 = await this.scripts.ExecuteStoredProcedureAsync <string>( storedProcedureId : sprocId, partitionKey : new Cosmos.PartitionKey(testPartitionId), parameters : new dynamic[] { "one", "two", "three" }, requestOptions : null, cancellationToken : default(CancellationToken)); Assert.AreEqual(HttpStatusCode.OK, sprocResponse2.StatusCode); string stringResponse2 = sprocResponse2.Resource; Assert.IsNotNull(stringResponse2); Assert.AreEqual("onetwothree", stringResponse2); StoredProcedureResponse deleteResponse = await this.scripts.DeleteStoredProcedureAsync(sprocId); Assert.AreEqual(HttpStatusCode.NoContent, deleteResponse.StatusCode); }
public async Task CRUDTest() { string sprocId = Guid.NewGuid().ToString(); string sprocBody = "function() { { var x = 42; } }"; StoredProcedureResponse storedProcedureResponse = await this.scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); double requestCharge = storedProcedureResponse.RequestCharge; Assert.IsTrue(requestCharge > 0); Assert.AreEqual(HttpStatusCode.Created, storedProcedureResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, sprocBody, storedProcedureResponse); storedProcedureResponse = await this.scripts.ReadStoredProcedureAsync(sprocId); requestCharge = storedProcedureResponse.RequestCharge; Assert.IsTrue(requestCharge > 0); Assert.AreEqual(HttpStatusCode.OK, storedProcedureResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, sprocBody, storedProcedureResponse); string updatedBody = @"function(name) { var context = getContext(); var response = context.getResponse(); response.setBody(""hello there "" + name); }"; StoredProcedureResponse replaceResponse = await this.scripts.ReplaceStoredProcedureAsync(new StoredProcedureProperties(sprocId, updatedBody)); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, updatedBody, replaceResponse); requestCharge = replaceResponse.RequestCharge; Assert.IsTrue(requestCharge > 0); Assert.AreEqual(HttpStatusCode.OK, replaceResponse.StatusCode); StoredProcedureTests.ValidateStoredProcedureSettings(sprocId, updatedBody, replaceResponse); StoredProcedureResponse deleteResponse = await this.scripts.DeleteStoredProcedureAsync(sprocId); requestCharge = deleteResponse.RequestCharge; Assert.IsTrue(requestCharge > 0); Assert.AreEqual(HttpStatusCode.NoContent, deleteResponse.StatusCode); }
public static async Task Main(string[] args) { using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) { await client.OpenAsync(); Uri sprocLinkUpload = UriFactory.CreateStoredProcedureUri("FinancialDatabase", "InvestorCollection", "bulkUpload"); List <Person> people = new Faker <Person>() .RuleFor(p => p.firstName, f => f.Name.FirstName()) .RuleFor(p => p.lastName, f => f.Name.LastName()) .RuleFor(p => p.company, f => "contosofinancial") .Generate(25000); int pointer = 0; while (pointer < people.Count) { RequestOptions options = new RequestOptions { PartitionKey = new PartitionKey("contosofinancial") }; StoredProcedureResponse <int> result = await client.ExecuteStoredProcedureAsync <int>(sprocLinkUpload, options, people.Skip(pointer)); pointer += result.Response; await Console.Out.WriteLineAsync($"{pointer} Total Documents\t{result.Response} Documents Uploaded in this Iteration"); } Uri sprocLinkDelete = UriFactory.CreateStoredProcedureUri("FinancialDatabase", "InvestorCollection", "bulkDelete"); bool resume = true; do { RequestOptions options = new RequestOptions { PartitionKey = new PartitionKey("contosofinancial") }; string query = "SELECT * FROM investors i WHERE i.company = 'contosofinancial'"; StoredProcedureResponse <DeleteStatus> result = await client.ExecuteStoredProcedureAsync <DeleteStatus>(sprocLinkDelete, options, query); await Console.Out.WriteLineAsync($"Batch Delete Completed.\tDeleted: {result.Response.Deleted}\tContinue: {result.Response.Continuation}"); resume = result.Response.Continuation; }while(resume); } }
private static async Task Execute_spBulkInsert(IDocumentClient client) { Console.WriteLine(); Console.WriteLine("Execute spBulkInsert"); List <dynamic> docs = new List <dynamic>(); const int total = 5000; for (int i = 1; i <= total; i++) { dynamic doc = new { name = $"Bulk inserted doc {i}", address = new { postalCode = "12345" } }; docs.Add(doc); } Uri uri = UriFactory.CreateStoredProcedureUri("mydb", "mystore", "spBulkInsert"); RequestOptions options = new RequestOptions { PartitionKey = new PartitionKey("12345") }; int totalInserted = 0; while (totalInserted < total) { StoredProcedureResponse <int> result = await client.ExecuteStoredProcedureAsync <int>(uri, options, docs); int inserted = result.Response; totalInserted += inserted; int remaining = total - totalInserted; Console.WriteLine($"Inserted {inserted} documents ({totalInserted} total, {remaining} remaining)"); docs = docs.GetRange(inserted, docs.Count - inserted); } }
// </RunDemoAsync> /// <summary> /// Runs a simple script which just does a server side query /// </summary> // <RunSimpleScript> private static async Task RunSimpleScript(Container container) { // 1. Create stored procedure for script. string scriptFileName = @"js\SimpleScript.js"; string scriptId = Path.GetFileNameWithoutExtension(scriptFileName); await TryDeleteStoredProcedure(container, scriptId); Scripts cosmosScripts = container.Scripts; StoredProcedureResponse sproc = await cosmosScripts.CreateStoredProcedureAsync( new StoredProcedureProperties( scriptId, File.ReadAllText(scriptFileName))); // 2. Create a document. SampleDocument doc = new SampleDocument { Id = Guid.NewGuid().ToString(), LastName = "Estel", Headquarters = "Russia", Locations = new Location[] { new Location { Country = "Russia", City = "Novosibirsk" } }, Income = 50000 }; ItemResponse <SampleDocument> created = await container.CreateItemAsync(doc, new PartitionKey(doc.LastName)); // 3. Run the script. Pass "Hello, " as parameter. // The script will take the 1st document and echo: Hello, <document as json>. StoredProcedureExecuteResponse <string> response = await container.Scripts.ExecuteStoredProcedureAsync <string>( scriptId, new PartitionKey(doc.LastName), new dynamic[] { "Hello" }); Console.WriteLine("Result from script: {0}\r\n", response.Resource); await container.DeleteItemAsync <SampleDocument>(doc.Id, new PartitionKey(doc.LastName)); }
// </RunBulkImport> /// <summary> /// Get documents ordered by some doc property. This is done using OrderBy stored procedure. /// </summary> // <RunOrderBy> private static async Task RunOrderBy(Container container) { // 1. Create or get the stored procedure. string body = File.ReadAllText(@"js\OrderBy.js"); string scriptId = "OrderBy"; await TryDeleteStoredProcedure(container, scriptId); Scripts cosmosScripts = container.Scripts; StoredProcedureResponse sproc = await cosmosScripts.CreateStoredProcedureAsync(new StoredProcedureProperties(scriptId, body)); // 2. Prepare to run stored procedure. string orderByFieldName = "FamilyId"; string filterQuery = string.Format(CultureInfo.InvariantCulture, "SELECT r.FamilyId FROM root r WHERE r.{0} > 10", orderByFieldName); // Note: in order to do a range query (> 10) on this field, the collection must have a range index set for this path (see ReadOrCreateCollection). int?continuationToken = null; int batchCount = 0; do { // 3. Run the stored procedure. StoredProcedureExecuteResponse <OrderByResult> response = await cosmosScripts.ExecuteStoredProcedureAsync <OrderByResult>( scriptId, new PartitionKey("Andersen"), new dynamic[] { filterQuery, orderByFieldName, continuationToken }); // 4. Process stored procedure response. continuationToken = response.Resource.Continuation; Console.WriteLine("Printing documents filtered/ordered by '{0}' and ordered by '{1}', batch #{2}:", filterQuery, orderByFieldName, batchCount++); foreach (dynamic doc in response.Resource.Result) { Console.WriteLine(doc.ToString()); } } while (continuationToken != null); // 5. To take care of big response, loop until Response.continuation token is null (see OrderBy.js for details). }
public async Task <int> AddMerchantsInBatchAsync(IList <Merchant> merchants) { if (merchants == null) { throw new ArgumentNullException(nameof(merchants)); } if (!merchants.Any()) { throw new ArgumentException("Merchants list is empty", nameof(merchants)); } StoredProcedure storedProcedure = await ExecuteWithRetries(() => CreateOrGetStoredProcAsync(Collection, "AddMerchants", "AddMerchants.js")).ConfigureAwait(false); int currentCount = 0; int maxMerchantsPerBatch = Math.Min(500, merchants.Count); Log.Info($"Total merchants to add in a batch to the db is {maxMerchantsPerBatch}"); Stopwatch sw = Stopwatch.StartNew(); while (currentCount < merchants.Count) { string argsJson = CreatePayloadForAddMerchant(merchants, currentCount, maxMerchantsPerBatch); var args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(argsJson) }; StoredProcedureResponse <int> scriptResult = await ExecuteWithRetries(() => Client.ExecuteStoredProcedureAsync <int>(storedProcedure.SelfLink, args)); int currentlyInserted = scriptResult.Response; currentCount += currentlyInserted; Log.Info($"Added {currentlyInserted} new merchants to db"); } long timeTaken = sw.ElapsedMilliseconds; Log.Info($"Total time taken to add {merchants.Count()} merchants is {timeTaken}"); return(currentCount); }
public async Task <IEnumerable <T> > CallStoredProcedure(string procName, params dynamic[] parameters) { try { Uri uri = UriFactory.CreateStoredProcedureUri(_databaseId, _collectionId, procName); StoredProcedureResponse <string> response = await _client.ExecuteStoredProcedureAsync <string>(uri, parameters); if (response == null || response.Response == "No documents matching query were found.") { return(new List <T>()); } else { List <T> documents = JsonConvert.DeserializeObject <List <T> >(response.Response); return(documents); } } catch (Exception e) { Console.WriteLine(e); throw; } }
private static async Task <int> Execute_spBulkDelete(IDocumentClient client, string sql) { Uri uri = UriFactory.CreateStoredProcedureUri("mydb", "mystore", "spBulkDelete"); RequestOptions options = new RequestOptions { PartitionKey = new PartitionKey("12345") }; bool continuationFlag = true; int totalDeleted = 0; while (continuationFlag) { StoredProcedureResponse <BulkDeleteResponse> result = await client.ExecuteStoredProcedureAsync <BulkDeleteResponse>(uri, options, sql); BulkDeleteResponse response = result.Response; continuationFlag = response.ContinuationFlag; int deleted = response.Count; totalDeleted += deleted; Console.WriteLine($"Deleted {deleted} documents ({totalDeleted} total, more: {continuationFlag})"); } return(totalDeleted); }
public async Task <HttpStatusCode> BulkUpdateAsync <T>(IEnumerable <T> entities, string storedProcedureName) where T : IIdentifiable { string documentType = GetDocumentType <T>(); IList <DocumentEntity <T> > documents = new List <DocumentEntity <T> >(); foreach (T entity in entities) { DocumentEntity <T> doc = new DocumentEntity <T>(entity); if (doc.DocumentType != null && doc.DocumentType != documentType) { throw new ArgumentException($"Cannot change {entity.Id} from {doc.DocumentType} to {typeof(T).Name}"); } doc.DocumentType = documentType; doc.UpdatedAt = DateTimeOffset.Now; documents.Add(doc); } try { string documentsAsJson = JsonConvert.SerializeObject(documents); dynamic[] args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(documentsAsJson) }; Uri link = UriFactory.CreateStoredProcedureUri(_databaseName, _collectionName, storedProcedureName); StoredProcedureResponse <string> result = await _documentClient.ExecuteStoredProcedureAsync <string> (link, args); return(result.StatusCode); } catch (Exception ex) { throw new Exception(ex.Message); } }
public RazorContext(StoredProcedureResponse spResponse, IDictionary <string, object> razorParameters) { if (spResponse == null) { throw new ArgumentNullException("spResponse"); } if (razorParameters == null) { throw new ArgumentNullException("razorParameters"); } _RazorTemplate = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorTemplateParameterName); if (string.IsNullOrEmpty(_RazorTemplate)) { throw new ArgumentNullException(DbWebApiOptions.QueryStringContract.RazorTemplateParameterName); } string encoding = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorEncodingParameterName); if (string.IsNullOrEmpty(encoding) || !Enum.TryParse(encoding, true, out _RazorEncoding)) { _RazorEncoding = DbWebApiOptions.DefaultRazorEncoding; } string language = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorLanguageParameterName); if (string.IsNullOrEmpty(language) || !Enum.TryParse(language, true, out _RazorLanguage)) { _RazorLanguage = DbWebApiOptions.DefaultRazorLanguage; } _Model = new SerializableResponseData(spResponse); ResolveRazorTemplate(); }
/// <summary> /// Bulk execute a DbApi with a IList<IDictionary<string, object>> (a collection of input parameters collection) /// </summary> /// <typeparam name="T">IDictionary<string, object></typeparam> /// <param name="apiController">Your ApiController to invoke this extension method</param> /// <param name="sp">Specifies the fully qualified name of database stored procedure or function</param> /// <param name="bulkParameterSets">Specifies a collection of required parameter dictionary for every call in the bulk execution</param> /// <returns>A complete HttpResponseMessage contains an array of every result data returned by the database</returns> public static HttpResponseMessage BulkExecuteDbApi <T>(this ApiController apiController, string sp, IList <T> bulkParameterSets) where T : IDictionary <string, object> { if (bulkParameterSets == null || bulkParameterSets.Count == 0) { return(apiController.Request.CreateResponse(HttpStatusCode.BadRequest)); } try { var negotiationResult = apiController.Request.Negotiate(); if (negotiationResult != null) { if (negotiationResult.Formatter is PseudoMediaTypeFormatter || negotiationResult.Formatter is RazorMediaTypeFormatter) { return(apiController.Request.CreateResponse(HttpStatusCode.UnsupportedMediaType)); } } using (DalCenter dbContext = new DalCenter(apiController.Request.GetQueryStringDictionary())) { StoredProcedureResponse[] spResponses = new StoredProcedureResponse[bulkParameterSets.Count]; for (int i = 0; i < spResponses.Length; i++) { spResponses[i] = dbContext.ExecuteDbApi(sp, bulkParameterSets[i]); } return(apiController.Request.CreateResponse(HttpStatusCode.OK, spResponses.AsQueryable())); } } finally { CleanupCache(apiController.Request.RequestUri); } }
public async Task <EntityCommandResult <RetrospectiveItem> > ExecuteAsync(CreateOrUpdateRetrospectiveItemCommand source) { string storedProcName = string.IsNullOrEmpty(source.RetrospectiveItemId) ? "InsertRetrospectiveItem" : "UpdateRetrospectiveItem"; var retroItem = new RetrospectiveItem { Votes = source.Votes, Description = source.Description, Id = string.IsNullOrEmpty(source.RetrospectiveItemId) ? Guid.NewGuid().ToString() : source.RetrospectiveItemId }; try { StoredProcedureResponse <dynamic> procresult = await repository.ExecuteStoredProc(storedProcName, new dynamic[] { source.RetrospectiveId, source.CategoryId, retroItem }); return(new EntityCommandResult <RetrospectiveItem> { Response = procresult, Entity = retroItem, IsSuccess = true }); } catch (Exception ex) { return(new EntityCommandResult <RetrospectiveItem> { IsSuccess = false, Response = ex }); } }
public RazorContext(StoredProcedureResponse spResponse, IDictionary<string, object> razorParameters) { if (razorParameters == null) throw new ArgumentNullException("razorParameters"); _RazorTemplate = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorTemplateParameterName); if (string.IsNullOrEmpty(_RazorTemplate)) throw new ArgumentNullException(DbWebApiOptions.QueryStringContract.RazorTemplateParameterName); string encoding = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorEncodingParameterName); if (string.IsNullOrEmpty(encoding) || !Enum.TryParse(encoding, true, out _RazorEncoding)) _RazorEncoding = DbWebApiOptions.DefaultRazorEncoding; string language = TryGetParameter(razorParameters, DbWebApiOptions.QueryStringContract.RazorLanguageParameterName); if (string.IsNullOrEmpty(language) || !Enum.TryParse(language, true, out _RazorLanguage)) _RazorLanguage = DbWebApiOptions.DefaultRazorLanguage; _Model = new SerializableResponseData(spResponse); ResolveRazorTemplate(); }
public async Task InitializeConflicts(ConflictGenerator conflict) { //Use West US 2 region to test if containers have been created and create them if needed. ReplicaRegion replicaRegion = conflict.replicaRegions.Find(s => s.region == "West US 2"); if (replicaRegion.container == null) { //Create the containers try { replicaRegion.container = replicaRegion.client.GetContainer(conflict.databaseId, conflict.containerId); await replicaRegion.container.ReadContainerAsync(); //ReadContainer to see if it is created } catch { DatabaseResponse dbResponse = await replicaRegion.client.CreateDatabaseIfNotExistsAsync(conflict.databaseId); Database database = dbResponse.Database; ContainerResponse cResponse; //Create containers with different conflict resolution policies switch (conflict.conflictResolutionType) { case ConflictResolutionType.LastWriterWins: cResponse = await database.CreateContainerIfNotExistsAsync(new ContainerProperties(conflict.containerId, conflict.partitionKeyPath) { ConflictResolutionPolicy = new ConflictResolutionPolicy() { Mode = ConflictResolutionMode.LastWriterWins, ResolutionPath = "/userDefinedId" } }, 400); break; case ConflictResolutionType.MergeProcedure: string scriptId = "MergeProcedure"; cResponse = await database.CreateContainerIfNotExistsAsync(new ContainerProperties(conflict.containerId, conflict.partitionKeyPath) { ConflictResolutionPolicy = new ConflictResolutionPolicy() { Mode = ConflictResolutionMode.Custom, ResolutionProcedure = $"dbs/{conflict.databaseId}/colls/{conflict.containerId}/sprocs/{scriptId}" } }, 400); //Conflict Merge Procedure string body = File.ReadAllText(@"spConflictUDP.js"); StoredProcedureResponse sproc = await cResponse.Container.Scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(scriptId, body)); break; case ConflictResolutionType.None: cResponse = await database.CreateContainerIfNotExistsAsync(new ContainerProperties(conflict.containerId, conflict.partitionKeyPath) { ConflictResolutionPolicy = new ConflictResolutionPolicy() { Mode = ConflictResolutionMode.Custom } }, 400); break; default: throw new ArgumentOutOfRangeException(); } replicaRegion.container = cResponse.Container; } } //Initialize and warm up all regional container references foreach (ReplicaRegion region in conflict.replicaRegions) { region.container = region.client.GetContainer(conflict.databaseId, conflict.containerId); //Verify container has replicated await Helpers.VerifyContainerReplicated(region.container); await WarmUp(region.container); } }
public void TestStoredProcedure() { // Create a document client with a customer json serializer settings JsonSerializerSettings serializerSettings = new JsonSerializerSettings(); serializerSettings.Converters.Add(new ObjectStringJsonConverter <SerializedObject>(_ => _.Name, _ => SerializedObject.Parse(_))); ConnectionPolicy connectionPolicy = new ConnectionPolicy { ConnectionMode = ConnectionMode.Gateway }; ConsistencyLevel defaultConsistencyLevel = ConsistencyLevel.Session; DocumentClient client = this.CreateDocumentClient( this.hostUri, this.masterKey, serializerSettings, connectionPolicy, defaultConsistencyLevel); // Create a simple stored procedure string scriptId = "bulkImportScript"; StoredProcedure sproc = new StoredProcedure { Id = scriptId, Body = @" function bulkImport(docs) { var collection = getContext().getCollection(); var collectionLink = collection.getSelfLink(); // The count of imported docs, also used as current doc index. var count = 0; // Validate input. if (!docs) throw new Error(""The array is undefined or null.""); var docsLength = docs.length; if (docsLength == 0) { getContext().getResponse().setBody(0); } // Call the CRUD API to create a document. tryCreate(docs[count], callback); // Note that there are 2 exit conditions: // 1) The createDocument request was not accepted. // In this case the callback will not be called, we just call setBody and we are done. // 2) The callback was called docs.length times. // In this case all documents were created and we don't need to call tryCreate anymore. Just call setBody and we are done. function tryCreate(doc, callback) { // If you are sure that every document will contain its own (unique) id field then // disable the option to auto generate ids. // by leaving this on, the entire document is parsed to check if there is an id field or not // by disabling this, parsing of the document is skipped because you're telling DocumentDB // that you are providing your own ids. // depending on the size of your documents making this change can have a significant // improvement on document creation. var options = { disableAutomaticIdGeneration: true }; var isAccepted = collection.createDocument(collectionLink, doc, options, callback); // If the request was accepted, callback will be called. // Otherwise report current count back to the client, // which will call the script again with remaining set of docs. // This condition will happen when this stored procedure has been running too long // and is about to get cancelled by the server. This will allow the calling client // to resume this batch from the point we got to before isAccepted was set to false if (!isAccepted) getContext().getResponse().setBody(count); } // This is called when collection.createDocument is done and the document has been persisted. function callback(err, doc, options) { if (err) throw err; // One more document has been inserted, increment the count. count++; if (count >= docsLength) { // If we have created all documents, we are done. Just set the response. getContext().getResponse().setBody(count); } else { // Create next document. tryCreate(docs[count], callback); } } } " }; sproc = client.CreateStoredProcedureAsync(this.collectionUri, sproc).Result.Resource; MyObject doc = new MyObject(1); dynamic[] args = new dynamic[] { new dynamic[] { doc } }; RequestOptions requestOptions = this.ApplyRequestOptions(new RequestOptions { PartitionKey = new PartitionKey("value") }, serializerSettings); StoredProcedureResponse <int> scriptResult = client.ExecuteStoredProcedureAsync <int>( sproc.SelfLink, requestOptions, args).Result; Uri docUri = UriFactory.CreateDocumentUri(this.databaseName, this.collectionName, doc.id); MyObject readDoc = client.ReadDocumentAsync <MyObject>(docUri, requestOptions).Result.Document; Assert.IsNotNull(readDoc.SerializedObject); Assert.AreEqual(doc.SerializedObject.Name, readDoc.SerializedObject.Name); }
public ResponseRoot(StoredProcedureResponse content) { _Content = content; }
public ResponseRoot() { _Content = null; }
public async Task TestReducePageSizeScenario() { int partitionKey = 0; // Create some docs to make sure that one separate response is returned for 1st execute of query before retries. // These are to make sure continuation token is passed along during retries. string sprocId = "createTwoDocs"; string sprocBody = @"function(startIndex) { for (var i = 0; i < 2; ++i) __.createDocument( __.getSelfLink(), { id: 'doc' + (i + startIndex).toString(), value: 'y'.repeat(1500000), pk:0 }, err => { if (err) throw err;} );}"; Scripts scripts = this.Container.Scripts; StoredProcedureResponse storedProcedureResponse = await scripts.CreateStoredProcedureAsync(new StoredProcedureProperties(sprocId, sprocBody)); ManualResetEvent allDocsProcessed = new ManualResetEvent(false); int processedDocCount = 0; string accumulator = string.Empty; ChangeFeedProcessor processor = this.Container .GetChangeFeedProcessorBuilder("test", (IReadOnlyCollection <dynamic> docs, CancellationToken token) => { processedDocCount += docs.Count(); foreach (dynamic doc in docs) { accumulator += doc.id.ToString() + "."; } if (processedDocCount == 5) { allDocsProcessed.Set(); } return(Task.CompletedTask); }) .WithStartFromBeginning() .WithInstanceName("random") .WithMaxItems(6) .WithLeaseContainer(this.LeaseContainer).Build(); // Generate the payload await scripts.ExecuteStoredProcedureAsync <object>( sprocId, new PartitionKey(partitionKey), new dynamic[] { 0 }); // Create 3 docs each 1.5MB. All 3 do not fit into MAX_RESPONSE_SIZE (4 MB). 2nd and 3rd are in same transaction. string content = string.Format("{{\"id\": \"doc2\", \"value\": \"{0}\", \"pk\": 0}}", new string('x', 1500000)); await this.Container.CreateItemAsync(JsonConvert.DeserializeObject <dynamic>(content), new PartitionKey(partitionKey)); await scripts.ExecuteStoredProcedureAsync <object>(sprocId, new PartitionKey(partitionKey), new dynamic[] { 3 }); await processor.StartAsync(); // Letting processor initialize and pickup changes bool isStartOk = allDocsProcessed.WaitOne(10 * BaseChangeFeedClientHelper.ChangeFeedSetupTime); await processor.StopAsync(); Assert.IsTrue(isStartOk, "Timed out waiting for docs to process"); Assert.AreEqual("doc0.doc1.doc2.doc3.doc4.", accumulator); }
private async void AssertStoredProcedures(IDocumentRepository <Retrospective> context) { StoredProcedureResponse <dynamic> response = await context.ExecuteStoredProc("HelloWorld", null); Assert.IsNotNull(response.Response, "Hello, World"); }
private void ValidateStoredProcedureSettings(StoredProcedureProperties storedProcedureSettings, StoredProcedureResponse cosmosResponse) { StoredProcedureProperties settings = cosmosResponse.Resource; Assert.AreEqual(storedProcedureSettings.Body, settings.Body, "Stored Procedure functions do not match"); Assert.AreEqual(storedProcedureSettings.Id, settings.Id, "Stored Procedure id do not match"); Assert.IsTrue(cosmosResponse.RequestCharge > 0); Assert.IsNotNull(cosmosResponse.MaxResourceQuota); Assert.IsNotNull(cosmosResponse.CurrentResourceQuotaUsage); }
private static async Task RunBulkImport(DocumentClient client, string collectionLink) { string inputDirectory = @".\Data\"; string inputFileMask = "*.json"; int maxFiles = 2000; int maxScriptSize = 50000; // 1. Get the files. string[] fileNames = Directory.GetFiles(inputDirectory, inputFileMask); DirectoryInfo di = new DirectoryInfo(inputDirectory); FileInfo[] fileInfos = di.GetFiles(inputFileMask); // 2. Prepare for import. int currentCount = 0; int fileCount = maxFiles != 0 ? Math.Min(maxFiles, fileNames.Length) : fileNames.Length; // 3. Create stored procedure for this script. string body = File.ReadAllText(@".\JS\BulkImport.js"); StoredProcedure sproc = new StoredProcedure { Id = "BulkImport", Body = body }; await TryDeleteStoredProcedure(client, collectionLink, sproc.Id); sproc = await client.CreateStoredProcedureAsync(collectionLink, sproc); Stopwatch sp = new Stopwatch(); sp.Start(); // 4. Create a batch of docs (MAX is limited by request size (2M) and to script for execution. // We send batches of documents to create to script. // Each batch size is determined by MaxScriptSize. // MaxScriptSize should be so that: // -- it fits into one request (MAX reqest size is 16Kb). // -- it doesn't cause the script to time out. // -- it is possible to experiment with MaxScriptSize to get best perf given number of throttles, etc. while (currentCount < fileCount) { // 5. Create args for current batch. // Note that we could send a string with serialized JSON and JSON.parse it on the script side, // but that would cause script to run longer. Since script has timeout, unload the script as much // as we can and do the parsing by client and framework. The script will get JavaScript objects. string argsJson = CreateBulkInsertScriptArguments(fileNames, currentCount, fileCount, maxScriptSize); var args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(argsJson) }; // 6. execute the batch. StoredProcedureResponse <int> scriptResult = await client.ExecuteStoredProcedureAsync <int>( sproc.SelfLink, new RequestOptions(), args); // 7. Prepare for next batch. int currentlyInserted = scriptResult.Response; currentCount += currentlyInserted; } // 8. Validate int numDocs = 0; string continuation = string.Empty; do { // Read document feed and count the number of documents. FeedResponse <dynamic> response = await client.ReadDocumentFeedAsync(collectionLink, new FeedOptions { RequestContinuation = continuation }); numDocs += response.Count; // Get the continuation so that we know when to stop. continuation = response.ResponseContinuation; }while (!string.IsNullOrEmpty(continuation)); Console.WriteLine("Found {0} documents in the collection inserted in {1}ms\r\n", numDocs, sp.Elapsed.Milliseconds); }
private StoredProcedureResponse <T> ProcessResponse <T>(StoredProcedureResponse <T> response) { _fhirRequestContextAccessor.FhirRequestContext.UpdateFhirRequestContext(response); return(response); }
private StoredProcedureResponse <T> ProcessResponse <T>(StoredProcedureResponse <T> response) { _fhirRequestContextAccessor.FhirRequestContext.UpdateResponseHeaders(response.SessionToken, response.RequestCharge); return(response); }