private static async Task ItemFeed(CosmosContainer container) { List <Family> families = new List <Family>(); // SQL FeedIterator <Family> setIterator = container.GetItemsIterator <Family>(maxItemCount: 1); while (setIterator.HasMoreResults) { int count = 0; foreach (Family item in await setIterator.FetchNextSetAsync()) { Assert("Should only return 1 result at a time.", count <= 1); families.Add(item); } } Assert("Expected two families", families.ToList().Count == 2); }
/// <summary> /// Import many documents using stored procedure. /// </summary> private static async Task RunBulkImport(CosmosContainer container) { string inputDirectory = @".\Data\"; string inputFileMask = "*.json"; int maxFiles = 2000; int maxScriptSize = 50000; // 1. Get the files. string[] fileNames = Directory.GetFiles(inputDirectory, inputFileMask); DirectoryInfo di = new DirectoryInfo(inputDirectory); FileInfo[] fileInfos = di.GetFiles(inputFileMask); // 2. Prepare for import. int currentCount = 0; int fileCount = maxFiles != 0 ? Math.Min(maxFiles, fileNames.Length) : fileNames.Length; // 3. Create stored procedure for this script. string scriptId = "BulkImport"; string body = File.ReadAllText(@".\JS\BulkImport.js"); await TryDeleteStoredProcedure(container, scriptId); CosmosScripts cosmosScripts = container.GetScripts(); StoredProcedureResponse sproc = await cosmosScripts.CreateStoredProcedureAsync(new CosmosStoredProcedureSettings(scriptId, body)); // 4. Create a batch of docs (MAX is limited by request size (2M) and to script for execution. // We send batches of documents to create to script. // Each batch size is determined by MaxScriptSize. // MaxScriptSize should be so that: // -- it fits into one request (MAX request size is 16Kb). // -- it doesn't cause the script to time out. // -- it is possible to experiment with MaxScriptSize to get best performance given number of throttles, etc. while (currentCount < fileCount) { // 5. Create args for current batch. // Note that we could send a string with serialized JSON and JSON.parse it on the script side, // but that would cause script to run longer. Since script has timeout, unload the script as much // as we can and do the parsing by client and framework. The script will get JavaScript objects. string argsJson = CreateBulkInsertScriptArguments(fileNames, currentCount, fileCount, maxScriptSize); dynamic[] args = new dynamic[] { JsonConvert.DeserializeObject <dynamic>(argsJson) }; // 6. execute the batch. StoredProcedureExecuteResponse <int> scriptResult = await cosmosScripts.ExecuteStoredProcedureAsync <dynamic, int>(new PartitionKey("Andersen"), scriptId, args); // 7. Prepare for next batch. int currentlyInserted = scriptResult.Resource; currentCount += currentlyInserted; } // 8. Validate int numDocs = 0; FeedIterator <dynamic> setIterator = container.GetItemsIterator <dynamic>(); while (setIterator.HasMoreResults) { FeedResponse <dynamic> response = await setIterator.FetchNextSetAsync(); numDocs += response.Count(); } Console.WriteLine("Found {0} documents in the collection. There were originally {1} files in the Data directory\r\n", numDocs, fileCount); }