internal static async Task PartitionedCollectionSmokeTest(DocumentClient client, bool sharedOffer = false, bool sharedThroughputCollections = false, int numberOfCollections = 1) { if (!sharedOffer && sharedThroughputCollections) { throw new ArgumentException("Shared throughput collections are not supported without shared offer"); } string uniqDatabaseName = string.Format("SmokeTest_{0}", Guid.NewGuid().ToString("N")); RequestOptions options = new RequestOptions { OfferThroughput = 50000 }; Database database = sharedOffer ? await client.CreateDatabaseAsync(new Database { Id = uniqDatabaseName }, options) : await client.CreateDatabaseAsync(new Database { Id = uniqDatabaseName }); Assert.AreEqual(database.AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName)); Database readbackdatabase = await client.ReadDatabaseAsync(database.SelfLink); List <dynamic> results = await ClientTestsUtils.SqlQueryDatabases(client, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqDatabaseName), 10); Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(database.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.IsTrue((await ClientTestsUtils.ReadFeedDatabases(client)).Any((db) => db.Id == uniqDatabaseName)); results = await ClientTestsUtils.SqlQueryDatabases(client, string.Format(@"select r._rid, r.id from root r where r.id = ""{0}""", uniqDatabaseName), 10); Assert.AreEqual(1, results.Count, "Should have queried and found 1 database"); Assert.AreEqual(database.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(database.ResourceId, (await client.ReadDatabaseAsync(database.SelfLink)).Resource.ResourceId); Assert.AreEqual(((Database)results[0]).AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName)); ArrayList testCollections = new ArrayList(); for (int i = 0; i < numberOfCollections; i++) { string uniqCollectionName = "SmokeTestCollection" + Guid.NewGuid().ToString("N"); PartitionKeyDefinition partitionKeyDefinition = new PartitionKeyDefinition { Paths = new System.Collections.ObjectModel.Collection <string> { "/id" }, Kind = PartitionKind.Hash }; DocumentCollection collection; if (sharedThroughputCollections) { collection = await TestCommon.CreateCollectionAsync(client, database.SelfLink, new DocumentCollection { Id = uniqCollectionName, PartitionKey = partitionKeyDefinition }); } else { collection = await TestCommon.CreateCollectionAsync(client, database.SelfLink, new DocumentCollection { Id = uniqCollectionName, PartitionKey = partitionKeyDefinition }, options); } Assert.AreEqual(collection.AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName, uniqCollectionName, typeof(DocumentCollection))); results = await SqlQueryCollections(client, database.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqCollectionName), 10); // query through database link Assert.AreEqual(1, results.Count, "Should have queried and found 1 collection"); Assert.AreEqual(collection.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryCollections(client, database.CollectionsLink, string.Format(@"select r._rid, r.id from root r where r.id = ""{0}""", uniqCollectionName), 10); // query through CollectionsLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 collection"); Assert.AreEqual(collection.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(1, (await ReadFeedCollections(client, database.SelfLink)).Count(item => item.Id == uniqCollectionName)); // read through database link Assert.AreEqual(1, (await ReadFeedCollections(client, database.CollectionsLink)).Count(item => item.Id == uniqCollectionName)); // read through CollectionsLink Assert.AreEqual(collection.ResourceId, (await client.ReadDocumentCollectionAsync(collection.SelfLink)).Resource.ResourceId); Assert.AreEqual(((DocumentCollection)results[0]).AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName, uniqCollectionName, typeof(DocumentCollection))); testCollections.Add(collection); string uniqDocumentName = "SmokeTestDocument" + Guid.NewGuid().ToString("N"); LinqGeneralBaselineTests.Book myDocument = new LinqGeneralBaselineTests.Book(); myDocument.Id = uniqDocumentName; myDocument.Title = "My Book"; //Simple Property. myDocument.Languages = new LinqGeneralBaselineTests.Language[] { new LinqGeneralBaselineTests.Language { Name = "English", Copyright = "London Publication" }, new LinqGeneralBaselineTests.Language { Name = "French", Copyright = "Paris Publication" } }; //Array Property myDocument.Author = new LinqGeneralBaselineTests.Author { Name = "Don", Location = "France" }; //Complex Property myDocument.Price = 9.99; myDocument.Editions = new List <LinqGeneralBaselineTests.Edition>() { new LinqGeneralBaselineTests.Edition() { Name = "First", Year = 2001 }, new LinqGeneralBaselineTests.Edition() { Name = "Second", Year = 2005 } }; Document document = await client.CreateDocumentAsync(collection.SelfLink, myDocument); Assert.AreEqual(document.AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName, uniqCollectionName, uniqDocumentName, typeof(Document))); results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqDocumentName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(document.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryDocuments(client, collection.DocumentsLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqDocumentName), 10); // query through DocumentsLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(document.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(1, (await ReadFeedDocuments(client, collection.SelfLink)).Count(item => item.Id == uniqDocumentName)); // read through collection link Assert.AreEqual(1, (await ReadFeedDocuments(client, collection.DocumentsLink)).Count(item => item.Id == uniqDocumentName)); // read through DocumentsLink if (client.QueryCompatibilityMode != QueryCompatibilityMode.SqlQuery) { //Test query with parameters results = await SqlQueryDocuments(client, collection.SelfLink, new SqlQuerySpec { QueryText = @"select r._rid from root r where r.id = @id", Parameters = new SqlParameterCollection() { new SqlParameter("@id", uniqDocumentName) } }, 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(document.ResourceId, ((QueryResult)results[0]).ResourceId); } RequestOptions docReplaceRequestOptions = new RequestOptions { PartitionKey = new PartitionKey(document.Id) }; FeedOptions docReplaceFeedOptions = new FeedOptions { EnableCrossPartitionQuery = true, PartitionKey = new PartitionKey(document.Id) }; myDocument.Title = "My_Book_v2"; document = await client.ReplaceDocumentAsync(document.AltLink, myDocument); results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqDocumentName), 10, docReplaceFeedOptions); Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(document.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqDocumentName), 10); // query through collection Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); results = await SqlQueryDocuments(client, collection.DocumentsLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqDocumentName), 10); // query through DocumentsLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); Assert.AreEqual(((Document)results[0]).AltLink, ClientTestsUtils.GenerateAltLink(uniqDatabaseName, uniqCollectionName, uniqDocumentName, typeof(Document))); // No Range Index on ts - override with scan FeedOptions queryFeedOptions1 = new FeedOptions() { EnableScanInQuery = true, EnableCrossPartitionQuery = true }; results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"SELECT r.name FROM root r WHERE r.Price>0"), 10, queryFeedOptions1); Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); FeedOptions queryFeedOptions2 = new FeedOptions() { EmitVerboseTracesInQuery = true, EnableCrossPartitionQuery = true }; results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"SELECT r.name FROM root r WHERE r.Price=9.99"), 10, queryFeedOptions2); Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); FeedOptions queryFeedOptions3 = new FeedOptions() { EmitVerboseTracesInQuery = false, EnableCrossPartitionQuery = true }; results = await SqlQueryDocuments(client, collection.SelfLink, string.Format(@"SELECT r.name FROM root r WHERE r.Price=9.99"), 10, queryFeedOptions3); Assert.AreEqual(1, results.Count, "Should have queried and found 1 document"); string uniqStoredProcedureName = "SmokeTestStoredProcedure" + Guid.NewGuid().ToString(); StoredProcedure storedProcedure = await client.CreateStoredProcedureAsync(collection.SelfLink, new StoredProcedure { Id = uniqStoredProcedureName, Body = "function f() {var x = 10;}" }); results = await SqlQueryStoredProcedures(client, collection.SelfLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id = ""{0}""", uniqStoredProcedureName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 storedProcedure"); Assert.AreEqual(storedProcedure.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryStoredProcedures(client, collection.StoredProceduresLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id = ""{0}""", uniqStoredProcedureName), 10); // query through StoredProceduresLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 storedProcedure"); Assert.AreEqual(storedProcedure.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(1, (await ReadFeedStoredProcedures(client, collection.SelfLink)).Count(item => item.Id == uniqStoredProcedureName)); // read through collection link Assert.AreEqual(1, (await ReadFeedStoredProcedures(client, collection.StoredProceduresLink)).Count(item => item.Id == uniqStoredProcedureName)); // read through StoredProceduresLink storedProcedure.Body = "function f() {var x= 20;}"; storedProcedure = await client.ReplaceStoredProcedureAsync(storedProcedure); results = await SqlQueryStoredProcedures(client, collection.StoredProceduresLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", storedProcedure.Id), 10); // query through StoredProceduresLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 storedProcedure"); Assert.AreEqual(storedProcedure.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(storedProcedure.ResourceId, (await client.ReadStoredProcedureAsync(storedProcedure.SelfLink)).Resource.ResourceId); Assert.AreEqual(1, results.Count, "Should have queried and found 1 storedProcedure"); string uniqTriggerName = "SmokeTestTrigger" + Guid.NewGuid().ToString("N"); Trigger trigger = await client.CreateTriggerAsync(collection.SelfLink, new Trigger { Id = uniqTriggerName, Body = "function f() {var x = 10;}", TriggerOperation = TriggerOperation.All, TriggerType = TriggerType.Pre }); results = await SqlQueryTriggers(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqTriggerName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 trigger"); Assert.AreEqual(trigger.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryTriggers(client, collection.TriggersLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqTriggerName), 10); // query through TriggersLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 trigger"); Assert.AreEqual(trigger.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(1, (await ReadFeedTriggers(client, collection.SelfLink)).Count(item => item.Id == uniqTriggerName)); // read through collection link Assert.AreEqual(1, (await ReadFeedTriggers(client, collection.TriggersLink)).Count(item => item.Id == uniqTriggerName)); // read through TriggersLink trigger.Body = "function f() {var x = 10;}"; trigger = await client.ReplaceTriggerAsync(trigger); results = await SqlQueryTriggers(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqTriggerName), 10); Assert.AreEqual(1, results.Count, "Should have queried and found 1 trigger"); Assert.AreEqual(trigger.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(trigger.ResourceId, (await client.ReadTriggerAsync(trigger.SelfLink)).Resource.ResourceId); results = await SqlQueryTriggers(client, collection.SelfLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqTriggerName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 trigger"); results = await SqlQueryTriggers(client, collection.TriggersLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqTriggerName), 10); // query through TriggersLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 trigger"); string uniqUserDefinedFunctionName = "SmokeTestUserDefinedFunction" + Guid.NewGuid().ToString("N"); UserDefinedFunction userDefinedFunction = await client.CreateUserDefinedFunctionAsync(collection.SelfLink, new UserDefinedFunction { Id = uniqUserDefinedFunctionName, Body = "function (){ var x = 10;}" }); results = await SqlQueryUserDefinedFunctions(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqUserDefinedFunctionName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 userDefinedFunction"); Assert.AreEqual(userDefinedFunction.ResourceId, ((QueryResult)results[0]).ResourceId); results = await SqlQueryUserDefinedFunctions(client, collection.UserDefinedFunctionsLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqUserDefinedFunctionName), 10); // query through UserDefinedFunctionsLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 userDefinedFunction"); Assert.AreEqual(userDefinedFunction.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(1, (await ReadFeedUserDefinedFunctions(client, collection.SelfLink)).Count(item => item.Id == uniqUserDefinedFunctionName)); // read through collection link Assert.AreEqual(1, (await ReadFeedUserDefinedFunctions(client, collection.UserDefinedFunctionsLink)).Count(item => item.Id == uniqUserDefinedFunctionName)); // read through UserDefinedFunctionsLink userDefinedFunction.Body = "function (){ var x = 10;}"; userDefinedFunction = await client.ReplaceUserDefinedFunctionAsync(userDefinedFunction); results = await SqlQueryUserDefinedFunctions(client, collection.SelfLink, string.Format(@"select r._rid from root r where r.id = ""{0}""", uniqUserDefinedFunctionName), 10); Assert.AreEqual(1, results.Count, "Should have queried and found 1 userDefinedFunction"); Assert.AreEqual(userDefinedFunction.ResourceId, ((QueryResult)results[0]).ResourceId); Assert.AreEqual(userDefinedFunction.ResourceId, (await client.ReadUserDefinedFunctionAsync(userDefinedFunction.SelfLink)).Resource.ResourceId); results = await SqlQueryUserDefinedFunctions(client, collection.SelfLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqUserDefinedFunctionName), 10); // query through collection link Assert.AreEqual(1, results.Count, "Should have queried and found 1 userDefinedFunction"); results = await SqlQueryUserDefinedFunctions(client, collection.UserDefinedFunctionsLink, string.Format(@"SELECT r.id, r._rid FROM root r WHERE r.id=""{0}""", uniqUserDefinedFunctionName), 10); // query through UserDefinedFunctionsLink Assert.AreEqual(1, results.Count, "Should have queried and found 1 userDefinedFunction"); //Test select array IDocumentQuery <dynamic> queryArray = client.CreateDocumentQuery(collection.SelfLink, "SELECT VALUE [1, 2, 3, 4]").AsDocumentQuery(); JArray result = queryArray.ExecuteNextAsync().Result.FirstOrDefault(); Assert.AreEqual(result[0], 1); Assert.AreEqual(result[1], 2); Assert.AreEqual(result[2], 3); Assert.AreEqual(result[3], 4); RequestOptions requestOptions = new RequestOptions { PartitionKey = new PartitionKey(document.Id) }; await client.DeleteDocumentAsync(document.SelfLink, requestOptions); } foreach (DocumentCollection collection in testCollections) { await client.DeleteDocumentCollectionAsync(collection.SelfLink); } await client.DeleteDatabaseAsync(database.SelfLink); }
/// <summary> /// Demo code taken from https://docs.microsoft.com/en-us/azure/cosmos-db/create-graph-dotnet and https://github.com/Azure-Samples/azure-cosmos-db-graph-dotnet-getting-started /// </summary> public async Task RunDemo() { var database = await _documentClient.CreateDatabaseIfNotExistsAsync(new Database { Id = DatabaseId }); var demoCollection = await _documentClient.CreateDocumentCollectionIfNotExistsAsync(UriFactory.CreateDatabaseUri(DatabaseId), new DocumentCollection { Id = DemoCollectionId }, new RequestOptions { OfferThroughput = 1000 }); // Azure Cosmos DB supports the Gremlin API for working with Graphs. Gremlin is a functional programming language composed of steps. // Here, we run a series of Gremlin queries to show how you can add vertices, edges, modify properties, perform queries and traversals // For additional details, see https://aka.ms/gremlin for the complete list of supported Gremlin operators var gremlinQueries = new Dictionary <string, string> { { "Cleanup", "g.V().drop()" }, { "AddVertex 1", "g.addV('person').property('id', 'thomas').property('firstName', 'Thomas').property('age', 44)" }, { "AddVertex 2", "g.addV('person').property('id', 'mary').property('firstName', 'Mary').property('lastName', 'Andersen').property('age', 39)" }, { "AddVertex 3", "g.addV('person').property('id', 'ben').property('firstName', 'Ben').property('lastName', 'Miller')" }, { "AddVertex 4", "g.addV('person').property('id', 'robin').property('firstName', 'Robin').property('lastName', 'Wakefield')" }, { "AddEdge 1", "g.V('thomas').addE('knows').to(g.V('mary'))" }, { "AddEdge 2", "g.V('thomas').addE('knows').to(g.V('ben'))" }, { "AddEdge 3", "g.V('ben').addE('knows').to(g.V('robin'))" }, { "UpdateVertex", "g.V('thomas').property('age', 44)" }, { "CountVertices", "g.V().count()" }, { "Filter Range", "g.V().hasLabel('person').has('age', gt(40))" }, { "Project", "g.V().hasLabel('person').values('firstName')" }, { "Sort", "g.V().hasLabel('person').order().by('firstName', decr)" }, { "Traverse", "g.V('thomas').out('knows').hasLabel('person')" }, { "Traverse 2x", "g.V('thomas').out('knows').hasLabel('person').out('knows').hasLabel('person')" }, { "Loop", "g.V('thomas').repeat(out()).until(has('id', 'robin')).path()" }, { "DropEdge", "g.V('thomas').outE('knows').where(inV().has('id', 'mary')).drop()" }, { "CountEdges", "g.E().count()" }, { "DropVertex", "g.V('thomas').drop()" }, }; foreach (KeyValuePair <string, string> gremlinQuery in gremlinQueries) { Console.WriteLine($"Running {gremlinQuery.Key}: {gremlinQuery.Value}"); // The CreateGremlinQuery method extensions allow you to execute Gremlin queries and iterate results asychronously IDocumentQuery <dynamic> query = _documentClient.CreateGremlinQuery <dynamic>(demoCollection, gremlinQuery.Value); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { Console.WriteLine($"\t {JsonConvert.SerializeObject(result)}"); } } Console.WriteLine(); } // Data is returned in GraphSON format, which can be deserialized into a strongly-typed vertex, edge or property class // The following snippet shows how to do this var gremlin = gremlinQueries["AddVertex 1"]; Console.WriteLine($"Running Add Vertex with deserialization: {gremlin}"); IDocumentQuery <Vertex> insertVertex = _documentClient.CreateGremlinQuery <Vertex>(demoCollection, gremlinQueries["AddVertex 1"]); while (insertVertex.HasMoreResults) { foreach (Vertex vertex in await insertVertex.ExecuteNextAsync <Vertex>()) { // Since Gremlin is designed for multi-valued properties, the format returns an array. Here we just read // the first value string name = (string)vertex.GetVertexProperties("firstName").First().Value; Console.WriteLine($"\t Id:{vertex.Id}, Name: {name}"); } } Console.WriteLine(); Console.WriteLine("Done. Press any key to exit..."); Console.ReadLine(); }
public static async Task <T> FirstOrDefaultAsync <T>(this IDocumentQuery <T> query) { var response = await query.ExecuteNextAsync <T>(); return(response.FirstOrDefault()); }
public async Task RunAsync(DocumentClient client) { Database database = await client.CreateDatabaseIfNotExistsAsync(new Database { Id = "graphdb" }); DocumentCollection graph = await client.CreateDocumentCollectionIfNotExistsAsync( UriFactory.CreateDatabaseUri("graphdb"), new DocumentCollection { Id = "Persons" }, new RequestOptions { OfferThroughput = 1000 }); Dictionary <string, string> gremlinQueries = new Dictionary <string, string> { { "Cleanup", "g.V().drop()" }, { "AddVertex 1", "g.addV('employee').property('id', 'u001').property('firstName', 'John').property('age', 44)" }, { "AddVertex 2", "g.addV('employee').property('id', 'u002').property('firstName', 'Mary').property('age', 37)" }, { "AddVertex 3", "g.addV('employee').property('id', 'u003').property('firstName', 'Christie').property('age', 30)" }, { "AddVertex 4", "g.addV('employee').property('id', 'u004').property('firstName', 'Bob').property('age', 35)" }, { "AddVertex 5", "g.addV('employee').property('id', 'u005').property('firstName', 'Susan').property('age', 31)" }, { "AddVertex 6", "g.addV('employee').property('id', 'u006').property('firstName', 'Emily').property('age', 29)" }, { "AddEdge 1", "g.V('u002').addE('manager').to(g.V('u001'))" }, { "AddEdge 2", "g.V('u005').addE('manager').to(g.V('u001'))" }, { "AddEdge 3", "g.V('u004').addE('manager').to(g.V('u002'))" }, { "AddEdge 4", "g.V('u005').addE('friend').to(g.V('u006'))" }, { "AddEdge 5", "g.V('u005').addE('friend').to(g.V('u003'))" }, { "AddEdge 6", "g.V('u006').addE('friend').to(g.V('u003'))" }, { "AddEdge 7", "g.V('u006').addE('manager').to(g.V('u004'))" }, { "ReturnVertex", "g.V().hasLabel('employee').has('age', gt(40))" }, { "AndOr", "g.V().hasLabel('employee').and(has('age', gt(35)), has('age', lt(40)))" }, { "Transversal", "g.V('u002').out('manager').hasLabel('employee')" }, { "outE/inV", "g.V('u002').outE('manager').inV().hasLabel('employee')" }, { "CountVertices", "g.V().count()" }, { "Filter Range", "g.V().hasLabel('employee').and(has('age', gt(35)), has('age', lt(40)))" }, }; foreach (KeyValuePair <string, string> gremlinQuery in gremlinQueries) { Console.WriteLine($"Running {gremlinQuery.Key}: {gremlinQuery.Value}"); IDocumentQuery <dynamic> query = client.CreateGremlinQuery <dynamic>(graph, gremlinQuery.Value); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { Console.WriteLine($"\t {JsonConvert.SerializeObject(result)}"); } } Console.WriteLine(); } //ReturnVertex operation await runQuery(client, graph, gremlinQueries, "ReturnVertex"); //AND/OR operation await runQuery(client, graph, gremlinQueries, "AndOr"); //Transversal operation await runQuery(client, graph, gremlinQueries, "Transversal"); //outE/inV operation await runQuery(client, graph, gremlinQueries, "outE/inV"); //Filter Range operation await runQuery(client, graph, gremlinQueries, "Filter Range"); Console.WriteLine("Done. Press any key to exit..."); Console.ReadLine(); }
public static async System.Threading.Tasks.Task <IActionResult> RunAsync( [HttpTrigger(AuthorizationLevel.Function, "post", Route = "LeaderBoard")] HttpRequestMessage req, [CosmosDB( databaseName: "contesthub", collectionName: "LeaderBoard", ConnectionStringSetting = "contesthub_DOCUMENTDB" )] DocumentClient client, TraceWriter log) { //try { LeaderBoardPostBody data = await req.Content.ReadAsAsync <LeaderBoardPostBody>(); ///////2nd level Auth code //var jwtHandler = new JwtSecurityTokenHandler(); //var jwtInput = req.Headers.Authorization.ToString(); //var jwt = ""; //if (jwtInput.Contains("Bearer")) // jwt = jwtInput.Substring(7); //else // jwt = jwtInput; //var readableToken = jwtHandler.CanReadToken(jwt); //var dId = ""; //if (readableToken != true) //{ // return new NotFoundResult(); //} //if (readableToken == true) //{ // var token = jwtHandler.ReadJwtToken(jwt); // var claims = token.Claims; // var claim = claims.Where(c => c.Type == "upn").FirstOrDefault(); // dId = claim.Value.Substring(0, claim.Value.IndexOf('@')); //} //if (dId.ToLower() != data.id.ToLower()) //{ // return new UnauthorizedResult(); //} log.Info($"Processed request for {data.id} in LeaderBoard, settings: limit: {data.limit}, isTerritory: {data.isTerritory}, isMarketOp: {data.isMarketOp}," + $"searchText: {data.searchText}"); // Query data Uri collectionUri = UriFactory.CreateDocumentCollectionUri("contesthub", "LeaderBoard"); // Get top level associate // Prepare id filter string id = data.id.ToUpper(); var territory = Convert.ToBoolean(data.isTerritory.ToLower()); var isMarketOp = Convert.ToBoolean(data.isMarketOp.ToLower()); if (!isMarketOp ^ territory) { return(new BadRequestResult()); } //check search string length if its a search query if (!string.IsNullOrEmpty(data.searchText) && data.searchText.Length < 3) { return(new BadRequestResult()); } if (territory) { id += "_T"; } else { id += "_M"; } //rawAssociate item = client.CreateDocumentQuery<rawAssociate>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true }) // .Where(d => d.id.ToUpper() == id.ToUpper()) // .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) // .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) // .FirstOrDefault(); IDocumentQuery <rawAssociate> query = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true }) .Where(d => d.id.ToUpper() == id).Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .AsDocumentQuery(); var results = await query.ExecuteNextAsync <rawAssociate>(); rawAssociate item = results.FirstOrDefault(); // One result //foreach (rawAssociate result in results) //{ // item = result; // break; //} if (item == null) { return(new NoContentResult()); } // Get offset int offset = (data.offset == 0) ? 0 : data.offset - 1; //// Get sublist //IDocumentQuery<rawAssociate> subLevelQuery = null; List <rawAssociate> associateList = new List <rawAssociate>(); int fullCount = 0; if (string.IsNullOrEmpty(data.searchText)) { //if market op is selected if (isMarketOp) { associateList = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { MaxItemCount = data.limit, EnableCrossPartitionQuery = true }) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.market.ToLower() == item.market.ToLower()) .Where(d => d.seqRoleTerritoryMarket > offset * data.limit) .OrderBy(d => d.seqRoleTerritoryMarket) .Take(data.limit) .ToList(); fullCount = client.CreateDocumentQuery <rawAssociate>(collectionUri) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.market.ToLower() == item.market.ToLower()) .Count(); } //if territory is selected else { associateList = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { MaxItemCount = data.limit, EnableCrossPartitionQuery = true }) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.territoryMarketOp.ToLower() == item.territoryMarketOp.ToLower()) .Where(d => d.seqRoleTerritoryMarket > offset * data.limit) .OrderBy(d => d.seqRoleTerritoryMarket) .Take(data.limit).ToList(); fullCount = client.CreateDocumentQuery <rawAssociate>(collectionUri) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.territoryMarketOp.ToLower() == item.territoryMarketOp.ToLower()) .Count(); } } else { //if market op is selected if (isMarketOp) { associateList = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { MaxItemCount = data.limit, EnableCrossPartitionQuery = true }) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.market.ToLower() == item.market.ToLower()) .Where(d => d.name.ToLower().Contains(data.searchText.ToLower())) .OrderBy(d => d.currentRank) .ToList(); fullCount = client.CreateDocumentQuery <rawAssociate>(collectionUri) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.market.ToLower() == item.market.ToLower()) .Where(d => d.name.ToLower().Contains(data.searchText.ToLower())) .Count(); } //if territory is selected else { associateList = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { MaxItemCount = data.limit, EnableCrossPartitionQuery = true }) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.territoryMarketOp.ToLower() == item.territoryMarketOp.ToLower()) .Where(d => d.name.ToLower().Contains(data.searchText.ToLower())) .OrderBy(d => d.currentRank) .ToList(); fullCount = client.CreateDocumentQuery <rawAssociate>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true }) .Where(d => d.role.ToLower() == item.role.ToLower()) .Where(d => d.isTerritory.ToLower() == data.isTerritory.ToLower()) .Where(d => d.isMarket.ToLower() == data.isMarketOp.ToLower()) .Where(d => d.territoryMarketOp.ToLower() == item.territoryMarketOp.ToLower()) .Where(d => d.name.ToLower().Contains(data.searchText.ToLower())) .Count(); } } //Stopwatch st3 = new Stopwatch(); //st3.Start(); //while (subLevelQuery.HasMoreResults) //{ // foreach (rawAssociate result in await subLevelQuery.ExecuteNextAsync()) // { // associateList.Add(new associate(result)); // } //} //st3.Stop(); //Console.WriteLine("stop watch timer3:" + st3.ElapsedMilliseconds); // Offset in-memory for search queries if (!string.IsNullOrEmpty(data.searchText)) { var lower = offset * data.limit; var upper = data.limit; if (lower + upper > associateList.Count || lower >= associateList.Count) { lower = offset * data.limit; upper = associateList.Count - lower; } // Limit associateList = associateList.GetRange(lower, upper); } LeaderboardDetails leaderboardDetails = new LeaderboardDetails(item, associateList, fullCount); // Return return(new OkObjectResult(leaderboardDetails)); } //catch(Exception e) //{ // return new BadRequestResult(); //} // Parse body }
public static async System.Threading.Tasks.Task <IActionResult> RunAsync( [HttpTrigger(AuthorizationLevel.Function, "post", Route = "notificationcountreset")] HttpRequestMessage req, [CosmosDB( databaseName: "contesthub", collectionName: "NotificationLanding", ConnectionStringSetting = "contesthub_DOCUMENTDB" )] DocumentClient client, TraceWriter log) { PostBodyNotificationCountReset data = await req.Content.ReadAsAsync <PostBodyNotificationCountReset>(); ///////2nd level Auth code //var jwtHandler = new JwtSecurityTokenHandler(); //var jwtInput = req.Headers.Authorization.ToString(); //var jwt = ""; //if (jwtInput.Contains("Bearer")) // jwt = jwtInput.Substring(7); //else // jwt = jwtInput; //var readableToken = jwtHandler.CanReadToken(jwt); //var dId = ""; //if (readableToken != true) //{ // return new NotFoundResult(); //} //if (readableToken == true) //{ // var token = jwtHandler.ReadJwtToken(jwt); // var claims = token.Claims; // var claim = claims.Where(c => c.Type == "upn").FirstOrDefault(); // dId = claim.Value.Substring(0, claim.Value.IndexOf('@')); //} //if (dId.ToLower() != data.userId.ToLower()) //{ // return new UnauthorizedResult(); //} log.Info($"Processed request for {data.userId} in NotificationCountReset"); Uri collectionUri = UriFactory.CreateDocumentCollectionUri("contesthub", "NotificationLanding"); IDocumentQuery <NotificationElement> query = client.CreateDocumentQuery <NotificationElement>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true }) .Where(d => d.logonid.ToUpper() == data.userId.ToUpper()) .AsDocumentQuery(); bool isUpdated = false; while (query.HasMoreResults) { foreach (Document result in await query.ExecuteNextAsync()) { result.SetPropertyValue("isBellReset", "true"); await client.ReplaceDocumentAsync(result.SelfLink, result); isUpdated = true; } } if (isUpdated) { return(new OkResult()); } else { return(new NoContentResult()); } }
internal virtual async Task <int> QueryAndVerifyDocuments(DocumentClient client, string collectionLink, IEnumerable <Query> queries, int pageSize = 1000, int retries = 0, bool allowScan = false) { // First we make sure that all the queries are inserted { List <dynamic> queriedDocuments = new List <dynamic>(); IDocumentQuery <Document> selectAllQuery = client.CreateDocumentQuery(collectionLink, feedOptions: new FeedOptions { MaxItemCount = pageSize, EnableScanInQuery = allowScan, EnableCrossPartitionQuery = true }).AsDocumentQuery(); while (selectAllQuery.HasMoreResults) { DocumentFeedResponse <dynamic> queryResultsPage = await selectAllQuery.ExecuteNextAsync(); System.Diagnostics.Trace.TraceInformation("ReadFeed continuation token: {0}, SessionToken: {1}", queryResultsPage.ResponseContinuation, queryResultsPage.SessionToken); queriedDocuments.AddRange(queryResultsPage); } List <dynamic> expected = new List <dynamic>(this.documents.Count()); for (int i = 0; i < this.documents.Count(); ++i) { expected.Add(JsonConvert.DeserializeObject(String.Format(CultureInfo.InvariantCulture, DocumentFormat, i + 1, String.Empty))); } queriedDocuments.Sort((doc1, doc2) => int.Parse(doc1.id).CompareTo(int.Parse(doc2.id))); IEnumerable <dynamic> expectedIds = expected.Select(doc => doc.id.ToString()); IEnumerable <dynamic> actualIds = queriedDocuments.Select(doc => doc.id.ToString()); if (!expectedIds.SequenceEqual(actualIds)) { System.Diagnostics.Trace.TraceInformation("Failed to insert all the documents, queried documents are:" + Environment.NewLine + String.Join(Environment.NewLine, queriedDocuments)); return(-1); } System.Diagnostics.Trace.TraceInformation("All the documents are inserted"); } // Query and verify TimeSpan totalQueryLatencyAllPages = TimeSpan.FromSeconds(0); uint numberOfQueries = 0; List <Query> failedQueries = new List <Query>(); List <Query> query_list = queries as List <Query> ?? queries.ToList(); foreach (Query query in query_list) { List <string> queriedDocuments = new List <string>(); List <string> activityIDsAllQueryPages = new List <string>(); if (numberOfQueries > 0 && numberOfQueries % 100 == 0) { System.Diagnostics.Trace.TraceInformation(DateTime.Now.ToString("HH:mm:ss.ffff") + @": Executing query {0} of {1}", numberOfQueries + 1, query_list.Count()); System.Diagnostics.Trace.TraceInformation(@" Query latency per query (avg ms) {0} after {1} queries", totalQueryLatencyAllPages.TotalMilliseconds / numberOfQueries, numberOfQueries); } IDocumentQuery <dynamic> docQuery = client.CreateDocumentQuery(collectionLink, query.ToString(), feedOptions: new FeedOptions { MaxItemCount = pageSize, EnableScanInQuery = allowScan, EnableCrossPartitionQuery = true }).AsDocumentQuery(); while (docQuery.HasMoreResults) { DateTime startTime = DateTime.Now; DocumentFeedResponse <dynamic> queryResultsPage = await QueryWithRetry(docQuery, query.ToString()); activityIDsAllQueryPages.Add(queryResultsPage.ActivityId); totalQueryLatencyAllPages += DateTime.Now - startTime; foreach (JObject result in queryResultsPage) { queriedDocuments.Add(result.ToString(Formatting.None)); } } numberOfQueries++; bool valid; IEnumerable <string> expected = this.Validate(queriedDocuments, query, out valid); if (!valid) { System.Diagnostics.Trace.TraceInformation( DateTime.Now.ToString("HH:mm:ss.ffff") + @": Query {0} did not retrieve expected documents, query all pages activitiIDs: ({1})" + Environment.NewLine + "Expected:" + Environment.NewLine + "{2}" + Environment.NewLine + "Actual:" + Environment.NewLine + "{3}" + Environment.NewLine, query.ToString(), String.Join(",", activityIDsAllQueryPages), String.Join(",", expected), String.Join(",", queriedDocuments)); failedQueries.Add(query); } } if (failedQueries.Count() == 0) { System.Diagnostics.Trace.TraceInformation(@"*** TEST PASSED ***"); return(0); } else { System.Diagnostics.Trace.TraceInformation(@"*** TEST FAILED with seed {0}***", this.seed); int result = -1; //In case of a failure, retry only failed queries after sleeping for couple of minutes. if (retries > 0) { System.Diagnostics.Trace.TraceInformation(@"*** Retrying Failed queries, {0} retries left ***", --retries); Task.Delay(120 * 1000).Wait(); result = await this.QueryAndVerifyDocuments(client, collectionLink, failedQueries, pageSize, retries, allowScan); } return(result); } }
public static async Task Main(string[] args) { // Task 2 //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // string sql = "SELECT TOP 5 VALUE s.studentAlias FROM coll s WHERE s.enrollmentYear = 2018 ORDER BY s.studentAlias"; // IQueryable<string> query = client.CreateDocumentQuery<string>(collectionLink, new SqlQuerySpec(sql)); // foreach (string alias in query) // { // await Console.Out.WriteLineAsync(alias); // } // Console.Read(); //} // Task 3 //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // //string sql = "SELECT s.clubs FROM students s WHERE s.enrollmentYear = 2018"; // string sql = "SELECT s.clubs FROM students s WHERE s.enrollmentYear = 2018"; // IQueryable<Student> query = client.CreateDocumentQuery<Student>(collectionLink, new SqlQuerySpec(sql)); // foreach (Student student in query) // foreach (string club in student.Clubs) // { // await Console.Out.WriteLineAsync(club); // } // Console.Read(); //} //// Point 27 //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // //string sql = "SELECT s.clubs FROM students s WHERE s.enrollmentYear = 2018"; // string sql = "SELECT activity FROM students s JOIN activity IN s.clubs WHERE s.enrollmentYear = 2018"; // IQueryable<StudentActivity> query = client.CreateDocumentQuery<StudentActivity>(collectionLink, new SqlQuerySpec(sql)); // foreach (StudentActivity studentActivity in query) // { // await Console.Out.WriteLineAsync(studentActivity.Activity); // } // Console.Read(); //} ////// Point 40 //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // string sql = "SELECT VALUE activity FROM students s JOIN activity IN s.clubs WHERE s.enrollmentYear = 2018"; // IQueryable<string> query = client.CreateDocumentQuery<string>(collectionLink, new SqlQuerySpec(sql)); // foreach (string activity in query) // { // await Console.Out.WriteLineAsync(activity); // } // Console.Read(); //} //// Task IV: Projecting Query Results //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // string sql = // "SELECT VALUE { 'id': s.id, 'name': CONCAT(s.firstName, ' ', s.lastName), 'email': { 'home': s.homeEmailAddress, 'school': CONCAT(s.studentAlias, '@contoso.edu') } } FROM students s WHERE s.enrollmentYear = 2018"; // IQueryable<StudentProfile> query = client.CreateDocumentQuery<StudentProfile>(collectionLink, new SqlQuerySpec(sql)); // foreach (StudentProfile profile in query) // { // await Console.Out.WriteLineAsync($"[{profile.Id}]\t{profile.Name,-20}\t{profile.Email.School,-50}\t{profile.Email.Home}"); // } //} //// Exercise 4: Implement Pagination using the .NET SDK // using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) // { // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // string sql = // "SELECT VALUE { 'id': s.id, 'name': CONCAT(s.firstName, ' ', s.lastName), 'email': { 'home': s.homeEmailAddress, 'school': CONCAT(s.studentAlias, '@contoso.edu') } } FROM students s WHERE s.enrollmentYear = 2018"; // IDocumentQuery<StudentProfile> query = client.CreateDocumentQuery<StudentProfile>(collectionLink, new SqlQuerySpec(sql), new FeedOptions { MaxItemCount = 100 }).AsDocumentQuery(); // int pageCount = 0; // while (query.HasMoreResults) // { // await Console.Out.WriteLineAsync($"---Page #{++pageCount:0000}---"); // foreach (StudentProfile profile in await query.ExecuteNextAsync()) // { // await Console.Out.WriteLineAsync($"\t[{profile.Id}]\t{profile.Name,-20}\t{profile.Email.School,-50}\t{profile.Email.Home}"); // } // } // Console.Read(); // } //Exercise 5: Implement Cross-Partition Queries //Task 1 //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // await client.OpenAsync(); // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // IEnumerable<Student> query = client // .CreateDocumentQuery<Student>(collectionLink, new FeedOptions { PartitionKey = new PartitionKey(2016) }) // .Where(student => student.projectedGraduationYear == 2020); // foreach (Student student in query) // { // Console.Out.WriteLine($"Enrolled: {student.enrollmentYear}\tGraduation: {student.projectedGraduationYear}\t{student.studentAlias}"); // } //} //Task 2 Execute Cross-Partition Query //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // await client.OpenAsync(); // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // IEnumerable<Student> query = client // .CreateDocumentQuery<Student>(collectionLink, new FeedOptions { EnableCrossPartitionQuery = true }) // .Where(student => student.projectedGraduationYear == 2020); // foreach (Student student in query) // { // Console.Out.WriteLine($"Enrolled: {student.enrollmentYear}\tGraduation: {student.projectedGraduationYear}\t{student.studentAlias}"); // } // Console.Read(); //} //// Task 3 Implement Continuation Token //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // await client.OpenAsync(); // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // string continuationToken = String.Empty; // do // { // FeedOptions options = new FeedOptions // { // EnableCrossPartitionQuery = true, // RequestContinuation = continuationToken // }; // IDocumentQuery<Student> query = client // .CreateDocumentQuery<Student>(collectionLink, options) // .Where(student => student.age < 18) // .AsDocumentQuery(); // FeedResponse<Student> results = await query.ExecuteNextAsync<Student>(); // continuationToken = results.ResponseContinuation; // await Console.Out.WriteLineAsync($"ContinuationToken:\t{continuationToken}"); // foreach (Student result in results) // { // await Console.Out.WriteLineAsync($"[Age: {result.age}]\t{result.studentAlias}@consoto.edu"); // } // await Console.Out.WriteLineAsync(); // } // while (!String.IsNullOrEmpty(continuationToken)); //// Task IV Observe How Partitions Are Accessed in a Cross-Partition Query //using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) //{ // await client.OpenAsync(); // Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); // FeedOptions options = new FeedOptions // { // EnableCrossPartitionQuery = true // }; // string sql = "SELECT * FROM students s WHERE s.academicStatus.suspension = true"; // IDocumentQuery<Student> query = client // .CreateDocumentQuery<Student>(collectionLink, sql, options) // .AsDocumentQuery(); // int pageCount = 0; // while (query.HasMoreResults) // { // await Console.Out.WriteLineAsync($"---Page #{++pageCount:0000}---"); // foreach (Student result in await query.ExecuteNextAsync()) // { // await Console.Out.WriteLineAsync($"Enrollment: {result.enrollmentYear}\tBalance: {result.financialData.tuitionBalance}\t{result.studentAlias}@consoto.edu"); // } // } //} //// Point 11 using (DocumentClient client = new DocumentClient(_endpointUri, _primaryKey)) { await client.OpenAsync(); Uri collectionLink = UriFactory.CreateDocumentCollectionUri(_databaseId, _collectionId); FeedOptions options = new FeedOptions { EnableCrossPartitionQuery = true }; //Point 11 //string sql = "SELECT * FROM students s WHERE s.financialData.tuitionBalance > 14000"; //Point 17 //string sql = "SELECT * FROM students s WHERE s.financialData.tuitionBalance > 14950"; //Point 23 //string sql = "SELECT * FROM students s WHERE s.financialData.tuitionBalance > 14996"; //Point 29 string sql = "SELECT * FROM students s WHERE s.financialData.tuitionBalance > 14998"; IDocumentQuery <Student> query = client .CreateDocumentQuery <Student>(collectionLink, sql, options) .AsDocumentQuery(); int pageCount = 0; while (query.HasMoreResults) { await Console.Out.WriteLineAsync($"---Page #{++pageCount:0000}---"); foreach (Student result in await query.ExecuteNextAsync()) { await Console.Out.WriteLineAsync($"Enrollment: {result.enrollmentYear}\tBalance: {result.financialData.tuitionBalance}\t{result.studentAlias}@consoto.edu"); } } } }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, ILogger log, ExecutionContext context) { try { EnsureClient(context); string customer = req.Query["customer"]; SqlQuerySpec querySpec = null; IDocumentQuery <dynamic> query = null; if (!String.IsNullOrWhiteSpace(customer)) { querySpec = new SqlQuerySpec( String.Concat( "SELECT VALUE COUNT(1) FROM Items i WHERE i.customer IN ('", customer, "')")); query = cosmosDbClient.CreateDocumentQuery( collectionUri, querySpec, new FeedOptions() { EnableCrossPartitionQuery = false, PartitionKey = new PartitionKey(customer), PopulateQueryMetrics = true, MaxItemCount = 50, MaxDegreeOfParallelism = 0, MaxBufferedItemCount = 100 }).AsDocumentQuery(); } else { querySpec = new SqlQuerySpec("SELECT VALUE COUNT(1) FROM Items i"); query = cosmosDbClient.CreateDocumentQuery( collectionUri, querySpec, new FeedOptions() { EnableCrossPartitionQuery = true, PartitionKey = null, PopulateQueryMetrics = true, MaxItemCount = 50, MaxDegreeOfParallelism = 0, MaxBufferedItemCount = 100 }).AsDocumentQuery(); } double totalRUs = 0; long count = 0; while (query.HasMoreResults) { FeedResponse <dynamic> feedResponse = await query.ExecuteNextAsync(); Console.WriteLine(feedResponse.RequestCharge); totalRUs += feedResponse.RequestCharge; IReadOnlyDictionary <string, QueryMetrics> partitionIdToQueryMetrics = feedResponse.QueryMetrics; foreach (KeyValuePair <string, QueryMetrics> kvp in partitionIdToQueryMetrics) { string partitionId = kvp.Key; QueryMetrics queryMetrics = kvp.Value; Console.WriteLine("{0}: {1}", partitionId, queryMetrics); } IEnumerator <dynamic> docEnumerator = feedResponse.GetEnumerator(); while (docEnumerator.MoveNext()) { count += (long)docEnumerator.Current; } } var responsePayload = new ResponseContract { Count = count, TotalRUs = totalRUs, }; log.LogInformation("Count: {0}, Total RUs: {1}", count, totalRUs); return(new OkObjectResult(JsonConvert.SerializeObject(responsePayload))); } catch (Exception error) { return(new ObjectResult(error.ToString()) { StatusCode = 500 }); } }
public static async System.Threading.Tasks.Task <IActionResult> RunAsync( [HttpTrigger(AuthorizationLevel.Function, "post", Route = "TeamPerformance")] HttpRequestMessage req, [CosmosDB( databaseName: "contesthub", collectionName: "TeamPerformance", ConnectionStringSetting = "contesthub_DOCUMENTDB" )] DocumentClient client, TraceWriter log) { PostBody data = await req.Content.ReadAsAsync <PostBody>(); //var jwtHandler = new JwtSecurityTokenHandler(); //var jwtInput = req.Headers.Authorization.ToString(); //var jwt = ""; //if (jwtInput.Contains("Bearer")) // jwt = jwtInput.Substring(7); //else // jwt = jwtInput; //var readableToken = jwtHandler.CanReadToken(jwt); //var dId = ""; //if (readableToken != true) //{ // return new NotFoundResult(); //} //if (readableToken == true) //{ // var token = jwtHandler.ReadJwtToken(jwt); // var claims = token.Claims; // var claim = claims.Where(c => c.Type == "upn").FirstOrDefault(); // dId = claim.Value.Substring(0, claim.Value.IndexOf('@')); //} //if(dId.ToLower()!=data.id.ToLower()) //{ // return new UnauthorizedResult(); //} log.Info($"Processed request for {data.id} in Profile, settings: sortOrder: {data.sortOrder}, sortBy: {data.sortBy}, limit: {data.limit}," + $"role: {data.role}, period: {data.period}"); // Query data Uri collectionUri = UriFactory.CreateDocumentCollectionUri("contesthub", "TeamPerformance"); // filter WTD,QTD,YTD string id = $"{data.id}_{data.role}_{data.period}"; IDocumentQuery <TeamPerformance> query = client.CreateDocumentQuery <TeamPerformance>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true }) .Where(d => d.id.ToUpper() == id.ToUpper()).AsDocumentQuery(); //.Where(d => d.period == data.period.ToString()).AsDocumentQuery(); // One result TeamPerformance item = null; while (query.HasMoreResults) { foreach (TeamPerformance result in await query.ExecuteNextAsync()) { item = result; break; } } if (item == null) { return(new NoContentResult()); } // Sort List <TeamPerformanceElement> sorted = new List <TeamPerformanceElement>(); if (data.sortBy == SORTBY.AP) { if (data.sortOrder == ORDER.Descending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderByDescending(elem => Convert.ToDouble(elem.ap.Replace("$", ""))).ToList(); } else if (data.sortOrder == ORDER.Ascending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderBy(elem => Convert.ToDouble(elem.ap.Replace("$", ""))).ToList(); } else { log.Warning("Order is not specified correctly"); return(new NoContentResult()); } } else if (data.sortBy == SORTBY.Firstname) { if (data.sortOrder == ORDER.Descending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderByDescending(elem => (elem.firstName)).ToList(); } else if (data.sortOrder == ORDER.Ascending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderBy(elem => (elem.firstName)).ToList(); } else { log.Warning("Order is not specified correctly"); return(new NoContentResult()); } } else if (data.sortBy == SORTBY.Lastname) { if (data.sortOrder == ORDER.Descending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderByDescending(elem => (elem.lastName)).ToList(); } else if (data.sortOrder == ORDER.Ascending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderBy(elem => (elem.lastName)).ToList(); } else { log.Warning("Order is not specified correctly"); return(new NoContentResult()); } } else if (data.sortBy == SORTBY.Accounts) { if (data.sortOrder == ORDER.Descending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderByDescending(elem => Convert.ToDouble(Convert.ToDouble(elem.accounts))).ToList(); } else if (data.sortOrder == ORDER.Ascending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderBy(elem => Convert.ToDouble(Convert.ToDouble(elem.accounts))).ToList(); } else { log.Warning("Order is not specified correctly"); return(new NoContentResult()); } } else if (data.sortBy == SORTBY.NoPay) { if (data.sortOrder == ORDER.Descending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderByDescending(elem => Convert.ToDouble(Convert.ToDouble(elem.noPay.Replace("%", "")))).ToList(); } else if (data.sortOrder == ORDER.Ascending) { sorted = item.teamPerformanceDetails.teamPerformanceList .OrderBy(elem => Convert.ToDouble(Convert.ToDouble(elem.noPay.Replace("%", "")))).ToList(); } else { log.Warning("Order is not specified correctly"); return(new NoContentResult()); } } else { // default only sorted = item.teamPerformanceDetails.teamPerformanceList; } //limit feature int totalCount = item.teamPerformanceDetails.teamPerformanceList.Count(); item.teamPerformanceDetails.teamPerformanceList = sorted.Skip(((data.offset == 0 ? 1 : data.offset) - 1) * data.limit).Take(data.limit).ToList(); // Build as per specifications TeamPerformanceNoId finalRes = new TeamPerformanceNoId(item, totalCount); // Return return(new OkObjectResult(finalRes)); }
/// <inheritdoc/> public async Task <List <Instance> > GetInstancesInStateOfInstanceOwner(int instanceOwnerPartyId, string instanceState) { List <Instance> instances = new List <Instance>(); string instanceOwnerPartyIdString = instanceOwnerPartyId.ToString(); FeedOptions feedOptions = new FeedOptions { PartitionKey = new PartitionKey(instanceOwnerPartyIdString) }; if (_settings.CollectMetrics) { feedOptions.PopulateQueryMetrics = true; } IQueryable <Instance> filter; if (instanceState.Equals("active")) { filter = _client.CreateDocumentQuery <Instance>(_collectionUri, feedOptions) .Where(i => i.InstanceOwner.PartyId == instanceOwnerPartyIdString) .Where(i => (!i.VisibleAfter.HasValue || i.VisibleAfter <= DateTime.UtcNow)) .Where(i => !i.Status.SoftDeleted.HasValue) .Where(i => !i.Status.HardDeleted.HasValue) .Where(i => !i.Status.Archived.HasValue); } else if (instanceState.Equals("deleted")) { filter = _client.CreateDocumentQuery <Instance>(_collectionUri, feedOptions) .Where(i => i.InstanceOwner.PartyId == instanceOwnerPartyIdString) .Where(i => i.Status.SoftDeleted.HasValue) .Where(i => !i.Status.HardDeleted.HasValue); } else if (instanceState.Equals("archived")) { filter = _client.CreateDocumentQuery <Instance>(_collectionUri, feedOptions) .Where(i => i.InstanceOwner.PartyId == instanceOwnerPartyIdString) .Where(i => i.Status.Archived.HasValue) .Where(i => !i.Status.SoftDeleted.HasValue) .Where(i => !i.Status.HardDeleted.HasValue); } else { // empty list return(instances); } IDocumentQuery <Instance> query = filter.AsDocumentQuery(); FeedResponse <Instance> feedResponse = await query.ExecuteNextAsync <Instance>(); if (_settings.CollectMetrics) { _logger.LogError($"Metrics retrieving {instanceState} instances for {instanceOwnerPartyId}: {JsonConvert.SerializeObject(feedResponse.QueryMetrics)}"); } instances = feedResponse.ToList(); await PostProcess(instances); return(instances); }
public Task StartAsync(CancellationToken cancellation) { if (this.isRunning) { return(Task.CompletedTask); } if (this.documentClient == null) { this.telemetryClient.TrackEvent($"Creating DocumentClient..."); this.documentClient = new DocumentClient(new Uri(this.cosmosDbConfiguration.Endpoint), this.cosmosDbConfiguration.MasterKey, ChangeFeedReader.BuildConnectionPolicy(this.cosmosDbConfiguration)); this.collectionLink = UriFactory.CreateDocumentCollectionUri(this.cosmosDbConfiguration.DatabaseName, this.cosmosDbConfiguration.CollectionName); this.telemetryClient.TrackEvent($"DocumentClient ready."); } this.isRunning = true; TimeSpan feedPollDelay = TimeSpan.FromMilliseconds(this.cosmosDbConfiguration.PollingInterval.HasValue ? this.cosmosDbConfiguration.PollingInterval.Value : ChangeFeedReader.DefaultPollingIntervalInMilliseconds); return(Task.Run(async() => { this.telemetryClient.TrackEvent($"ChangeFeedReader running."); ChangeFeedOptions options = new ChangeFeedOptions { MaxItemCount = -1, PartitionKeyRangeId = "0", }; if (this.cosmosDbConfiguration.MaxItemCount.HasValue) { options.MaxItemCount = this.cosmosDbConfiguration.MaxItemCount.Value; } while (this.isRunning) { IDocumentQuery <Document> query = this.documentClient.CreateDocumentChangeFeedQuery(this.collectionLink, options); do { ExceptionDispatchInfo exceptionDispatchInfo = null; FeedResponse <Document> readChangesResponse = null; var operation = this.telemetryClient.StartOperation(new RequestTelemetry() { Name = "ChangeFeedReader.ReadFeed" }); DateTimeOffset feedDependencyStartTime = DateTimeOffset.UtcNow; try { readChangesResponse = await query.ExecuteNextAsync <Document>(); this.telemetryClient.TrackMetric(new MetricTelemetry("CosmosDB.ChangeFeed.RU", readChangesResponse.RequestCharge)); this.telemetryClient.TrackDependency("CosmosDB.ChangeFeed", "ExecuteNextAsync", feedDependencyStartTime, DateTimeOffset.UtcNow.Subtract(feedDependencyStartTime), true); options.RequestContinuation = readChangesResponse.ResponseContinuation; } catch (DocumentClientException ex) { exceptionDispatchInfo = ExceptionDispatchInfo.Capture(ex); this.telemetryClient.TrackDependency("CosmosDB.ChangeFeed", "ExecuteNextAsync", feedDependencyStartTime, DateTimeOffset.UtcNow.Subtract(feedDependencyStartTime), false); } if (exceptionDispatchInfo != null) { DocumentClientException dcex = (DocumentClientException)exceptionDispatchInfo.SourceException; if ((HttpStatusCode)dcex.StatusCode == HttpStatusCode.NotFound && (SubStatusCode)ChangeFeedReader.GetSubStatusCode(dcex) != SubStatusCode.ReadSessionNotAvailable) { // Most likely, the database or collection was removed while we were enumerating. this.telemetryClient.TrackException(dcex); this.telemetryClient.StopOperation(operation); this.isRunning = false; break; } else if ((HttpStatusCode)dcex.StatusCode == HttpStatusCode.Gone) { SubStatusCode subStatusCode = (SubStatusCode)ChangeFeedReader.GetSubStatusCode(dcex); this.telemetryClient.TrackException(dcex); } else if ((int)dcex.StatusCode == 429 || (HttpStatusCode)dcex.StatusCode == HttpStatusCode.ServiceUnavailable) { this.telemetryClient.TrackEvent($"Retriable exception: {dcex.Message}"); } else if (dcex.Message.Contains("Reduce page size and try again.")) { this.telemetryClient.TrackEvent($"Page size error while reading the feed."); // Temporary workaround to compare exception message, until server provides better way of handling this case. if (!options.MaxItemCount.HasValue) { options.MaxItemCount = DefaultMaxItemCount; } else if (options.MaxItemCount <= 1) { this.telemetryClient.TrackEvent($"Cannot reduce maxItemCount further as it's already at {options.MaxItemCount}."); this.telemetryClient.TrackException(new Exception("Cannot reduce maxItemCount")); } else { options.MaxItemCount /= 2; this.telemetryClient.TrackEvent($"Reducing maxItemCount, new value: {options.MaxItemCount}."); } } else { this.telemetryClient.TrackException(dcex); } if (dcex.RetryAfter != TimeSpan.Zero) { this.telemetryClient.TrackTrace($"Exception requires retryAfter, sleeping {dcex.RetryAfter.TotalMilliseconds} ms."); await Task.Delay(dcex.RetryAfter, cancellation); } } if (readChangesResponse != null) { var results = readChangesResponse.ToList(); if (results.Count > 0) { var lsn = results.First().GetPropertyValue <long>("_lsn"); this.telemetryClient.TrackTrace($"Detected {results.Count} documents. First _lsn {lsn}"); DateTimeOffset signalRDependencyStartTime = DateTimeOffset.UtcNow; try { var response = results.Select((d) => new { items = d.GetPropertyValue <List <Pixel> >("items"), _lsn = d.GetPropertyValue <long>("_lsn"), }); await this.signalRHubContext.Clients.All.SendAsync("Changes", JsonConvert.SerializeObject(response)); this.telemetryClient.TrackDependency("SignalR", "SendAsync", signalRDependencyStartTime, DateTimeOffset.UtcNow.Subtract(signalRDependencyStartTime), true); } catch (Exception ex) { this.telemetryClient.TrackException(ex); this.telemetryClient.TrackDependency("SignalR", "SendAsync", signalRDependencyStartTime, DateTimeOffset.UtcNow.Subtract(signalRDependencyStartTime), false); } this.telemetryClient.StopOperation(operation); } else { this.telemetryClient.TrackTrace($"No changes, sleeping {feedPollDelay.TotalMilliseconds} ms."); this.telemetryClient.StopOperation(operation); await Task.Delay(feedPollDelay, cancellation); } } else { this.telemetryClient.StopOperation(operation); } }while (query.HasMoreResults && this.isRunning); } })); }
public static async Task RunAsync( [HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequestMessage req, TraceWriter log) { log.Info($"C# function processing: NortonGraphHttpTrigger2"); string documentsEndpoint = ConfigurationManager.AppSettings["documentsendpoint"]; string graphDbKey = ConfigurationManager.AppSettings["graphdbkey"]; string databaseName = ConfigurationManager.AppSettings["database"]; string documentCollection = ConfigurationManager.AppSettings["collection"]; log.Info($" documentsEndpoint:{documentsEndpoint}"); log.Info($" graphDbKey:{graphDbKey}"); log.Info($" databaseName:{databaseName}"); log.Info($" documentCollection:{documentCollection}"); var client = new DocumentClient(new Uri(documentsEndpoint), graphDbKey); Database database = await client.CreateDatabaseIfNotExistsAsync(new Database { Id = databaseName }); DocumentCollection graph = await client.CreateDocumentCollectionIfNotExistsAsync( UriFactory.CreateDatabaseUri(databaseName), new DocumentCollection { Id = documentCollection }, new RequestOptions { OfferThroughput = 1000 }); var data = new Dictionary <string, dynamic>() { { "id", Guid.NewGuid() }, { "array", new List <string> { "blah" } }, { "firstName", "Justin" }, { "lastName", "Bieber" }, { "male", true }, { "age", 32 }, }; var data2 = new User { id = Guid.NewGuid(), array = new List <string> { "blah2" }, firstName = "Larry", lastName = "Gowan", male = true, age = 32 }; var query1 = QueryAddVertex("user", data); var query2 = QueryAddVertex("user", data2); foreach (KeyValuePair <string, string> gremlinQuery in GremlinQueries) { log.Info($"Running {gremlinQuery.Key}: {gremlinQuery.Value}"); // The CreateGremlinQuery method extensions allow you to execute Gremlin queries and iterate // results asychronously IDocumentQuery <dynamic> query = client.CreateGremlinQuery <dynamic>(graph, gremlinQuery.Value); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { log.Info($"\t {JsonConvert.SerializeObject(result)}"); } } } // Data is returned in GraphSON format, which be deserialized into a strongly-typed vertex, edge or property class // The following snippet shows how to do this string gremlin = GremlinQueries["AddVertex 1"]; log.Info($"Running Add Vertex with deserialization: {gremlin}"); IDocumentQuery <Vertex> insertVertex = client.CreateGremlinQuery <Vertex>(graph, GremlinQueries["AddVertex 1"]); while (insertVertex.HasMoreResults) { foreach (Vertex vertex in await insertVertex.ExecuteNextAsync <Vertex>()) { // Since Gremlin is designed for multi-valued properties, the format returns an array. Here we just read // the first value string name = (string)vertex.GetVertexProperties("firstName").First().Value; log.Info($"\t Id:{vertex.Id}, Name: {name}"); } } var resp = await client.CreateGremlinQuery <Vertex>(graph, query1).ExecuteNextAsync(); log.Info($"C# function processed: {resp.ActivityId}"); var resp2 = await client.CreateGremlinQuery <Vertex>(graph, query2).ExecuteNextAsync(); log.Info($"C# function processed: {resp2.ActivityId}"); log.Info($"C# function processed: NortonGraphHttpTrigger2"); }
private void RequestNextChunk() { TrashCan.Throw(ref chunkCursor); chunkDownloadTask = documentQuery.ExecuteNextAsync <TIn>(); }
public static async Task <IContinuationEnumerable <T> > ListContinuationAsync <T>(this IDocumentQuery <T> docQuery, CancellationToken ct) { var docs = await docQuery.ExecuteNextAsync <T>(ct); return(new ContinuationEnumerable <T>(docs, docs.ResponseContinuation)); }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequestMessage request, [CosmosDB(ConnectionStringSetting = "DBConnectionString")] IDocumentClient client, ILogger log ) { log.LogInformation("GetTokenFunction - Invoked"); Token newToken = JsonConvert.DeserializeObject <Token>(await request.Content.ReadAsStringAsync()); newToken.CreatedDate = DateTime.Now; newToken.Status = TokenStatusEnume.InQueue; Uri tokenCollectUri = UriFactory.CreateDocumentCollectionUri("TokenManagerDB", "Token"); var options = new FeedOptions { MaxItemCount = 1, EnableCrossPartitionQuery = true }; IDocumentQuery <Token> queryRes = client.CreateDocumentQuery <Token>(tokenCollectUri, options) .OrderByDescending(token => token.TokenNo) .AsDocumentQuery(); options.MaxItemCount = null; IDocumentQuery <Token> querActiveToken = client.CreateDocumentQuery <Token>(tokenCollectUri, options) .Where(x => x.Status != TokenStatusEnume.Served && x.TransactionType == newToken.TransactionType) .AsDocumentQuery(); int count = 0; if (querActiveToken.HasMoreResults) { foreach (Token token in querActiveToken.ExecuteNextAsync <Token>().Result) { count++; } } if (newToken.TransactionType == TransactionTypeEnume.BankTransaction) { newToken.InitialEstimatedWaitingTime = 5 * count; } else { newToken.InitialEstimatedWaitingTime = 25 * count; } newToken.CurrentEstimatedWaitingTime = newToken.InitialEstimatedWaitingTime; if (queryRes.HasMoreResults) { var data = queryRes.ExecuteNextAsync().Result; int lastTokenNo = 0; if (data.Any()) { var lastToken = (Token)data.First(); lastTokenNo = int.Parse(lastToken.TokenNo); } newToken.TokenNo = (lastTokenNo + 1).ToString(); await client.CreateDocumentAsync(tokenCollectUri, newToken); } log.LogInformation("GetTokenFunction - Completed"); return(new OkObjectResult(newToken)); }
/// <summary> /// ThreadPool callback to process each partition /// </summary> /// <param name="pkRange">current PartitionKeyRange</param> public void CallbackProcessEachPartition(string pkRangeId) { string continuation = null; Checkpoints.TryGetValue(pkRangeId, out continuation); IDocumentQuery <Document> query = SourceClient.CreateDocumentChangeFeedQuery( Constants.SourceCollectionUri, new ChangeFeedOptions { PartitionKeyRangeId = pkRangeId, StartFromBeginning = true, RequestContinuation = continuation, MaxItemCount = -1 }); int numOfDocsUploaded = 0; while (query.HasMoreResults) { FeedResponse <Document> readChangesResponse = query.ExecuteNextAsync <Document>().Result; List <Task <bool> > taskList = new List <Task <bool> >(); numOfDocsUploaded = 0; foreach (Document changedDocument in readChangesResponse) { Task <bool> pTask = UploadToDestCollectionAsync(changedDocument); taskList.Add(pTask); // Wait for fixed number of tasks before creating new tasks if (taskList.Count == 100) { Task.WaitAll(taskList.ToArray()); // Console.WriteLine("ThreadId: {0} Clearing the 100 tasks", Thread.CurrentThread.ManagedThreadId); Console.Write("."); taskList.Clear(); } // Console.WriteLine("\t Debug: Read document {0} from the change feed.", changedDocument.ToString()); numOfDocsUploaded++; } Task.WaitAll(taskList.ToArray()); Console.WriteLine("ThreadId: {0} Number of documents uploaded: {1}", Thread.CurrentThread.ManagedThreadId, numOfDocsUploaded); Checkpoints[pkRangeId] = readChangesResponse.ResponseContinuation; } // If this is the last thread to complete, set the event so that the main thread can continue if (Interlocked.Decrement(ref NumOfPartitions) == 0) { resetEvent.Set(); } }
public static async System.Threading.Tasks.Task <IActionResult> RunAsync( [HttpTrigger(AuthorizationLevel.Function, "get", Route = "viprecentlyclosed/{id}")] HttpRequestMessage req, [CosmosDB( databaseName: "contesthub", collectionName: "VIP", ConnectionStringSetting = "contesthub_DOCUMENTDB" )] DocumentClient client, string id, TraceWriter log) { //var jwtHandler = new JwtSecurityTokenHandler(); //var jwtInput = req.Headers.Authorization.ToString(); //var jwt = ""; //if (jwtInput.Contains("Bearer")) // jwt = jwtInput.Substring(7); //else // jwt = jwtInput; //var readableToken = jwtHandler.CanReadToken(jwt); //var dId = ""; //if (readableToken != true) //{ // return new NotFoundResult(); //} //if (readableToken == true) //{ // var token = jwtHandler.ReadJwtToken(jwt); // var claims = token.Claims; // var claim = claims.Where(c => c.Type == "upn").FirstOrDefault(); // dId = claim.Value.Substring(0, claim.Value.IndexOf('@')); //} //if(dId.ToLower()!=id.ToLower()) //{ // return new UnauthorizedResult(); //} log.Info($"Processed request for {id} in VIP"); Uri collectionUri = UriFactory.CreateDocumentCollectionUri("contesthub", "VIP"); var query = client.CreateDocumentQuery <VIPElementRC>(collectionUri, new FeedOptions { EnableCrossPartitionQuery = true, MaxItemCount = Int32.MaxValue }).Where(d => d.logonid.ToUpper() == id.ToUpper()); List <VIPElementRC> vipElementList = query.ToList(); vipElementList.ForEach(x => x.endDate = "Ended " + x.endDate); Uri c2 = UriFactory.CreateDocumentCollectionUri("contesthub", "ProductionCalendar"); IDocumentQuery <ProductionCalendar> dateQuery = client.CreateDocumentQuery <ProductionCalendar>(c2, new FeedOptions { EnableCrossPartitionQuery = true }).AsDocumentQuery(); var dates = await dateQuery.ExecuteNextAsync <ProductionCalendar>(); var year = Convert.ToInt32(dates.FirstOrDefault().closedProdYear) - 1; vipElementList = vipElementList.Where(e => Convert.ToInt32(e.weekAndYear.Substring(e.weekAndYear.Length - 4, 4)) == year).ToList(); if (vipElementList.Count == 0) { return(new NoContentResult()); } else { VIPResponseRC vipResponse = new VIPResponseRC(vipElementList); return(new OkObjectResult(vipResponse)); } }
async void QueryDocumentCollectionsAsync(object resource, RequestOptions requestOptions) { try { var queryText = resource as string; // text is the querytext. IDocumentQuery <dynamic> q = null; var feedOptions = Program.GetMain().GetFeedOptions(); if (requestOptions == null) { // requestOptions = null means it is from the next page. We only attempt to continue using the RequestContinuation for next page button if (!string.IsNullOrEmpty(_currentContinuation) && string.IsNullOrEmpty(feedOptions.RequestContinuation)) { feedOptions.RequestContinuation = _currentContinuation; } } q = _client.CreateDocumentCollectionQuery((Tag as Database).GetLink(_client), queryText, feedOptions).AsDocumentQuery(); var sw = Stopwatch.StartNew(); FeedResponse <dynamic> r; using (PerfStatus.Start("QueryDocument")) { r = await q.ExecuteNextAsync(); } sw.Stop(); _currentContinuation = r.ResponseContinuation; _currentQueryCommandContext.HasContinuation = !string.IsNullOrEmpty(_currentContinuation); _currentQueryCommandContext.QueryStarted = true; // set the result window string text = null; if (r.Count > 1) { text = string.Format(CultureInfo.InvariantCulture, "Returned {0} collections in {1} ms.", r.Count, sw.ElapsedMilliseconds); } else { text = string.Format(CultureInfo.InvariantCulture, "Returned {0} collections in {1} ms.", r.Count, sw.ElapsedMilliseconds); } if (r.ResponseContinuation != null) { text += " (more results might be available)"; } var jsonarray = "["; var index = 0; foreach (var d in r) { index++; // currently Query.ToString() has Formatting.Indented, but the public release doesn't have yet. jsonarray += d.ToString(); if (index == r.Count) { jsonarray += "]"; } else { jsonarray += ",\r\n"; } } Program.GetMain().SetResultInBrowser(jsonarray, text, true, r.ResponseHeaders); Program.GetMain().SetNextPageVisibility(_currentQueryCommandContext); } catch (AggregateException e) { Program.GetMain().SetResultInBrowser(null, e.InnerException.ToString(), true); } catch (Exception e) { Program.GetMain().SetResultInBrowser(null, e.ToString(), true); } }
public async Task <IEnumerable <string> > buildDB() { var results = new List <string>(); Microsoft.Azure.Documents.Database database = await client.CreateDatabaseIfNotExistsAsync(new Database { Id = "graphdb" }); DocumentCollection graph = await client.CreateDocumentCollectionIfNotExistsAsync( UriFactory.CreateDatabaseUri("graphdb"), new DocumentCollection { Id = "Persons" }, new RequestOptions { OfferThroughput = 400 }); Dictionary <string, string> gremlinQueries = new Dictionary <string, string> { { "Cleanup", "g.V().drop()" }, { "AddAdam", "g.addV('person').property('id', 'Adam').property('Name', 'Adam')" }, { "AddEve", "g.addV('person').property('id', 'Eve').property('Name', 'Eve')" }, { "AddCain", "g.addV('person').property('id', 'Cain').property('Name', 'Cain')" }, { "AddAbel", "g.addV('person').property('id', 'Abel').property('Name', 'Abel')" }, { "AddSeth", "g.addV('person').property('id', 'Seth').property('Name', 'Seth')" }, { "AddEnosh", "g.addV('person').property('id', 'Enosh').property('Name', 'Enosh')" }, { "AddAdam+Eve", "g.V('Adam').addE('Married').to(g.V('Eve'))" }, { "AddCain->Abel", "g.V('Cain').addE('Brother').to(g.V('Abel'))" }, { "AddCain->Seth", "g.V('Cain').addE('Brother').to(g.V('Seth'))" }, { "AddAbel->Seth", "g.V('Abel').addE('Brother').to(g.V('Seth'))" }, { "AddEve->Abel", "g.V('Eve').addE('Mother').to(g.V('Abel'))" }, { "AddEve->Cain", "g.V('Eve').addE('Mother').to(g.V('Cain'))" }, { "AddEve->Seth", "g.V('Eve').addE('Mother').to(g.V('Seth'))" }, { "AddAdam->Abel", "g.V('Adam').addE('Father').to(g.V('Abel'))" }, { "AddAdam->Cain", "g.V('Adam').addE('Father').to(g.V('Cain'))" }, { "AddAdam->Seth", "g.V('Adam').addE('Father').to(g.V('Seth'))" }, { "AddSeth->Enosh", "g.V('Seth').addE('Father').to(g.V('Enosh'))" }, { "UpdateAbel1", "g.V('Abel').property('Profession', 'Shepherd')" }, { "UpdateCain1", "g.V('Cain').property('Profession', 'Farmer')" }, { "UpdateAdam", "g.V('Adam').property('Max Age', '930').property('Born', '4026 BCE').property('Died', '3096 BCE').property('Name Means', 'Earthling Man; Mankind; Humankind; from a root meaning \"red\"')" }, { "UpdateAdamFatherAge", "g.V('Adam').outE('Father').as('e').inV().has('Name', 'Seth').select('e').property('Age', '130')" } }; foreach (KeyValuePair <string, string> gremlinQuery in gremlinQueries) { results.Add($"Running {gremlinQuery.Key}: {gremlinQuery.Value}"); // The CreateGremlinQuery method extensions allow you to execute Gremlin queries and iterate // results asychronously IDocumentQuery <dynamic> query = client.CreateGremlinQuery <dynamic>(graph, gremlinQuery.Value); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { results.Add($"\t {JsonConvert.SerializeObject(result)}"); } } results.Add(""); } return(results); }
async Task IPartitionObserver <DocumentServiceLease> .OnPartitionAcquiredAsync(DocumentServiceLease lease) { Debug.Assert(lease != null && !string.IsNullOrEmpty(lease.Owner), "lease"); TraceLog.Informational(string.Format("Host '{0}' partition {1}: acquired!", this.HostName, lease.PartitionId)); #if DEBUG Interlocked.Increment(ref this.partitionCount); #endif IChangeFeedObserver observer = this.observerFactory.CreateObserver(); ChangeFeedObserverContext context = new ChangeFeedObserverContext { PartitionKeyRangeId = lease.PartitionId }; CancellationTokenSource cancellation = new CancellationTokenSource(); // Create ChangeFeedOptions to use for this worker. ChangeFeedOptions options = new ChangeFeedOptions { MaxItemCount = this.changeFeedOptions.MaxItemCount, PartitionKeyRangeId = this.changeFeedOptions.PartitionKeyRangeId, SessionToken = this.changeFeedOptions.SessionToken, StartFromBeginning = this.changeFeedOptions.StartFromBeginning, RequestContinuation = this.changeFeedOptions.RequestContinuation }; var workerTask = await Task.Factory.StartNew(async() => { ChangeFeedObserverCloseReason?closeReason = null; try { try { await observer.OpenAsync(context); } catch (Exception ex) { TraceLog.Error(string.Format("IChangeFeedObserver.OpenAsync exception: {0}", ex)); closeReason = ChangeFeedObserverCloseReason.ObserverError; throw; } options.PartitionKeyRangeId = lease.PartitionId; if (!string.IsNullOrEmpty(lease.ContinuationToken)) { options.RequestContinuation = lease.ContinuationToken; } IDocumentQuery <Document> query = this.documentClient.CreateDocumentChangeFeedQuery(this.collectionSelfLink, options); TraceLog.Verbose(string.Format("Worker start: partition '{0}', continuation '{1}'", lease.PartitionId, lease.ContinuationToken)); try { while (this.isShutdown == 0) { do { DocumentClientException dcex = null; FeedResponse <Document> response = null; try { response = await query.ExecuteNextAsync <Document>(); } catch (DocumentClientException ex) { if (StatusCode.NotFound != (StatusCode)ex.StatusCode && StatusCode.TooManyRequests != (StatusCode)ex.StatusCode && StatusCode.ServiceUnavailable != (StatusCode)ex.StatusCode) { throw; } dcex = ex; } if (dcex != null) { const int ReadSessionNotAvailable = 1002; if (StatusCode.NotFound == (StatusCode)dcex.StatusCode && GetSubStatusCode(dcex) != ReadSessionNotAvailable) { // Most likely, the database or collection was removed while we were enumerating. // Shut down. The user will need to start over. // Note: this has to be a new task, can't await for shutdown here, as shudown awaits for all worker tasks. await Task.Factory.StartNew(() => this.StopAsync(ChangeFeedObserverCloseReason.ResourceGone)); break; } else { Debug.Assert(StatusCode.TooManyRequests == (StatusCode)dcex.StatusCode || StatusCode.ServiceUnavailable == (StatusCode)dcex.StatusCode); TraceLog.Warning(string.Format("Partition {0}: retriable exception : {1}", context.PartitionKeyRangeId, dcex.Message)); await Task.Delay(dcex.RetryAfter != TimeSpan.Zero ? dcex.RetryAfter : this.options.FeedPollDelay, cancellation.Token); } } if (response != null) { if (response.Count > 0) { List <Document> docs = new List <Document>(); docs.AddRange(response); try { context.FeedResponse = response; await observer.ProcessChangesAsync(context, docs); } catch (Exception ex) { TraceLog.Error(string.Format("IChangeFeedObserver.ProcessChangesAsync exception: {0}", ex)); closeReason = ChangeFeedObserverCloseReason.ObserverError; throw; } finally { context.FeedResponse = null; } // Checkpoint after every successful delivery to the client. lease = await CheckpointAsync(lease, response.ResponseContinuation, context); } else if (string.IsNullOrEmpty(lease.ContinuationToken)) { // Checkpoint if we've never done that for this lease. lease = await CheckpointAsync(lease, response.ResponseContinuation, context); } } }while (query.HasMoreResults && this.isShutdown == 0); if (this.isShutdown == 0) { await Task.Delay(this.options.FeedPollDelay, cancellation.Token); } } // Outer while (this.isShutdown == 0) loop. closeReason = ChangeFeedObserverCloseReason.Shutdown; } catch (TaskCanceledException) { Debug.Assert(cancellation.IsCancellationRequested, "cancellation.IsCancellationRequested"); TraceLog.Informational(string.Format("Cancel signal received for partition {0} worker!", context.PartitionKeyRangeId)); } } catch (LeaseLostException) { closeReason = ChangeFeedObserverCloseReason.LeaseLost; } catch (Exception ex) { TraceLog.Error(string.Format("Partition {0} exception: {1}", context.PartitionKeyRangeId, ex)); if (!closeReason.HasValue) { closeReason = ChangeFeedObserverCloseReason.Unknown; } } if (closeReason.HasValue) { TraceLog.Informational(string.Format("Releasing lease for partition {0} due to an error, reason: {1}!", context.PartitionKeyRangeId, closeReason.Value)); // Note: this has to be a new task, because OnPartitionReleasedAsync awaits for worker task. await Task.Factory.StartNew(async() => await this.partitionManager.TryReleasePartitionAsync(context.PartitionKeyRangeId, true, closeReason.Value)); } TraceLog.Informational(string.Format("Partition {0}: worker finished!", context.PartitionKeyRangeId)); }); var newWorkerData = new WorkerData(workerTask, observer, context, cancellation); this.partitionKeyRangeIdToWorkerMap.AddOrUpdate(context.PartitionKeyRangeId, newWorkerData, (string id, WorkerData d) => { return(newWorkerData); }); }
public async Task <IHttpActionResult> GetInfo(string info) { DocumentCollection graph = await client.CreateDocumentCollectionIfNotExistsAsync( UriFactory.CreateDatabaseUri("graphdb"), new DocumentCollection { Id = "Persons" }, new RequestOptions { OfferThroughput = 400 }); string grem = $"g.V('{info}')"; IDocumentQuery <dynamic> query = client.CreateGremlinQuery <dynamic>(graph, grem); var vert = new List <dynamic>(); var eMe = new List <dynamic>(); var eOther = new List <dynamic>(); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { vert.Add(result); } } grem = $"g.V('{info}').outE()"; query = client.CreateGremlinQuery <dynamic>(graph, grem); while (query.HasMoreResults) { string inV = "", label = "", outV = ""; foreach (dynamic result in await query.ExecuteNextAsync()) { foreach (KeyValuePair <string, JToken> child in (result as JObject)) { if (child.Key == "inV") { inV = (child.Value as JToken).Value <string>(); } else if (child.Key == "label") { label = (child.Value as JToken).Value <string>(); } else if (child.Key == "outV") { outV = (child.Value as JToken).Value <string>(); } } eMe.Add(new { outV, label, inV }); } } grem = $"g.V('{info}').inE()"; query = client.CreateGremlinQuery <dynamic>(graph, grem); while (query.HasMoreResults) { string inV = "", label = "", outV = ""; foreach (dynamic result in await query.ExecuteNextAsync()) { foreach (KeyValuePair <string, JToken> child in (result as JObject)) { if (child.Key == "inV") { inV = (child.Value as JToken).Value <string>(); } else if (child.Key == "label") { label = (child.Value as JToken).Value <string>(); } else if (child.Key == "outV") { outV = (child.Value as JToken).Value <string>(); } } eOther.Add(new { inV, label, outV }); } } string marriedTo = ""; List <dynamic> family = new List <dynamic>(); if (eMe.Any(z => z.label == "Married")) { marriedTo = eMe.First(z => z.label == "Married").inV; eMe.Remove(eMe.First(z => z.label == "Married")); } else if (eOther.Any(z => z.label == "Married")) { marriedTo = eOther.First(z => z.label == "Married").outV; eOther.Remove(eOther.First(z => z.label == "Married")); } if (eMe.Any(z => z.label == "Father")) { family.AddRange(eMe.Where(z => z.label == "Father").Select(z => new { type = "Child", name = (string)z.inV }).ToList()); eMe.RemoveAll((z) => { return(z.label == "Father"); }); } else if (eMe.Any(z => z.label == "Mother")) { family.AddRange(eMe.Where(z => z.label == "Mother").Select(z => new { type = "Child", name = (string)z.inV }).ToList()); eMe.RemoveAll((z) => { return(z.label == "Mother"); }); } if (eOther.Any(z => z.label == "Father")) { family.AddRange(eOther.Where(z => z.label == "Father").Select(z => new { type = "Father", name = (string)z.outV }).ToList()); eOther.RemoveAll((z) => { return(z.label == "Father"); }); } if (eOther.Any(z => z.label == "Mother")) { family.AddRange(eOther.Where(z => z.label == "Mother").Select(z => new { type = "Mother", name = (string)z.outV }).ToList()); eOther.RemoveAll((z) => { return(z.label == "Mother"); }); } if (eMe.Any(z => z.label == "Sister")) { family.AddRange(eMe.Where(z => z.label == "Sister").Select(z => new { type = "Sister", name = (string)z.outV }).ToList()); eMe.RemoveAll((z) => { return(z.label == "Sister"); }); } else if (eMe.Any(z => z.label == "Brother")) { family.AddRange(eMe.Where(z => z.label == "Brother").Select(z => new { type = "Brother", name = (string)z.outV }).ToList()); eMe.RemoveAll((z) => { return(z.label == "Brother"); }); } if (eOther.Any(z => z.label == "Sister")) { family.AddRange(eOther.Where(z => z.label == "Sister").Select(z => new { type = "Sister", name = (string)z.inV }).ToList()); eOther.RemoveAll((z) => { return(z.label == "Sister"); }); } else if (eOther.Any(z => z.label == "Brother")) { family.AddRange(eOther.Where(z => z.label == "Brother").Select(z => new { type = "Brother", name = (string)z.inV }).ToList()); eOther.RemoveAll((z) => { return(z.label == "Brother"); }); } return(Ok(new { vert, eMe, eOther, marriedTo, family })); }
internal static async Task <FeedResponse <dynamic> > QuerySingleDocumentAsync(IDocumentQuery <dynamic> query) { return(await query.ExecuteNextAsync <dynamic>().ConfigureAwait(false)); }
/// <inheritdoc/> public async Task <InstanceQueryResponse> GetInstancesFromQuery( Dictionary <string, StringValues> queryParams, string continuationToken, int size) { InstanceQueryResponse queryResponse = new InstanceQueryResponse { Count = 0, Instances = new List <Instance>() }; while (queryResponse.Count < size) { FeedOptions feedOptions = new FeedOptions { EnableCrossPartitionQuery = true, MaxItemCount = size - queryResponse.Count, ResponseContinuationTokenLimitInKb = 7 }; if (!string.IsNullOrEmpty(continuationToken)) { feedOptions.RequestContinuation = continuationToken; } IQueryable <Instance> queryBuilder = Client.CreateDocumentQuery <Instance>(CollectionUri, feedOptions); try { queryBuilder = BuildQueryFromParameters(queryParams, queryBuilder); } catch (Exception e) { queryResponse.Exception = e.Message; return(queryResponse); } try { IDocumentQuery <Instance> documentQuery = queryBuilder.AsDocumentQuery(); FeedResponse <Instance> feedResponse = await documentQuery.ExecuteNextAsync <Instance>(); if (feedResponse.Count == 0 && !documentQuery.HasMoreResults) { queryResponse.ContinuationToken = string.Empty; break; } List <Instance> instances = feedResponse.ToList(); await PostProcess(instances); queryResponse.Instances.AddRange(instances); queryResponse.Count += instances.Count; if (string.IsNullOrEmpty(feedResponse.ResponseContinuation)) { queryResponse.ContinuationToken = string.Empty; break; } queryResponse.ContinuationToken = feedResponse.ResponseContinuation; continuationToken = feedResponse.ResponseContinuation; } catch (Exception e) { _logger.LogError(e, "Exception querying CosmosDB for instances"); queryResponse.Exception = e.Message; break; } } return(queryResponse); }
public static async Task <IActionResult> InitializeUserProgram( [HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "progress/program")] HttpRequest req, [CosmosDB( databaseName: "UserProgress", collectionName: "UserPrograms", ConnectionStringSetting = "AzureWebJobsStorage")] DocumentClient userProgsCient, [CosmosDB( databaseName: "UserProgress", collectionName: "UserProgramDetails", ConnectionStringSetting = "AzureWebJobsStorage")] IAsyncCollector <UserProgramDetail> userProgDetsOut, ILogger log) { List <Document> output = new List <Document>(); log.LogInformation("Initializa a new user program"); // Get the program details string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); var inputProgDetails = JsonConvert.DeserializeObject <UserProgramDetailCreateModel>(requestBody); // Get the active program for this user Uri collectionUri = UriFactory.CreateDocumentCollectionUri("UserProgress", "UserPrograms"); IDocumentQuery <UserProgram> query = userProgsCient.CreateDocumentQuery <UserProgram>(collectionUri) .Where(p => p.Username.Equals(inputProgDetails.Username)) .Where(p => p.Status.Equals("active")) .AsDocumentQuery(); try { while (query.HasMoreResults) { foreach (Document doc in await query.ExecuteNextAsync()) { UserProgram userProgram = (dynamic)doc; // If the program has already been started let the caller know and don't continue foreach (var program in userProgram.Programs) { if (program.Id == inputProgDetails.ProgId) { return(new ConflictObjectResult("Program already started")); } } // If the program hasn't started then you're here, update the active program list userProgram.Programs.Add(new Program() { Id = inputProgDetails.ProgId, Name = inputProgDetails.ProgName }); output.Add(await userProgsCient.ReplaceDocumentAsync(doc.SelfLink, userProgram)); // Map the create model to the database required model var progDetail = new UserProgramDetail() { Username = inputProgDetails.Username, ProgId = inputProgDetails.ProgId, ProgName = inputProgDetails.ProgName, TotalDays = inputProgDetails.TotalDays, Exersices = inputProgDetails.Exersices }; // Create the framework in the program details container await userProgDetsOut.AddAsync(progDetail); } } } catch (Exception e) { return(new BadRequestResult()); } return(new OkObjectResult(output)); }
async Task IPartitionObserver <DocumentServiceLease> .OnPartitionAcquiredAsync(DocumentServiceLease lease) { Debug.Assert(lease != null && !string.IsNullOrEmpty(lease.Owner), "lease"); TraceLog.Informational(string.Format("Host '{0}' partition {1}: acquired!", this.HostName, lease.PartitionId)); #if DEBUG Interlocked.Increment(ref this.partitionCount); #endif IChangeFeedObserver observer = this.observerFactory.CreateObserver(); ChangeFeedObserverContext context = new ChangeFeedObserverContext { PartitionKeyRangeId = lease.PartitionId }; CancellationTokenSource cancellation = new CancellationTokenSource(); // Create ChangeFeedOptions to use for this worker. ChangeFeedOptions options = new ChangeFeedOptions { MaxItemCount = this.changeFeedOptions.MaxItemCount, PartitionKeyRangeId = this.changeFeedOptions.PartitionKeyRangeId, SessionToken = this.changeFeedOptions.SessionToken, StartFromBeginning = this.changeFeedOptions.StartFromBeginning, RequestContinuation = this.changeFeedOptions.RequestContinuation }; var workerTask = await Task.Factory.StartNew(async() => { ChangeFeedObserverCloseReason?closeReason = null; try { try { await observer.OpenAsync(context); } catch (Exception ex) { TraceLog.Error(string.Format("IChangeFeedObserver.OpenAsync exception: {0}", ex)); closeReason = ChangeFeedObserverCloseReason.ObserverError; throw; } options.PartitionKeyRangeId = lease.PartitionId; if (!string.IsNullOrEmpty(lease.ContinuationToken)) { options.RequestContinuation = lease.ContinuationToken; } CheckpointStats checkpointStats = null; if (!this.statsSinceLastCheckpoint.TryGetValue(lease.PartitionId, out checkpointStats) || checkpointStats == null) { // It could be that the lease was created by different host and we picked it up. checkpointStats = this.statsSinceLastCheckpoint.AddOrUpdate( lease.PartitionId, new CheckpointStats(), (partitionId, existingStats) => existingStats); Trace.TraceWarning(string.Format("Added stats for partition '{0}' for which the lease was picked up after the host was started.", lease.PartitionId)); } IDocumentQuery <Document> query = this.documentClient.CreateDocumentChangeFeedQuery(this.collectionSelfLink, options); TraceLog.Verbose(string.Format("Worker start: partition '{0}', continuation '{1}'", lease.PartitionId, lease.ContinuationToken)); string lastContinuation = options.RequestContinuation; try { while (this.isShutdown == 0) { do { ExceptionDispatchInfo exceptionDispatchInfo = null; FeedResponse <Document> response = null; try { response = await query.ExecuteNextAsync <Document>(); lastContinuation = response.ResponseContinuation; } catch (DocumentClientException ex) { exceptionDispatchInfo = ExceptionDispatchInfo.Capture(ex); } if (exceptionDispatchInfo != null) { DocumentClientException dcex = (DocumentClientException)exceptionDispatchInfo.SourceException; if (StatusCode.NotFound == (StatusCode)dcex.StatusCode && SubStatusCode.ReadSessionNotAvailable != (SubStatusCode)GetSubStatusCode(dcex)) { // Most likely, the database or collection was removed while we were enumerating. // Shut down. The user will need to start over. // Note: this has to be a new task, can't await for shutdown here, as shudown awaits for all worker tasks. TraceLog.Error(string.Format("Partition {0}: resource gone (subStatus={1}). Aborting.", context.PartitionKeyRangeId, GetSubStatusCode(dcex))); await Task.Factory.StartNew(() => this.StopAsync(ChangeFeedObserverCloseReason.ResourceGone)); break; } else if (StatusCode.Gone == (StatusCode)dcex.StatusCode) { SubStatusCode subStatusCode = (SubStatusCode)GetSubStatusCode(dcex); if (SubStatusCode.PartitionKeyRangeGone == subStatusCode) { bool isSuccess = await HandleSplitAsync(context.PartitionKeyRangeId, lastContinuation, lease.Id); if (!isSuccess) { TraceLog.Error(string.Format("Partition {0}: HandleSplit failed! Aborting.", context.PartitionKeyRangeId)); await Task.Factory.StartNew(() => this.StopAsync(ChangeFeedObserverCloseReason.ResourceGone)); break; } // Throw LeaseLostException so that we take the lease down. throw new LeaseLostException(lease, exceptionDispatchInfo.SourceException, true); } else if (SubStatusCode.Splitting == subStatusCode) { TraceLog.Warning(string.Format("Partition {0} is splitting. Will retry to read changes until split finishes. {1}", context.PartitionKeyRangeId, dcex.Message)); } else { exceptionDispatchInfo.Throw(); } } else if (StatusCode.TooManyRequests == (StatusCode)dcex.StatusCode || StatusCode.ServiceUnavailable == (StatusCode)dcex.StatusCode) { TraceLog.Warning(string.Format("Partition {0}: retriable exception : {1}", context.PartitionKeyRangeId, dcex.Message)); } else { exceptionDispatchInfo.Throw(); } await Task.Delay(dcex.RetryAfter != TimeSpan.Zero ? dcex.RetryAfter : this.options.FeedPollDelay, cancellation.Token); } if (response != null) { if (response.Count > 0) { List <Document> docs = new List <Document>(); docs.AddRange(response); try { context.FeedResponse = response; await observer.ProcessChangesAsync(context, docs); } catch (Exception ex) { TraceLog.Error(string.Format("IChangeFeedObserver.ProcessChangesAsync exception: {0}", ex)); closeReason = ChangeFeedObserverCloseReason.ObserverError; throw; } finally { context.FeedResponse = null; } } checkpointStats.ProcessedDocCount += (uint)response.Count; if (IsCheckpointNeeded(lease, checkpointStats)) { lease = await CheckpointAsync(lease, response.ResponseContinuation, context); checkpointStats.Reset(); } else if (response.Count > 0) { TraceLog.Informational(string.Format("Checkpoint: not checkpointing for partition {0}, {1} docs, new continuation '{2}' as frequency condition is not met", lease.PartitionId, response.Count, response.ResponseContinuation)); } } }while (query.HasMoreResults && this.isShutdown == 0); if (this.isShutdown == 0) { await Task.Delay(this.options.FeedPollDelay, cancellation.Token); } } // Outer while (this.isShutdown == 0) loop. closeReason = ChangeFeedObserverCloseReason.Shutdown; } catch (TaskCanceledException) { Debug.Assert(cancellation.IsCancellationRequested, "cancellation.IsCancellationRequested"); TraceLog.Informational(string.Format("Cancel signal received for partition {0} worker!", context.PartitionKeyRangeId)); } } catch (LeaseLostException ex) { closeReason = ex.IsGone ? ChangeFeedObserverCloseReason.LeaseGone : ChangeFeedObserverCloseReason.LeaseLost; } catch (Exception ex) { TraceLog.Error(string.Format("Partition {0} exception: {1}", context.PartitionKeyRangeId, ex)); if (!closeReason.HasValue) { closeReason = ChangeFeedObserverCloseReason.Unknown; } } if (closeReason.HasValue) { TraceLog.Informational(string.Format("Releasing lease for partition {0} due to an error, reason: {1}!", context.PartitionKeyRangeId, closeReason.Value)); // Note: this has to be a new task, because OnPartitionReleasedAsync awaits for worker task. await Task.Factory.StartNew(async() => await this.partitionManager.TryReleasePartitionAsync(context.PartitionKeyRangeId, true, closeReason.Value)); } TraceLog.Informational(string.Format("Partition {0}: worker finished!", context.PartitionKeyRangeId)); }); var newWorkerData = new WorkerData(workerTask, observer, context, cancellation); this.partitionKeyRangeIdToWorkerMap.AddOrUpdate(context.PartitionKeyRangeId, newWorkerData, (string id, WorkerData d) => { return(newWorkerData); }); }
internal static void ValidateQuery <T>(DocumentClient client, string collectionLink, string queryProperty, string queryPropertyValue, int expectedCount, INameValueCollection headers = null) where T : Resource, new() { if (headers != null) { headers = new DictionaryNameValueCollection(headers); // dont mess with the input headers } else { headers = new DictionaryNameValueCollection(); } int maxTries = 5; const int minIndexInterval = 5000; // 5 seconds while (maxTries-- > 0) { DocumentFeedResponse <dynamic> resourceFeed = null; IDocumentQuery <dynamic> queryService = null; string queryString = @"select * from root r where r." + queryProperty + @"=""" + queryPropertyValue + @""""; if (typeof(T) == typeof(Database)) { queryService = client.CreateDatabaseQuery(queryString).AsDocumentQuery(); } else if (typeof(T) == typeof(DocumentCollection)) { queryService = client.CreateDocumentCollectionQuery(collectionLink, queryString).AsDocumentQuery(); } else if (typeof(T) == typeof(Document)) { queryService = client.CreateDocumentQuery(collectionLink, queryString).AsDocumentQuery(); } else { Assert.Fail("Unexpected type"); } while (queryService.HasMoreResults) { resourceFeed = queryService.ExecuteNextAsync().Result; if (resourceFeed.Count > 0) { Assert.IsNotNull(resourceFeed, "Query result is null"); Assert.AreNotEqual(0, resourceFeed.Count, "Query result is invalid"); foreach (T resource in resourceFeed) { if (queryProperty.Equals("name", StringComparison.CurrentCultureIgnoreCase)) { Assert.AreEqual(resource.Id, queryPropertyValue, "Result contain invalid result"); } } return; } } Task.Delay(minIndexInterval); } Assert.Fail("Query did not return result after max tries"); }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "user")] HttpRequest req, [CosmosDB(databaseName: Constants.DATABASE_NAME, collectionName: Constants.USERS_COLLECTION_NAME, ConnectionStringSetting = Constants.CONNECTION_STRING)] DocumentClient client, ILogger log) { log.LogInformation("C# HTTP trigger function processed a request."); Uri collectionUri = UriFactory.CreateDocumentCollectionUri(Constants.DATABASE_NAME, Constants.USERS_COLLECTION_NAME); //Also works : req.Headers.TryGetValue("X-MS-CLIENT-PRINCIPAL-NAME", out var principalName) = Isabelle Riverain //req.Headers.TryGetValue("X-MS-CLIENT-PRINCIPAL-ID", out var principalId) = AccountInfo.LocalAccountId string searchValue = null; var userReq = req.HttpContext.User; if (userReq == null) { log.LogInformation($"User from context is null"); } else { searchValue = userReq.GetDisplayName(); log.LogInformation($"searchedValue is {searchValue}"); } string name = req.Query["name"]; var option = new FeedOptions { EnableCrossPartitionQuery = true }; IDocumentQuery <DatabaseUser> query = client.CreateDocumentQuery <DatabaseUser>(collectionUri, option) .Where(p => p.UserName == searchValue) .AsDocumentQuery(); List <DatabaseUser> result = new List <DatabaseUser>(); while (query.HasMoreResults) { foreach (DatabaseUser user in await query.ExecuteNextAsync()) { result.Add(user); } } if (result.Count > 1) { log.LogError($"Found several users with userName {searchValue}"); return(new StatusCodeResult(StatusCodes.Status500InternalServerError)); } ReturnedUser returnUser; if (result.Count == 0) { returnUser = null; } else { returnUser = new ReturnedUser { Id = result[0].Id, Login = result[0].Login, Score = result[0].Score }; } return(new OkObjectResult(returnUser)); }
internal static Task <FeedResponse <dynamic> > QuerySingleDocumentAsync(IDocumentQuery <dynamic> query) { return(query.ExecuteNextAsync <dynamic>()); }
static async Task MainAsync(string[] args) { var builder = new ConfigurationBuilder() .SetBasePath(Directory.GetCurrentDirectory()) .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true); IConfigurationRoot configuration = builder.Build(); string documentsEndpoint = configuration["azure:documentsendpoint"]; string graphDbKey = configuration["azure:graphdbkey"]; string databaseName = configuration["azure:database"]; string documentCollection = configuration["azure:collection"]; Console.WriteLine($"documentsEndpoint:{documentsEndpoint}"); Console.WriteLine($"graphDbKey:{graphDbKey}"); Console.WriteLine($"databaseName:{databaseName}"); Console.WriteLine($"documentCollection:{documentCollection}"); Console.WriteLine("------------------------------------"); var client = new DocumentClient(new Uri(documentsEndpoint), graphDbKey); Database database = await client.CreateDatabaseIfNotExistsAsync(new Database { Id = databaseName }); DocumentCollection graph = await client.CreateDocumentCollectionIfNotExistsAsync( UriFactory.CreateDatabaseUri(databaseName), new DocumentCollection { Id = documentCollection }, new RequestOptions { OfferThroughput = 1000 }); var data = new Dictionary <string, dynamic>() { { "id", Guid.NewGuid() }, { "array", new List <string> { "blah" } }, { "firstName", "Justin" }, { "lastName", "Bieber" }, { "male", true }, { "age", 32 }, }; var data2 = new User { id = Guid.NewGuid(), array = new List <string> { "blah2" }, firstName = "Larry", lastName = "Gowan", male = true, age = 32 }; var query1 = QueryAddVertex("user", data); var query2 = QueryAddVertex("user", data2); var resp = await client.CreateGremlinQuery <Vertex>(graph, query1).ExecuteNextAsync(); Console.WriteLine($"C# function processed: {resp.ActivityId}"); var resp2 = await client.CreateGremlinQuery <Vertex>(graph, query2).ExecuteNextAsync(); Console.WriteLine($"C# function processed: {resp2.ActivityId}"); foreach (KeyValuePair <string, string> gremlinQuery in GremlinQueries) { Console.WriteLine($"Running {gremlinQuery.Key}: {gremlinQuery.Value}"); // The CreateGremlinQuery method extensions allow you to execute Gremlin queries and iterate // results asychronously IDocumentQuery <dynamic> query = client.CreateGremlinQuery <dynamic>(graph, gremlinQuery.Value); while (query.HasMoreResults) { foreach (dynamic result in await query.ExecuteNextAsync()) { Console.WriteLine($"\t {JsonConvert.SerializeObject(result)}"); } } } // Data is returned in GraphSON format, which be deserialized into a strongly-typed vertex, edge or property class // The following snippet shows how to do this string gremlin = GremlinQueries["AddVertex 1"]; Console.WriteLine($"Running Add Vertex with deserialization: {gremlin}"); IDocumentQuery <Vertex> insertVertex = client.CreateGremlinQuery <Vertex>(graph, GremlinQueries["AddVertex 1"]); while (insertVertex.HasMoreResults) { foreach (Vertex vertex in await insertVertex.ExecuteNextAsync <Vertex>()) { // Since Gremlin is designed for multi-valued properties, the format returns an array. Here we just read // the first value string name = (string)vertex.GetVertexProperties("firstName").First().Value; Console.WriteLine($"\t Id:{vertex.Id}, Name: {name}"); } } Console.WriteLine($"C# function processed: NortonGraphHttpTrigger2"); }