/// <summary> /// The default behavior is for DocumentDB to index every attribute in every document automatically. /// There are times when a document contains large amounts of information, in deeply nested structures /// that you know you will never search on. In extreme cases like this, you can exclude paths from the /// index to save on storage cost, improve write performance and also improve read performance because the index is smaller /// /// This method demonstrates how to set IndexingPolicy.ExcludedPaths /// </summary> private static async Task ExcludePathsFromIndex() { string containerId = $"{Program.containerId}-ExcludePathsFromIndex"; Console.WriteLine("\n3. Exclude specified paths from document index"); ContainerProperties containerProperties = new ContainerProperties(containerId, Program.partitionKey); containerProperties.IndexingPolicy.IncludedPaths.Add(new IncludedPath { Path = "/*" }); // Special manadatory path of "/*" required to denote include entire tree containerProperties.IndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/metaData/*" }); // exclude metaData node, and anything under it containerProperties.IndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/subDoc/nonSearchable/*" }); // exclude ONLY a part of subDoc containerProperties.IndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/\"excludedNode\"/*" }); // exclude excludedNode node, and anything under it // The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed ContainerResponse response = await Program.database.CreateContainerAsync(containerProperties); Console.WriteLine("Container {0} created with index policy \n{1}", containerId, JsonConvert.SerializeObject(response.Resource.IndexingPolicy)); Container container = (Container)response; try { int numDocs = 250; Console.WriteLine("Creating {0} documents", numDocs); for (int docIndex = 0; docIndex < numDocs; docIndex++) { dynamic dyn = new { id = "doc" + docIndex, partitionKey = "doc" + docIndex, foo = "bar" + docIndex, metaData = "meta" + docIndex, subDoc = new { searchable = "searchable" + docIndex, nonSearchable = "value" + docIndex }, excludedNode = new { subExcluded = "something" + docIndex, subExcludedNode = new { someProperty = "value" + docIndex } } }; ItemResponse <dynamic> created = await container.CreateItemAsync <dynamic>(dyn, new PartitionKey("doc" + docIndex)); Console.WriteLine("Creating document with id {0}", created.Resource.id); } // Querying for a document on either metaData or /subDoc/subSubDoc/someProperty will be expensive since they do not utilize the index, // but instead are served from scan automatically. int queryDocId = numDocs / 2; QueryStats queryStats = await Program.GetQueryResult(container, string.Format(CultureInfo.InvariantCulture, "SELECT * FROM root r WHERE r.metaData='meta{0}'", queryDocId)); Console.WriteLine("Query on metaData returned {0} results", queryStats.Count); Console.WriteLine("Query on metaData consumed {0} RUs", queryStats.RequestCharge); queryStats = await Program.GetQueryResult(container, string.Format(CultureInfo.InvariantCulture, "SELECT * FROM root r WHERE r.subDoc.nonSearchable='value{0}'", queryDocId)); Console.WriteLine("Query on /subDoc/nonSearchable returned {0} results", queryStats.Count); Console.WriteLine("Query on /subDoc/nonSearchable consumed {0} RUs", queryStats.RequestCharge); queryStats = await Program.GetQueryResult(container, string.Format(CultureInfo.InvariantCulture, "SELECT * FROM root r WHERE r.excludedNode.subExcludedNode.someProperty='value{0}'", queryDocId)); Console.WriteLine("Query on /excludedNode/subExcludedNode/someProperty returned {0} results", queryStats.Count); Console.WriteLine("Query on /excludedNode/subExcludedNode/someProperty cost {0} RUs", queryStats.RequestCharge); // Querying for a document using foo, or even subDoc/searchable > consume less RUs because they were not excluded queryStats = await Program.GetQueryResult(container, string.Format(CultureInfo.InvariantCulture, "SELECT * FROM root r WHERE r.foo='bar{0}'", queryDocId)); Console.WriteLine("Query on /foo returned {0} results", queryStats.Count); Console.WriteLine("Query on /foo cost {0} RUs", queryStats.RequestCharge); queryStats = await Program.GetQueryResult(container, string.Format(CultureInfo.InvariantCulture, "SELECT * FROM root r WHERE r.subDoc.searchable='searchable{0}'", queryDocId)); Console.WriteLine("Query on /subDoc/searchable returned {0} results", queryStats.Count); Console.WriteLine("Query on /subDoc/searchable cost {0} RUs", queryStats.RequestCharge); } finally { // Cleanup await container.DeleteContainerAsync(); } }
internal void WriteStatisticsTo(out QueryStats stats) { stats = lastQueryStats; }
public void CopyTo(QueryStats target) { target.TotalResults = TotalResults; }
/// <summary> /// Add up the correct matchings (type/text) counts for predicted entities and their children /// and remove any matching labeled entity from false negatives /// and populate false positive predicted entities /// </summary> private void AddUpCorrectTypeAndTextEntitiesCountsRecursively( IReadOnlyList <Entity> labeledEntities, Entity predictedEntity, QueryStats queryStats, Dictionary <string, Entity> falseNegativeEntities, string labeledEntityPrefix = "", string predictedEntityPrefix = "") { // get or create entity stats string predictedEntityFullName = GetEntityFullName(predictedEntityPrefix, predictedEntity.Name); var predictedEntityEvalObj = GetOrAddEntityStatObject(predictedEntityFullName); // A boolean to keep track if the guessed entity matches with any labeled entity var isFalsePositive = true; var setChildrenAsFalse = true; foreach (var labeledEntity in labeledEntities ?? Enumerable.Empty <Entity>()) { var labeledEntityFullName = GetEntityFullName(labeledEntityPrefix, labeledEntity.Name); // Filtering on entity type/name if (predictedEntityFullName == labeledEntityFullName) { // If predicted parent type matched a labeled then some children might be true positives // So we don't need to set children as false positives since they're going to be handled in the recursive call setChildrenAsFalse = false; // Check MUC Type correctness by validating intersection var locationsOverlap = LocationsOverlap( labeledEntity.StartPosition, labeledEntity.EndPosition, predictedEntity.StartPosition, predictedEntity.EndPosition); if (locationsOverlap) { predictedEntityEvalObj.CorrectTypeCount++; } // Check MUC Text correctness by validating exact location match var locationsMatchExactly = LocationsMatchExactly( labeledEntity.StartPosition, labeledEntity.EndPosition, predictedEntity.StartPosition, predictedEntity.EndPosition); if (locationsMatchExactly) { predictedEntityEvalObj.CorrectTextCount++; // Guessed entity matches a labeled one then it is a true positive and not a false positive isFalsePositive = false; // If labeled entity matches exactly with a guessed entity then remove the labeled one from false negatives falseNegativeEntities.Remove(GetUniqueEntityKey(labeledEntityFullName, labeledEntity.StartPosition, labeledEntity.EndPosition)); } // Since this level matches, we need to also check if lower levels match and account for them recursively if (predictedEntity.Children != null) { foreach (var childPredictedEntity in predictedEntity.Children) { AddUpCorrectTypeAndTextEntitiesCountsRecursively( labeledEntity.Children, childPredictedEntity, queryStats, falseNegativeEntities, labeledEntityFullName, predictedEntityFullName); } } } } // If guessed entity didn't match exactly with any labeled entity then it is a false positive // and if it didn't match type with any labeled one then also all of its children are false positives if (isFalsePositive) { AddFalsePositivesRecursively( predictedEntity, queryStats, setChildrenAsFalse, predictedEntityPrefix); } }
public static IQueryable <T> Statistics <T>(this IQueryable <T> query, out QueryStats stats) where T : class { ((QueryProvider)query.Provider).WriteStatisticsTo(out stats); return(query); }
private void btnRunQuery_Click(object sender, EventArgs e) { if (string.IsNullOrWhiteSpace(_serverName)) { MessageBox.Show("Please connect to a database."); return; } if (string.IsNullOrWhiteSpace(txtQuery.Text)) { MessageBox.Show("Please enter a query to run."); return; } try { //wait.Show(); Cursor.Current = Cursors.WaitCursor; this.SuspendLayout(); lblExecutionTime.Text = "0"; lblLogicalReads.Text = "0"; lblPhysicalReads.Text = "0"; lblPlanCompileTime.Text = "0"; lblScanCount.Text = "0"; lblWorkerTime.Text = "0"; dgvStatements.DataSource = null; dgvStatements.Rows.Clear(); SetupGrid(); AnalyzeQuery analyzeQuery; _results = null; try { if (_isTrusted) { analyzeQuery = new AnalyzeQuery(_serverName, _databaseName); } else { analyzeQuery = new AnalyzeQuery(_serverName, _databaseName, _userName, _password); } var queryText = txtQuery.Text; var clean = chkClean.Checked; CancellationTokenSource cancelToken = new CancellationTokenSource(); WaitDialog.ShowDialog(this, () => analyzeQuery.GetQueryStats(queryText, clean, cancelToken), "Run Query", "Running query ...", cancelToken, out _results); if (_results == null) { //they canceled the query MessageBox.Show("Operation canceled by user."); return; } _results.query_text = txtQuery.Text; } catch (Exception ex) { StringBuilder exMsg = new StringBuilder(); exMsg.AppendLine($"Exception running query: {ex.Message} \r\n"); while (ex.InnerException != null) { var sqlEx = ex.InnerException as SqlException; if (sqlEx == null) { exMsg.AppendLine($"Message: {ex.InnerException.Message} \r\n"); } else { exMsg.AppendLine($"Line: {sqlEx.LineNumber}, Message: {ex.InnerException.Message} \r\n"); } ex = ex.InnerException; } MessageBox.Show(exMsg.ToString()); return; } lblExecutionTime.Text = Convert.ToString(_results.execution_time); lblLogicalReads.Text = Convert.ToString(_results.logical_reads); lblPhysicalReads.Text = Convert.ToString(_results.physical_reads); lblPlanCompileTime.Text = Convert.ToString(_results.parse_and_compile_elapsed); lblScanCount.Text = Convert.ToString(_results.scan_count); lblWorkerTime.Text = Convert.ToString(_results.worker_time); dgvStatements.DataSource = new SortableBindingList <Statement>(_results.execution_plan_statements); btnCloseDialog.Enabled = btnSaveBaseLine.Enabled = btnSaveToZip.Enabled = true; btnCompareToBaseLine.Enabled = _baseLineResults != null && _baseLineResults.StatsId != _results.StatsId; //MessageBox.Show("Done."); } finally { this.ResumeLayout(); Cursor.Current = Cursors.Default; } }
/// <summary> /// Evaluate trained application performance against labeled data using fscore for classes and MUC Evaluation for entities /// </summary> /// <param name="testData">List of TestingExample each containing the labeled and predicted data</param> /// <param name="verbose">Ouputs extra metrics for Entities</param> /// <param name="entities">List of all entity models in the application</param> /// <param name="classes">List of all classification models in the application</param> /// <returns></returns> public BatchTestResponse EvaluateModel( IReadOnlyList <TestingExample> testData, bool verbose = false, IReadOnlyList <Model> entities = null, IReadOnlyList <Model> classes = null) { ValidateInput(testData); // Intialize the evaluation service with the application models var evaluationService = new EvaluationService(entities, classes); foreach (var testCase in testData) { // classification model stats aggregation evaluationService.AggregateClassificationStats(new HashSet <string>(testCase.LabeledData.Classification), new HashSet <string>(testCase.PredictedData.Classification)); // Prepare query stats var queryStats = new QueryStats { QueryText = testCase.Text, LabeledClassNames = testCase.LabeledData.Classification, PredictedClassNames = testCase.PredictedData.Classification }; // Populate False entities and Aggregate Entity MUC model stats evaluationService.PopulateQueryAndEntityStats(testCase.LabeledData.Entities, testCase.PredictedData.Entities, queryStats); } // Calculate precision, recall and fScore for Classification models var classificationModelsStats = new List <ModelStats>(evaluationService.ClassificationStats.Count); foreach (var classificationConfusionMatrix in evaluationService.ClassificationStats.Values) { classificationModelsStats.Add(new ModelStats { ModelName = classificationConfusionMatrix.ModelName, ModelType = classificationConfusionMatrix.ModelType, Precision = classificationConfusionMatrix.CalculatePrecision(), Recall = classificationConfusionMatrix.CalculateRecall(), FScore = classificationConfusionMatrix.CalculateFScore(), EntityTextFScore = null, EntityTypeFScore = null }); } // Calculate precision, recall and fScore for Entity models var entityModelsStats = new List <ModelStats>(evaluationService.EntityStats.Count); foreach (var entitiesEvaluation in evaluationService.EntityStats.Values) { entityModelsStats.Add(new ModelStats { ModelName = entitiesEvaluation.ModelName, ModelType = entitiesEvaluation.ModelType, Precision = entitiesEvaluation.CalculatePrecision(), Recall = entitiesEvaluation.CalculateRecall(), FScore = entitiesEvaluation.CalculateFScore(), EntityTextFScore = verbose ? entitiesEvaluation.CalculateTextFScore() : (double?)null, EntityTypeFScore = verbose ? entitiesEvaluation.CalculateTypeFScore() : (double?)null }); } return(new BatchTestResponse { ClassificationModelsStats = classificationModelsStats, EntityModelsStats = entityModelsStats, QueryStats = evaluationService.QueryStats }); }
public IEnumerable <TProjection> Query <TProjection>( DocumentTable table, out QueryStats stats, string @select = "", string @where = "", int skip = 0, int take = 0, string @orderby = "", object parameters = null) { return(store.Query <TProjection>(table, out stats, select, where, skip, take, orderby, parameters)); }
internal ResultEnumerable(HttpClient client, Uri searchJobLocation, QueryStats qs) { this.enumerator = new ResultEnumerator <T>(client, searchJobLocation, qs); this.Stats = qs; }
public async Task ReturnsQueryStats_WhenInTransaction() { var response = MockResponse.Json(201, @"{ 'results': [ { 'columns': [ 'id(n)' ], 'data': [ { 'row': [ 4 ], 'meta': [ null ] } ], 'stats': { 'contains_updates': false, 'nodes_created': 0, 'nodes_deleted': 0, 'properties_set': 0, 'relationships_created': 0, 'relationship_deleted': 0, 'labels_added': 0, 'labels_removed': 0, 'indexes_added': 0, 'indexes_removed': 0, 'constraints_added': 0, 'constraints_removed': 0, 'contains_system_updates': false, 'system_updates': 0 } } ], 'errors': [], 'commit': 'http://*****:*****@"{ 'statements': [ { 'statement': 'MATCH (n)\r\nRETURN id(n)', 'resultDataContents': [], 'parameters': {}, 'includeStats': true } ] }"); var rollbackTransactionRequest = MockRequest.Delete($"/db/{database}/tx/1"); using (var testHarness = new RestTestHarness(false, "http://*****:*****@"{'results':[], 'errors':[] }") } }) { var graphClient = await testHarness.CreateAndConnectTransactionalGraphClient(RestTestHarness.Neo4jVersion.Neo40); var completedRaised = false; QueryStats stats = null; graphClient.OperationCompleted += (o, e) => { stats = e.QueryStats; completedRaised = true; }; using (var tx = graphClient.BeginTransaction()) { var query = graphClient.Cypher.WithQueryStats.Match("(n)").Return(n => n.Id()); query.Query.IncludeQueryStats.Should().BeTrue(); await query.ExecuteWithoutResultsAsync(); completedRaised.Should().BeTrue(); stats.Should().NotBeNull(); } } }
/// <inheritdoc /> async Task <IEnumerable <TResult> > IRawGraphClient.ExecuteGetCypherResultsAsync <TResult>(CypherQuery query) { if (Driver == null) { throw new InvalidOperationException("Can't execute cypher unless you have connected to the server."); } var context = ExecutionContext.Begin(this); List <TResult> results; Bookmark lastBookmark = null; QueryStats stats = null; async Task <QueryStats> GetQueryStats(IResultCursor resultCursor) { if (!query.IncludeQueryStats) { return(null); } var summary = await resultCursor.ConsumeAsync().ConfigureAwait(false); stats = new QueryStats(summary.Counters); return(stats); } try { if (InTransaction) { context.Database = Transaction.Database; var result = await transactionManager.EnqueueCypherRequest($"The query was: {query.QueryText}", this, query).ConfigureAwait(false); results = ParseResults <TResult>(await result.StatementResult.ToListAsync().ConfigureAwait(false), query); if (query.IncludeQueryStats) { var summary = await result.StatementResult.ConsumeAsync().ConfigureAwait(false); stats = new QueryStats(summary.Counters); } } else { var session = Driver.AsyncSession(ServerVersion, query.Database, query.IsWrite, query.Bookmarks); async Task <List <IRecord> > Records(IAsyncTransaction asyncTransaction) { var cursor = await asyncTransaction.RunAsync(query, this).ConfigureAwait(false); var output = await cursor.ToListAsync().ConfigureAwait(false); stats = await GetQueryStats(cursor).ConfigureAwait(false); return(output); } var result = query.IsWrite ? await session.WriteTransactionAsync(async s => await Records(s)).ConfigureAwait(false) : await session.ReadTransactionAsync(async s => await Records(s)).ConfigureAwait(false); results = ParseResults <TResult>(result, query); lastBookmark = session.LastBookmark; await session.CloseAsync().ConfigureAwait(false); } } catch (AggregateException aggregateException) { context.Complete(query, lastBookmark, aggregateException.TryUnwrap(out var unwrappedException) ? unwrappedException : aggregateException); throw; } catch (Exception e) { context.Complete(query, lastBookmark, e); throw; } context.Complete(query, lastBookmark, results.Count, stats); return(results); }
public void Complete(string queryText, Bookmark lastBookmark, int resultsCount = -1, Exception exception = null, NameValueCollection customHeaders = null, int?maxExecutionTime = null, string identifier = null, IEnumerable <Bookmark> bookmarks = null, QueryStats stats = null) { var args = new OperationCompletedEventArgs { LastBookmark = lastBookmark, QueryText = queryText, ResourcesReturned = resultsCount, TimeTaken = stopwatch.Elapsed, Exception = exception, CustomHeaders = customHeaders, MaxExecutionTime = maxExecutionTime, Identifier = identifier, BookmarksUsed = bookmarks, QueryStats = stats }; owner.OnOperationCompleted(args); }
public void Complete(CypherQuery query, Bookmark lastBookmark, int resultsCount, QueryStats stats) { // only parse the events when there's an event handler Complete(owner.OperationCompleted != null ? query.DebugQueryText : string.Empty, lastBookmark, resultsCount, null, query.CustomHeaders, identifier: query.Identifier, bookmarks: query.Bookmarks, stats: stats); }
public void Complete(CypherQuery query, Bookmark lastBookmark, QueryStats queryStats) { Complete(owner.OperationCompleted != null ? query.DebugQueryText : string.Empty, lastBookmark, 0, null, identifier: query.Identifier, bookmarks: query.Bookmarks, stats: queryStats); }
public void CopyTo(QueryStats target) { target.TotalResults = TotalResults; }