public List <PersistDocumentMeta> EnumerateCatalog(Session session, string schema) { try { using (var txRef = core.Transactions.Begin(session)) { PersistSchema schemaMeta = core.Schemas.VirtualPathToMeta(txRef.Transaction, schema, LockOperation.Read); if (schemaMeta == null || schemaMeta.Exists == false) { throw new LeafSQLSchemaDoesNotExistException(schema); } var list = new List <PersistDocumentMeta>(); var filePath = Path.Combine(schemaMeta.DiskPath, Constants.DocumentCatalogFile); var documentCatalog = core.IO.GetJson <PersistDocumentCatalog>(txRef.Transaction, filePath, LockOperation.Read); foreach (var item in documentCatalog.Collection) { list.Add(item); } txRef.Commit(); return(list); } } catch (Exception ex) { core.Log.Write(String.Format("Failed to get catalog for process {0}.", session.ProcessId), ex); throw; } }
/// <summary> /// Inserts all documents in a schema into a single index in the schema. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> private void RebuildIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta) { try { var filePath = Path.Combine(schemaMeta.DiskPath, Constants.DocumentCatalogFile); var documentCatalog = core.IO.GetJson <PersistDocumentCatalog>(transaction, filePath, LockOperation.Read); //Clear out the existing index pages. core.IO.PutPBuf(transaction, indexMeta.DiskPath, new PersistIndexPageCatalog()); var indexPageCatalog = core.IO.GetPBuf <PersistIndexPageCatalog>(transaction, indexMeta.DiskPath, LockOperation.Write); var state = new RebuildIndexItemThreadProc_ParallelState() { TargetThreadCount = Environment.ProcessorCount * 2, }; state.TargetThreadCount = 8; var param = new RebuildIndexItemThreadProc_Params() { DocumentCatalog = documentCatalog, State = state, IndexMeta = indexMeta, IndexPageCatalog = indexPageCatalog, SchemaMeta = schemaMeta, Transaction = transaction }; for (int i = 0; i < state.TargetThreadCount; i++) { new Thread(RebuildIndexItemThreadProc).Start(param); param.Initialized.WaitOne(Timeout.Infinite); } while (state.IsComplete == false) { Thread.Sleep(1); } if (state.Success == false) { throw new LeafSQLExceptionBase($"Failed to build index: {state.Exception.ToString()}"); } core.IO.PutPBuf(transaction, indexMeta.DiskPath, indexPageCatalog); } catch (Exception ex) { core.Log.Write(String.Format("Failed to rebuild single index for process {0}.", transaction.ProcessId), ex); throw; } }
private PersistIndexCatalog GetIndexCatalog(Transaction transaction, PersistSchema schemaMeta, LockOperation intendedOperation) { string indexCatalogDiskPath = Path.Combine(schemaMeta.DiskPath, Constants.IndexCatalogFile); var indexCatalog = core.IO.GetJson <PersistIndexCatalog>(transaction, indexCatalogDiskPath, intendedOperation); indexCatalog.DiskPath = indexCatalogDiskPath; foreach (var index in indexCatalog.Collection) { index.DiskPath = Path.Combine(schemaMeta.DiskPath, MakeIndexFileName(index.Name)); } return(indexCatalog); }
/// <summary> /// Removes a document from an index. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> /// <param name="document"></param> private void DeleteDocumentFromIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta, Guid documentId) { try { var persistIndexPageCatalog = core.IO.GetPBuf <PersistIndexPageCatalog>(transaction, indexMeta.DiskPath, LockOperation.Write); if (RemoveDocumentFromLeaves(ref persistIndexPageCatalog.Leaves, documentId)) { core.IO.PutPBuf(transaction, indexMeta.DiskPath, persistIndexPageCatalog); } } catch (Exception ex) { core.Log.Write(String.Format("Index document upsert failed for process {0}.", transaction.ProcessId), ex); throw; } }
public void DeleteDocumentFromIndexes(Transaction transaction, PersistSchema schemaMeta, Guid documentId) { try { var indexCatalog = GetIndexCatalog(transaction, schemaMeta, LockOperation.Read); //Loop though each index in the schema. foreach (var indexMeta in indexCatalog.Collection) { DeleteDocumentFromIndex(transaction, schemaMeta, indexMeta, documentId); } } catch (Exception ex) { core.Log.Write(String.Format("Multi-index upsert failed for process {0}.", transaction.ProcessId), ex); throw; } }
/// <summary> /// Returns a list of conditon groups and an associated list of supporting indexes to satisify each group (nested conditions). /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="conditions"></param> /// <returns></returns> public List <IndexSelections> SelectIndexes(Transaction transaction, PersistSchema schemaMeta, Conditions conditions) { List <IndexSelections> indexSelections = new List <IndexSelections>(); IndexSelections indexSelection = new IndexSelections(conditions); SelectIndexes(transaction, schemaMeta, conditions.Root, ref indexSelection); indexSelections.Add(indexSelection); if (conditions.Children != null && conditions.Children.Count > 0) { foreach (var childConditions in conditions.Children) { var childIndexSelections = SelectIndexes(transaction, schemaMeta, childConditions); if (childIndexSelections != null && childIndexSelections.Count > 0) { indexSelections.AddRange(childIndexSelections); } } } return(indexSelections); }
/// <summary> /// Inserts an index entry for a single document into a single index using a long lived index page catalog. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> /// <param name="document"></param> private void InsertDocumentIntoIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta, PersistDocument document, PersistIndexPageCatalog indexPageCatalog, bool flushPageCatalog) { try { var searchTokens = GetIndexSearchTokens(transaction, indexMeta, document); var findResult = FindKeyPage(transaction, indexMeta, searchTokens, indexPageCatalog); //If we found a full match for all supplied key values - add the document to the leaf collection. if (findResult.IsFullMatch) { if (findResult.Leaf.DocumentIDs == null) { findResult.Leaf.DocumentIDs = new HashSet <Guid>(); } if (indexMeta.IsUnique && findResult.Leaf.DocumentIDs.Count > 1) { string exceptionText = string.Format("Duplicate key violation occurred for index [{0}]/[{1}]. Values: {{{2}}}", schemaMeta.VirtualPath, indexMeta.Name, string.Join(",", searchTokens)); throw new DokdexDuplicateKeyViolation(exceptionText); } findResult.Leaf.DocumentIDs.Add(document.Id); if (flushPageCatalog) { core.IO.PutPBuf(transaction, indexMeta.DiskPath, findResult.Catalog); } } else { //If we didn't find a full match for all supplied key values, // then create the tree and add the document to the lowest leaf. //Note that we are going to start creating the leaf level at the findResult.ExtentLevel. // This is because we may have a partial match and don't need to create the full tree. lock (indexPageCatalog) { for (int i = findResult.ExtentLevel; i < searchTokens.Count; i++) { findResult.Leaf = findResult.Leaves.AddNewleaf(searchTokens[i]); findResult.Leaves = findResult.Leaf.Leaves; } if (findResult.Leaf.DocumentIDs == null) { findResult.Leaf.DocumentIDs = new HashSet <Guid>(); } findResult.Leaf.DocumentIDs.Add(document.Id); } if (flushPageCatalog) { core.IO.PutPBuf(transaction, indexMeta.DiskPath, findResult.Catalog); } } } catch (Exception ex) { core.Log.Write(String.Format("Index document insert failed for process {0}.", transaction.ProcessId), ex); throw; } }
/// <summary> /// Inserts an index entry for a single document into a single index. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> /// <param name="document"></param> private void InsertDocumentIntoIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta, PersistDocument document) { InsertDocumentIntoIndex(transaction, schemaMeta, indexMeta, document, null, true); }
public IndexSelections SelectIndexes(Transaction transaction, PersistSchema schemaMeta, Conditions conditions) { try { } catch (Exception ex) { core.Log.Write(String.Format("Failed to select indexes for process {0}.", transaction.ProcessId), ex); throw; } IndexKeyMatches indexKeyMatches = new IndexKeyMatches(conditions); var indexCatalog = GetIndexCatalog(transaction, schemaMeta, LockOperation.Read); IndexSelections indexSelections = new IndexSelections(); //Loop though each index in the schema. List <PotentialIndex> potentialIndexs = new List <PotentialIndex>(); foreach (var indexMeta in indexCatalog.Collection) { List <string> handledKeyNames = new List <string>(); for (int i = 0; i < indexMeta.Attributes.Count; i++) { if (indexKeyMatches.Find(o => o.Key == indexMeta.Attributes[i].Name.ToLower() && o.Handled == false) != null) { handledKeyNames.Add(indexMeta.Attributes[i].Name.ToLower()); } else { break; } } if (handledKeyNames.Count > 0) { potentialIndexs.Add(new PotentialIndex(indexMeta, handledKeyNames)); } } //Grab the index that matches the most of our supplied keys but also has the least attributes. var firstIndex = (from o in potentialIndexs where o.Tried == false select o) .OrderByDescending(s => s.HandledKeyNames.Count) .ThenBy(t => t.Index.Attributes.Count).FirstOrDefault(); if (firstIndex != null) { var handledKeys = (from o in indexKeyMatches where firstIndex.HandledKeyNames.Contains(o.Key) select o).ToList(); foreach (var handledKey in handledKeys) { handledKey.Handled = true; } firstIndex.Tried = true; indexSelections.Add(new IndexSelection(firstIndex.Index, firstIndex.HandledKeyNames)); } indexSelections.UnhandledKeys.AddRange((from o in indexKeyMatches where o.Handled == false select o.Key).ToList()); return(indexSelections); }
private QueryResult FindDocuments(Transaction transaction, PersistSchema schemaMeta, Conditions conditions, int rowLimit, List <string> fieldList) { QueryResult results = new QueryResult(); try { conditions.MakeLowerCase(); if (fieldList.Count == 1 && fieldList[0] == "*") { fieldList = null; } if (fieldList?.Count() > 0) { foreach (var field in fieldList) { results.Columns.Add(new QueryColumn(field)); } } else { results.Columns.Add(new QueryColumn("Id")); results.Columns.Add(new QueryColumn("Created")); results.Columns.Add(new QueryColumn("Modfied")); results.Columns.Add(new QueryColumn("Content")); } bool hasFieldList = fieldList != null && fieldList.Count > 0; var indexSelectionGroups = core.Indexes.SelectIndexes(transaction, schemaMeta, conditions); string documentCatalogDiskPath = Path.Combine(schemaMeta.DiskPath, Constants.DocumentCatalogFile); if (indexSelectionGroups.Count == 0) //Full schema scan. Ouch! { var documentCatalog = core.IO.GetJson <PersistDocumentCatalog>(transaction, documentCatalogDiskPath, LockOperation.Read); foreach (var documentMeta in documentCatalog.Collection) { string documentDiskPath = Path.Combine(schemaMeta.DiskPath, Helpers.GetDocumentModFilePath(documentMeta.Id)); PersistDocument persistDocument = core.IO.GetJson <PersistDocument>(transaction, documentDiskPath, LockOperation.Read); JObject jsonContent = JObject.Parse(persistDocument.Content); if (conditions.IsMatch(jsonContent)) { QueryRow rowValues = new QueryRow(); if (rowLimit > 0 && results.Rows.Count > rowLimit) { break; } if (hasFieldList) { if (jsonContent == null) { jsonContent = JObject.Parse(persistDocument.Content); } foreach (string fieldName in fieldList) { if (fieldName == "#RID") { rowValues.Add(persistDocument.Id.ToString()); } else { JToken fieldToken = null; if (jsonContent.TryGetValue(fieldName, StringComparison.CurrentCultureIgnoreCase, out fieldToken)) { rowValues.Add(fieldToken.ToString()); } else { rowValues.Add(string.Empty); } } } } else { //If no fields "*" was specified as the select list, just return the content of each document and some metadata. rowValues.Add(persistDocument.Id.ToString()); rowValues.Add(persistDocument.Created.ToString()); rowValues.Add(persistDocument.Modfied.ToString()); rowValues.Add(persistDocument.Content); } results.Rows.Add(rowValues); } } } else //Indexed search! { HashSet <Guid> intersectedDocumentIds = new HashSet <Guid>(); foreach (var indexSelectionGroup in indexSelectionGroups) { if (indexSelectionGroup.ConditionGroupType == ConditionType.None) { } else if (indexSelectionGroup.ConditionGroupType == ConditionType.And) { } else if (indexSelectionGroup.ConditionGroupType == ConditionType.Or) { } foreach (var indexSelection in indexSelectionGroup) { var indexPageCatalog = core.IO.GetPBuf <PersistIndexPageCatalog>(transaction, indexSelection.Index.DiskPath, LockOperation.Read); //var targetedIndexConditions = (from o in indexSelectionGroup.Conditions // .Where(o => indexSelection.HandledKeyNames.Contains(o.Key)) select o).ToList(); //Going to have to loop though all of the nested conditions. var foundDocuments = core.Indexes.MatchDocuments(indexPageCatalog, indexSelection); } } //Now that we have elimiated all but the document IDs that we care about, all we // have to do is open each of them up and pull the content requeted in the field list. if (intersectedDocumentIds.Count > 0) { var documentCatalog = core.IO.GetJson <PersistDocumentCatalog>(transaction, documentCatalogDiskPath, LockOperation.Read); foreach (var intersectedDocumentId in intersectedDocumentIds) { var documentMeta = documentCatalog.GetById(intersectedDocumentId); string documentDiskPath = Path.Combine(schemaMeta.DiskPath, Helpers.GetDocumentModFilePath(documentMeta.Id)); PersistDocument persistDocument = core.IO.GetJson <PersistDocument>(transaction, documentDiskPath, LockOperation.Read); JObject jsonContent = JObject.Parse(persistDocument.Content); QueryRow rowValues = new QueryRow(); if (rowLimit > 0 && results.Rows.Count > rowLimit) { break; } bool fullAttributeMatch = true; foreach (var indexSelectionGroup in indexSelectionGroups) { //If we have any conditions that were not indexed, open the remainder // of the documents and do additonal document-level filtering. if (indexSelectionGroup.UnhandledKeys?.Count > 0) { var unhandledConditions = (from o in indexSelectionGroup.Conditions where indexSelectionGroup.UnhandledKeys.Contains(o.Key) select o).ToList(); foreach (Condition condition in unhandledConditions) { JToken jToken = null; if (jsonContent.TryGetValue(condition.Key, StringComparison.CurrentCultureIgnoreCase, out jToken)) { if (condition.IsMatch(jToken.ToString().ToLower()) == false) { fullAttributeMatch = false; break; } } } } } if (fullAttributeMatch) { if (hasFieldList) { if (jsonContent == null) { jsonContent = JObject.Parse(persistDocument.Content); } foreach (string fieldName in fieldList) { if (fieldName == "#RID") { rowValues.Add(persistDocument.Id.ToString()); } else { JToken fieldToken = null; if (jsonContent.TryGetValue(fieldName, StringComparison.CurrentCultureIgnoreCase, out fieldToken)) { rowValues.Add(fieldToken.ToString()); } else { rowValues.Add(string.Empty); } } } } else { //If no fields "*" was specified as the select list, just return the content of each document and some metadata. rowValues.Add(persistDocument.Id.ToString()); rowValues.Add(persistDocument.Created.ToString()); rowValues.Add(persistDocument.Modfied.ToString()); rowValues.Add(persistDocument.Content); } results.Rows.Add(rowValues); } } } } return(results); } catch (Exception ex) { throw new LeafSQLExecutionException(ex.Message); } }
private void /*QueryResult*/ FindDocuments(Transaction transaction, PersistSchema schemaMeta, Conditions conditions, int rowLimit, List <string> fieldList) { try { conditions.MakeLowerCase(); if (fieldList.Count == 1) { if (fieldList[0] == "*") { fieldList = null; } } if (fieldList != null && fieldList.Count() > 0) { fieldList.Insert(0, "#RID"); } var indexSelections = core.Indexes.SelectIndexes(transaction, schemaMeta, conditions); Console.WriteLine(indexSelections.UnhandledKeys.Count()); return; /* * * List<List<String>> rowValues = new List<List<string>>(); * int rowCount = 0; * * * using (serverCore.ObjectLocks.Obtain(sessionId, LockType.Namespace, LockAccessType.Read, namespacePath)) * { * IndexSelections indexSelections = serverCore.IndexOperations.SelectIndex(namespacePath, conditions, explanation); * * if (indexSelections != null && indexSelections.Count > 0) * { #region Index Scan, this is going to be quick! * * List<int> intersectedDocuments = new List<int>(); * bool firstLookup = true; * * foreach (var indexSelection in indexSelections) * { * string indexPageCatalogFileName = Utility.MakePath(serverCore.Configuration.NamespacesPath, namespacePath, indexSelection.Index.Filename); * PersistIndexPageCatalog IndexPageCatalog = serverCore.IO.DeserializeFromProtoBufFile<PersistIndexPageCatalog>(indexPageCatalogFileName); * * List<Condition> keyValues = new List<Condition>(); * * foreach (string attributeName in indexSelection.HandledKeyNames) * { * keyValues.Add((from o in conditions.Collection where o.Key == attributeName select o).First()); * } * * List<int> foundIndexPages = null; * * //Get all index pages that match the key values. * if (indexSelection.Index.Attributes.Count == keyValues.Count) * { * if (indexSelection.Index.IndexType == IndexType.Unique) * { * planExplanationNode.Operation = PlanOperation.FullUniqueIndexMatchScan; * } * else * { * planExplanationNode.Operation = PlanOperation.FullIndexMatchScan; * } * * foundIndexPages = IndexPageCatalog.FindDocuments(keyValues, planExplanationNode); * } * else * { * planExplanationNode.Operation = PlanOperation.PartialIndexMatchScan; * foundIndexPages = IndexPageCatalog.FindDocuments(keyValues, planExplanationNode); * } * * //By default, FindPagesByPartialKey and FindPageByExactKey report ResultingNodes in "pages". Convert to documents. * planExplanationNode.ResultingNodes = foundIndexPages.Count; * * if (firstLookup) * { * firstLookup = false; * //If we do not currently have any items in the result, then add the ones we just found. * intersectedDocuments.AddRange(foundIndexPages); * } * else * { * //Each time we do a subsequent lookup, find the intersection of the IDs from * // this lookup and the previous looksup and make it our result. * //In this way, we continue to limit down the resulting rows by each subsequent index lookup. * intersectedDocuments = foundIndexPages.Intersect(intersectedDocuments).ToList(); * } * * planExplanationNode.IntersectedNodes = intersectedDocuments.Count; * planExplanationNode.Duration = explainStepDuration.Elapsed; * explanation.Steps.Add(planExplanationNode); * * if (intersectedDocuments.Count == 0) * { * break; //Early termination, all rows eliminated. * } * } * * List<Document> resultDocuments = new List<Document>(); * * var unindexedConditions = conditions.Collection.Where(p => indexSelections.UnhandledKeys.Any(p2 => p2 == p.Key)).ToList(); * * bool foundKey = false; * Stopwatch documentScanExplanationDuration = new Stopwatch(); * * PlanExplanationNode documentScanExplanationNode = null; * * if (unindexedConditions.Count == 0 || intersectedDocuments.Count == 0) * { * foundKey = true; * } * else * { * documentScanExplanationDuration.Start(); * documentScanExplanationNode = new PlanExplanationNode() * { * CoveredAttributes = (from o in unindexedConditions select o.Key).ToList(), * Operation = PlanOperation.DocumentScan * }; * } * * foreach (int documentId in intersectedDocuments) * { * if (documentScanExplanationNode != null) * { * documentScanExplanationNode.ScannedNodes++; * } * * string persistDocumentFile = Utility.MakePath(serverCore.Configuration.NamespacesPath, * namespacePath, * PersistIndexPageCatalog.DocumentFileName(documentId)); * * timer.Restart(); * PersistDocument persistDocument = serverCore.IO.DeserializeFromJsonFile<PersistDocument>(persistDocumentFile); * timer.Stop(); * serverCore.PerformaceMetrics.AddDeserializeDocumentMs(timer.ElapsedMilliseconds); * * bool fullAttributeMatch = true; * * JObject jsonContent = null; * * //If we have unindexed attributes, then open each of the documents from the previous index scans and compare the remining values. * if (unindexedConditions.Count > 0) * { * jsonContent = JObject.Parse(persistDocument.Text); * * foreach (Condition condition in unindexedConditions) * { * JToken jToken = null; * * if (jsonContent.TryGetValue(condition.Key, StringComparison.CurrentCultureIgnoreCase, out jToken)) * { * foundKey = true; //TODO: Implement this on the index scan! * * if (condition.IsMatch(jToken.ToString().ToLower()) == false) * { * fullAttributeMatch = false; * break; * } * } * else * { * fullAttributeMatch = false; * break; * } * } * } * * if (fullAttributeMatch) * { * rowCount++; * if (rowLimit > 0 && rowCount > rowLimit) * { * break; * } * * if (documentScanExplanationNode != null) * { * documentScanExplanationNode.ResultingNodes++; * } * * if (hasFieldList) * { * if (jsonContent == null) * { * jsonContent = JObject.Parse(persistDocument.Text); * } * * List<string> fieldValues = new List<string>(); * * foreach (string fieldName in fieldList) * { * if (fieldName == "#RID") * { * fieldValues.Add(persistDocument.Id.ToString()); * } * else * { * JToken fieldToken = null; * if (jsonContent.TryGetValue(fieldName, StringComparison.CurrentCultureIgnoreCase, out fieldToken)) * { * fieldValues.Add(fieldToken.ToString()); * } * else * { * fieldValues.Add(string.Empty); * } * } * } * * rowValues.Add(fieldValues); * } * else * { * resultDocuments.Add(new Document * { * Id = persistDocument.Id, * OriginalType = persistDocument.OriginalType, * Bytes = persistDocument.Bytes * }); * } * } * } * * if (documentScanExplanationNode != null) * { * documentScanExplanationNode.Duration = documentScanExplanationDuration.Elapsed; * documentScanExplanationNode.IntersectedNodes = resultDocuments.Count; * explanation.Steps.Add(documentScanExplanationNode); * } * #endregion * * return new QueryResult * { * Message = foundKey ? string.Empty : "No attribute was found matching the given key(s).", * Success = true, * Documents = resultDocuments, * ExecutionTime = executionTime.Elapsed, * Explanation = explanation, * Columns = fieldList == null ? null : fieldList.ToList(), * Rows = rowValues, * RowCount = rowCount * }; * } * else * { #region Document scan.... this is going to be tuff! * * PlanExplanationNode planExplanationNode = new PlanExplanationNode(PlanOperation.FullDocumentScan) * { * CoveredAttributes = (from o in conditions.Collection select o.Key).ToList() * }; * * List<Document> resultDocuments = new List<Document>(); * * bool foundKey = false; * * string documentCatalogFileName = Utility.MakePath(serverCore.Configuration.NamespacesPath, namespacePath, PersistDocumentCatalog.FileName); * * timer.Restart(); * PersistDocumentCatalog documentCatalog = serverCore.IO.DeserializeFromJsonFile<PersistDocumentCatalog>(documentCatalogFileName); * timer.Stop(); * serverCore.PerformaceMetrics.AddDeserializeDocumentCatalogMs(timer.ElapsedMilliseconds); * * foreach (PersistDocumentCatalogItem documentCatalogItem in documentCatalog.Collection) * { * string persistDocumentFile = Utility.MakePath(serverCore.Configuration.NamespacesPath, namespacePath, documentCatalogItem.DocumentFileName); * * timer.Restart(); * PersistDocument persistDocument = serverCore.IO.DeserializeFromJsonFile<PersistDocument>(persistDocumentFile); * timer.Stop(); * serverCore.PerformaceMetrics.AddDeserializeDocumentMs(timer.ElapsedMilliseconds); * * JObject jsonContent = JObject.Parse(persistDocument.Text); * * bool fullAttributeMatch = true; * * foreach (Condition condition in conditions.Collection) * { * JToken jToken = null; * * if (jsonContent.TryGetValue(condition.Key, StringComparison.CurrentCultureIgnoreCase, out jToken)) * { * foundKey = true; //TODO: Implement this on the index scan! * * if (condition.IsMatch(jToken.ToString().ToLower()) == false) * { * fullAttributeMatch = false; * break; * } * } * } * * if (fullAttributeMatch) * { * rowCount++; * if (rowLimit > 0 && rowCount > rowLimit) * { * break; * } * * if (hasFieldList) * { * if (jsonContent == null) * { * jsonContent = JObject.Parse(persistDocument.Text); * } * * List<string> fieldValues = new List<string>(); * * foreach (string fieldName in fieldList) * { * if (fieldName == "#RID") * { * fieldValues.Add(persistDocument.Id.ToString()); * } * else * { * JToken fieldToken = null; * if (jsonContent.TryGetValue(fieldName, StringComparison.CurrentCultureIgnoreCase, out fieldToken)) * { * fieldValues.Add(fieldToken.ToString()); * } * else * { * fieldValues.Add(string.Empty); * } * } * } * * rowValues.Add(fieldValues); * } * else * { * resultDocuments.Add(new Document * { * Id = persistDocument.Id, * OriginalType = persistDocument.OriginalType, * Bytes = persistDocument.Bytes * }); * } * * planExplanationNode.ResultingNodes++; * } * * planExplanationNode.ScannedNodes++; * * //------------------------------------------------------------------------------------------------------------------------------------------------ * } * #endregion * * planExplanationNode.IntersectedNodes = planExplanationNode.ScannedNodes; * * explanation.Steps.Add(planExplanationNode); * * return new QueryResult * { * Message = foundKey ? string.Empty : "No attribute was found matching the given key(s).", * Success = true, * Documents = resultDocuments, * ExecutionTime = executionTime.Elapsed, * Explanation = explanation, * Columns = fieldList == null ? null : fieldList.ToList(), * Rows = rowValues, * RowCount = rowCount * }; * } * } //End lock. */ } catch (Exception ex) { // return new QueryResult { Message = ex.Message }; } }
private void SelectIndexes(Transaction transaction, PersistSchema schemaMeta, List <Condition> conditions, ref IndexSelections indexSelection) { try { var indexCatalog = GetIndexCatalog(transaction, schemaMeta, LockOperation.Read); List <PotentialIndex> potentialIndexs = new List <PotentialIndex>(); var indexConditions = new IndexConditions(conditions); //Loop though each index in the schema and create a list of all indexes which could potentially be used to match the conditions. foreach (var indexMeta in indexCatalog.Collection) { var indexHandledCondition = new List <IndexHandledCondition>(); for (int i = 0; i < indexMeta.Attributes.Count; i++) { var indexConditonMatches = indexConditions.FindAll(o => o.Key == indexMeta.Attributes[i].Name.ToLower() && o.Handled == false); if (indexConditonMatches.Count > 0) { foreach (var indexConditonMatche in indexConditonMatches) { indexHandledCondition.Add(new IndexHandledCondition(indexConditonMatche, i)); } } else { break; } } if (indexHandledCondition.Count > 0) { potentialIndexs.Add(new PotentialIndex(indexMeta, indexHandledCondition)); } } //Group the indexes by their first attribute. var distinctFirstAttributes = potentialIndexs.Select(o => o.Index.Attributes[0].Name).Distinct(); foreach (var distinctFirstAttribute in distinctFirstAttributes) { //Find all idexes with the same first attribute: var indexGroup = potentialIndexs.Where(o => o.FirstAttributeName == distinctFirstAttribute); //For the group of indexes, find the one index that handles the most keys but also has the fewest atributes. var firstIndexInGroup = (from o in indexGroup select o) .OrderByDescending(s => s.IndexHandledConditions.Count) .ThenBy(t => t.Index.Attributes.Count).FirstOrDefault(); foreach (var indexHandledCondition in firstIndexInGroup.IndexHandledConditions) { //Mark the keys which are handled by this index as "handled". var handledKeys = (from o in indexConditions where o.Id == indexHandledCondition.Id select o).ToList(); foreach (var handledKey in handledKeys) { handledKey.Handled = true; } } indexSelection.Add(new IndexSelection(firstIndexInGroup.Index, firstIndexInGroup.IndexHandledConditions)); } indexSelection.UnhandledKeys.AddRange((from o in indexConditions where o.Handled == false select o.Key).ToList()); } catch (Exception ex) { core.Log.Write(String.Format("Failed to select indexes for process {0}.", transaction.ProcessId), ex); throw; } }