/// <summary> /// Inserts an index entry for a single document into a single index using a long lived index page catalog. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> /// <param name="document"></param> private void InsertDocumentIntoIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta, PersistDocument document, PersistIndexPageCatalog indexPageCatalog, bool flushPageCatalog) { try { var searchTokens = GetIndexSearchTokens(transaction, indexMeta, document); var findResult = FindKeyPage(transaction, indexMeta, searchTokens, indexPageCatalog); //If we found a full match for all supplied key values - add the document to the leaf collection. if (findResult.IsFullMatch) { if (findResult.Leaf.DocumentIDs == null) { findResult.Leaf.DocumentIDs = new HashSet <Guid>(); } if (indexMeta.IsUnique && findResult.Leaf.DocumentIDs.Count > 1) { string exceptionText = string.Format("Duplicate key violation occurred for index [{0}]/[{1}]. Values: {{{2}}}", schemaMeta.VirtualPath, indexMeta.Name, string.Join(",", searchTokens)); throw new DokdexDuplicateKeyViolation(exceptionText); } findResult.Leaf.DocumentIDs.Add(document.Id); if (flushPageCatalog) { core.IO.PutPBuf(transaction, indexMeta.DiskPath, findResult.Catalog); } } else { //If we didn't find a full match for all supplied key values, // then create the tree and add the document to the lowest leaf. //Note that we are going to start creating the leaf level at the findResult.ExtentLevel. // This is because we may have a partial match and don't need to create the full tree. lock (indexPageCatalog) { for (int i = findResult.ExtentLevel; i < searchTokens.Count; i++) { findResult.Leaf = findResult.Leaves.AddNewleaf(searchTokens[i]); findResult.Leaves = findResult.Leaf.Leaves; } if (findResult.Leaf.DocumentIDs == null) { findResult.Leaf.DocumentIDs = new HashSet <Guid>(); } findResult.Leaf.DocumentIDs.Add(document.Id); } if (flushPageCatalog) { core.IO.PutPBuf(transaction, indexMeta.DiskPath, findResult.Catalog); } } } catch (Exception ex) { core.Log.Write(String.Format("Index document insert failed for process {0}.", transaction.ProcessId), ex); throw; } }
/// <summary> /// Inserts an index entry for a single document into each index in the schema. /// </summary> /// <param name="transaction"></param> /// <param name="schema"></param> /// <param name="document"></param> public void InsertDocumentIntoIndexes(Transaction transaction, PersistSchema schemaMeta, PersistDocument document) { try { var indexCatalog = GetIndexCatalog(transaction, schemaMeta, LockOperation.Read); //Loop though each index in the schema. foreach (var indexMeta in indexCatalog.Collection) { InsertDocumentIntoIndex(transaction, schemaMeta, indexMeta, document); } } catch (Exception ex) { core.Log.Write(String.Format("Multi-index insert failed for process {0}.", transaction.ProcessId), ex); throw; } }
/// <summary> /// Inserts an index entry for a single document into a single index. /// </summary> /// <param name="transaction"></param> /// <param name="schemaMeta"></param> /// <param name="indexMeta"></param> /// <param name="document"></param> private void InsertDocumentIntoIndex(Transaction transaction, PersistSchema schemaMeta, PersistIndex indexMeta, PersistDocument document) { InsertDocumentIntoIndex(transaction, schemaMeta, indexMeta, document, null, true); }
private List <string> GetIndexSearchTokens(Transaction transaction, PersistIndex indexMeta, PersistDocument document) { try { List <string> result = new List <string>(); foreach (var indexAttribute in indexMeta.Attributes) { var jsonContent = JObject.Parse(document.Content); JToken jToken = null; if (jsonContent.TryGetValue(indexAttribute.Name, StringComparison.CurrentCultureIgnoreCase, out jToken)) { result.Add(jToken.ToString()); } } return(result); } catch (Exception ex) { core.Log.Write(String.Format("Failed to build index search tokens for process {0}.", transaction.ProcessId), ex); throw; } }