/// <summary> /// /// </summary> /// <param name="tran"></param> /// <param name="tableName"></param> /// <param name="documentIDs"></param> /// <returns></returns> public Dictionary <byte[], HashSet <string> > GetDocumentsSearchables(Transaction tran, string tableName, HashSet <byte[]> documentIDs) { ITS its = null; its = new ITS() { e2i = tran.SelectTable <byte>(tableName, 1, 0), srch = tran.SelectTable <byte>(tableName, 3, 0), }; its.e2i.ValuesLazyLoadingIsOn = false; its.srch.ValuesLazyLoadingIsOn = false; Dictionary <byte[], HashSet <string> > rdocuments = new Dictionary <byte[], HashSet <string> >(); foreach (var documentID in documentIDs) { var r1 = its.e2i.Select <byte[], int>(documentID); if (r1.Exists) //DOCUMENT EXISTS { //Getting searchables for this document byte[] oldSrch = its.srch.Select <byte[], byte[]>(r1.Value.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 }), true).Value; rdocuments[documentID] = GetSearchablesFromByteArray_AsHashSet(oldSrch); //always instantiated hashset } } return(rdocuments); }
public PatientClinicalInformation ParseCCDAFile(string path, bool AsString) { ParseAddress prsadd = new ParseAddress(); StreamReader sr = new StreamReader(path); HL7SDK.Cda.ClinicalDocument doc = new HL7SDK.Cda.ClinicalDocument(); doc.Load(sr); ///Patient Clinic Summary Information ptInformation = new PatientClinicalInformation(); /// Patient Demographic Information ptDemographic = new PatientDemographicDetail(); if (doc.RecordTarget != null) { IADCollection ptaddess = doc.RecordTarget[0].PatientRole.Addr; AddressModel add = prsadd.FillAddress(ptaddess); ///Parse Address /// Patient Address ptDemographic.Street = add.street; ptDemographic.City = add.city; ptDemographic.State = add.state; ptDemographic.Country = add.country; ptDemographic.Zip = Convert.ToString(add.pinCode); ///// Patient Name NameModel ptname = prsadd.FillName(doc.RecordTarget[0].PatientRole.Patient.Name); ptDemographic.FirstName = doc.RecordTarget[0].PatientRole.Patient.Name[0].FindENGiven(); ///Get Patient First Name ptDemographic.LastName = doc.RecordTarget[0].PatientRole.Patient.Name[0].FindENFamily(); ///Get Patient Last Name ///// Patient DOB ITS ts = doc.RecordTarget[0].PatientRole.Patient.BirthTime; ptDemographic.DateofBirth = ts.AsDateTime.ToString(); ///// Patient Gender ptDemographic.gender = doc.RecordTarget[0].PatientRole.Patient.AdministrativeGenderCode.Code == "M" ? "MALE" : "FEMALE"; ///// Patient SSN try { ptDemographic.SSN = doc.RecordTarget[0].PatientRole.Id.Where(k => k.Root == "2.16.840.1.113883.4.1").FirstOrDefault().Extension.ToString(); } catch (Exception) { ptDemographic.SSN = doc.RecordTarget[0].PatientRole.Id.FirstOrDefault().Extension.ToString(); } ///// Patient PHNo. ptDemographic.ContactNo = Convert.ToString(doc.RecordTarget[0].PatientRole.Telecom[0].Value); ///// Patient Race ptDemographic.Race = doc.RecordTarget[0].PatientRole.Patient.RaceCode.DisplayName.ToString(); ///// Patient Language if (doc.RecordTarget[0].PatientRole.Patient.LanguageCommunication.Count > 0) { ptDemographic.PreferredLanguage = doc.RecordTarget[0].PatientRole.Patient.LanguageCommunication[0].LanguageCode.Code.ToString(); ptDemographic.LanguageCode = doc.RecordTarget[0].PatientRole.Patient.LanguageCommunication[0].LanguageCode.Code.ToString(); } ///// Patient Ethencity ptDemographic.Ethnicity = doc.RecordTarget[0].PatientRole.Patient.EthnicGroupCode.DisplayName; ptInformation.ptDemographicDetail = ptDemographic; } ///Component OF IEncompassingEncounter componentOf = doc.ComponentOf.EncompassingEncounter; ptInformation.EncounterNoteDate = componentOf.EffectiveTime.Low != null ? componentOf.EffectiveTime.Low.Value != null?componentOf.EffectiveTime.Low.AsDateTime.ToString() : null : null; ptInformation.EncounterCode = componentOf.Code.Code == null ? null : componentOf.Code.Code.ToString(); ptInformation.EncounterDescription = componentOf.Code.DisplayName == null ? null : componentOf.Code.DisplayName.ToString(); if (componentOf.ResponsibleParty.AssignedEntity.AssignedPerson.Name.Count > 0) { ptInformation.EncounterStaffName = componentOf.ResponsibleParty.AssignedEntity.AssignedPerson.Name[0].FindENFamily();///Performer Name (Staff/Clinician Name) } /// Clinic / Provider Detail ptClinicInformation = new ClinicInformation(); if (doc.Author != null) { /////Clinic / Provider Address IADCollection ptaddess = doc.Author[0].AssignedAuthor.Addr; AddressModel add = prsadd.FillAddress(ptaddess);///Parse Address ptClinicInformation.ClinicCity = add.city; ptClinicInformation.ClinicState = add.state; ptClinicInformation.ClinicStreeet = add.street; ptClinicInformation.ClinicCountry = add.country; ptClinicInformation.ClinicZip = add.pinCode; ///// Clinic / Provider PHNo. ptClinicInformation.ClinicPhoneNumber = doc.Author[0].AssignedAuthor.Telecom[0].Value; ///// Clinic / Provider Name try { ptClinicInformation.ClinicName = doc.Author[0].AssignedAuthor.AsPerson.Name[0].FindENGiven(); } catch (Exception) { } ptInformation.ptClinicInformation = ptClinicInformation; } if (doc.DocumentationOf != null) { documentationOfInfo = new List <DocumentationOfList>(); for (int i = 0; i < doc.DocumentationOf.Count; i++) { DocumentationOfList docof = new DocumentationOfList(); IADCollection ptaddess = doc.DocumentationOf[i].ServiceEvent.Performer[0].AssignedEntity.Addr; AddressModel add = prsadd.FillAddress(ptaddess); docof.address = add.street; docof.city = add.city; docof.state = add.state; docof.pinCode = add.pinCode; if (doc.DocumentationOf[i].ServiceEvent.Performer[0].AssignedEntity.AssignedPerson.Name.Count > 0) { docof.staffName = doc.DocumentationOf[i].ServiceEvent.Performer[0].AssignedEntity.AssignedPerson.Name[0].FindENFamily(); } documentationOfInfo.Add(docof); } ptInformation.documentationOfInfo = documentationOfInfo; } /// Get All Document Component if (doc.Component.AsStructuredBody.Component != null) { IComponent3Collection item = doc.Component.AsStructuredBody.Component; IEnumerable <ISection> sections = item.Select(s => s.Section); IEnumerable <IStrucDocText> text = sections.Select(t => t.Text); IEnumerable <IStrucDocElementCollection> textitems = text.Select(tb => tb.Items); foreach (ISection funcststus in sections) { string snomdcode = funcststus.Code.Code; componentSections = new GetComponents(); dataCollection = new ComponentDataCollection(); //Dictionary<string, ArrayList> DataArr = dataCollection.GetDataCollection(funcststus, ptInformation); switch (snomdcode) { case "48765-2": /// Patient Allergies Information // ptAllergies = componentSections.GetAllergies(DataArr); ptAllergies = componentSections.FillAllergies(funcststus.Entry); break; case "11450-4": /// Patient Problems Information //ptProblemes = componentSections.GetProblems(DataArr); ptProblemes = componentSections.FillProblems(funcststus.Entry); break; case "29762-2": /// Patient Social History Information //ptSocialHistory = componentSections.GetSocialHistory(DataArr); ptSocialHistory = componentSections.FillSocialHistory(funcststus.Entry); break; case "8716-3": /// Patient Vital Signs Information //ptVitalSigns = componentSections.GetVitalSigns(DataArr); ptVitalSigns = componentSections.FillVitalSigns(funcststus.Entry); break; case "10160-0": /// Patient Medication Information // ptMedication = componentSections.GetMedication(DataArr); ptMedication = componentSections.FillMedication(funcststus.Entry); break; case "46240-8": /// Patient ENCOUNTERS Information // ptEncounters = componentSections.GetEncounters(DataArr); ptEncounters = componentSections.FillEncounters(funcststus.Entry, ptInformation.EncounterStaffName); break; case "30954-2": /// Patient Lab Results Information //ptLabResults = componentSections.GetLabResults(DataArr); ptLabResults = componentSections.FillLabResults(funcststus.Entry); break; case "46239-0": /// Patient Reason For Visit Information //ptReason = componentSections.GetReason(DataArr); ptReason = componentSections.FillReason(funcststus); break; case "11369-6": /// Patient Immunizations Information //ptImmunization = componentSections.GetImmunization(DataArr); ptImmunization = componentSections.FillImmunization(funcststus.Entry); break; case "18776-5": /// Patient Plan Of Care Information //ptPlanOfCare = componentSections.GetPlanOfCare(DataArr); ptPlanOfCare = componentSections.FillPlanOfCare(funcststus.Entry); break; case "42349-1": /// Patient Reason For Transfer Information //reasonforTransfer = componentSections.GetReasonForTransfer(DataArr); reasonforTransfer = componentSections.FillReasonForTransfer(funcststus); break; case "47519-4": /// Patient Procedure Information //ptProcedure = componentSections.GetProcedure(DataArr); ptProcedure = componentSections.FillProcedure(funcststus.Entry); break; case "47420-5": /// Patient Functional Status Information //ptFunctionalStatus = componentSections.GetFunctionalStatus(DataArr); ptFunctionalStatus = componentSections.FillFunctionalStatus(funcststus.Entry); break; } } ///Encapsulate Patient Information In A Single Model ptInformation.ptAllergies = ptAllergies; ptInformation.ptProblemes = ptProblemes; ptInformation.ptVitalSigns = ptVitalSigns; ptInformation.ptSocialHistory = ptSocialHistory; ptInformation.ptMedication = ptMedication; ptInformation.ptEncounters = ptEncounters; ptInformation.ptLabResults = ptLabResults; ptInformation.ptReason = ptReason; ptInformation.ptImmunization = ptImmunization; ptInformation.ptPlanOfCare = ptPlanOfCare; ptInformation.reasonforTransfer = reasonforTransfer; ptInformation.ptProcedure = ptProcedure; ptInformation.ptFunctionalStatus = ptFunctionalStatus; ///END } return(ptInformation); }
/// <summary> /// itbls and transaction must be supplied, to make it working from outside /// </summary> internal void DoIndexing(Transaction itran, Dictionary <string, ITS> xitbls) { byte[] btUdtStart = DateTime.UtcNow.Ticks.To_8_bytes_array_BigEndian(); ITS its = null; byte[] kA = null; byte[] kZ = null; byte[] newSrch = null; byte[] oldSrch = null; Row <string, byte[]> rWord = null; //Dictionary<string, WordInDocs> wds = new Dictionary<string, WordInDocs>(); WordInDocs wd = null; uint iterBlockId = 0; int iterBlockLen = 0; int blockSize = 0; byte[] btBlock = null; Dictionary <uint, byte[]> block = new Dictionary <uint, byte[]>(); byte[] btWah = null; byte[] tmp = null; byte[] val = null; WABI wah = null; foreach (var tbl in xitbls) { its = tbl.Value; if (its.srch == null) //Can be instantiated in insert procedure, depending how we use indexer { its.srch = itran.InsertTable <byte>(tbl.Key, 3, 0); its.srch.ValuesLazyLoadingIsOn = false; } //Are instantiated only hear its.blocks = itran.InsertTable <byte>(tbl.Key, 10, 0); its.words = itran.InsertTable <byte>(tbl.Key, 20, 0); its.currentBlock = itran.Select <int, uint>(tbl.Key, 11).Value; its.numberInBlock = itran.Select <int, uint>(tbl.Key, 12).Value; its.blocks.ValuesLazyLoadingIsOn = false; its.words.ValuesLazyLoadingIsOn = false; if (its.currentBlock == 0) { its.numberInBlock = 0; its.currentBlock = 1; } //Getting latest indexing time for that table var litRow = itran.Select <byte, byte[]>(tbl.Key, 4); byte[] lastIndexed = DateTime.MinValue.Ticks.To_8_bytes_array_BigEndian(); if (litRow.Exists) { lastIndexed = litRow.Value; } kA = lastIndexed.Concat(int.MinValue.To_4_bytes_array_BigEndian()); kZ = DateTime.MaxValue.Ticks.To_8_bytes_array_BigEndian().Concat(int.MaxValue.To_4_bytes_array_BigEndian()); //Key is word, Value.Item1 is documents list from which this word must be removed, Value.Item2 is documents List where word must be added Dictionary <string, Tuple <HashSet <int>, HashSet <int>, WordInDocs> > ds = new Dictionary <string, Tuple <HashSet <int>, HashSet <int>, WordInDocs> >(); Tuple <HashSet <int>, HashSet <int>, WordInDocs> tpl = null; //Dictionary<string, byte[]> tmpWrds = new Dictionary<string, byte[]>(StringComparison.Ordinal); var tmpWrds = new SortedDictionary <string, byte[]>(StringComparer.Ordinal); Action <string> createNew = (word) => { if (!tmpWrds.ContainsKey(word)) { rWord = its.words.Select <string, byte[]>(word, true); wd = new WordInDocs(); if (rWord.Exists) { wd.BlockId = rWord.Value.Substring(0, 4).To_UInt32_BigEndian(); wd.NumberInBlock = rWord.Value.Substring(4, 4).To_UInt32_BigEndian(); } else { its.numberInBlock++; if (its.numberInBlock > itran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.QuantityOfWordsInBlock) //Quantity of words (WAHs) in block { its.currentBlock++; its.numberInBlock = 1; } wd.BlockId = its.currentBlock; wd.NumberInBlock = its.numberInBlock; //Inserting new definition // its.words.Insert<string, byte[]>(word, wd.BlockId.To_4_bytes_array_BigEndian().Concat(wd.NumberInBlock.To_4_bytes_array_BigEndian())); if (tmpWrds.Count < 100000) { tmpWrds[word] = wd.BlockId.To_4_bytes_array_BigEndian().Concat(wd.NumberInBlock.To_4_bytes_array_BigEndian()); } else { // its.words.Insert<string, byte[]>(word, wd.BlockId.To_4_bytes_array_BigEndian().Concat(wd.NumberInBlock.To_4_bytes_array_BigEndian())); foreach (var tmpwrd in tmpWrds) { its.words.Insert <string, byte[]>(tmpwrd.Key, tmpwrd.Value); } tmpWrds.Clear(); } } tpl = new Tuple <HashSet <int>, HashSet <int>, WordInDocs>(new HashSet <int>(), new HashSet <int>(), wd); ds[word] = tpl; } }; //List<byte[]> docs2Change = new List<byte[]>(); Dictionary <byte[], byte[]> docs2Change = new Dictionary <byte[], byte[]>(); Tuple <HashSet <string>, HashSet <string> > diff; //foreach (var docId in its.ChangedDocIds) foreach (var docId in its.ChangedDocIds.OrderBy(r => r)) { //diff will return list of words to be removed and list of words to be added oldSrch = its.srch.Select <byte[], byte[]>(docId.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 })).Value; newSrch = its.srch.Select <byte[], byte[]>(docId.To_4_bytes_array_BigEndian().Concat(new byte[] { 1 })).Value; diff = WordsDiff( oldSrch, //Current searchables newSrch //new ); //diff = WordsDiff( // its.srch.Select<byte[], byte[]>(docId.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 }), true).Value, //Current searchables // newSrch //new // ); //Copying new searchables to current searchables docs2Change.Add(docId.To_4_bytes_array_BigEndian(), newSrch); //its.srch.ChangeKey<byte[]>(docId.To_4_bytes_array_BigEndian().Concat(new byte[] { 1 }), docId.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 })); //To be removed foreach (var word in diff.Item1) { if (!ds.TryGetValue(word, out tpl)) { createNew(word); } tpl.Item1.Add(docId); } //To be added foreach (var word in diff.Item2) { if (!ds.TryGetValue(word, out tpl)) { createNew(word); } tpl.Item2.Add(docId); } }//eo foreach new searchables, end of document itteration foreach (var d2c in docs2Change.OrderBy(r => r.Key.ToBytesString())) { its.srch.RemoveKey <byte[]>(d2c.Key.Concat(new byte[] { 1 })); its.srch.Insert <byte[], byte[]>(d2c.Key.Concat(new byte[] { 0 }), d2c.Value); // its.srch.ChangeKey<byte[]>(d2c.Concat(new byte[] { 1 }), d2c.Concat(new byte[] { 0 })); } //foreach (var eeel in its.srch.SelectForward<byte[], byte[]>(false).Take(50)) // Console.WriteLine(eeel.Key.ToBytesString()); foreach (var tmpwrd in tmpWrds) { its.words.Insert <string, byte[]>(tmpwrd.Key, tmpwrd.Value); } tmpWrds.Clear(); #region "S1" //Inserting WAH blocks //Going through the list of collected words order by blockID, fill blocks and save them block.Clear(); iterBlockId = 0; foreach (var wd1 in ds.OrderBy(r => r.Value.Item3.BlockId)) { //reading block if it's not loaded if (wd1.Value.Item3.BlockId != iterBlockId) { if (iterBlockId > 0) { //We must save current datablock if (block.Count() > 0) { btBlock = block.Encode_DICT_PROTO_UINT_BYTEARRAY(Compression.eCompressionMethod.Gzip); if ((btBlock.Length + 4) < itran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.MinimalBlockReservInBytes) //Minimal reserv { tmp = new byte[itran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.MinimalBlockReservInBytes]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } else if ((btBlock.Length + 4) > iterBlockLen) { //Doubling reserve tmp = new byte[btBlock.Length * 2]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } else { //Filling existing space tmp = new byte[btBlock.Length + 4]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } //Saving into DB its.blocks.Insert <uint, byte[]>(iterBlockId, tmp); } block.Clear(); } val = its.blocks.Select <uint, byte[]>(wd1.Value.Item3.BlockId).Value; iterBlockId = wd1.Value.Item3.BlockId; iterBlockLen = val == null ? 0 : val.Length; if (val != null) { blockSize = val.Substring(0, 4).To_Int32_BigEndian(); if (blockSize > 0) { btBlock = val.Substring(4, blockSize); block.Clear(); btBlock.Decode_DICT_PROTO_UINT_BYTEARRAY(block, Compression.eCompressionMethod.Gzip); } else { block.Clear(); } } else { block.Clear(); } } //Getting from Block if (block.TryGetValue((uint)wd1.Value.Item3.NumberInBlock, out btWah)) { wah = new WABI(btWah); } else { wah = new WABI(null); } //Adding documents foreach (var dId in wd1.Value.Item2) { wah.Add(dId, true); } //Removing documents foreach (var dId in wd1.Value.Item1) { wah.Add(dId, false); } block[wd1.Value.Item3.NumberInBlock] = wah.GetCompressedByteArray(); }//eo foreach wds //Saving last element //saving current block if (block.Count() > 0) { //!!!!!!!!!!! Remake it for smoothing storage btBlock = block.Encode_DICT_PROTO_UINT_BYTEARRAY(Compression.eCompressionMethod.Gzip); if ((btBlock.Length + 4) < itran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.MinimalBlockReservInBytes) //Minimal reserve { tmp = new byte[itran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.MinimalBlockReservInBytes]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } else if ((btBlock.Length + 4) > iterBlockLen) { //Doubling reserve tmp = new byte[btBlock.Length * 2]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } else { //Filling existing space tmp = new byte[btBlock.Length + 4]; tmp.CopyInside(0, btBlock.Length.To_4_bytes_array_BigEndian()); tmp.CopyInside(4, btBlock); } //Saving into DB its.blocks.Insert <uint, byte[]>(iterBlockId, tmp); } block.Clear(); #endregion itran.Insert <int, uint>(tbl.Key, 11, its.currentBlock); itran.Insert <int, uint>(tbl.Key, 12, its.numberInBlock); //Setting last indexing time itran.Insert <byte, byte[]>(tbl.Key, 4, btUdtStart); }//eo foreach tablesToIndex }
/// <summary> /// /// </summary> /// <param name="tran"></param> /// <param name="tableName"></param> /// <param name="documentId"></param> /// <param name="containsWords"></param> /// <param name="fullMatchWords"></param> /// <param name="deferredIndexing"></param> /// <param name="containsMinimalLength"></param> /// <param name="iMode"></param> public void InsertDocumentText(Transaction tran, string tableName, byte[] documentId, string containsWords, string fullMatchWords, bool deferredIndexing, int containsMinimalLength, eInsertMode iMode) { //tran._transactionUnit.TransactionsCoordinator._engine.Configuration. if (String.IsNullOrEmpty(tableName) || documentId == null) { return; } if ((iMode == eInsertMode.Append || iMode == eInsertMode.Remove) && (String.IsNullOrEmpty(containsWords) && String.IsNullOrEmpty(fullMatchWords))) { return; } //tran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.QuantityOfWordsInBlock SortedDictionary <string, WordDefinition> pST = this.GetWordsDefinitionFromText(containsWords, fullMatchWords, containsMinimalLength, tran._transactionUnit.TransactionsCoordinator._engine.Configuration.TextSearchConfig.MaximalWordSize); //flattend searchables StringBuilder sbPs = new StringBuilder(); //Registering all tables for text-search in current transaction ITS its = null; if (!itbls.TryGetValue(tableName, out its)) { its = new ITS() { e2i = tran.InsertTable <byte>(tableName, 1, 0), i2e = tran.InsertTable <byte>(tableName, 2, 0), srch = tran.InsertTable <byte>(tableName, 3, 0), }; its.e2i.ValuesLazyLoadingIsOn = false; its.i2e.ValuesLazyLoadingIsOn = false; its.srch.ValuesLazyLoadingIsOn = false; itbls.Add(tableName, its); } //Internal document ID int iId = 0; //Searching document by externalID var r1 = its.e2i.Select <byte[], int>(documentId); if (r1.Exists) //DOCUMENT EXISTS { iId = r1.Value; //Getting old searchables for this document byte[] oldSrch = its.srch.Select <byte[], byte[]>(iId.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 }), true).Value; HashSet <string> oldSearchables = GetSearchablesFromByteArray_AsHashSet(oldSrch); //always instantiated hashset switch (iMode) { case eInsertMode.Insert: //Comparing if (oldSearchables.Intersect(pST.Keys).Count() == oldSearchables.Count && oldSearchables.Count == pST.Keys.Count) { return; //Going out, nothing to insert } foreach (var ps1i in pST) { sbPs.Append(ps1i.Key); sbPs.Append(" "); } break; case eInsertMode.Append: case eInsertMode.Remove: if ((iMode == eInsertMode.Append) && oldSearchables.Intersect(pST.Keys).Count() == oldSearchables.Count && oldSearchables.Count == pST.Keys.Count ) { return; //Going out, nothing to insert } foreach (var ew in pST.Keys) { if (iMode == eInsertMode.Append) { oldSearchables.Add(ew); } else { oldSearchables.Remove(ew); } } foreach (var el in oldSearchables) { sbPs.Append(el); sbPs.Append(" "); } break; } } else { //DOCUMENT NEW if (pST.Count < 1) { return; //Going out, nothing to insert } //Document is new if (iMode == eInsertMode.Append) { iMode = eInsertMode.Insert; } else if (iMode == eInsertMode.Remove) { return; //Going out } iId = its.i2e.Max <int, byte[]>().Key; iId++; its.e2i.Insert <byte[], int>(documentId, iId); its.i2e.Insert <int, byte[]>(iId, documentId); foreach (var ps1i in pST) { sbPs.Append(ps1i.Key); sbPs.Append(" "); } } this.InsertWasPerformed = true; //Inserting into affected table if (!deferredIndexing) { its.ChangedDocIds.Add(iId); } else { if (!defferedDocIds.ContainsKey(tableName)) { defferedDocIds[tableName] = new HashSet <uint>(); } defferedDocIds[tableName].Add((uint)iId); } //Inserting searchables to be indexed its.srch.Insert <byte[], byte[]>(iId.To_4_bytes_array_BigEndian().Concat(new byte[] { 1 }), GetByteArrayFromSearchbles(sbPs.ToString())); }
/// <summary> /// /// </summary> /// <param name="tran"></param> /// <param name="tableName"></param> /// <param name="documentIDs"></param> /// <returns></returns> public Dictionary<byte[], HashSet<string>> GetDocumentsSearchables(Transaction tran, string tableName, HashSet<byte[]> documentIDs) { ITS its = null; its = new ITS() { e2i = tran.SelectTable<byte>(tableName, 1, 0), srch = tran.SelectTable<byte>(tableName, 3, 0), }; its.e2i.ValuesLazyLoadingIsOn = false; its.srch.ValuesLazyLoadingIsOn = false; Dictionary<byte[], HashSet<string>> rdocuments = new Dictionary<byte[], HashSet<string>>(); foreach (var documentID in documentIDs) { var r1 = its.e2i.Select<byte[], int>(documentID); if (r1.Exists) //DOCUMENT EXISTS { //Getting searchables for this document byte[] oldSrch = its.srch.Select<byte[], byte[]>(r1.Value.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 }), true).Value; rdocuments[documentID] = GetSearchablesFromByteArray_AsHashSet(oldSrch); //always instantiated hashset } } return rdocuments; }
/// <summary> /// /// </summary> /// <param name="tran"></param> /// <param name="tableName"></param> /// <param name="documentId"></param> /// <param name="containsWords"></param> /// <param name="fullMatchWords"></param> /// <param name="deferredIndexing"></param> /// <param name="containsMinimalLength"></param> /// <param name="iMode"></param> public void InsertDocumentText(Transaction tran, string tableName, byte[] documentId, string containsWords, string fullMatchWords, bool deferredIndexing, int containsMinimalLength, eInsertMode iMode) { //tran._transactionUnit.TransactionsCoordinator._engine.Configuration. if (String.IsNullOrEmpty(tableName) || documentId == null) return; if ((iMode == eInsertMode.Append || iMode == eInsertMode.Remove) && (String.IsNullOrEmpty(containsWords) && String.IsNullOrEmpty(fullMatchWords))) return; SortedDictionary<string, WordDefinition> pST = this.GetWordsDefinitionFromText(containsWords, fullMatchWords, containsMinimalLength); //flattend searchables StringBuilder sbPs = new StringBuilder(); //Registering all tables for text-search in current transaction ITS its = null; if (!itbls.TryGetValue(tableName, out its)) { its = new ITS() { e2i = tran.InsertTable<byte>(tableName, 1, 0), i2e = tran.InsertTable<byte>(tableName, 2, 0), srch = tran.InsertTable<byte>(tableName, 3, 0), }; its.e2i.ValuesLazyLoadingIsOn = false; its.i2e.ValuesLazyLoadingIsOn = false; its.srch.ValuesLazyLoadingIsOn = false; itbls.Add(tableName, its); } //Internal document ID int iId = 0; //Searching document by externalID var r1 = its.e2i.Select<byte[], int>(documentId); if (r1.Exists) //DOCUMENT EXISTS { iId = r1.Value; //Getting old searchables for this document byte[] oldSrch = its.srch.Select<byte[], byte[]>(iId.To_4_bytes_array_BigEndian().Concat(new byte[] { 0 }), true).Value; HashSet<string> oldSearchables = GetSearchablesFromByteArray_AsHashSet(oldSrch); //always instantiated hashset switch (iMode) { case eInsertMode.Insert: //Comparing if (oldSearchables.Intersect(pST.Keys).Count() == oldSearchables.Count && oldSearchables.Count == pST.Keys.Count) return; //Going out, nothing to insert foreach (var ps1i in pST) { sbPs.Append(ps1i.Key); sbPs.Append(" "); } break; case eInsertMode.Append: case eInsertMode.Remove: if ((iMode == eInsertMode.Append) && oldSearchables.Intersect(pST.Keys).Count() == oldSearchables.Count && oldSearchables.Count == pST.Keys.Count ) return; //Going out, nothing to insert foreach (var ew in pST.Keys) { if (iMode == eInsertMode.Append) oldSearchables.Add(ew); else oldSearchables.Remove(ew); } foreach (var el in oldSearchables) { sbPs.Append(el); sbPs.Append(" "); } break; } } else { //DOCUMENT NEW if (pST.Count < 1) return; //Going out, nothing to insert //Document is new if (iMode == eInsertMode.Append) iMode = eInsertMode.Insert; else if (iMode == eInsertMode.Remove) return; //Going out iId = its.i2e.Max<int, byte[]>().Key; iId++; its.e2i.Insert<byte[], int>(documentId, iId); its.i2e.Insert<int, byte[]>(iId, documentId); foreach (var ps1i in pST) { sbPs.Append(ps1i.Key); sbPs.Append(" "); } } this.InsertWasPerformed = true; //Inserting into affected table if (!deferredIndexing) its.ChangedDocIds.Add(iId); else { if (!defferedDocIds.ContainsKey(tableName)) defferedDocIds[tableName] = new HashSet<uint>(); defferedDocIds[tableName].Add((uint)iId); } //Inserting searchables to be indexed its.srch.Insert<byte[], byte[]>(iId.To_4_bytes_array_BigEndian().Concat(new byte[] { 1 }), GetByteArrayFromSearchbles(sbPs.ToString())); }