private int GetRank() { WriteDiagnostics(); int rank = -1; if (KeywordList.Contains("1") && KeywordList.Contains("best")) { rank = 0; } else if (KeywordList.Contains("1")) { rank = 1; } else if (KeywordList.Contains("2")) { rank = 2; } else if (KeywordList.Contains("3")) { rank = 3; } else { rank = 2; } return(rank); }
private void SetRank(int rank) { // rebuilding the keyword list this way is necessary, otherwise, other recently selected List View items are getting promoted // maybe there is a shared pointer reference or something to the keywordlist // basically I have no idea why this line of code is necessary, but it is necessary _keywordList = new List <string>(_keywordList); List <string> rankWords = new List <string>(4); rankWords.AddRange(new string[] { "best", "1", "2", "3" }); RemoveKeywords(rankWords); switch (rank) { case 0: KeywordList.AddRange(new string[] { "1", "best" }); break; case 1: KeywordList.Add("1"); break; case 2: KeywordList.Add("2"); break; case 3: KeywordList.Add("3"); break; } KeywordList.Sort(); }
private void SetRank(int rank) { ArrayList rankWords = new ArrayList(4); rankWords.AddRange(new string[] { "best", "1", "2", "3" }); RemoveKeywords(rankWords); switch (rank) { case 0: KeywordList.AddRange(new string[] { "1", "best" }); break; case 1: KeywordList.Add("1"); break; case 2: KeywordList.Add("2"); break; case 3: KeywordList.Add("3"); break; } KeywordList.Sort(); }
public void RemoveKeywords(ArrayList keywords) { foreach (string keyword in keywords) { if (KeywordList.Contains(keyword)) { KeywordList.Remove(keyword); } } }
private FWKeyword ApplyKeywordFilter(String currentNameFilter) { FWKeyword myKeywordList = Conn.MasterData.ClassFactory.NewFWKeyword(new Keyword(), null); myKeywordList.Id = currentNameFilter; List <FWKeyword> filteredKeywordList = KeywordList.Find(currentNameFilter); myKeywordList.Children = new FWKeywordChildren(null, filteredKeywordList); return(myKeywordList); }
public void AddKeywords(ArrayList keywords) { foreach (string keyword in keywords) { if (!KeywordList.Contains(keyword)) { KeywordList.Add(keyword); } } KeywordList.Sort(); }
public override void Visit(FullTextQueryMatchOp q) { q.wno = kwdList.Count; KeywordList list = new KeywordList(q.word); #if USE_GENERICS list.list = impl.inverseIndex[q.word]; #else list.list = (InverseList)impl.inverseIndex[q.word]; #endif kwdList.Add(list); }
public string this[KeywordList keyword] { get { Keyword[] found = values.Where <Keyword>((kw) => { return(kw.key == keyword); }).ToArray(); if (found.Length > 0) { return(found[0].value); } else { throw new System.ArgumentOutOfRangeException(); } } }
public void Load() { Debug.Log("CERN archive loading from path: " + jsonPath); if (loadImages) { images = LoadJson <ImageList>(jsonPath + "/files.json"); images.PrepareData(); Debug.Log("CERN Images loaded: " + images.items.Length); } if (loadRecords) { records = LoadJson <RecordList>(jsonPath + "/records.json"); records.PrepareData(); Debug.Log("CERN Records loaded: " + records.items.Length); } if (loadAuthors) { authors = LoadJson <AuthorList>(jsonPath + "/authors.json"); authors.PrepareData(); Debug.Log("CERN Authors loaded: " + authors.items.Length); } if (loadKeywords) { keywords = LoadJson <KeywordList>(jsonPath + "/keywords.json"); keywords.PrepareData(); Debug.Log("CERN Keywords loaded: " + keywords.items.Length); } if (loadSubjects) { subjects = LoadJson <SubjectList>(jsonPath + "/subjects.json"); subjects.PrepareData(); Debug.Log("CERN Subjects loaded: " + subjects.items.Length); } if (loadCopyrightHolders) { copyrightHolders = LoadJson <CopyrightHolderList>(jsonPath + "/copyright.json"); copyrightHolders.PrepareData(); Debug.Log("CERN CopyrightHolders loaded: " + copyrightHolders.items.Length); } if (loadCollections) { collections = LoadJson <CollectionList>(jsonPath + "/collections.json"); collections.PrepareData(); Debug.Log("CERN Collections loaded: " + collections.items.Length); } dataLoaded = true; }
public ResponseResult <KeywordResponse> GetKeywordList([FromBody] KeywordListRequest model) { try { string accessTokenStr = _redisHandler.GetAccessToken(); if (string.IsNullOrEmpty(accessTokenStr)) { accessTokenStr = _weChatServiceHandler.GetAccessToken().access_token; _redisHandler.SaveAccessToken(accessTokenStr); } KeywordList keywordList = _weChatServiceHandler.GetKeywordList(accessTokenStr, model.Id); if (keywordList.errcode == 0) { return(new ResponseResult <KeywordResponse>() { ErrCode = 0, ErrMsg = "success", Data = new KeywordResponse() { id = keywordList.id, title = keywordList.title, keyword_list = keywordList.keyword_list } }); } else { logger.Error(keywordList.errmsg); return(new ResponseResult <KeywordResponse>() { ErrCode = 1001, ErrMsg = keywordList.errmsg, Data = null }); } } catch (Exception ex) { logger.Error(ex); return(new ResponseResult <KeywordResponse>() { ErrCode = 1003, ErrMsg = ex.Message, Data = null }); } }
internal virtual int calculateEstimation(FullTextQuery query, int nResults) { switch (query.op) { case FullTextQuery.Operator.And: case FullTextQuery.Operator.Near: { int left = calculateEstimation(((FullTextQueryBinaryOp)query).left, nResults); int right = calculateEstimation(((FullTextQueryBinaryOp)query).right, nResults); return(left < right ? left : right); } case FullTextQuery.Operator.Or: { int left = calculateEstimation(((FullTextQueryBinaryOp)query).left, nResults); int right = calculateEstimation(((FullTextQueryBinaryOp)query).right, nResults); return(left > right ? left : right); } case FullTextQuery.Operator.Match: case FullTextQuery.Operator.StrictMatch: { KeywordList kwd = kwds[((FullTextQueryMatchOp)query).wno]; if (kwd.currDoc == 0) { return(0); } else { int curr = kwd.currDoc; int first = kwd.list.FirstKey; int last = kwd.list.LastKey; int estimation = nResults * (last - first + 1) / (curr - first + 1); if (estimation > kwd.list.Count) { estimation = kwd.list.Count; } return(estimation); } } case FullTextQuery.Operator.Not: return(impl.documents.Count); } return(0); }
public static KeywordList Deserialize(byte[] buffer) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } // Serialized sequentially as length-prefixed string. KeywordList keywords = new KeywordList(); using (BinaryReader reader = new BinaryReader(new MemoryStream(buffer))) { while (reader.BaseStream.Position < reader.BaseStream.Length) { byte length = reader.ReadByte(); byte[] keyword = reader.ReadBytes(length); keywords.Add(keyword); } } return(keywords); }
internal virtual double calculateNearness() { KeywordList[] kwds = this.kwds; int nKwds = kwds.Length; if (nKwds < 2) { return(0); } for (int i = 0; i < nKwds; i++) { if (kwds[i].occ == null) { int j = kwds[i].sameAs; if (j >= 0 && kwds[j].occ != null) { kwds[i].occ = kwds[j].occ; } else { return(0); } } kwds[i].occPos = 0; } double maxNearness = 0; int swapPenalty = impl.helper.WordSwapPenalty; while (true) { int minPos = int.MaxValue; double nearness = 0; KeywordList first = null; KeywordList prev = null; for (int i = 0; i < nKwds; i++) { KeywordList curr = kwds[i]; if (curr.occPos < curr.occ.Length) { if (prev != null) { int offset = curr.occ[curr.occPos] - prev.occ[prev.occPos]; if (offset < 0) { offset = (-offset - curr.kwdLen) * swapPenalty; } else { offset -= prev.kwdLen; } if (offset <= 2) { offset = 1; } nearness += 1 / System.Math.Sqrt(offset); } if (curr.occ[curr.occPos] < minPos) { minPos = curr.occ[curr.occPos]; first = curr; } prev = curr; } } if (first == null) { break; } first.occPos += 1; if (nearness > maxNearness) { maxNearness = nearness; } } return(maxNearness); }
public static string GetLocalizedString(KeywordList keyword) { return(Instance.currentData[keyword]); }
public static void UnicodeSpeechChat3(NetState state, PacketReader pvSrc) { Mobile from = state.Mobile; MessageType type = (MessageType)pvSrc.ReadByte(); int hue = pvSrc.ReadInt16(); pvSrc.ReadInt16(); // font string lang = pvSrc.ReadString(4); string text; bool isEncoded = (type & MessageType.Encoded) != 0; int[] keywords; if (isEncoded) { int value = pvSrc.ReadInt16(); int count = (value & 0xFFF0) >> 4; int hold = value & 0xF; if (count < 0 || count > 50) { return; } KeywordList keyList = c_KeywordList; for (int i = 0; i < count; ++i) { int speechID; if ((i & 1) == 0) { hold <<= 8; hold |= pvSrc.ReadByte(); speechID = hold; hold = 0; } else { value = pvSrc.ReadInt16(); speechID = (value & 0xFFF0) >> 4; hold = value & 0xF; } if (!keyList.Contains(speechID)) { keyList.Add(speechID); } } text = pvSrc.ReadUTF8StringSafe(); keywords = keyList.ToArray(); } else { text = pvSrc.ReadUnicodeStringSafe(); keywords = c_EmptyInts; } text = text.Trim(); if (text.Length <= 0 || text.Length > 128) { return; } type &= ~MessageType.Encoded; if (!Enum.IsDefined(typeof(MessageType), type)) { type = MessageType.Regular; } from.Language = lang; Channel c = Channel.GetByType(typeof(Guild)); if (RUOVersion.GuildChat(type) && c != null) { if (c.CanChat(from, true)) { c.OnChat(from, text); } } else { from.DoSpeech(text, keywords, type, Utility.ClipDyedHue(hue)); } }
internal virtual int intersect(int doc, FullTextQuery query) { int left, right; switch (query.op) { case FullTextQuery.Operator.And: case FullTextQuery.Operator.Near: do { left = intersect(doc, ((FullTextQueryBinaryOp)query).left); if (left == int.MaxValue) { return(left); } doc = intersect(left, ((FullTextQueryBinaryOp)query).right); }while (left != doc && doc != int.MaxValue); return(doc); case FullTextQuery.Operator.Or: left = intersect(doc, ((FullTextQueryBinaryOp)query).left); right = intersect(doc, ((FullTextQueryBinaryOp)query).right); return(left < right ? left : right); case FullTextQuery.Operator.Match: case FullTextQuery.Operator.StrictMatch: { KeywordList kwd = kwds[((FullTextQueryMatchOp)query).wno]; if (kwd.currDoc >= doc) { return(kwd.currDoc); } IDictionaryEnumerator iterator = kwd.iterator; if (iterator != null) { if (iterator.MoveNext()) { DictionaryEntry entry = iterator.Entry; int nextDoc = (int)entry.Key; if (nextDoc >= doc) { kwd.currEntry = entry; kwd.currDoc = nextDoc; return(nextDoc); } } else { kwd.currDoc = 0; return(int.MaxValue); } } if (kwd.list != null) { kwd.iterator = iterator = kwd.list.GetDictionaryEnumerator(doc); if (iterator.MoveNext()) { DictionaryEntry entry = iterator.Entry; doc = (int)entry.Key; kwd.currEntry = entry; kwd.currDoc = doc; return(doc); } } kwd.currDoc = 0; return(int.MaxValue); } case FullTextQuery.Operator.Not: { int nextDoc = intersect(doc, ((FullTextQueryUnaryOp)query).opd); if (nextDoc == doc) { doc += 1; } return(doc); } default: return(doc); } }
public abstract void CreateKeyword(out Int32 id , String keyword , KeywordList keywordList);
public static void CreateKeyword(out Int32 id , String keyword , KeywordList keywordList) { Provider.CreateKeyword(out id, keyword, keywordList); }
public override void CreateKeywordList(out int id, string name, Project project) { using (var tran = new TransactionScope(_connName)) { var ds = DSKeywordList.Create(_connName); var ce = new KeywordList(); if (name != null) ce.Name = name; if (project != null) ce.Project = project; ds.Insert(ce); tran.Commit(); id = ce.Id; } }
public override void CreateKeyword(out int id, string keyword, KeywordList keywordList) { using (var tran = new TransactionScope(_connName)) { var ds = DSKeyword.Create(_connName); var ce = new Keyword(); if (keyword != null) ce.Keyword = keyword; if (keywordList != null) ce.KeywordList = keywordList; ds.Insert(ce); tran.Commit(); id = ce.Id; } }
internal virtual double evaluate(int doc, FullTextQuery query) { double left, right; switch (query.op) { case FullTextQuery.Operator.Near: case FullTextQuery.Operator.And: left = evaluate(doc, ((FullTextQueryBinaryOp)query).left); right = evaluate(doc, ((FullTextQueryBinaryOp)query).right); nOccurrences = 0; return(left < 0 || right < 0 ? -1 : left + right); case FullTextQuery.Operator.Or: left = evaluate(doc, ((FullTextQueryBinaryOp)query).left); right = evaluate(doc, ((FullTextQueryBinaryOp)query).right); return(left > right ? left : right); case FullTextQuery.Operator.Match: case FullTextQuery.Operator.StrictMatch: { KeywordList kwd = kwds[((FullTextQueryMatchOp)query).wno]; if (kwd.currDoc != doc) { return(-1); } DocumentOccurrences d = (DocumentOccurrences)kwd.currEntry.Value; int[] occ = d.occurrences; kwd.occ = occ; int frequency = occ.Length; if (query.op == FullTextQuery.Operator.StrictMatch) { if (nOccurrences == 0) { nOccurrences = frequency; if (occurrences == null || occurrences.Length < frequency) { occurrences = new int[frequency]; } for (int i = 0; i < frequency; i++) { occurrences[i] = occ[i] & OCC_POSITION_MASK; } } else { int nPairs = 0; int[] dst = occurrences; int occ1 = dst[0]; int occ2 = occ[0] & OCC_POSITION_MASK; int i = 0, j = 0; int offs = kwd.kwdOffset; while (true) { if (occ1 + offs <= occ2) { if (occ1 + offs + 1 >= occ2) { dst[nPairs++] = occ2; } if (++j == nOccurrences) { break; } occ1 = dst[j]; } else { if (++i == frequency) { break; } occ2 = occ[i] & OCC_POSITION_MASK; } } nOccurrences = nPairs; if (nPairs == 0) { return(-1); } } } return(calculateKwdRank(kwd.list, d, occ)); } case FullTextQuery.Operator.Not: { double rank = evaluate(doc, ((FullTextQueryUnaryOp)query).opd); return((rank >= 0) ? -1 : 0); } default: return(-1); } }