static void Main(string[] args) { HashMap<int, int> test = new HashMap<int, int>(); test.Add(1, 2); test.Add(2, 3); test.Add(3, 4); test.Add(4, 5); Console.WriteLine(test.Contains(22)); foreach (var item in test) { Console.WriteLine($"{item.Key} {item.Value}"); } }
/// <summary> /// person.dic /// </summary> private void Init1() { TextReader br = null; try { _personNatureAttrs = new HashMap<string, PersonNatureAttr>(); br = MyStaticValue.GetPersonReader(); string temp; while ((temp = br.ReadLine()) != null) { var strs = temp.Split('\t'); var pna = _personNatureAttrs[strs[0]]; if (pna == null) { pna = new PersonNatureAttr(); } pna.AddFreq(int.Parse(strs[1]), int.Parse(strs[2])); _personNatureAttrs.Add(strs[0], pna); } } finally { if (br != null) br.Close(); } }
public static void Main() { Console.WriteLine("Enter contacts (name - number) or search existing contacts (type 'search'):"); HashMap<string, string> phonebook = new HashMap<string, string>(); string entry = Console.ReadLine(); while (entry != "search") { string[] contactInfo = entry .Split(new char[] { '-' }, StringSplitOptions.RemoveEmptyEntries) .Select(s => s.Trim()) .ToArray(); if (!phonebook.ContainsKey(contactInfo[0])) { phonebook.Add(contactInfo[0], contactInfo[1]); } entry = Console.ReadLine(); } List<string> searchWords = new List<string>(); string word = Console.ReadLine(); while (word != string.Empty) { searchWords.Add(word); word = Console.ReadLine(); } foreach (var searchWord in searchWords) { if (phonebook.ContainsKey(searchWord)) { Console.WriteLine("{0} -> {1}", searchWord, phonebook[searchWord]); } else { Console.WriteLine("Contact {0} does not exist.", searchWord); } } }
// company_freq private static void Init() { // TODO Auto-generated method stub TextReader br = null; try { _cnMap = new HashMap<string, int[]>(); br = MyStaticValue.GetCompanReader(); string temp; while ((temp = br.ReadLine()) != null) { var strs = temp.Split('\t'); var cna = new int[2]; cna[0] = int.Parse(strs[1]); cna[1] = int.Parse(strs[2]); _cnMap.Add(strs[0], cna); } } finally { if (br != null) br.Close(); } }
public ParserState AddLocal(string name, ValueToken value) => new ParserState(Clusters, Locals.Add(name, value));
/** * /// Add a Node to the set of all Nodes * * /// @param n */ protected void AddNode(Node n) { Debug.Assert(!HasNode(n.Id)); Nodes.Add(n.Id, n); }
public HaskellTokenType(string s, int index) : base(s, index) { IntToTokenType.Add(index, this); }
public OpenSimLibraryReflectedTypeData(string openSimBinDirectory) { _openSimBinDirectory = openSimBinDirectory; foreach (var assemblyPath in Directory.EnumerateFiles(openSimBinDirectory, "*.dll", SearchOption.AllDirectories)) { try { string assemblyName = Path.GetFileName(assemblyPath); if (assemblyName != null && !_allOpenSimAssemblies.ContainsKey(assemblyName)) { _allOpenSimAssemblies.Add(assemblyName, Assembly.LoadFrom(assemblyPath)); } } catch { //this is sparta } } ScriptApiAssembly = _allOpenSimAssemblies["OpenSim.Region.ScriptEngine.Shared.dll"]; ScriptRuntimeAssembly = _allOpenSimAssemblies["OpenSim.Region.ScriptEngine.Shared.Api.Runtime.dll"]; RegionFrameworkAssembly = _allOpenSimAssemblies["OpenSim.Region.Framework.dll"]; OpenMetaverseTypesAssembly = _allOpenSimAssemblies["OpenMetaverseTypes.dll"]; ScriptModuleFunctionAttribute = RegionFrameworkAssembly.GetType("OpenSim.Region.Framework.Interfaces.ScriptInvocationAttribute"); ScriptModuleConstantAttribute = RegionFrameworkAssembly.GetType("OpenSim.Region.Framework.Interfaces.ScriptConstantAttribute"); AppDomain.CurrentDomain.AssemblyResolve += _currentDomainOnAssemblyResolve; _eventNames = new HashedSet <string>( ScriptRuntimeAssembly.GetType("OpenSim.Region.ScriptEngine.Shared.ScriptBase.Executor") .GetNestedType("scriptEvents") .GetMembers(BindingFlags.Public | BindingFlags.Static) .Where(x => x.Name != "None") .Select(x => x.Name) ); foreach (var assembly in AllOpenSimAssemblies.Values) { const BindingFlags bindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static; try { var types = assembly.GetTypes(); var interfaces = types.Where(x => x.GetInterfaces().Any(y => y.Name == "INonSharedRegionModule")); _scriptModuleClasses.AddRange( interfaces.Where(t => t.GetFields(bindingFlags).Cast <MemberInfo>().Concat(t.GetMethods(bindingFlags)) .Any(h => h.GetCustomAttributes(true).Any( x => { var n = x.GetType().Name; return(n == "ScriptConstantAttribute" || n == "ScriptInvocationAttribute"); }))).ToList() ); } catch (ReflectionTypeLoadException e) { Log.WriteLineWithHeader("[OpenSimLibraryReflector ASSEMBLY LOAD EXCEPTION]", string.Join(Environment.NewLine, e.LoaderExceptions.Select(x => x.Message))); } } FunctionContainingInterfaces = ScriptApiAssembly.GetTypes() .Where(x => x.IsInterface && x.Namespace == "OpenSim.Region.ScriptEngine.Shared.Api.Interfaces") .ToList(); ScriptBaseClass = ScriptRuntimeAssembly.GetType("OpenSim.Region.ScriptEngine.Shared.ScriptBase.ScriptBaseClass"); ScriptConstantContainerClasses = new GenericArray <Type> { ScriptBaseClass }; AppDomain.CurrentDomain.AssemblyResolve -= _currentDomainOnAssemblyResolve; }
public IReadOnlyGenericArray <LSLLibraryFunctionSignature> LSLFunctionOverloads(string name) { var map = GetFunctionOSDMap(_data, name); if (map == null) { return(new GenericArray <LSLLibraryFunctionSignature> ()); } LSLType returnType = LSLType.Void; if (map.ContainsKey("return")) { var value = map["return"].AsString(); if (value != "void") { returnType = LSLTypeTools.FromLSLTypeName(value); } } LSLLibraryFunctionSignature func = new LSLLibraryFunctionSignature(returnType, name) { DocumentationString = DocumentFunction(_data, name) }; func.Subsets.SetSubsets(_subsets); func.Deprecated = map.ContainsKey("deprecated"); if (!map.ContainsKey("arguments")) { return new GenericArray <LSLLibraryFunctionSignature> { func } } ; var args = map["arguments"] as OSDArray; if (args == null) { return new GenericArray <LSLLibraryFunctionSignature> { func } } ; var paramNameDuplicates = new HashMap <string, int>(); foreach (var arg in args.Cast <OSDMap>()) { var argName = arg.Keys.First(); var paramDetails = (OSDMap)arg[argName]; if (paramNameDuplicates.ContainsKey(argName)) { //rename duplicates with a trailing number int curValue = paramNameDuplicates[argName]; int newValue = curValue + 1; paramNameDuplicates[argName] = newValue; argName = argName + "_" + newValue; } else { paramNameDuplicates.Add(argName, 0); } func.AddParameter(new LSLParameterSignature( LSLTypeTools.FromLSLTypeName(paramDetails["type"].AsString()), argName, false)); } return(new GenericArray <LSLLibraryFunctionSignature> { func }); }
/// <summary> /// Adds a new property with a {@code String} value. /// </summary> /// <param name="propName">the name of the property</param> /// <param name="propValue">the value of the property</param> public void Add(String propName, string propValue) { _properties.Add(propName, propValue); }
public void PreDefineState(string name) { _definedStates.Add(name, null); }
public static void Initialize() { try { _outlookVersion = GetOutlookVersionFromRegistry(); } catch (Exception ex) { Core.AddExceptionReportData("\nError getting Outlook version from registry: " + ex.Message); Trace.WriteLine("Error getting Outlook version from registry: " + ex.Message); _outlookVersion = 0; } ReportOutlookAddins(); ReportOutlookExtensions(); _eSession = new EMAPISession(0); _eSession.CheckDependencies(); try { if (!_eSession.Initialize(IsPickLogonProfile(), _libManager)) { throw new Exception("MAPI logon failed"); } } catch (COMException exception) { _tracer.TraceException(exception); Core.ReportBackgroundException(exception); throw new Exception("MAPI logon failed: " + exception.Message); } _addressBook = _eSession.OpenAddrBook(); IEMsgStores stores = _eSession.GetMsgStores(); if (stores != null) { using ( stores ) { int count = stores.GetCount(); Trace.WriteLine("*********************************************************"); Trace.WriteLine("* " + count + " MAPI stores detected"); for (int i = 0; i < count; ++i) { IEMsgStore store = null; try { store = stores.GetMsgStore(i); } catch (EMAPILib.ProblemWhenOpenStorage ex) { Trace.WriteLine("* " + i + "th store caused problem while getting the IEMsgStore resource"); ProblemWithOpeningStorage(ex); } if (store == null) { continue; } string storeID = stores.GetStorageID(i); _msgStores.Add(storeID, store); Trace.WriteLine("* " + i + "th store has StoreID [" + storeID + "]"); if (Settings.UseOutlookListeners) { try { MAPIListenerStub mapiListener = new MAPIListenerStub(new MAPIListener(storeID)); store.Advise(mapiListener); } catch (Exception exception) { _tracer.TraceException(exception); //SetLastException( exception ); } } if (stores.IsDefaultStore(i)) { Trace.WriteLine("* " + i + "th store is a default store"); _defaultMsgStore = store; string delEntryID = _defaultMsgStore.GetBinProp(MAPIConst.PR_IPM_WASTEBASKET_ENTRYID); _deletedFolderIDs = new PairIDs(delEntryID, storeID); } } Trace.WriteLine("*********************************************************"); } } if (_defaultMsgStore == null) { throw new ApplicationException("There is no default storage"); } }
/** * /// Sets the best token for a given state * * /// @param token the best token * /// @param state the state */ protected void SetBestToken(Token token, ISearchState state) { BestTokenMap.Add(state, token); }
/// <summary> /// Atomically adds a new item to the map. /// If the key already exists then the Fail handler is called with the unaltered map /// and the value already set for the key, it expects a new map returned. /// </summary> /// <remarks>Null is not allowed for a Key or a Value</remarks> /// <param name="key">Key</param> /// <param name="value">Value</param> /// <param name="Fail">Delegate to handle failure, you're given the unaltered map /// and the value already set for the key</param> /// <exception cref="ArgumentNullException">Throws ArgumentNullException the key or value are null</exception> /// <returns>New Map with the item added</returns> public static HashMap <K, V> TryAdd <K, V>(this HashMap <K, V> self, K key, V value, Func <HashMap <K, V>, K, V, HashMap <K, V> > Fail) => self.ContainsKey(key) ? Fail(self, key, value) : self.Add(key, value);
private ArrayList PartitionDfaGroups(Hashset setMasterDfa, Hashset setInputSymbol) { ArrayList arrGroup = new ArrayList(); HashMap hashMap = new HashMap(); Hashset emptySet = new Hashset(); Hashset acceptingSet = new Hashset(); Hashset nonAcceptingSet = new Hashset(); foreach (object objState in setMasterDfa) { TransitionState state = (TransitionState)objState; if (state.AcceptingState == true) { acceptingSet.AddElement(state); } else { nonAcceptingSet.AddElement(state); } } if (nonAcceptingSet.GetCardinality() > 0) { arrGroup.Add(nonAcceptingSet); } arrGroup.Add(acceptingSet); IEnumerator iterInput = setInputSymbol.GetEnumerator(); iterInput.Reset(); while (iterInput.MoveNext()) { string sInputSymbol = iterInput.Current.ToString(); int nPartionIndex = 0; while (nPartionIndex < arrGroup.Count) { Hashset setToBePartitioned = (Hashset)arrGroup[nPartionIndex]; nPartionIndex++; if (setToBePartitioned.IsEmpty() || setToBePartitioned.GetCardinality() == 1) { continue; } foreach (object objState in setToBePartitioned) { TransitionState state = (TransitionState)objState; TransitionState[] arrState = state.GetTransitions(sInputSymbol.ToString()); if (arrState != null) { Debug.Assert(arrState.Length == 1); TransitionState stateTransionTo = arrState[0]; Hashset setFound = FindGroup(arrGroup, stateTransionTo); hashMap.Add(setFound, state); } else { hashMap.Add(emptySet, state); } } if (hashMap.Count > 1) { arrGroup.Remove(setToBePartitioned); foreach (DictionaryEntry de in hashMap) { Hashset setValue = (Hashset)de.Value; arrGroup.Add(setValue); } nPartionIndex = 0; iterInput.Reset(); } hashMap.Clear(); } } return(arrGroup); }
/// <summary> /// </summary> /// <param name="smartForest"></param> /// <param name="hm"></param> /// <param name="nature"></param> private void ValueResult(SmartForest<NewWord> smartForest, HashMap<string, double> hm, Nature nature) { // TODO Auto-generated method stub if (smartForest == null || smartForest.Branches == null) { return; } for (var i = 0; i < smartForest.Branches.Length; i++) { var param = smartForest.Branches[i].Param; if (smartForest.Branches[i].Status == 3) { if (param.IsActive && (nature == null || param.Nature.Equals(nature))) { hm.Add(param.Name, param.Score); } } else if (smartForest.Branches[i].Status == 2) { if (param.IsActive && (nature == null || param.Nature.Equals(nature))) { hm.Add(param.Name, param.Score); } ValueResult(smartForest.Branches[i], hm, nature); } else { ValueResult(smartForest.Branches[i], hm, nature); } } }
public static string ProcPrintJob(string e, ref int errorCode, ref String errorMessage) { LabelPrint print = new LabelPrint(); HashMap map = JsonConvert.DeserializeObject <HashMap>(e); String LabelContent = map["labelJsonData"].ToString(); String printer = map.GetValue <String>("printName"); //如果设置了全局当前打印机,就用当前打印机打印 //提供web修改接口 if (Setting.globalPrinter != null && Setting.globalPrinter != "") { printer = Setting.globalPrinter; } String tempatePath = map.GetValue <String>("tempatePath"); int copys = map.GetValue <int>("copys"); try { PrintX.LeanMES.Plugin.LabelPrint.PrintHlper.Copys = copys; } catch (Exception err) { Console.WriteLine(err.Message); copys = 1; } int linkFlag = 0; try { map.GetValue <int>("linkFlag"); } catch (Exception err) { Console.WriteLine(err.Message); linkFlag = 1; } Tools.PubMessage("是否连板--------" + linkFlag); if (linkFlag < 1) { //连扳标记 linkFlag = 0; } if (copys < 1) { //打印份数 PrintX.LeanMES.Plugin.LabelPrint.PrintHlper.Copys = 1; copys = 1; } Tools.PubMessage("打印份数--------" + copys); string snString = ""; List <LabelData> list = new List <LabelData>(); List <LabelInfo> list2 = new List <LabelInfo>(); list = Deserialization.JSONStringToList <LabelData>(LabelContent); try { //加速赋值 Parallel.For(0, list.Count, i => { list2 = Deserialization.JSONStringToList <LabelInfo>(JsonConvert.SerializeObject(list[i].LabelContent)); for (int j = 0; j < list2.Count; j++) { snString = list2[j].Value; } }); } catch (Exception err) { errorMessage = "解析打印内容的json失败,请校验格式是否正确,您传递的格式为:" + e + "触发的异常信息为:\n" + err.Message; errorCode = -1; } try { Tools.PubMessage("准备打印--------"); Tools.PubMessage("打印模板--------" + tempatePath); Tools.PubMessage("打印机--------" + printer); print.PrintLabelUseCodeSoft9(tempatePath, LabelContent, copys, "cn", printer, ref errorCode, ref errorMessage, linkFlag); } catch (Exception er) { Tools.PubMessage("发生异常咯--------" + er.Message); errorMessage = er.Message; errorCode = -1; } //重置打印份数 PrintX.LeanMES.Plugin.LabelPrint.PrintHlper.Copys = 1; //组装返回结果 HashMap responseBean = new HashMap(); if (errorCode == 0) { errorMessage = "打印成功"; } responseBean.Add("errorMessage", errorMessage); responseBean.Add("errorCode", errorCode); responseBean.Add("key", "PrintJob"); return(JsonConvert.SerializeObject(responseBean)); }
public static HttpServletRequestImpl parse(Map <string, string> requestHeader, Stream input, string contentTypeStr, int contentLength, HttpServletResponseImpl resp, WebApplication webApp) { ContentType contentType = Util.parseContentType(contentTypeStr); string boundary = "--" + contentType.getAttribute("BOUNDARY"); List <Part> partList = new List <Part>(); int length = contentLength; length = readToBoundary(input, boundary, length, null); HashMap <string, byte[][]> parameterMap = new HashMap <string, byte[][]>(); int cnt = 0; for ( ;;) { Map <string, string> headerMap = new HashMap <string, string>(); string line; while ((line = Util.readLine(input)) != null) { length -= line.Length + 2; // 2はCR+LF分 if (line == "") { break; } Util.parseHeader(headerMap, line); } ContentType cd = Util.parseContentType(headerMap.get("CONTENT-DISPOSITION")); string quotedName = cd.getAttribute("NAME"); string name = quotedName.Substring(1, quotedName.Length - 1); string ct = headerMap.get("CONTENT-TYPE"); byte[][] dataOut = new byte[1][]; length = readToBoundary(input, "\r\n" + boundary, length, dataOut); PartImpl part = new PartImpl(ct, headerMap, dataOut[0], name); partList.Add(part); if (ct == null) { byte[][] array = parameterMap.get(name); if (array == null) { parameterMap.Add(name, new byte[][] { dataOut[0] }); } else { byte[][] newArray = new byte[array.Length + 1][]; Array.Copy(array, 0, newArray, 0, array.Length); newArray[array.Length] = dataOut[0]; if (parameterMap.ContainsKey(name)) { // 送信されたパラメータ名が重複した場合はカウント値を追加する // 例外回避のため cnt++; parameterMap.Add(name + cnt.ToString(), newArray); } else { parameterMap.Add(name, newArray); } } } if (length == 0) { break; } } HttpServletRequestImpl req = new HttpServletRequestImpl(requestHeader, parameterMap, partList, resp, webApp); return(req); }
private IndexContext CreateIndexContext() { Random random = Random(); FieldInfo.DocValuesType_e[] dvTypes = new FieldInfo.DocValuesType_e[] { FieldInfo.DocValuesType_e.BINARY, FieldInfo.DocValuesType_e.SORTED }; Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NewLogMergePolicy()) ); bool canUseDV = !"Lucene3x".Equals(w.w.Config.Codec.Name, StringComparison.Ordinal); FieldInfo.DocValuesType_e?dvType = canUseDV ? dvTypes[random.nextInt(dvTypes.Length)] : (FieldInfo.DocValuesType_e?)null; int numDocs = 86 + random.nextInt(1087) * RANDOM_MULTIPLIER; string[] groupValues = new string[numDocs / 5]; string[] countValues = new string[numDocs / 10]; for (int i = 0; i < groupValues.Length; i++) { groupValues[i] = GenerateRandomNonEmptyString(); } for (int i = 0; i < countValues.Length; i++) { countValues[i] = GenerateRandomNonEmptyString(); } List <string> contentStrings = new List <string>(); IDictionary <string, IDictionary <string, ISet <string> > > searchTermToGroupCounts = new HashMap <string, IDictionary <string, ISet <string> > >(); for (int i = 1; i <= numDocs; i++) { string groupValue = random.nextInt(23) == 14 ? null : groupValues[random.nextInt(groupValues.Length)]; string countValue = random.nextInt(21) == 13 ? null : countValues[random.nextInt(countValues.Length)]; string content = "random" + random.nextInt(numDocs / 20); IDictionary <string, ISet <string> > groupToCounts; if (!searchTermToGroupCounts.TryGetValue(content, out groupToCounts)) { // Groups sort always DOCID asc... searchTermToGroupCounts.Add(content, groupToCounts = new LinkedHashMap <string, ISet <string> >()); contentStrings.Add(content); } ISet <string> countsVals; if (!groupToCounts.TryGetValue(groupValue, out countsVals)) { groupToCounts.Add(groupValue, countsVals = new HashSet <string>()); } countsVals.Add(countValue); Document doc = new Document(); doc.Add(new StringField("id", string.Format(CultureInfo.InvariantCulture, "{0:D9}", i), Field.Store.YES)); if (groupValue != null) { AddField(doc, groupField, groupValue, dvType); } if (countValue != null) { AddField(doc, countField, countValue, dvType); } doc.Add(new TextField("content", content, Field.Store.YES)); w.AddDocument(doc); } DirectoryReader reader = w.Reader; if (VERBOSE) { for (int docID = 0; docID < reader.MaxDoc; docID++) { Document doc = reader.Document(docID); Console.WriteLine("docID=" + docID + " id=" + doc.Get("id") + " content=" + doc.Get("content") + " author=" + doc.Get("author") + " publisher=" + doc.Get("publisher")); } } w.Dispose(); return(new IndexContext(dir, reader, dvType, searchTermToGroupCounts, contentStrings.ToArray(/*new String[contentStrings.size()]*/))); }
public PrintModule() { ///以UTF8的方式解码 Post["/CQRS/PrintLabel/"] = parameter => { String errorMessage = ""; int errorCode = -1; try { String bodyData = HttpUtility.UrlDecode(GetBodyRaw(), Encoding.UTF8); Form1.ProcPrintJob(bodyData, ref errorCode, ref errorMessage); if (errorCode == 0) { errorMessage = "打印成功"; } return("{}"); } catch (Exception err) { errorCode = -1; errorMessage = err.Message; } HashMap map = new HashMap(); map.Add("errorCode", errorCode); map.Add("errorMessage", errorMessage); String responseStr = JsonConvert.SerializeObject(map); return(responseStr); }; ///以base64的方式解码 Post["/"] = parameter => { Tools.PubMessage("收到打印请求"); int errorCode = -1; String errorMessage = ""; try { String bodyData = GetBodyRaw(); string orgStr = ""; while (!(orgStr.StartsWith("{") && orgStr.EndsWith("}"))) { byte[] outputb = Convert.FromBase64String(bodyData); orgStr = Encoding.Default.GetString(outputb); bodyData = orgStr; } Form1.ProcPrintJob(orgStr, ref errorCode, ref errorMessage); if (errorCode == 0) { errorMessage = "打印成功"; } } catch (Exception err) { errorCode = -1; errorMessage = err.Message; } HashMap map = new HashMap(); map.Add("errorCode", errorCode); map.Add("errorMessage", errorMessage); String responseStr = JsonConvert.SerializeObject(map); Tools.PubMessage(errorMessage); return(responseStr); }; Get["/SetPrinter/{printer}/"] = dynamicPara => { Setting.Setting.globalPrinter = dynamicPara.printer; return ("<HTML><head></Head><body><h1>你已经将当前打印机设置为" + dynamicPara.printer + "</h1></body></HTML>"); }; }
public void PreDefineFunction(LSLPreDefinedFunctionSignature lslFunctionSignature) { _functionDefinitions.Add(lslFunctionSignature.Name, lslFunctionSignature); }
public Unit AddSubscription(ProcessId pid, IDisposable sub) { RemoveSubscription(pid); subs = subs.Add(pid.Path, sub); return(unit); }
public override Query Rewrite(IndexReader reader) { if (rewrittenQuery != null) { return(rewrittenQuery); } //load up the list of possible terms foreach (FieldVals f in fieldVals) { AddTerms(reader, f); } //clear the list of fields fieldVals.Clear(); BooleanQuery bq = new BooleanQuery(); //create BooleanQueries to hold the variants for each token/field pair and ensure it // has no coord factor //Step 1: sort the termqueries by term/field HashMap <Term, List <ScoreTerm> > variantQueries = new HashMap <Term, List <ScoreTerm> >(); int size = q.Size(); for (int i = 0; i < size; i++) { ScoreTerm st = q.Pop(); var l = variantQueries[st.fuzziedSourceTerm]; if (l == null) { l = new List <ScoreTerm>(); variantQueries.Add(st.fuzziedSourceTerm, l); } l.Add(st); } //Step 2: Organize the sorted termqueries into zero-coord scoring boolean queries foreach (var variants in variantQueries.Values) { if (variants.Count == 1) { //optimize where only one selected variant ScoreTerm st = variants[0]; TermQuery tq = new FuzzyTermQuery(st.Term, ignoreTF); tq.Boost = st.Score; // set the boost to a mix of IDF and score bq.Add(tq, Occur.SHOULD); } else { BooleanQuery termVariants = new BooleanQuery(true); //disable coord and IDF for these term variants foreach (ScoreTerm st in variants) { TermQuery tq = new FuzzyTermQuery(st.Term, ignoreTF); // found a match tq.Boost = st.Score; // set the boost using the ScoreTerm's score termVariants.Add(tq, Occur.SHOULD); // add to query } bq.Add(termVariants, Occur.SHOULD); // add to query } } //TODO possible alternative step 3 - organize above booleans into a new layer of field-based // booleans with a minimum-should-match of NumFields-1? bq.Boost = Boost; this.rewrittenQuery = bq; return(bq); }
static void Main(string[] args) { // Get the indexes files from the "lucene_Index" folder string[] filePaths = System.IO.Directory.GetFiles(_luceneDir); // Delete all the indexes from "lucene_Index" folder foreach (string filePath in filePaths) { File.Delete(filePath); } #region Indexing //Create Directory for Indexes //There are 2 options, FS or RAM //Step 1: Declare Index Store //Now we need Analyzer //An Analyzer builds TokenStreams, which analyze text. It thus represents a policy for extracting index terms from text. //In general, any analyzer in Lucene is tokenizer + stemmer + stop-words filter. //Tokenizer splits your text into chunks-For example, for phrase "I am very happy" it will produce list ["i", "am", "very", "happy"] // stemmer:-piece of code responsible for “normalizing” words to their common form (horses => horse, indexing => index, etc) //Stop words are the most frequent and almost useless words Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); //Need an Index Writer to write the output of Analysis to Index IndexWriter writer = new IndexWriter(_directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED); //Provide Documents/Build Documents //Add Documents to the Index. SQLRecords allRecords = new SQLRecords(); List <lucene1> records = allRecords.GetAllRecords(); foreach (lucene1 item in records) { Document doc = new Document(); doc.Add(new Field("Id", item.Id.ToString(), Field.Store.YES, Field.Index.NO)); doc.Add(new Field("Title", item.Title.ToLower(), Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("ShortDescription", item.ShortDescription.ToLower(), Field.Store.YES, Field.Index.ANALYZED)); // doc.Add(new Field("PageBody", item.PageBody.ToLower(), Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("Date", item.DateCrated.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); } //Run the IndexWriter writer.Optimize(); writer.Commit(); writer.Dispose(); //Index is ready for searching #endregion #region Searching // Create the Query. String[] fields = new String[] { "Title", "ShortDescription", "PageBody", "DateCreated" }; // Boosting/Scoring: is the ability to assign higher importance to specific words in a query //Document level boosting - while indexing - by calling document.setBoost() before a document is added to the index. //Document's Field level boosting - while indexing - by calling field.setBoost() before adding a field to the document (and before adding the document to the index). //Query level boosting - during search, by setting a boost on a query clause, calling Query.setBoost(). //Query level boosting used here... HashMap <String, float> boosts = new HashMap <String, float>(); boosts.Add("Title", 15); boosts.Add("ShortDescription", 10); boosts.Add("PageBody", 5); MultiFieldQueryParser parser = new MultiFieldQueryParser( Lucene.Net.Util.Version.LUCENE_30, fields, analyzer, boosts ); //QueryParser parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "Title", analyzer); Console.WriteLine("Please Enter the string to be searched :\n"); string searchItem = Console.ReadLine(); Query query = parser.Parse(searchItem); // // Pass the Query to the IndexSearcher. // IndexSearcher searcher = new IndexSearcher(_directory, readOnly: true); TopDocs hits = searcher.Search(query, 200); Sort sort = new Sort(new SortField("DateCreated", SortField.LONG, true)); var filter = new QueryWrapperFilter(query); TopDocs results = searcher.Search(query, filter, 1000, sort); int result = results.ScoreDocs.Length; Console.WriteLine("Found {0} results", result); foreach (var item in results.ScoreDocs) { Document document = searcher.Doc(item.Doc); foreach (var i in document.fields_ForNUnit) { if (i.StringValue.Contains(searchItem)) { Console.WriteLine("ID: {0}", document.Get("Id")); Console.WriteLine("Text found: {0}" + Environment.NewLine, i.StringValue); Console.WriteLine("Date Created: {0}" + Environment.NewLine, document.Get("DateCreated")); } } } Console.WriteLine("Press ENTER to quit..."); Console.ReadLine(); #endregion }
public void AddTest() { string[] expectedKeys = { "test1", "test2", "test3" }; string[] expectedValues = { "test3", "test4", "test5" }; hashMap.Add("test1", "test3"); hashMap.Add("test2", "test4"); hashMap.Add("test3", "test5"); for (int i = 0; i < expectedKeys.Length; i++) { Assert.AreEqual(expectedValues[i], hashMap.GetValue(expectedKeys[i])); } }
public void HashMapAllFunctions() { HashMap <int, int> hashMap = new HashMap <int, int>(); hashMap.Add(1, 5); hashMap.Add(19, 17); hashMap.Add(7, 14); hashMap.Add(46, 18); hashMap.Add(6, 13); hashMap.Add(13, 49); hashMap.Add(84, 82); hashMap.Add(92, 98); hashMap.Add(31, 92); hashMap.Add(57, 13); hashMap.Add(78, 78); hashMap.Remove(7); hashMap[3] = 2; //if key doesn't exist, add a k/v pair int value; Assert.IsTrue(hashMap.TryGetValue(19, out value)); Assert.IsFalse(hashMap.ContainsKey(7)); Assert.IsFalse(hashMap.pairs[13] == null); for (int i = 0; i < hashMap.pairs.Length; i++) { if (hashMap.pairs[i] == null) { continue; } foreach (var pair in hashMap) { } } }
} // prepare /// <summary> /// Process /// </summary> /// <returns>info</returns> protected override String DoIt() { _apps_host = new VConnection(); CacheHandler thisHandler = new CacheHandler //(CacheHandler.ConvertJNPURLToCacheURL(GetCtx().GetContext("java.naming.provider.url")), log, GetCtx(), Get_Trx()); (CacheHandler.ConvertJNPURLToCacheURL((String)_apps_host.Apps_host), log, GetCtx(), Get_Trx()); log.Info("CM_WebProject_ID=" + _CM_WebProject_ID); _project = new MWebProject(GetCtx(), _CM_WebProject_ID, Get_Trx()); if (_project.Get_ID() != _CM_WebProject_ID) { throw new Exception("@NotFound@ @CM_WebProject_ID@ " + _CM_WebProject_ID); } log.Log(Level.INFO, "Starting media deployment"); // Deploy Media MMediaServer[] mserver = MMediaServer.GetMediaServer(_project); for (int i = 0; i < mserver.Length; i++) { log.Log(Level.INFO, "Media Server deployment started on: " + mserver.ToString()); if (_isRedeploy) { mserver[i].ReDeployAll(); } mserver[i].Deploy(); log.Log(Level.INFO, "Media Server deployment finished on: " + mserver.ToString()); } // Stage MCStage[] stages = MCStage.GetStages(_project); for (int i = 0; i < stages.Length; i++) { _map.Add(Utility.Util.GetValueOfInt(stages[i].GetCM_CStage_ID()), stages[i]); } // Copy Stage Tree MTree treeS = new MTree(GetCtx(), _project.GetAD_TreeCMS_ID(), false, false, Get_Trx()); VTreeNode root = treeS.GetRootNode(); CopyStage(root, "/", _isRedeploy); // Delete Inactive Containers MContainer[] containers = MContainer.GetContainers(_project); for (int i = 0; i < containers.Length; i++) { MContainer container = containers[i]; if (!_idList.Contains(Utility.Util.GetValueOfInt(container.GetCM_Container_ID()))) { String name = container.GetName(); if (container.Delete(true)) { log.Fine("Deleted: " + name); } else // e.g. was referenced { log.Warning("Failed Delete: " + name); AddLog(0, null, null, "@Error@ @Delete@: " + name); } } // Remove Container from cache thisHandler.CleanContainer(container.Get_ID()); } // Delete Inactive // Sync Stage & Container Tree MTreeNodeCMS[] nodesCMS = MTreeNodeCMS.GetTree(GetCtx(), _project.GetAD_TreeCMS_ID(), Get_Trx()); MTreeNodeCMC[] nodesCMC = MTreeNodeCMC.GetTree(GetCtx(), _project.GetAD_TreeCMC_ID(), Get_Trx()); for (int s = 0; s < nodesCMS.Length; s++) { MTreeNodeCMS nodeCMS = nodesCMS[s]; int Node_ID = nodeCMS.GetNode_ID(); for (int c = 0; c < nodesCMC.Length; c++) { MTreeNodeCMC nodeCMC = nodesCMC[c]; if (nodeCMC.GetNode_ID() == Node_ID) { //if (nodeCMS.getParent_ID()!=0) nodeCMC.setParent_ID(nodeCMS.GetParent_ID()); nodeCMC.SetSeqNo(nodeCMS.GetSeqNo()); nodeCMC.Save(); break; } } } // for all stage nodes // Clean ContainerTree Cache thisHandler.CleanContainerTree(_CM_WebProject_ID); return("@Copied@ @CM_Container_ID@ #" + _idList.Count); } // doIt