static MomentUtil() { MOMENT_TYPES = new HashMap<String, String>(9); MOMENT_TYPES.Put("AddActivity", "https://developers.google.com/+/plugins/snippet/examples/thing"); MOMENT_TYPES.Put("BuyActivity", "https://developers.google.com/+/plugins/snippet/examples/a-book"); MOMENT_TYPES.Put("CheckInActivity", "https://developers.google.com/+/plugins/snippet/examples/place"); MOMENT_TYPES.Put("CommentActivity", "https://developers.google.com/+/plugins/snippet/examples/blog-entry"); MOMENT_TYPES.Put("CreateActivity", "https://developers.google.com/+/plugins/snippet/examples/photo"); MOMENT_TYPES.Put("ListenActivity", "https://developers.google.com/+/plugins/snippet/examples/song"); MOMENT_TYPES.Put("ReserveActivity", "https://developers.google.com/+/plugins/snippet/examples/restaurant"); MOMENT_TYPES.Put("ReviewActivity", "https://developers.google.com/+/plugins/snippet/examples/widget"); MOMENT_LIST = new ArrayList<String>(MomentUtil.MOMENT_TYPES.KeySet()); Collections.Sort(MOMENT_LIST); VISIBLE_ACTIVITIES = MOMENT_TYPES.KeySet().ToArray(new String[0]); int count = VISIBLE_ACTIVITIES.Length; for (int i = 0; i < count; i++) { VISIBLE_ACTIVITIES[i] = "http://schemas.google.com/" + VISIBLE_ACTIVITIES[i]; } }
/// <summary> /// person.dic /// </summary> private void Init1() { TextReader br = null; try { _personNatureAttrs = new HashMap<string, PersonNatureAttr>(); br = MyStaticValue.GetPersonReader(); string temp; while ((temp = br.ReadLine()) != null) { var strs = temp.Split('\t'); var pna = _personNatureAttrs[strs[0]]; if (pna == null) { pna = new PersonNatureAttr(); } pna.AddFreq(int.Parse(strs[1]), int.Parse(strs[2])); _personNatureAttrs.Add(strs[0], pna); } } finally { if (br != null) br.Close(); } }
private List<Keyword> ComputeArticleTfidf(string content, int titleLength) { var tm = new HashMap<string, Keyword>(); var parse = NlpAnalysis.Parse(content); foreach (var term in parse) { var weight = getWeight(term, content.Length, titleLength); if (weight == 0) continue; var keyword = tm[term.Name]; if (keyword == null) { keyword = new Keyword(term.Name, term.Nature.allFrequency, weight); tm[term.Name] = keyword; } else { keyword.UpdateWeight(1); } } var treeSet = new SortedSet<Keyword>(tm.Values); var arrayList = new List<Keyword>(treeSet); if (treeSet.Count <= _keywordAmount) { return arrayList; } return arrayList.Take(_keywordAmount).ToList(); }
public static Map<string, string> Parse(string[] args) { string[] arr = new string[args.Length]; args.CopyTo(arr, 0); args = arr; Map<string, string> options = new HashMap<string, string>(); for (int i = 0; i < args.Length; i++) { if (args[i][0] == '-' || args[i][0] == '/') //This start with - or / { args[i] = args[i].Substring(1); if (i + 1 >= args.Length || args[i + 1][0] == '-' || args[i + 1][0] == '/') //Next start with - (or last arg) { options.Put(args[i], "null"); } else { options.Put(args[i], args[i + 1]); i++; } } } return options; }
private IDictionary<string, Document> GenerateIndexDocuments(int ndocs) { IDictionary<string, Document> docs = new HashMap<string, Document>(); for (int i = 0; i < ndocs; i++) { Field field = new TextField(FIELD_NAME, "field_" + i, Field.Store.YES); Field payload = new StoredField(PAYLOAD_FIELD_NAME, new BytesRef("payload_" + i)); Field weight1 = new NumericDocValuesField(WEIGHT_FIELD_NAME_1, 10 + i); Field weight2 = new NumericDocValuesField(WEIGHT_FIELD_NAME_2, 20 + i); Field weight3 = new NumericDocValuesField(WEIGHT_FIELD_NAME_3, 30 + i); Field contexts = new StoredField(CONTEXTS_FIELD_NAME, new BytesRef("ctx_" + i + "_0")); Document doc = new Document(); doc.Add(field); doc.Add(payload); doc.Add(weight1); doc.Add(weight2); doc.Add(weight3); doc.Add(contexts); for (int j = 1; j < AtLeast(3); j++) { contexts.BytesValue = new BytesRef("ctx_" + i + "_" + j); doc.Add(contexts); } docs.Put(field.StringValue, doc); } return docs; }
public virtual void DoRandom(int iter, bool ignoreCase) { CharArrayMap<int?> map = new CharArrayMap<int?>(TEST_VERSION_CURRENT, 1, ignoreCase); HashMap<string, int?> hmap = new HashMap<string, int?>(); char[] key; for (int i = 0; i < iter; i++) { int len = Random().Next(5); key = new char[len]; for (int j = 0; j < key.Length; j++) { key[j] = (char)Random().Next(127); } string keyStr = new string(key); string hmapKey = ignoreCase ? keyStr.ToLower() : keyStr; int val = Random().Next(); object o1 = map.Put(key, val); object o2 = hmap.Put(hmapKey, val); assertEquals(o1, o2); // add it again with the string method assertEquals(val, map.Put(keyStr, val)); assertEquals(val, map.Get(key, 0, key.Length)); assertEquals(val, map.Get(key)); assertEquals(val, map.Get(keyStr)); assertEquals(hmap.Count, map.size()); } }
public void walkDomFragment(Node node, AbstractBehavior parentBehavior = null) { investigateNode(node, parentBehavior); //free this for GC when we are done, it has references to a lot of DOM elements this.extensionsToBeApplied = null; }
private HashMap getChartExamples() { HashMap examples = new HashMap(); ArrayList examplesSet = new ArrayList(); examplesSet.Add(new ListViewGettingStartedFragment()); examples.Put("Binding", examplesSet); examplesSet = new ArrayList(); examplesSet.Add(new ListViewReorderFragment()); examplesSet.Add(new ListViewSwipeToExecuteFragment()); examplesSet.Add(new ListViewSwipeToRefreshFragment()); examplesSet.Add(new ListViewItemAnimationFragment()); examplesSet.Add(new ListViewManualLoadOnDemandFragment()); examplesSet.Add(new ListViewDataAutomaticLoadOnDemandFragment()); examplesSet.Add(new ListViewDataOperationsFragment()); examplesSet.Add(new ListViewLayoutsFragment()); examples.Put("Features", examplesSet); return examples; }
public static void Main() { Console.Write("Please, enter some text: "); string text = Console.ReadLine(); var chars = text.AsEnumerable(); var charCounts = new HashMap<char, int>(); foreach (var character in chars) { if (charCounts.ContainsKey(character)) { charCounts[character]++; } else { charCounts[character] = 1; } } var sortedChars = charCounts.Keys.OrderBy(k => k).ToList(); foreach (var character in sortedChars) { Console.WriteLine("{0}: {1} time(s)", character, charCounts[character]); } }
private HashMap getListViewExamples() { HashMap examples = new HashMap(); ArrayList examplesSet = new ArrayList(); examplesSet.Add(new ListViewGettingStartedFragment()); examplesSet.Add(new ListViewLayoutsFragment()); examplesSet.Add(new ListViewDeckOfCardsFragment()); examplesSet.Add(new ListViewSlideFragment()); examplesSet.Add(new ListViewWrapFragment()); examplesSet.Add(new ListViewItemAnimationFragment()); examplesSet.Add(new ListViewDataOperationsFragment()); examples.Put("Features", examplesSet); examplesSet = new ArrayList(); examplesSet.Add(new ListViewReorderFragment()); examplesSet.Add(new ListViewSwipeToExecuteFragment()); examplesSet.Add(new ListViewSwipeToRefreshFragment()); examplesSet.Add(new ListViewManualLoadOnDemandFragment()); examplesSet.Add(new ListViewSelectionFragment()); examplesSet.Add(new ListViewDataAutomaticLoadOnDemandFragment()); examplesSet.Add (new ListViewStickyHeadersFragment ()); examples.Put("Behaviors", examplesSet); return examples; }
private void addItem(IList<IMap<String, Object>> data, String title, Intent intent) { HashMap<String, Object> temp = new HashMap<String, Object>(); temp.Put(TITLE_KEY, title); temp.Put(INTENT_KEY, intent); data.Add(temp); }
public HashMap<StyleExtensionMapEntry> getExtensionsForFragment(HtmlElement element) { var hashmap = new HashMap<StyleExtensionMapEntry>(); //We need to loop over all of the relevant entries in the map that define some behavior var allEntries = map.getAllRandoriSelectorEntries(); for ( var i=0; i<allEntries.length; i++) { JsArray<HtmlElement> implementingNodes = findChildNodesForCompoundSelector(element, allEntries[i]); //For each of those entries, we need to see if we have any elements in this DOM fragment that implement any of those classes for ( var j=0; j<implementingNodes.length; j++) { var implementingElement = implementingNodes[ j ]; var value = hashmap.get( implementingElement ); if ( value == null ) { //Get the needed entry var extensionEntry = map.getExtensionEntry(allEntries[i]); //give us a copy so we can screw with it at will hashmap.put(implementingElement, extensionEntry.clone()); } else { //We already have data for this node, so we need to merge the new data into the existing one var extensionEntry = map.getExtensionEntry(allEntries[i]); extensionEntry.mergeTo( (StyleExtensionMapEntry)value ); } } } //return the hashmap which can be queried and applied to the Dom return hashmap; }
public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state) { var childThreadsAndFields = new HashMap<InvertedDocConsumerPerThread, ICollection<InvertedDocConsumerPerField>>(); var endChildThreadsAndFields = new HashMap<InvertedDocEndConsumerPerThread, ICollection<InvertedDocEndConsumerPerField>>(); foreach (var entry in threadsAndFields) { var perThread = (DocInverterPerThread) entry.Key; ICollection<InvertedDocConsumerPerField> childFields = new HashSet<InvertedDocConsumerPerField>(); ICollection<InvertedDocEndConsumerPerField> endChildFields = new HashSet<InvertedDocEndConsumerPerField>(); foreach(DocFieldConsumerPerField field in entry.Value) { var perField = (DocInverterPerField)field; childFields.Add(perField.consumer); endChildFields.Add(perField.endConsumer); } childThreadsAndFields[perThread.consumer] = childFields; endChildThreadsAndFields[perThread.endConsumer] = endChildFields; } consumer.Flush(childThreadsAndFields, state); endConsumer.Flush(endChildThreadsAndFields, state); }
/// <summary> /// Creates the event for main activity. /// Sets layout to main, references the main page button, /// and signIn/signOut button clicks. /// </summary> /// <param name="bundle">Bundle.</param> protected override void OnCreate (Bundle bundle) { //For testing purposes //Allows us to use a self signed server certificate ServicePointManager.ServerCertificateValidationCallback += (sender, certificate, chain, sslPolicyErrors) => true; //End base.OnCreate (bundle); references = new HashMap (); // Set our view from the "main" layout resource SetContentView (Resource.Layout.Main); //References for Main Menu Items mButtonSignUp = FindViewById<Button> (Resource.Id.SignUpButton); mButtonSignIn = FindViewById<Button> (Resource.Id.SignInButton); //Click Events //Sign Up Click mButtonSignUp.Click += mButtonSignUp_Click; //Sign in Click opens mButtonSignIn.Click += MButtonSignIn_Click; msgText = FindViewById<TextView> (Resource.Id.msgText); //Check to see if the app can use GCM if (IsPlayServicesAvailable ()) { var intent = new Intent (this, typeof(RegistrationIntentService)); StartService (intent); } }
public static Map<RandomVariable, RandomVariable> getUmbrellaWorld_Xt_to_Xtm1_Map() { Map<RandomVariable, RandomVariable> tToTm1StateVarMap = new HashMap<RandomVariable, RandomVariable>(); tToTm1StateVarMap.put(ExampleRV.RAIN_t_RV, ExampleRV.RAIN_tm1_RV); return tToTm1StateVarMap; }
static void Main(string[] args) { var listOfIntegers = new LinkedList<int> {0, 1, 2, 3, 4, 5, 6, 7}; listOfIntegers.Remove(2); foreach (var entry in listOfIntegers) { Console.WriteLine(entry); } var hashMap = new HashMap<String, Int32>(); hashMap.Put("mike", 4); hashMap.Put("chris", 7); Console.WriteLine("KEY: 'mike'\t VALUE:{0}", hashMap.GetValue("mike")); foreach (var keyValuePair in hashMap) { Console.WriteLine(keyValuePair); } var tree = new Tree<int> {4, 6, 9, 2}; foreach (var value in tree) { Console.Write("{0}\t", value); } Console.Read(); }
public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state) { var oneThreadsAndFields = new HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>(); var twoThreadsAndFields = new HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>(); foreach(var entry in threadsAndFields) { DocFieldConsumersPerThread perThread = (DocFieldConsumersPerThread) entry.Key; ICollection<DocFieldConsumerPerField> fields = entry.Value; IEnumerator<DocFieldConsumerPerField> fieldsIt = fields.GetEnumerator(); ICollection<DocFieldConsumerPerField> oneFields = new HashSet<DocFieldConsumerPerField>(); ICollection<DocFieldConsumerPerField> twoFields = new HashSet<DocFieldConsumerPerField>(); while (fieldsIt.MoveNext()) { DocFieldConsumersPerField perField = (DocFieldConsumersPerField) fieldsIt.Current; oneFields.Add(perField.one); twoFields.Add(perField.two); } oneThreadsAndFields[perThread.one] = oneFields; twoThreadsAndFields[perThread.two] = twoFields; } one.Flush(oneThreadsAndFields, state); two.Flush(twoThreadsAndFields, state); }
private HashMap getChartExamples() { HashMap chartExamples = new HashMap(); ArrayList result = new ArrayList(); result.Add(new AreaSeriesFragment()); result.Add(new LineSeriesFragment()); result.Add(new CandleStickSeriesFragment()); result.Add(new DoughnutSeriesFragment()); result.Add(new HorizontalBarSeriesFragment()); result.Add(new IndicatorSeriesFragment()); result.Add(new OhlcSeriesFragment()); result.Add(new PieSeriesFragment()); result.Add(new ScatterBubbleSeriesFragment()); result.Add(new ScatterPointSeriesFragment()); result.Add(new SplineAreaSeriesFragment()); result.Add(new SplineSeriesFragment()); result.Add(new StackAreaSeriesFragment()); result.Add(new StackBarSeriesFragment()); result.Add(new StackBarSeriesFragment()); result.Add(new StackSplineAreaSeriesFragment()); result.Add(new VerticalBarSeriesFragment()); chartExamples.Put("Series", result); result = new ArrayList(); result.Add(new ChartLegendFragment()); result.Add(new GridFeatureFragment()); result.Add(new PalettesFragment()); chartExamples.Put("Features", result); result = new ArrayList(); result.Add(new PanAndZoomFragment()); result.Add(new SelectionBehaviorFragment()); result.Add(new TooltipBehaviorFragment()); result.Add(new TrackballBehaviorFragment()); chartExamples.Put("Behaviors", result); result = new ArrayList(); result.Add(new DateTimeContinuousAxisFragment()); result.Add(new MultipleAxesFragment()); chartExamples.Put("Axes", result); result = new ArrayList(); result.Add(new GridLineAnnotationFragment()); result.Add(new PlotBandAnnotationFragment()); chartExamples.Put("Annotations", result); return chartExamples; }
public void walkDomChildren(Node parentNode, AbstractBehavior parentBehavior = null) { //The fact that we have two entry point into here walkChildren and walkDomFragment continues to screw us walkChildren(parentNode, parentBehavior); //free this for GC when we are done, it has references to a lot of DOM elements this.extensionsToBeApplied = null; }
protected internal Manifest(java.io.InputStream isJ, bool readChunks) { //throws IOException { if (readChunks) { chunks = new HashMap<String, Chunk>(); } read(isJ); }
static void Main(string[] args) { RezeptModel rezeptBasiskuchen = new RezeptModel(); rezeptBasiskuchen.SetName("Basiskuchen"); rezeptBasiskuchen.SetZutatMehl(500); rezeptBasiskuchen.SetZutatZucker(100); rezeptBasiskuchen.SetZutatButter(100); rezeptBasiskuchen.SetZutatEier(4); RezeptModel rezeptZweiterkuchen = new RezeptModel(); rezeptZweiterkuchen.SetName("Zweiter Kuchen"); rezeptZweiterkuchen.SetZutatMehl(520); rezeptZweiterkuchen.SetZutatZucker(120); rezeptZweiterkuchen.SetZutatButter(120); rezeptZweiterkuchen.SetZutatEier(4); RezeptModel rezeptDritterkuchen = new RezeptModel(); rezeptDritterkuchen.SetName("Dritter Kuchen"); rezeptDritterkuchen.SetZutatMehl(250); rezeptDritterkuchen.SetZutatZucker(55); rezeptDritterkuchen.SetZutatButter(90); rezeptDritterkuchen.SetZutatEier(2); RezeptModel rezeptVierterkuchen = new RezeptModel(); rezeptVierterkuchen.SetName("Vierter Kuchen"); rezeptVierterkuchen.SetZutatMehl(400); rezeptVierterkuchen.SetZutatZucker(120); rezeptVierterkuchen.SetZutatButter(130); rezeptVierterkuchen.SetZutatEier(3); RezeptModel vorhandeneZutaten = new RezeptModel(); vorhandeneZutaten.SetZutatMehl(400); vorhandeneZutaten.SetZutatZucker(120); vorhandeneZutaten.SetZutatButter(1130); vorhandeneZutaten.SetZutatEier(3); VergleichRezept(vorhandeneZutaten, rezeptBasiskuchen); VergleichRezept(vorhandeneZutaten, rezeptZweiterkuchen); VergleichRezept(vorhandeneZutaten, rezeptDritterkuchen); VergleichRezept(vorhandeneZutaten, rezeptVierterkuchen); // Create a HashMap with three key/value pairs. HashMap hm = new HashMap(); hm.put("One", "1"); hm.put("Two", "2a"); hm.put("Two", "2b"); hm.put("Three", "3"); // Iterate over the HashMap to see what we just put in. Set set = hm.entrySet(); Iterator setIter = set.iterator(); while (setIter.hasNext()) { System.out.println(setIter.next()); } Console.ReadKey(); }
public QuizListenerService () { var temp = new HashMap (4); temp.Put (0, Resource.Drawable.ic_choice_a); temp.Put (1, Resource.Drawable.ic_choice_b); temp.Put (2, Resource.Drawable.ic_choice_c); temp.Put (3, Resource.Drawable.ic_choice_d); question_num_to_drawable_id = temp; }
public static Map<string, string> GetFilesAsStringMap(string[] files) { HashMap<string, string> map = new HashMap<string, string>(); foreach (string file in files) { map.Put(file, GetFileAsString(file)); } return map; }
/// <summary> /// Only constructor for the class. No default available. /// </summary> /// <param name="symbol">The group symbol for which the corresponding subscription /// was created.</param> public MamdaMultiParticipantManager(string symbol) { mNotifiedConsolidatedCreate = false; mConsolidatedListeners = new ArrayList(); mParticipants = new HashMap(); mHandlers = new ArrayList(); mSymbol = symbol; mIsPrimaryParticipant = new NullableBool(true); }
private HashMap getSideDrawerExamples(){ HashMap sideDrawerExamples = new HashMap(); ArrayList result = new ArrayList(); result.Add (new DrawerInitialSetupFragment()); sideDrawerExamples.Put ("Init", result); return sideDrawerExamples; }
static DateTimeFormatting() { var conv = new HashMap<string, string>(); conv.Put("d", "d"); conv.Put("dd", "dd"); conv.Put("ddd", "E"); conv.Put("dddd", "EEEE"); conv.Put("f", "S"); conv.Put("ff", "SS"); conv.Put("fff", "SSS"); conv.Put("ffff", "SSSS"); conv.Put("fffff", "SSSSS"); conv.Put("ffffff", "SSSSSS"); conv.Put("fffffff", "SSSSSSS"); conv.Put("F", "S"); conv.Put("FF", "SS"); conv.Put("FFF", "SSS"); conv.Put("FFFF", "SSSS"); conv.Put("FFFFF", "SSSSS"); conv.Put("FFFFFF", "SSSSSS"); conv.Put("FFFFFFF", "SSSSSSS"); conv.Put("g", "G"); conv.Put("gg", "GG"); conv.Put("h", "K"); conv.Put("hh", "KK"); conv.Put("H", "H"); conv.Put("HH", "HH"); // K is handled in Code. //conv.Put("K", "X"); conv.Put("m", "m"); conv.Put("mm", "mm"); conv.Put("M", "M"); conv.Put("MM", "MM"); conv.Put("MMM", "MMM"); conv.Put("MMMM", "MMMM"); conv.Put("s", "s"); conv.Put("ss", "ss"); conv.Put("t", "a"); conv.Put("tt", "aa"); conv.Put("y", "y"); conv.Put("yy", "yy"); conv.Put("yyy", "yyy"); conv.Put("yyyy", "yyyy"); conv.Put("yyyyy", "yyyyy"); conv.Put("yyyyy", "yyyyy"); conv.Put("yyyyyy", "yyyyyy"); conv.Put("yyyyyyy", "yyyyyyy"); conv.Put("z", "Z"); conv.Put("zz", "ZZ"); conv.Put("zzz", "ZZZ"); //conv.Put("%", ""); Conversion = conv; }
private HashMap getCalendarExamples() { HashMap calendarExamples = new HashMap(); ArrayList result = new ArrayList(); result.Add (new DrawerInitialSetupFragment()); calendarExamples.Put ("Init", result); return calendarExamples; }
public static bool test() { var map1 = new HashMap<string, Float> { { "K1.0", 1f }, { "K2.0", 2f }, { "K3.0", 3f }}; var map2 = Query.asIterable(new[] { 1f, 2f, 3f }).toMap(p => "K" + p); int i = 0; foreach (var k in map2.keySet()) { if (!map1[k].equals(map2.get(k))) { return false; } i++; } return map2.size() == 3 && i == 3; }
public string putQualified(String name) { if (_qualifiedMap == null) { _qualifiedMap = new HashMap <String, String>(); } int i = name.lastIndexOf('.'); string shortName; if (i > 0) { shortName = name.substring(i + 1); } else { shortName = name; } putQualified(shortName, name); return(shortName); }
public void Configurable_PropSheetFromConfigurableInstance() { const string testString = "test"; var props = new HashMap <String, Object>(); props.Put(PROP_ASTRING, testString); props.Put(PROP_DATA_PROC, new DummyProcessor()); var tc = ConfigurationManager.GetInstance <TestConfigurable>(typeof(TestConfigurable), props); // now create a property sheet in order to modify the configurable var propSheet = new PropertySheet(tc, null, new RawPropertyData("tt", tc.GetType().Name), new ConfigurationManager()); propSheet.SetComponent(PROP_DATA_PROC, "tt", new AnotherDummyProcessor()); tc.NewProperties(propSheet); // test whether old props were preserved and new ones were applied // FIXME: Its by design not possible to preserve the old properties without have a CM // probably we should remove the possibility to let the user create PropertySheet instances. // Assert.assertTrue(tc.myString.equals(testString)); // Assert.assertTrue(tc.gamma == testDouble); Assert.IsTrue(tc.dataProc != null && tc.dataProc is AnotherDummyProcessor); }
private void SaveButton_Click(object sender, EventArgs e) { if (InputValid(LocET.Text, AgeET.Text, LVLET.Text, c)) { //add to firebase Group group = new Group(AgeET.Text, LVLET.Text, CompRB.Checked, LocET.Text, TimeButton.Text); HashMap map = new HashMap(); map.Put("Location", group.Location); map.Put("Level", group.geoupLevel); map.Put("Age", group.age); map.Put("Comp", group.competetive); map.Put("Time", group.time); DocumentReference docref = database.Collection("Users").Document(admin1.email).Collection("Groups").Document(group.Location + " " + group.time + " " + group.age); docref.Set(map); HashMap map2 = new HashMap(); Toasty.Config.Instance .TintIcon(true) .SetToastTypeface(Typeface.CreateFromAsset(Assets, "Katanf.ttf")); Toasty.Success(this, "Group Added Sucesfully", 5, true).Show(); GrouD.Dismiss(); d.Dismiss(); this.Recreate(); } }
private _ANSWER solve2(string s) { var map = new HashMap <char, int>(); foreach (var x in s) { map[x]++; } var chk = "ZWXSGVFOTN"; var val = new string[] { "ZERO", "TWO", "SIX", "SEVEN", "EIGHT", "FIVE", "FOUR", "ONE", "THREE", "NINE" }; var otaku = new int[] { 0, 2, 6, 7, 8, 5, 4, 1, 3, 9 }; var ans = new List <int>(); for (int i = 0; i < 10; i++) { var k = 1; if (val[i] == "NINE") { k = 2; } var a = map[chk[i]]; for (int j = 0; j < a / k; j++) { foreach (var x in val[i]) { map[x]--; } ans.Add(otaku[i]); } } ans.Sort(); return(ans.AsJoinedString("")); }
private void InitializeMultiFieldQueryParser(bool PreProcess) { String[] fields = new String[] { SEDocument.TITLE_FN, SEDocument.ABSTRACT_FN }; if (PreProcess) { HashMap <string, float> boosts = new HashMap <string, float>(); boosts.Add(SEDocument.TITLE_FN, (float)10); boosts.Add(SEDocument.ABSTRACT_FN, (float)5); _MultiFieldQueryParser = new MultiFieldQueryParser( Lucene.Net.Util.Version.LUCENE_30, fields, _Analyzer , boosts ); } else { _MultiFieldQueryParser = new MultiFieldQueryParser( Lucene.Net.Util.Version.LUCENE_30, fields, _Analyzer); } _MultiFieldQueryParser.DefaultOperator = MultiFieldQueryParser.OR_OPERATOR; }
private void investigateNode(Node node, AbstractBehavior parentBehavior) { if (node.nodeType == Node.ELEMENT_NODE) { if (extensionsToBeApplied == null) { //We build our extension cache from the first element we find entryElement = node.As <HtmlElement>(); extensionsToBeApplied = styleExtensionManager.getExtensionsForFragment(entryElement); } //Just an optimization, need to create constants for all of these things.... removed as kendo uses script nodes for templates node.nodeName == "SCRIPT" || if (node.nodeName == "META") { return; } if (node.nodeName == "LINK") { investigateLinkElement(node.As <HtmlLinkElement>()); } else { investigateDomElement(node.As <HtmlElement>(), parentBehavior); } } else if (node.nodeType == Node.TEXT_NODE) { //This is a text node, check to see if it needs internationalization localizationProvider.investigateTextNode(node); } else { walkChildren(node, parentBehavior); } }
public static int test() { var map1 = new HashMap <Float, string> { { 1f, "V1" }, { 2f, "V2" }, { 3f, "V3" } }; var map2 = Query.empty <string>().toMap(p => 0f); map2.putAll(map1); int i = 0; foreach (var e in map2.entrySet()) { if (!map1[e.Key].equals(e.Value)) { return(1); } i++; } if (i != 3) { return(2); } return(0); }
private static HashMap importSymbols(string text) { File file = new File(text); if (!file.exists() || !file.isFile()) { return(null); } FileInputStream fileInputStream = new FileInputStream(text); DataInputStream dataInputStream = new DataInputStream(fileInputStream); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(dataInputStream)); HashMap hashMap = new HashMap(); string text2; while ((text2 = bufferedReader.readLine()) != null) { string[] array = String.instancehelper_split(text2, "\\t"); string text3 = array[0]; Integer integer = Integer.valueOf(Integer.parseInt(array[1])); hashMap.put(text3, integer); } bufferedReader.close(); return(hashMap); }
private void ProcessTerms(System.String[] queryTerms) { if (queryTerms != null) { System.Array.Sort(queryTerms); IDictionary <string, int> tmpSet = new HashMap <string, int>(queryTerms.Length); //filter out duplicates IList <string> tmpList = new List <string>(queryTerms.Length); IList <int> tmpFreqs = new List <int>(queryTerms.Length); int j = 0; for (int i = 0; i < queryTerms.Length; i++) { var term = queryTerms[i]; var position = tmpSet[term]; if (!tmpSet.ContainsKey(term)) // if temp_position == null { tmpSet[term] = j++; tmpList.Add(term); tmpFreqs.Add(1); } else { int integer = tmpFreqs[position]; tmpFreqs[position] = (integer + 1); } } terms = tmpList.ToArray(); //termFreqs = (int[])tmpFreqs.toArray(termFreqs); termFreqs = new int[tmpFreqs.Count]; int i2 = 0; foreach (int integer in tmpFreqs) { termFreqs[i2++] = integer; } } }
protected virtual void Dispose(bool disposing) { if (isDisposed) { return; } if (disposing) { // Clear the hard refs; then, the only remaining refs to // all values we were storing are weak (unless somewhere // else is still using them) and so GC may reclaim them: hardRefs = null; // Take care of the current thread right now; others will be // taken care of via the WeakReferences. if (t != null) { t.Remove(); } t = null; } isDisposed = true; }
private void DeleteFile_Click(object sender, RoutedEventArgs e) { // to delete we just mark for not to be reincluded and do a round of saving PDDocumentCatalog catalog = pd.getDocumentCatalog(); PDDocumentNameDictionary names = catalog.getNames(); PDEmbeddedFilesNameTreeNode embeddedFiles = names.getEmbeddedFiles(); Map embeddedFileNames = embeddedFiles.getNames(); Dictionary <String, PDComplexFileSpecification> embeddedFileNamesNet = embeddedFileNames.ToDictionary <String, PDComplexFileSpecification>(); Map TomsNewMap = new HashMap(); // Attach all the existing files foreach (KeyValuePair <String, PDComplexFileSpecification> entry in embeddedFileNamesNet) { if (selectedFile == entry.Key) { // } else { TomsNewMap.put(entry.Key, entry.Value); } } PDEmbeddedFilesNameTreeNode TomsEmbeddedFiles = new PDEmbeddedFilesNameTreeNode(); TomsEmbeddedFiles.setNames(TomsNewMap); names.setEmbeddedFiles(TomsEmbeddedFiles); catalog.setNames(names); pd.save(pathToCurrentPDF); // reload the PDF LoadPDFAndLookForAttachments(pathToCurrentPDF); ButtonsStackPanel.Visibility = Visibility.Hidden; }
/** * Set the ini value for the given scope. */ protected void set(HashMap <String, Value> map, int scope, Value value) { if (scope == PHP_INI_USER && !(_scope == PHP_INI_USER || _scope == PHP_INI_ALL)) { // do nothing } else if (_type == Type.BOOLEAN) { map.put(_name, toBooleanValue(value)); } /* up to modules to interpret values * http://bugs.caucho.com/view.php?id=2095 * else if (_type == Type.LONG) * map.put(_name, toLongValue(value)); */ else { map.put(_name, value); } }
protected HashMap <String, int?> GetOrCreateRelationMemberNameToIndexMap(Type entityType) { if (typeToMemberNameToIndexMap == null) { typeToMemberNameToIndexMap = new HashMap <Type, HashMap <String, int?> >(); } HashMap <String, int?> memberNameToIndexMap = typeToMemberNameToIndexMap.Get(entityType); if (memberNameToIndexMap != null) { return(memberNameToIndexMap); } IEntityMetaData metaData = entityMetaDataProvider.GetMetaData(entityType); RelationMember[] relationMembers = metaData.RelationMembers; memberNameToIndexMap = HashMap <String, int?> .Create(relationMembers.Length); for (int a = relationMembers.Length; a-- > 0;) { memberNameToIndexMap.Put(relationMembers[a].Name, a); } typeToMemberNameToIndexMap.Put(entityType, memberNameToIndexMap); return(memberNameToIndexMap); }
/** * /// Sets the best token Map. * * /// @param bestTokenMap the new best token Map */ protected void SetBestTokenMap(HashMap <ISearchState, Token> bestTokenMap) { BestTokenMap = bestTokenMap; }
public SubmissionProfile(IDataReference ref, String method, String action, String mediatype, HashMap < String, String > attributeMap)
/** Returns Pair(list of invalid document terms, Map of document term -> document) */ private KeyValuePair <List <string>, IDictionary <string, Document> > GenerateIndexDocuments(int ndocs, bool requiresPayload, bool requiresContexts) { IDictionary <string, Document> docs = new HashMap <string, Document>(); List <string> invalidDocTerms = new List <string>(); for (int i = 0; i < ndocs; i++) { Document doc = new Document(); bool invalidDoc = false; Field field = null; // usually have valid term field in document if (Usually()) { field = new TextField(FIELD_NAME, "field_" + i, Field.Store.YES); doc.Add(field); } else { invalidDoc = true; } // even if payload is not required usually have it if (requiresPayload || Usually()) { // usually have valid payload field in document if (Usually()) { Field payload = new StoredField(PAYLOAD_FIELD_NAME, new BytesRef("payload_" + i)); doc.Add(payload); } else if (requiresPayload) { invalidDoc = true; } } if (requiresContexts || Usually()) { if (Usually()) { for (int j = 0; j < AtLeast(2); j++) { doc.Add(new StoredField(CONTEXT_FIELD_NAME, new BytesRef("context_" + i + "_" + j))); } } // we should allow entries without context } // usually have valid weight field in document if (Usually()) { Field weight = (Rarely()) ? (Field) new StoredField(WEIGHT_FIELD_NAME, 100d + i) : (Field) new NumericDocValuesField(WEIGHT_FIELD_NAME, 100 + i); doc.Add(weight); } string term = null; if (invalidDoc) { term = (field != null) ? field.GetStringValue() : "invalid_" + i; invalidDocTerms.Add(term); } else { term = field.GetStringValue(); } docs.Put(term, doc); } return(new KeyValuePair <List <string>, IDictionary <string, Document> >(invalidDocTerms, docs)); }
public override void Run(object sender, System.EventArgs e) { DF3DApplication app = DF3DApplication.Application; if (app == null || app.Current3DMapControl == null) { return; } try { Map3DCommandManager.Push(this); RenderControlEditServices.Instance().StopGeometryEdit(true); app.Current3DMapControl.PauseRendering(false); System.Collections.Generic.IList <System.Collections.Generic.KeyValuePair <int, string> > list = new System.Collections.Generic.List <System.Collections.Generic.KeyValuePair <int, string> >(); HashMap featureClassInfoMap = SelectCollection.Instance().FeatureClassInfoMap; DF3DFeatureClass featureClassInfo = null; System.Collections.Generic.IList <int> list2 = new System.Collections.Generic.List <int>(); System.Collections.IEnumerator enumerator = featureClassInfoMap.Keys.GetEnumerator(); try { if (enumerator.MoveNext()) { DF3DFeatureClass featureClassInfo2 = (DF3DFeatureClass)enumerator.Current; featureClassInfo = featureClassInfo2; ResultSetInfo resultSetInfo = featureClassInfoMap[featureClassInfo2] as ResultSetInfo; foreach (DataRow dataRow in resultSetInfo.ResultSetTable.Rows) { int num = int.Parse(dataRow[featureClassInfo.GetFeatureClass().FidFieldName].ToString()); string value = num.ToString(); System.Collections.Generic.KeyValuePair <int, string> item = new System.Collections.Generic.KeyValuePair <int, string>(num, value); list.Add(item); list2.Add(num); } } } finally { System.IDisposable disposable = enumerator as System.IDisposable; if (disposable != null) { disposable.Dispose(); } } if (featureClassInfo != null) { using (MergeDlg mergeDlg = new MergeDlg(list)) { if (mergeDlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { if (System.Windows.Forms.DialogResult.No != XtraMessageBox.Show("模型合并不支持撤销操作,是否继续?", "提示", System.Windows.Forms.MessageBoxButtons.YesNo, System.Windows.Forms.MessageBoxIcon.Exclamation)) { int fid = mergeDlg.Fid; using (new WaitDialogForm("", "正在进行模型合并,请稍后...")) { IFeatureClass featureClass = featureClassInfo.GetFeatureClass(); string geometryFieldName = featureClassInfo.GetFeatureLayer().GeometryFieldName; IModelPoint model = this.GetModel(featureClass, geometryFieldName, fid); IResourceManager resourceManager = CommonUtils.Instance().GetCurrentFeatureDataset() as IResourceManager; if (resourceManager != null) { if (!this.MergeModels(featureClass, geometryFieldName, list2.ToArray <int>(), resourceManager, ref model)) { XtraMessageBox.Show("模型合并失败!"); } else { if (list2.Remove(fid)) { featureClass.Delete(new QueryFilterClass { IdsFilter = list2.ToArray <int>() }); CommonUtils.Instance().Delete(featureClassInfo, list2.ToArray <int>()); app.Current3DMapControl.FeatureManager.DeleteFeatures(featureClass, list2.ToArray <int>()); } app.Current3DMapControl.RefreshModel(CommonUtils.Instance().GetCurrentFeatureDataset(), model.ModelName); IFdeCursor fdeCursor = featureClass.Update(new QueryFilterClass { IdsFilter = new int[] { fid } }); IRowBuffer rowBuffer = fdeCursor.NextRow(); if (rowBuffer != null) { int num2 = rowBuffer.FieldIndex(geometryFieldName); if (num2 != -1) { rowBuffer.SetValue(num2, model); } fdeCursor.UpdateRow(rowBuffer); System.Runtime.InteropServices.Marshal.ReleaseComObject(fdeCursor); app.Current3DMapControl.FeatureManager.EditFeature(featureClass, rowBuffer); } //System.Runtime.InteropServices.Marshal.ReleaseComObject(featureClass); } } } } } } } } catch (System.Runtime.InteropServices.COMException ex) { XtraMessageBox.Show(ex.Message); } catch (System.UnauthorizedAccessException var_23_389) { XtraMessageBox.Show("拒绝访问"); } catch (System.Exception ex2) { XtraMessageBox.Show(ex2.Message); } finally { app.Current3DMapControl.ResumeRendering(); } }
public void BeforeClass() { ANALYZER = new MockAnalyzer(Random()); qp = new StandardQueryParser(ANALYZER); HashMap <String, /*Number*/ object> randomNumberMap = new HashMap <string, object>(); /*SimpleDateFormat*/ string dateFormat; long randomDate; bool dateFormatSanityCheckPass; int count = 0; do { if (count > 100) { fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations."); } dateFormatSanityCheckPass = true; LOCALE = randomLocale(Random()); TIMEZONE = randomTimeZone(Random()); DATE_STYLE = randomDateStyle(Random()); TIME_STYLE = randomDateStyle(Random()); //// assumes localized date pattern will have at least year, month, day, //// hour, minute //dateFormat = (SimpleDateFormat)DateFormat.getDateTimeInstance( // DATE_STYLE, TIME_STYLE, LOCALE); //// not all date patterns includes era, full year, timezone and second, //// so we add them here //dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy"); //dateFormat.setTimeZone(TIMEZONE); DATE_FORMAT = new NumberDateFormat(DATE_STYLE, TIME_STYLE, LOCALE) { TimeZone = TIMEZONE }; dateFormat = DATE_FORMAT.GetDateFormat(); do { randomDate = Random().nextLong(); // prune date value so it doesn't pass in insane values to some // calendars. randomDate = randomDate % 3400000000000L; // truncate to second randomDate = (randomDate / 1000L) * 1000L; // only positive values randomDate = Math.Abs(randomDate); } while (randomDate == 0L); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, -randomDate); count++; } while (!dateFormatSanityCheckPass); //NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE); //NUMBER_FORMAT.setMaximumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMaximumIntegerDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumIntegerDigits((Random().nextInt() & 20) + 1); NUMBER_FORMAT = new NumberFormat(LOCALE); double randomDouble; long randomLong; int randomInt; float randomFloat; while ((randomLong = Convert.ToInt64(NormalizeNumber(Math.Abs(Random().nextLong())) )) == 0L) { ; } while ((randomDouble = Convert.ToDouble(NormalizeNumber(Math.Abs(Random().NextDouble())) )) == 0.0) { ; } while ((randomFloat = Convert.ToSingle(NormalizeNumber(Math.Abs(Random().nextFloat())) )) == 0.0f) { ; } while ((randomInt = Convert.ToInt32(NormalizeNumber(Math.Abs(Random().nextInt())))) == 0) { ; } randomNumberMap.Put(FieldType.NumericType.LONG.ToString(), randomLong); randomNumberMap.Put(FieldType.NumericType.INT.ToString(), randomInt); randomNumberMap.Put(FieldType.NumericType.FLOAT.ToString(), randomFloat); randomNumberMap.Put(FieldType.NumericType.DOUBLE.ToString(), randomDouble); randomNumberMap.Put(DATE_FIELD_NAME, randomDate); RANDOM_NUMBER_MAP = Collections.UnmodifiableMap(randomNumberMap); directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())) .SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)) .SetMergePolicy(NewLogMergePolicy())); Document doc = new Document(); HashMap <String, NumericConfig> numericConfigMap = new HashMap <String, NumericConfig>(); HashMap <String, Field> numericFieldMap = new HashMap <String, Field>(); qp.NumericConfigMap = (numericConfigMap); foreach (FieldType.NumericType type in Enum.GetValues(typeof(FieldType.NumericType))) { numericConfigMap.Put(type.ToString(), new NumericConfig(PRECISION_STEP, NUMBER_FORMAT, type)); FieldType ft2 = new FieldType(IntField.TYPE_NOT_STORED); ft2.NumericTypeValue = (type); ft2.Stored = (true); ft2.NumericPrecisionStep = (PRECISION_STEP); ft2.Freeze(); Field field; switch (type) { case FieldType.NumericType.INT: field = new IntField(type.ToString(), 0, ft2); break; case FieldType.NumericType.FLOAT: field = new FloatField(type.ToString(), 0.0f, ft2); break; case FieldType.NumericType.LONG: field = new LongField(type.ToString(), 0L, ft2); break; case FieldType.NumericType.DOUBLE: field = new DoubleField(type.ToString(), 0.0, ft2); break; default: fail(); field = null; break; } numericFieldMap.Put(type.ToString(), field); doc.Add(field); } numericConfigMap.Put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP, DATE_FORMAT, FieldType.NumericType.LONG)); FieldType ft = new FieldType(LongField.TYPE_NOT_STORED); ft.Stored = (true); ft.NumericPrecisionStep = (PRECISION_STEP); LongField dateField = new LongField(DATE_FIELD_NAME, 0L, ft); numericFieldMap.Put(DATE_FIELD_NAME, dateField); doc.Add(dateField); foreach (NumberType numberType in Enum.GetValues(typeof(NumberType))) { setFieldValues(numberType, numericFieldMap); if (VERBOSE) { Console.WriteLine("Indexing document: " + doc); } writer.AddDocument(doc); } reader = writer.Reader; searcher = NewSearcher(reader); writer.Dispose(); }
public void recThread() { Init_accessToken(); HttpsUtil http_utils_test = new HttpsUtil(); Map paramQueryDeviceData = new HashMap(); paramQueryDeviceData.put("pageNo", "0"); paramQueryDeviceData.put("appId", appId); paramQueryDeviceData.put("pageSize", "2048"); Map header = new HashMap(); header.put("Authorization", "Bearer " + accessToken_str); header.put("app_key", appId); while (true) { StreamClosedHttpResponse rec_http = null; dynamic json = null; try { rec_http = http_utils_test.doGetWithParasGetStatusLine(urlQueryDevices, paramQueryDeviceData, header); json = Newtonsoft.Json.Linq.JToken.Parse(rec_http.getContent()) as dynamic; } catch (Exception e) { LOG.Error("error", e);//////////////////////////////////////////////LOG记录 System.Threading.Thread.Sleep(3000); continue; } //int a = json.devices[0].services[0].data.DiZhiMa; int len_devices = 0; try { len_devices = json.totalCount;//获取本应用中共有几个节点 } catch (Exception e) { LOG.Error("error", e); ///////////////////////////////////////////LOG记录 LOG.Info("http content : " + rec_http.getContent()); ////////////////////////////////将电信平台的返回值也记录下来 Init_accessToken(); http_utils_test = new HttpsUtil(); paramQueryDeviceData = new HashMap(); paramQueryDeviceData.put("pageNo", "0"); paramQueryDeviceData.put("appId", appId); paramQueryDeviceData.put("pageSize", "2048"); header = new HashMap(); header.put("Authorization", "Bearer " + accessToken_str); header.put("app_key", appId); continue; } //返回一个需要更新的节点的列表 //遍历所有设备信息,如果发现有新的信息,就触发更新事件 for (int i = 0; i < len_devices; i++) { //判断时间新旧 DateTime temp_datetime = DateTime.ParseExact(json.devices[i].services[0].eventTime.ToString(), "yyyyMMddTHHmmssZ", null); //考虑到之后肯能多次使用这个节点的时间,所以先将其提取出来 if (DateTime.Compare(temp_datetime, DateTime_NewestData) > 0) { LOG.Info("receive new date : " + json.devices[i].services[0].data.DiZhiMa.ToString()); //如果需要更新,先将数据整理成所需格式,string数组 string[] temp_byte_array = new string[14]; temp_byte_array[0] = json.devices[i].services[0].data.DiZhiMa.ToString(); temp_byte_array[1] = json.devices[i].services[0].data.GongNengMa.ToString(); temp_byte_array[2] = json.devices[i].services[0].data.ZiJieShu.ToString(); temp_byte_array[3] = json.devices[i].services[0].data.JiQiMa.ToString(); temp_byte_array[4] = json.devices[i].services[0].data.QiTiLeiXing.ToString(); temp_byte_array[5] = json.devices[i].services[0].data.DanWei.ToString(); temp_byte_array[6] = json.devices[i].services[0].data.TangCeQiZhuangTai.ToString(); temp_byte_array[7] = json.devices[i].services[0].data.QiTiNongDu.ToString(); temp_byte_array[8] = json.devices[i].services[0].data.DiXian.ToString(); temp_byte_array[9] = json.devices[i].services[0].data.GaoXian.ToString(); temp_byte_array[10] = json.devices[i].services[0].data.DianLiang.ToString(); temp_byte_array[11] = json.devices[i].services[0].data.WenDuZhengShu.ToString(); temp_byte_array[12] = json.devices[i].services[0].data.WenDuXiaoShu.ToString(); temp_byte_array[13] = temp_datetime.ToString("yyyy-MM-dd HH:mm:ss"); //将整理后的数据传入rev_New中 rev_New(temp_byte_array); //触发事件之后,比较当前设备是否比暂存区的时间要新,如果新,就更新 if (DateTime.Compare(temp_datetime, temp_DateTime_NewestData) > 0) { temp_DateTime_NewestData = temp_datetime;//保证temp_DateTime_NewestData是当前所有节点时间和DateTime_NewestData中最新的时间 } } } //所有节点检测并更新完成之后,用暂存版的datetime更新DateTime_NewestData DateTime_NewestData = temp_DateTime_NewestData; System.Threading.Thread.Sleep(1500);//连续发送请求可能引起程序错误 } }
/// <summary> /// 创建控件 /// </summary> /// <param name="node">节点</param> /// <param name="type">类型</param> /// <returns>控件</returns> public virtual FCView createControl(XmlNode node, String type) { if (type == "band") { return(new FCGridBand()); } else if (type == "bandcolumn") { return(new FCBandedFCGridColumn()); } else if (type == "bandedgrid") { return(new FCBandedGrid()); } else if (type == "button") { return(new FCButton()); } else if (type == "calendar") { return(new FCCalendar()); } else if (type == "chart") { return(new FCChart()); } else if (type == "checkbox") { return(new FCCheckBox()); } else if (type == "column" || type == "th") { return(new FCGridColumn()); } else if (type == "combobox" || type == "select") { return(new FCComboBox()); } else if (type == "datetimepicker") { return(new FCDateTimePicker()); } else if (type == "div") { HashMap <String, String> attributes = getAttributes(node); if (attributes.containsKey("type")) { String inputType = attributes.get("type"); if (inputType == "groupbox") { return(new FCGroupBox()); } else if (inputType == "layout") { return(new FCLayoutDiv()); } else if (inputType == "splitlayout") { return(new FCSplitLayoutDiv()); } else if (inputType == "tabcontrol") { return(new FCTabControl()); } else if (inputType == "tabpage") { return(new FCTabPage()); } else if (inputType == "tablelayout") { return(new FCTableLayoutDiv()); } else if (inputType == "usercontrol") { return(createUserControl(node)); } } return(new FCDiv()); } else if (type == "grid" || type == "table") { return(new FCGrid()); } else if (type == "groupbox") { return(new FCGroupBox()); } else if (type == "input") { HashMap <String, String> attributes = getAttributes(node); if (attributes.containsKey("type")) { String inputType = attributes.get("type"); if (inputType == "button") { return(new FCButton()); } else if (inputType == "checkbox") { return(new FCCheckBox()); } else if (inputType == "datetime") { return(new FCDateTimePicker()); } else if (inputType == "radio") { return(new FCRadioButton()); } else if (inputType == "range") { return(new FCSpin()); } else if (inputType == "text") { return(new FCTextBox()); } else if (inputType == "usercontrol") { return(createUserControl(node)); } } attributes.clear(); } else if (type == "label") { return(new FCLabel()); } else if (type == "layoutdiv") { return(new FCLayoutDiv()); } else if (type == "linklabel" || type == "a") { return(new FCLinkLabel()); } else if (type == "menu") { return(new FCMenu()); } else if (type == "splitlayoutdiv") { return(new FCSplitLayoutDiv()); } else if (type == "radiobutton") { return(new FCRadioButton()); } else if (type == "spin") { return(new FCSpin()); } else if (type == "tabcontrol") { return(new FCTabControl()); } else if (type == "tablelayoutdiv") { return(new FCTableLayoutDiv()); } else if (type == "textbox") { return(new FCTextBox()); } else if (type == "tree") { return(new FCTree()); } else if (type == "usercontrol") { return(createUserControl(node)); } else if (type == "window") { return(new FCWindow()); } return(null); }
// FST is pruned private void VerifyPruned(int inputMode, FST <T> fst, int prune1, int prune2) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: now verify pruned " + pairs.Count + " terms; outputs=" + outputs); foreach (InputOutput <T> pair in pairs) { Console.WriteLine(" " + InputToString(inputMode, pair.Input) + ": " + outputs.OutputToString(pair.Output)); } } // To validate the FST, we brute-force compute all prefixes // in the terms, matched to their "common" outputs, prune that // set according to the prune thresholds, then assert the FST // matches that same set. // NOTE: Crazy RAM intensive!! //System.out.println("TEST: tally prefixes"); // build all prefixes IDictionary <Int32sRef, CountMinOutput <T> > prefixes = new HashMap <Int32sRef, CountMinOutput <T> >(); Int32sRef scratch = new Int32sRef(10); foreach (InputOutput <T> pair in pairs) { scratch.CopyInt32s(pair.Input); for (int idx = 0; idx <= pair.Input.Length; idx++) { scratch.Length = idx; CountMinOutput <T> cmo = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null; if (cmo == null) { cmo = new CountMinOutput <T>(); cmo.Count = 1; cmo.Output = pair.Output; prefixes[Int32sRef.DeepCopyOf(scratch)] = cmo; } else { cmo.Count++; T output1 = cmo.Output; if (output1.Equals(outputs.NoOutput)) { output1 = outputs.NoOutput; } T output2 = pair.Output; if (output2.Equals(outputs.NoOutput)) { output2 = outputs.NoOutput; } cmo.Output = outputs.Common(output1, output2); } if (idx == pair.Input.Length) { cmo.IsFinal = true; cmo.FinalOutput = cmo.Output; } } } if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: now prune"); } // prune 'em // LUCENENET NOTE: Altered this a bit to go in reverse rather than use an enumerator since // in .NET you cannot delete records while enumerating forward through a dictionary. for (int i = prefixes.Count - 1; i >= 0; i--) { KeyValuePair <Int32sRef, CountMinOutput <T> > ent = prefixes.ElementAt(i); Int32sRef prefix = ent.Key; CountMinOutput <T> cmo = ent.Value; if (LuceneTestCase.VERBOSE) { Console.WriteLine(" term prefix=" + InputToString(inputMode, prefix, false) + " count=" + cmo.Count + " isLeaf=" + cmo.IsLeaf + " output=" + outputs.OutputToString(cmo.Output) + " isFinal=" + cmo.IsFinal); } bool keep; if (prune1 > 0) { keep = cmo.Count >= prune1; } else { Debug.Assert(prune2 > 0); if (prune2 > 1 && cmo.Count >= prune2) { keep = true; } else if (prefix.Length > 0) { // consult our parent scratch.Length = prefix.Length - 1; Array.Copy(prefix.Int32s, prefix.Offset, scratch.Int32s, 0, scratch.Length); CountMinOutput <T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null; //System.out.println(" parent count = " + (cmo2 == null ? -1 : cmo2.count)); keep = cmo2 != null && ((prune2 > 1 && cmo2.Count >= prune2) || (prune2 == 1 && (cmo2.Count >= 2 || prefix.Length <= 1))); } else if (cmo.Count >= prune2) { keep = true; } else { keep = false; } } if (!keep) { prefixes.Remove(prefix); //System.out.println(" remove"); } else { // clear isLeaf for all ancestors //System.out.println(" keep"); scratch.CopyInt32s(prefix); scratch.Length--; while (scratch.Length >= 0) { CountMinOutput <T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null; if (cmo2 != null) { //System.out.println(" clear isLeaf " + inputToString(inputMode, scratch)); cmo2.IsLeaf = false; } scratch.Length--; } } } if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: after prune"); foreach (KeyValuePair <Int32sRef, CountMinOutput <T> > ent in prefixes) { Console.WriteLine(" " + InputToString(inputMode, ent.Key, false) + ": isLeaf=" + ent.Value.IsLeaf + " isFinal=" + ent.Value.IsFinal); if (ent.Value.IsFinal) { Console.WriteLine(" finalOutput=" + outputs.OutputToString(ent.Value.FinalOutput)); } } } if (prefixes.Count <= 1) { Assert.IsNull(fst); return; } Assert.IsNotNull(fst); // make sure FST only enums valid prefixes if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: check pruned enum"); } Int32sRefFSTEnum <T> fstEnum = new Int32sRefFSTEnum <T>(fst); Int32sRefFSTEnum.InputOutput <T> current; while ((current = fstEnum.Next()) != null) { if (LuceneTestCase.VERBOSE) { Console.WriteLine(" fstEnum.next prefix=" + InputToString(inputMode, current.Input, false) + " output=" + outputs.OutputToString(current.Output)); } CountMinOutput <T> cmo = prefixes.ContainsKey(current.Input) ? prefixes[current.Input] : null; Assert.IsNotNull(cmo); Assert.IsTrue(cmo.IsLeaf || cmo.IsFinal); //if (cmo.isFinal && !cmo.isLeaf) { if (cmo.IsFinal) { Assert.AreEqual(cmo.FinalOutput, current.Output); } else { Assert.AreEqual(cmo.Output, current.Output); } } // make sure all non-pruned prefixes are present in the FST if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: verify all prefixes"); } int[] stopNode = new int[1]; foreach (KeyValuePair <Int32sRef, CountMinOutput <T> > ent in prefixes) { if (ent.Key.Length > 0) { CountMinOutput <T> cmo = ent.Value; T output = Run(fst, ent.Key, stopNode); if (LuceneTestCase.VERBOSE) { Console.WriteLine("TEST: verify prefix=" + InputToString(inputMode, ent.Key, false) + " output=" + outputs.OutputToString(cmo.Output)); } // if (cmo.isFinal && !cmo.isLeaf) { if (cmo.IsFinal) { Assert.AreEqual(cmo.FinalOutput, output); } else { Assert.AreEqual(cmo.Output, output); } Assert.AreEqual(ent.Key.Length, stopNode[0]); } } }
private void RotatingModel(double AxisX, double AxisY, double AxisZ, double Angle) { DF3DApplication app = DF3DApplication.Application; if (app == null || app.Current3DMapControl == null) { return; } this.bFinished = false; this.modifyRowbufferMap = SelectCollection.Instance().Clone(this.beginRowbufferMap); if (this.modifyRowbufferMap != null) { foreach (DF3DFeatureClass featureClassInfo in this.modifyRowbufferMap.Keys) { IFeatureClass featureClass = featureClassInfo.GetFeatureClass(); string facName = featureClassInfo.GetFacilityClassName(); IRowBufferCollection rowBufferCollection = this.modifyRowbufferMap[featureClassInfo] as IRowBufferCollection; for (int i = 0; i < rowBufferCollection.Count; i++) { IRowBuffer rowBuffer = rowBufferCollection.Get(i); if (rowBuffer != null) { int num = rowBuffer.FieldIndex(featureClassInfo.GetFeatureLayer().GeometryFieldName); if (num != -1) { IGeometry geometry = rowBuffer.GetValue(num) as IGeometry; if (geometry != null) { ITransform transform = geometry as ITransform; if (geometry.HasZ()) { transform.Rotate3D(AxisX, AxisY, AxisZ, this.centerX, this.centerY, this.centerZ, Angle); } else { if (!geometry.HasZ() && AxisZ > 0.0) { transform.Rotate2D(this.centerX, this.centerY, Angle); } } rowBuffer.SetValue(num, transform); } } #region 管线设施 if (facName == "PipeLine" || facName == "PipeNode" || facName == "PipeBuild" || facName == "PipeBuild1") { int num3 = rowBuffer.FieldIndex("Shape"); if (num3 != -1) { IGeometry geometry = rowBuffer.GetValue(num3) as IGeometry; if (geometry != null) { ITransform transform = geometry as ITransform; if (geometry != null && transform != null) { if (geometry.HasZ()) { transform.Rotate3D(AxisX, AxisY, AxisZ, this.centerX, this.centerY, this.centerZ, Angle); } else { transform.Rotate2D(this.centerX, this.centerY, Angle); } rowBuffer.SetValue(num3, transform); } } } int num4 = rowBuffer.FieldIndex("FootPrint"); if (num4 != -1) { IGeometry geometry = rowBuffer.GetValue(num4) as IGeometry; if (geometry != null) { ITransform transform = geometry as ITransform; if (geometry != null && transform != null) { if (geometry.HasZ()) { transform.Rotate3D(AxisX, AxisY, AxisZ, this.centerX, this.centerY, this.centerZ, Angle); } else { transform.Rotate2D(this.centerX, this.centerY, Angle); } rowBuffer.SetValue(num4, transform); } } } } #endregion } } app.Current3DMapControl.FeatureManager.EditFeatures(featureClass, rowBufferCollection); //System.Runtime.InteropServices.Marshal.ReleaseComObject(featureClass); } } }
public static int Sum <EqK, K>(this HashMap <EqK, K, int> self) where EqK : struct, Eq <K> => self.Values.Sum();
public static int Count <EqK, K, V>(this HashMap <EqK, K, V> self) where EqK : struct, Eq <K> => self.Count;
public ICUDResult CreateCUDResult(MergeHandle mergeHandle) { ILinkedMap <Type, ICUDResultExtension> typeToCudResultExtension = extensions.GetExtensions(); foreach (Entry <Type, ICUDResultExtension> entry in typeToCudResultExtension) { entry.Value.Extend(mergeHandle); } IdentityLinkedMap <Object, IList <IUpdateItem> > objToModDict = mergeHandle.objToModDict; IdentityHashSet <Object> objToDeleteSet = mergeHandle.objToDeleteSet; HashMap <Type, IPrimitiveUpdateItem[]> entityTypeToFullPuis = new HashMap <Type, IPrimitiveUpdateItem[]>(); HashMap <Type, IRelationUpdateItem[]> entityTypeToFullRuis = new HashMap <Type, IRelationUpdateItem[]>(); List <IChangeContainer> allChanges = new List <IChangeContainer>(objToModDict.Count); List <Object> originalRefs = new List <Object>(objToModDict.Count); foreach (Object objToDelete in objToDeleteSet) { IObjRef ori = OriHelper.GetCreateObjRef(objToDelete, mergeHandle); DeleteContainer deleteContainer = new DeleteContainer(); deleteContainer.Reference = ori; allChanges.Add(deleteContainer); originalRefs.Add(objToDelete); } IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider; foreach (Entry <Object, IList <IUpdateItem> > entry in objToModDict) { Object obj = entry.Key; IList <IUpdateItem> modItems = entry.Value; IEntityMetaData metaData = entityMetaDataProvider.GetMetaData(obj.GetType()); IPrimitiveUpdateItem[] fullPuis = GetEnsureFullPUIs(metaData, entityTypeToFullPuis); IRelationUpdateItem[] fullRuis = GetEnsureFullRUIs(metaData, entityTypeToFullRuis); int puiCount = 0, ruiCount = 0; for (int a = modItems.Count; a-- > 0;) { IUpdateItem modItem = modItems[a]; Member member = metaData.GetMemberByName(modItem.MemberName); if (modItem is IRelationUpdateItem) { fullRuis[metaData.GetIndexByRelation(member)] = (IRelationUpdateItem)modItem; ruiCount++; } else { fullPuis[metaData.GetIndexByPrimitive(member)] = (IPrimitiveUpdateItem)modItem; puiCount++; } } IRelationUpdateItem[] ruis = CompactRUIs(fullRuis, ruiCount); IPrimitiveUpdateItem[] puis = CompactPUIs(fullPuis, puiCount); IObjRef ori = OriHelper.GetCreateObjRef(obj, mergeHandle); originalRefs.Add(obj); if (ori is IDirectObjRef) { CreateContainer createContainer = new CreateContainer(); ((IDirectObjRef)ori).CreateContainerIndex = allChanges.Count; createContainer.Reference = ori; createContainer.Primitives = puis; createContainer.Relations = ruis; allChanges.Add(createContainer); } else { UpdateContainer updateContainer = new UpdateContainer(); updateContainer.Reference = ori; updateContainer.Primitives = puis; updateContainer.Relations = ruis; allChanges.Add(updateContainer); } } return(new CUDResult(allChanges, originalRefs)); }
public override void handlePOSTRequest(HttpProcessor p, StreamReader inputData) { //这是回写的数据 HashMap responseObj = new HashMap(); HashMap responseData = new HashMap(); Console.WriteLine("POST request: {0}", p.http_url); string data = inputData.ReadToEnd(); //这是来自客户端POST的数据,UTF8编码 String bodyData = HttpUtility.UrlDecode(data, Encoding.UTF8); //这是解码后的数据 int errorCode = -1; String errorMessage = ""; String serviceType = ""; try { HashMap requestObj = JsonConvert.DeserializeObject <HashMap>(bodyData); if (requestObj == null) { errorCode = -1; throw new Exception("解析到了空的HashMap对象"); } if (!requestObj.ContainsKey("key")) { errorCode = -1; throw new Exception("未能找到注册的服务类型KEY"); } serviceType = requestObj["key"].ToString(); if (String.IsNullOrEmpty(serviceType)) { errorCode = -1; throw new Exception("传递了空的服务类型"); } switch (serviceType) { case "printJob": PrintJob job = new PrintJob(); job.ProcessPrintJob(bodyData, ref errorCode, ref errorMessage); break; case "printerList": List <String> list = Printer.GetLocalPrinters(); responseData.Add("printerList", list); errorCode = 0; break; default: errorCode = -1; throw new Exception("传递了未知的服务类型"); } responseObj.Add("errorCode", errorCode); responseObj.Add("errorMessage", errorMessage); responseObj.Add("key", serviceType); responseData.Add("queryString", data); responseObj.Add("data", responseData); String responseText = JsonConvert.SerializeObject(responseObj); p.writeSuccess(); p.outputStream.WriteLine(responseText); } catch (Exception e) { errorCode = -1; errorMessage = e.Message; Console.WriteLine("SKTHttpEngine处理请求失败,原因为" + e.Message); } }
private void SpatialQuery() { DF3DApplication app = DF3DApplication.Application; if (app == null || app.Current3DMapControl == null) { return; } try { HashMap hashMap = new HashMap(); IRowBuffer buffer = null; IFdeCursor cursor = null; ISpatialFilter filter = null; IGeometry geo2D = this._drawTool.GetGeo(); if (geo2D != null && geo2D.GeometryType == gviGeometryType.gviGeometryPolygon) { IPolygon polygon = geo2D as IPolygon; if (polygon != null) { DF3DFeatureClass featureClassInfo = CommonUtils.Instance().CurEditLayer; if (featureClassInfo != null) { IFeatureClass featureClass = featureClassInfo.GetFeatureClass(); if (featureClass != null) { string typeName = featureClassInfo.GetFacilityClassName(); if (typeName == "PipeLine" || typeName == "PipeNode" || typeName == "PipeBuild" || typeName == "PipeBuild1") { filter = new SpatialFilterClass { GeometryField = "Shape", SpatialRel = gviSpatialRel.gviSpatialRelIntersects, Geometry = polygon.Clone2(gviVertexAttribute.gviVertexAttributeNone) }; } else { filter = new SpatialFilterClass { Geometry = polygon, GeometryField = "Geometry", SpatialRel = gviSpatialRel.gviSpatialRelEnvelope }; } filter.SubFields = featureClass.FidFieldName; cursor = featureClass.Search(filter, true); while ((buffer = cursor.NextRow()) != null) { int featureId = int.Parse(buffer.GetValue(0).ToString()); if (hashMap.Contains(featureClassInfo)) { System.Collections.Generic.List <int> list = hashMap[featureClassInfo] as System.Collections.Generic.List <int>; if (!list.Contains(featureId)) { list.Add(featureId); } } else { System.Collections.Generic.List <int> list2 = new System.Collections.Generic.List <int>(); if (!list2.Contains(featureId)) { list2.Add(featureId); } hashMap[featureClassInfo] = list2; } } } } } SelectCollection.Instance().UpdateSelection(hashMap); RenderControlEditServices.Instance().SetEditorPosition(SelectCollection.Instance().FcRowBuffersMap); this.Clear(); } if (buffer != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(buffer); } if (cursor != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(cursor); } if (filter != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(filter); } } catch (Exception ex) { LoggingService.Error(ex.Message); } }
/// <summary> /// 创建表格行 /// </summary> /// <param name="node">节点</param> /// <param name="control">控件</param> protected virtual void createGridRow(XmlNode node, FCView control) { FCGrid grid = control as FCGrid; FCGridRow row = new FCGridRow(); grid.addRow(row); setAttributesBefore(node, row); //单元格 int col = 0; foreach (XmlNode node3 in node.ChildNodes) { String subNodeName = node3.Name.ToLower(); String subNodeValue = node3.InnerText; if (subNodeName == "cell" || subNodeName == "td") { String cellType = "string"; HashMap <String, String> attributes = getAttributes(node3); if (attributes.containsKey("type")) { cellType = attributes.get("type"); } attributes.clear(); FCGridCell cell = null; if (cellType == "bool") { cell = new FCGridBoolCell(); } else if (cellType == "button") { cell = new FCGridButtonCell(); } else if (cellType == "checkbox") { cell = new FCGridCheckBoxCell(); } else if (cellType == "combobox") { cell = new FCGridComboBoxCell(); } else if (cellType == "double") { cell = new FCGridDoubleCell(); } else if (cellType == "float") { cell = new FCGridFloatCell(); } else if (cellType == "string") { cell = new FCGridStringCell(); } else if (cellType == "int") { cell = new FCGridIntCell(); } else if (cellType == "long") { cell = new FCGridLongCell(); } else if (cellType == "textbox") { cell = new FCGridTextBoxCell(); } row.addCell(col, cell); setAttributesBefore(node3, cell); cell.setString(subNodeValue); setAttributesAfter(node3, cell); col++; } } setAttributesAfter(node, row); }
protected override List <IMember> BuildMemberList() { List <IMember> list = new List <IMember>(); if (_target == null) { return(list); } if (Context == null) { return(base.BuildMemberList()); } IEntityMetaDataProvider entityMetaDataProvider = Context.GetService <IEntityMetaDataProvider>(); Type type = _target.GetType(); IEntityMetaData metaData = entityMetaDataProvider.GetMetaData(type, true); if (metaData == null) { return(base.BuildMemberList()); } HashSet <String> suppressedPropertyNames = new HashSet <String>(); foreach (RelationMember member in metaData.RelationMembers) { suppressedPropertyNames.Add(ValueHolderIEC.GetObjRefsFieldName(member.Name)); suppressedPropertyNames.Add(ValueHolderIEC.GetInitializedFieldName(member.Name)); } HashMap <String, RelationMember> nameToRelationMap = new HashMap <String, RelationMember>(); foreach (RelationMember member in metaData.RelationMembers) { nameToRelationMap.Put(member.Name + ValueHolderIEC.GetNoInitSuffix(), member); } ITypeInfoItem[] members = Context.GetService <ITypeInfoProvider>().GetTypeInfo(type).Members; foreach (ITypeInfoItem member in members) { if (!member.CanRead) { continue; } DebuggerBrowsableAttribute att = member.GetAnnotation <DebuggerBrowsableAttribute>(); if (att != null && att.State == DebuggerBrowsableState.Never) { continue; } String propertyName = member.Name; if (suppressedPropertyNames.Contains(propertyName)) { continue; } RelationMember relMember = nameToRelationMap.Get(propertyName); Object value = null; if (relMember != null) { propertyName = relMember.Name; int relationIndex = metaData.GetIndexByRelationName(propertyName); ValueHolderState state = ((IObjRefContainer)_target).Get__State(relationIndex); if (!ValueHolderState.INIT.Equals(state)) { IObjRef[] objRefs = ((IObjRefContainer)_target).Get__ObjRefs(relationIndex); if (objRefs == null) { list.Add(new LazyUnknownMember(propertyName, state, member.RealType)); } else { list.Add(new LazyMember(propertyName, state, objRefs, member.RealType)); } continue; } } if (value == null) { try { value = member.GetValue(_target); } catch (Exception ex) { value = ex; } } list.Add(new FHPMember(propertyName, value, member.RealType)); } return(list.OrderBy(m => m.Name).ToList()); }
/// <summary> /// Compiles the grammar into a sentence HMM. A GrammarJob is created for the /// initial grammar node and added to the GrammarJob queue. While there are /// jobs left on the grammar job queue, a job is removed from the queue and /// the associated grammar node is expanded and attached to the tails. /// GrammarJobs for the successors are added to the grammar job queue. /// </summary> /// <returns></returns> protected HashSet <SentenceHMMState> CompileGrammar() { InitialGrammarState = Grammar.InitialNode; NodeStateMap = new HashMap <GrammarNode, GState>(); // create in declaration section (22.12.2014) ArcPool = new Cache <SentenceHMMStateArc>(); var gstateList = new List <GState>(); TimerPool.GetTimer(this, "Compile").Start(); // get the nodes from the grammar and create states // for them. Add the non-empty gstates to the gstate list. TimerPool.GetTimer(this, "Create States").Start(); foreach (var grammarNode in Grammar.GrammarNodes) { var gstate = CreateGState(grammarNode); gstateList.Add(gstate); } TimerPool.GetTimer(this, "Create States").Stop(); AddStartingPath(); // ensures an initial path to the start state // Prep all the gstates, by gathering all of the contexts up // this allows each gstate to know about its surrounding contexts TimerPool.GetTimer(this, "Collect Contexts").Start(); foreach (var gstate in gstateList) { gstate.CollectContexts(); } TimerPool.GetTimer(this, "Collect Contexts").Stop(); // now all gstates know all about their contexts, we can expand them fully TimerPool.GetTimer(this, "Expand States").Start(); foreach (var gstate in gstateList) { gstate.Expand(); } TimerPool.GetTimer(this, "Expand States").Stop(); // now that all states are expanded fully, we can connect all the states up TimerPool.GetTimer(this, "Connect Nodes").Start(); foreach (var gstate in gstateList) { gstate.Connect(); } TimerPool.GetTimer(this, "Connect Nodes").Stop(); var initialState = FindStartingState(); // add an out-of-grammar branch if configured to do so if (AddOutOfGrammarBranch) { var phoneLoop = new CIPhoneLoop(PhoneLoopAcousticModel, LogPhoneInsertionProbability); var firstBranchState = (SentenceHMMState)phoneLoop.GetSearchGraph().InitialState; initialState.Connect(GetArc(firstBranchState, LogOne, LogOutOfGrammarBranchProbability)); } _searchGraph = new FlatSearchGraph(initialState); TimerPool.GetTimer(this, "Compile").Stop(); // Now that we are all done, dump out some interesting // information about the process if (_dumpGStates) { foreach (var grammarNode in Grammar.GrammarNodes) { var gstate = GetGState(grammarNode); gstate.DumpInfo(); } } NodeStateMap = null; ArcPool = null; return(SentenceHMMState.CollectStates(initialState)); }
public static int test() { var map = new HashMap<string, Object>(); map.put("str1", new Object()); map.put("str2", new Object()); var map2 = new HashMap<string, Object>(); foreach (var e in map.entrySet()) { map2.put(e.getKey(), e.getValue()); } return map2.size(); }