public static DataTable LoadFullSummary(int locationId, int thermostatId, DateTime startDate, DateTime endDate, int timezoneDifference) { DataTable cycles = LoadSummary(thermostatId, startDate, endDate,timezoneDifference); DataTable weather = OutsideConditions.LoadSummary(locationId, startDate, endDate, timezoneDifference); DataTable result = new DataTable(); result.Columns.Add("LogDate", typeof(DateTime)); result.Columns.Add("OutsideMin", typeof(int)); result.Columns.Add("OutsideMax", typeof(int)); System.Collections.Hashtable cycleTypes = new System.Collections.Hashtable(); foreach (DataRow row in cycles.Rows) { string cycleType = Convert.ToString(row["cycle_type"]); if (!cycleTypes.Contains(cycleType)) cycleTypes.Add(cycleType, cycleType); } foreach (string cycleType in cycleTypes.Keys) { result.Columns.Add(cycleType + "_CycleCount", typeof(int)); result.Columns.Add(cycleType + "_TotalSeconds", typeof(double)); result.Columns.Add(cycleType + "_AverageSeconds", typeof(double)); } System.Collections.Hashtable dateHash = new System.Collections.Hashtable(); foreach (DataRow row in cycles.Rows) { string cycleType = Convert.ToString(row["cycle_type"]); int cycleCount = Convert.ToInt32(row["cycle_count"]); int totalSeconds = Convert.ToInt32(row["total_seconds"]); double averageSeconds = Convert.ToDouble(totalSeconds) / Convert.ToDouble(cycleCount); DateTime logDate = Convert.ToDateTime(row["log_date"]); bool newDate = !dateHash.Contains(logDate); DataRow resultRow=null; if (newDate) resultRow = result.NewRow(); else resultRow = result.Rows[Convert.ToInt32(dateHash[logDate])]; resultRow[cycleType + "_CycleCount"] = cycleCount; resultRow[cycleType + "_TotalSeconds"] = totalSeconds; resultRow[cycleType + "_AverageSeconds"] = averageSeconds; if (newDate) { resultRow["LogDate"] = logDate; result.Rows.Add(resultRow); dateHash.Add(logDate, result.Rows.Count - 1); } } foreach (DataRow row in weather.Rows) { DateTime logDate = Convert.ToDateTime(row["log_date"]); bool newDate = !dateHash.Contains(logDate); if (!newDate) { DataRow resultRow = result.Rows[Convert.ToInt32(dateHash[logDate])]; resultRow["OutsideMin"] = Convert.ToInt32(row["MinDegrees"]); resultRow["OutsideMax"] = Convert.ToInt32(row["MaxDegrees"]); } } return result; }
/// <summary> /// �]�wCMS��� /// </summary> /// <param name="devNames"></param> /// <returns></returns> protected override System.Collections.Hashtable setDisplay(RemoteInterface.HC.FetchDeviceData[] devNames, int maxSegId, MegType megType) { System.Collections.Hashtable displayht = new System.Collections.Hashtable(); List<object> outputs = new List<object>(); if (devNames == null || devNames.Length == 0) return displayht; foreach (RemoteInterface.HC.FetchDeviceData devName in devNames) { int distance = getDeviceDistance(devName.SegId, maxSegId); DevStartMile = devName.Mileage; DevLineID = devName.LineId; CMSDevName = devName.DevName; outputs = (List<object>)com.select(DBConnect.DataType.CmsCategory, Command.GetSelectCmd.getCMSCategory(Convert.ToInt32(DevRange["RULEID"]), (int)secType, devType.ToString(), distance, devName.DevName, megType.ToString(), ht["INC_LINEID"].ToString().Trim(),devName.Location,devName.LineId)); foreach (object obj in outputs) { List<object> output=new List<object>(); output.AddRange(new object[] { getPriority(), obj }); if (!displayht.Contains(devName.DevName)) displayht.Add(devName.DevName, output); else if (devName.Location == "L") { displayht[devName.DevName] = output; } } } return displayht; }
public virtual void TestStopList() { System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable(); stopWordsSet.Add("good", "good"); stopWordsSet.Add("test", "test"); stopWordsSet.Add("analyzer", "analyzer"); // {{Aroush how can we copy 'stopWordsSet' to 'System.String[]'? System.String[] arrStopWordsSet = new System.String[3]; arrStopWordsSet[0] = "good"; arrStopWordsSet[1] = "test"; arrStopWordsSet[2] = "analyzer"; // Aroush}} StopAnalyzer newStop = new StopAnalyzer(arrStopWordsSet); System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer"); TokenStream stream = newStop.TokenStream("test", reader); Assert.IsTrue(stream != null); Token token = null; try { while ((token = stream.Next()) != null) { System.String text = token.TermText(); Assert.IsTrue(stopWordsSet.Contains(text) == false); } } catch (System.IO.IOException e) { Assert.IsTrue(false); } }
public virtual void TestBasic() { System.Collections.Hashtable fileExtensions = new System.Collections.Hashtable(); SupportClass.CollectionsHelper.AddIfNotContains(fileExtensions, "fdt"); SupportClass.CollectionsHelper.AddIfNotContains(fileExtensions, "fdx"); Directory primaryDir = new MockRAMDirectory(); RAMDirectory secondaryDir = new MockRAMDirectory(); FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true); IndexWriter writer = new IndexWriter(fsd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); writer.SetUseCompoundFile(false); TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer); IndexReader reader = writer.GetReader(); Assert.AreEqual(100, reader.MaxDoc()); writer.Commit(); // we should see only fdx,fdt files here System.String[] files = primaryDir.ListAll(); Assert.IsTrue(files.Length > 0); for (int x = 0; x < files.Length; x++) { System.String ext = FileSwitchDirectory.GetExtension(files[x]); Assert.IsTrue(fileExtensions.Contains(ext)); } files = secondaryDir.ListAll(); Assert.IsTrue(files.Length > 0); // we should not see fdx,fdt files here for (int x = 0; x < files.Length; x++) { System.String ext = FileSwitchDirectory.GetExtension(files[x]); Assert.IsFalse(fileExtensions.Contains(ext)); } reader.Close(); writer.Close(); files = fsd.ListAll(); for (int i = 0; i < files.Length; i++) { Assert.IsNotNull(files[i]); } fsd.Close(); }
public override string InterfaceMethod1(string _string) { string text = Test.Integration.Interface_Fields.Default + Test.Integration.Interface_Fields._params.ToString(); text = text.Substring(1).Trim() + typeof(Test.Integration.Interface[]) + typeof(Class1) + this.GetType(); string _lock = ""; Test.Integration.InterfaceInnerClass anonymousClass = new AnonymousClassInterface_InterfaceInnerClass1(text, this, _lock); System.Collections.IDictionary map = new System.Collections.Hashtable(); if (map.Contains(text) && ExistSimilarFieldAndMethod()) { lock (text) { System.Collections.IEnumerator it = new System.Collections.ArrayList(map.Keys).GetEnumerator(); System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry)it.Current; object key = entry.Key; } } return text[0] + System.Text.RegularExpressions.Regex.Replace(text, "\\s", " "); }
public virtual void TestStopList() { System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable(); stopWordsSet.Add("good", "good"); stopWordsSet.Add("test", "test"); stopWordsSet.Add("analyzer", "analyzer"); StopAnalyzer newStop = new StopAnalyzer(stopWordsSet); System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer"); TokenStream stream = newStop.TokenStream("test", reader); Assert.IsNotNull(stream); Token token = null; while ((token = stream.Next()) != null) { System.String text = token.TermText(); Assert.IsFalse(stopWordsSet.Contains(text)); Assert.AreEqual(1, token.GetPositionIncrement()); // by default stop tokenizer does not apply increments. } }
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32) doc)) continue; Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
public void Finish() { // Now merge the lists. System.Collections.Hashtable existing = new System.Collections.Hashtable(); IList<Task> copy = new List<Task>(); for (int i = 0, n = list.Count; i < n; i++) { Task t = list[i]; copy.Add(t); existing[t] = t; } // Remove tasks that are no longer reported. foreach (Task t in copy){ if (!unique.Contains(t)) { this.list.Remove(t); } } // Insert any new tasks that have appeared up to a maximum of 1000. for (int i = 0, n = errors.Count; i < n; i++) { Task t = errors[i]; if (!existing.Contains(t)) { this.list.Insert(i, t); } // don't let the error list get too long. if (list.Count > 1000) { break; } } }
/// <summary> /// 设置格式 /// </summary> public void SetFormat(int startRow, int startCol, int endRow, int endCol, System.Collections.Hashtable htFormat) { object m_objOpt = System.Reflection.Missing.Value; excel.Range range = Sheet.get_Range(ToExcelCellIndex(startRow, startCol), ToExcelCellIndex(endRow, endCol)); if (htFormat.Contains("NumberFormat")) { range.NumberFormatLocal = htFormat["NumberFormat"]; } if (htFormat.Contains("BoldFont")) { range.Font.Bold = htFormat["BoldFont"]; } if (htFormat.Contains("SetBorders")) { if (htFormat["SetBorders"].Equals("粗")) { range.Borders.Weight = excel.XlBorderWeight.xlThick; } else if (htFormat["SetBorders"].Equals("细")) { range.Borders.Weight = excel.XlBorderWeight.xlThin; } else { range.Borders.Weight = excel.XlBorderWeight.xlMedium; } } if (htFormat.Contains("LeftThickBorder") && htFormat.ContainsValue("加粗")) { range.Borders[excel.XlBordersIndex.xlEdgeLeft].Weight = excel.XlBorderWeight.xlMedium; } if (htFormat.Contains("RightThickBorder") && htFormat.ContainsValue("加粗")) { range.Borders[excel.XlBordersIndex.xlEdgeRight].Weight = excel.XlBorderWeight.xlMedium; } if (htFormat.Contains("BottomtThickBorder") && htFormat.ContainsValue("加粗")) { range.Borders[excel.XlBordersIndex.xlEdgeBottom].Weight = excel.XlBorderWeight.xlMedium; } if (htFormat.Contains("TopThickBorder") && htFormat.ContainsValue("加粗")) { range.Borders[excel.XlBordersIndex.xlEdgeTop].Weight = excel.XlBorderWeight.xlMedium; } if (htFormat.Contains("FontName")) { range.Font.Name = htFormat["FontName"]; } if (htFormat.Contains("FontSize")) { range.Font.Size = htFormat["FontSize"]; } if (htFormat.Contains("FontColor")) { range.Font.Color = htFormat["FontColor"]; } if (htFormat.Contains("RowHeight")) { range.RowHeight = htFormat["RowHeight"]; } if (htFormat.Contains("ColumnWidth")) { range.ColumnWidth = htFormat["ColumnWidth"]; } }
private string Top10Types() { System.Collections.Hashtable ht = new System.Collections.Hashtable(); foreach (DataRow dr in m_dtReport.Select("", "Frequency DESC", DataViewRowState.CurrentRows)) { string type = (string)dr["Type"]; if (!ht.Contains(dr["Type"].ToString())) ht[type] = 0; ht[type] = (int)ht[type] + (int)dr["Frequency"]; } System.Collections.ArrayList indices = range(ht.Values.Count); indices.Sort( new Comparer( ht ) ); System.Collections.ArrayList keys = new System.Collections.ArrayList(ht.Keys); System.Collections.ArrayList top10 = new System.Collections.ArrayList(); for (int i = 0; i < 10; i++) { string key = (string)keys[(int)indices[i]]; top10.Add(key + ": " + ht[key]); } string[] output = new string[top10.Count]; for (int i = 0; i < top10.Count; i++) { output[i] = (string)top10[i]; } return "Top10 Types Used " + string.Join(",", output); }
internal void ProcessRecord(int flags, EmfPlusRecordType recordType, byte[] RecordData) { switch (recordType) { case EmfPlusRecordType.Header: break; case EmfPlusRecordType.SetPageTransform: EMFSetPageTransform P = EMFSetPageTransform.getTransform(flags, RecordData); break; case EmfPlusRecordType.Object: EMFRecordObject O = EMFRecordObject.getObject(flags, RecordData); if (O != null) { if (ObjectTable.Contains(O.ObjectID)) { ObjectTable[O.ObjectID] = O; } else { ObjectTable.Add(O.ObjectID, O); } } break; case EmfPlusRecordType.DrawLines: //After each instruction we must do something, as the object table is constantly being changed... //and we need to use what is currently in the table! DrawLines DL = new DrawLines(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DL.Process(flags, RecordData)); break; case EmfPlusRecordType.DrawString: DrawString DS = new DrawString(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DS.Process(flags, RecordData)); break; case EmfPlusRecordType.FillRects: FillRects FR = new FillRects(X, Y, Width, Height, ObjectTable); PageItems.AddRange(FR.Process(flags, RecordData)); break; case EmfPlusRecordType.DrawRects: DrawRects DR = new DrawRects(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DR.Process(flags, RecordData)); break; case EmfPlusRecordType.FillPolygon: FillPolygon FPo = new FillPolygon(X, Y, Width, Height, ObjectTable); PageItems.AddRange(FPo.Process(flags, RecordData)); break; case EmfPlusRecordType.DrawEllipse: DrawEllipse DE = new DrawEllipse(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DE.Process(flags, RecordData)); break; case EmfPlusRecordType.FillEllipse: FillEllipse FE = new FillEllipse(X, Y, Width, Height, ObjectTable); PageItems.AddRange(FE.Process(flags, RecordData)); break; case EmfPlusRecordType.FillPie: FillPie FP = new FillPie(X, Y, Width, Height, ObjectTable); PageItems.AddRange(FP.Process(flags, RecordData)); break; case EmfPlusRecordType.DrawPie: DrawPie DP = new DrawPie(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DP.Process(flags, RecordData)); break; case EmfPlusRecordType.DrawCurve: DrawCurve DC = new DrawCurve(X, Y, Width, Height, ObjectTable); PageItems.AddRange(DC.Process(flags, RecordData)); break; case EmfPlusRecordType.Comment: Comment CM = new Comment(X, Y, Width, Height, ObjectTable); PageItems.AddRange(CM.Process(flags, RecordData)); break; default: break; } }
/// <summary>Expert: called when re-writing queries under MultiSearcher. /// /// Create a single query suitable for use by all subsearchers (in 1-1 /// correspondence with queries). This is an optimization of the OR of /// all queries. We handle the common optimization cases of equal /// queries and overlapping clauses of boolean OR queries (as generated /// by MultiTermQuery.rewrite() and RangeQuery.rewrite()). /// Be careful overriding this method as queries[0] determines which /// method will be called and is not necessarily of the same type as /// the other queries. /// </summary> public virtual Query Combine(Query[] queries) { System.Collections.Hashtable uniques = new System.Collections.Hashtable(); for (int i = 0; i < queries.Length; i++) { Query query = queries[i]; BooleanClause[] clauses = null; // check if we can split the query into clauses bool splittable = (query is BooleanQuery); if (splittable) { BooleanQuery bq = (BooleanQuery) query; splittable = bq.IsCoordDisabled(); clauses = bq.GetClauses(); for (int j = 0; splittable && j < clauses.Length; j++) { splittable = (clauses[j].GetOccur() == BooleanClause.Occur.SHOULD); } } if (splittable) { for (int j = 0; j < clauses.Length; j++) { Query tmp = clauses[j].GetQuery(); if (uniques.Contains(tmp) == false) { uniques.Add(tmp, tmp); } } } else { if (uniques.Contains(query) == false) { uniques.Add(query, query); } } } // optimization: if we have just one query, just return it if (uniques.Count == 1) { System.Collections.IDictionaryEnumerator iter = uniques.GetEnumerator(); iter.MoveNext(); return iter.Value as Query; } System.Collections.IDictionaryEnumerator it = uniques.GetEnumerator(); BooleanQuery result = new BooleanQuery(true); while (it.MoveNext()) { result.Add((Query) it.Value, BooleanClause.Occur.SHOULD); } return result; }
protected override System.Collections.Hashtable setDisplay(RemoteInterface.HC.FetchDeviceData[] devNames, int maxSegId, MegType megType) { System.Collections.Hashtable displayht = new System.Collections.Hashtable(); //if (devNames.Length == 0||Math.Abs(devNames[0].Mileage - int.Parse(ht["FROM_MILEPOST1"].ToString())) > getValidRangle()) return displayht; //List<object> outputs = new List<object>(); if (devNames == null || devNames.Length == 0) return displayht; RemoteInterface.HC.I_HC_FWIS hobj = EasyClient.getHost(); com.select(DBConnect.DataType.CSLS, Command.GetSelectCmd.getSectionSpeed(ht["ORIGINAL_INC_LOCATION"].ToString())); int ColNum; if (type == CategoryType.GEN) { switch (degree) { case Degree.L: ColNum = 1; break; case Degree.M: ColNum = 2; break; case Degree.H: ColNum = 3; break; case Degree.S: ColNum = 4; break; default: ColNum = 1; break; } } else { ColNum = Convert.ToInt32(ht["INC_CONGESTION"]); } string cmd = string.Format("Select SuggesSpeeD{0} From {1}.{2} csls,{1}.{3} rule where AlarmClass = {4} and csls.RuleID = rule.RuleID and rule.RUNING = 'Y';" , ColNum, RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblRSPCSLSSpeed, DBConnect.DB2TableName.tblRSPRule, ht["INC_NAME"]); System.Data.DataTable DT = com.Select(cmd); if (DT != null && DT.Rows.Count > 0) { maxSpeed = Convert.ToInt32(DT.Rows[0][0]); if (maxSpeed == -1) { return displayht; } } cmd = string.Format("Select * from {0}.{1} csls,{0}.{2} rule where csls.RuleID = rule.ruleid and rule.runing = 'Y' and SuggesSpeed = {3};" , RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblIIPCSLSParam, DBConnect.DB2TableName.tblRSPRule, maxSpeed); DT = com.Select(cmd); if (DT.Rows.Count < 1) { throw new Exception("not Mapping CSLS Speed"); } System.Data.DataRow dr = DT.Rows[0]; List<int> SpeedList = new List<int>(); SpeedList.Add(Convert.ToInt32(dr[UPStream1])); SpeedList.Add(Convert.ToInt32(dr[UPStream2])); SpeedList.Add(Convert.ToInt32(dr[UPStream3])); SpeedList.Add(Convert.ToInt32(dr[UPStream4])); SpeedList.Add(Convert.ToInt32(dr[UPStream5])); range = Convert.ToInt32(dr[RangerLimit]) * 1000; int i = 0; int SetSpped = 90; int SectionMaxSpeed = 90; if (devNames.Length > 0) { cmd = string.Format("Select sec.MaxSpeed From {0}.{1} sec, {0}.{2} cfg where sec.sectionid = cfg.sectionID and cfg.DeviceName = '{3}';" , RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblGroupSection, DBConnect.DB2TableName.tblDeviceConfig, devNames[0].DevName); DT = com.Select(cmd); if (DT.Rows.Count > 0) { SectionMaxSpeed = (int)DT.Rows[0][0]; } } for (int j = 0; j < devNames.Length;j++ ) { if (maxSegId != 99 && (int)ht["INC_NAME"] == 31 && ((devNames[j].Mileage > Convert.ToInt32(ht["FROM_MILEPOST1"]) && devNames[j].Mileage < Convert.ToInt32(ht["TO_MILEPOST1"])) ||(devNames[j].Mileage < Convert.ToInt32(ht["FROM_MILEPOST1"]) && devNames[j].Mileage > Convert.ToInt32(ht["TO_MILEPOST1"])) )) { SetSpped = 255;//���� } else if (devNames[j].Location == "D")//�U�� { cmd = string.Format("Select sec.MaxSpeed From {0}.{1} sec, {0}.{2} cfg where sec.sectionid = cfg.sectionID and cfg.DeviceName = '{3}';" , RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblGroupSection, DBConnect.DB2TableName.tblDeviceConfig, devNames[j].DevName); DT = com.Select(cmd); if (DT.Rows.Count > 0) { SetSpped = (int)DT.Rows[0][0]; } } else { if (maxSegId != -99 && ((j == 0 && (Math.Abs(devNames[j].Mileage - Convert.ToInt32(ht["FROM_MILEPOST1"])) > range && Math.Abs(devNames[j].Mileage - Convert.ToInt32(ht["TO_MILEPOST1"])) > range)) || (j > 0 && devNames[j-1].Location != "D" && Math.Abs(devNames[j].Mileage - devNames[j - 1].Mileage) > range))) break; if (maxSegId == -99) //�d�� �̧C�t�� { SetSpped = SpeedList[0]; } else //�d��~ { if (i >= SpeedList.Count || SpeedList[i].Equals(-1)) { break; } else { if (SectionMaxSpeed < SpeedList[i]) SetSpped = SectionMaxSpeed; else SetSpped = SpeedList[i]; } } i++; } System.Data.DataSet DS = hobj.getSendDs("CSLS", "set_speed"); DS.Tables[0].Rows[0]["speed"] = SetSpped; DS.AcceptChanges(); List<object> output = new List<object>(); output.AddRange(new object[] { SetSpped / 10, new RemoteInterface.HC.CSLSOutputData(DS) }); if (!displayht.Contains(devNames[j].DevName)) { displayht.Add(devNames[j].DevName, output); } } #region OLD Function //if (maxSegId == -99) //{ // foreach (RemoteInterface.HC.FetchDeviceData devName in devNames) // { // System.Data.DataSet DS = hobj.getSendDs("CSLS", "set_speed"); // if (devName.minSpd > minSpeed) // DS.Tables[0].Rows[0]["speed"] = devName.minSpd; //�t�� // else // DS.Tables[0].Rows[0]["speed"] = minSpeed; //�t�� // DS.AcceptChanges(); // List<object> output = new List<object>(); // output.AddRange(new object[] { (int)(Convert.ToInt32(DS.Tables[0].Rows[0]["speed"]) / 10), new RemoteInterface.HC.CSLSOutputData(DS) }); // displayht.Add(devName.DevName, output); // } //} //else //{ // int speed = minSpeed - decrease; // foreach (RemoteInterface.HC.FetchDeviceData devName in devNames) // { // System.Data.DataSet DS = hobj.getSendDs("CSLS", "set_speed"); // if (range == -1) // { // if (speed < maxSpeed) speed += decrease; // if (devName.maxSpd >= speed) // DS.Tables[0].Rows[0]["speed"] = speed; //�t�� // else // DS.Tables[0].Rows[0]["speed"] = devName.maxSpd; // } // else // { // if (devName.SegId < range) // { // if (devName.maxSpd >= speed) // { // if (speed < maxSpeed) speed += decrease; // DS.Tables[0].Rows[0]["speed"] = speed; //�t�� // } // else // DS.Tables[0].Rows[0]["speed"] = devName.maxSpd; // } // else // { // if (devName.maxSpd >= speed) // { // DS.Tables[0].Rows[0]["speed"] = maxSpeed; //�t�� // } // else // DS.Tables[0].Rows[0]["speed"] = devName.maxSpd; // } // } // DS.AcceptChanges(); // List<object> output = new List<object>(); // output.AddRange(new object[] { (int)(Convert.ToInt32(DS.Tables[0].Rows[0]["speed"]) / 10), new RemoteInterface.HC.CSLSOutputData(DS) }); // displayht.Add(devName.DevName, output); // } //} #endregion return displayht; }
public override bool Check(ref List <Error> checkResult) { IFeatureWorkspace ipFtWS = (IFeatureWorkspace)m_BaseWorkspace; try { List <Error> pResult = new List <Error>(); string strAlias = m_pFieldPara.strAlias; List <string> listLayer = m_pFieldPara.m_LyrFldMap; System.Collections.Hashtable hashtable = new System.Collections.Hashtable(); for (int i = 0; i < listLayer.Count; i++) { string strTemp = listLayer[i]; int nIndex = strTemp.IndexOf('&'); if (nIndex < 0) { continue; } string str = strTemp.Substring(0, nIndex); if (!hashtable.Contains(str)) { hashtable.Add(str, ""); } } DataTable dtLayer = new DataTable(); string strSQL = "select AttrTableName,LayerName,LayerID from LR_DicLayer"; dtLayer = Hy.Common.Utility.Data.AdoDbHelper.GetDataTable(SysDbHelper.GetSysDbConnection(), strSQL); if (dtLayer == null) { return(false); } foreach (DataRow drLayer in dtLayer.Rows) { if (drLayer != null) { string strLayerCode = drLayer["AttrTableName"].ToString(); string strLayerName = drLayer["LayerName"].ToString(); int nLayerID = Convert.ToInt32(drLayer["LayerID"]); if (!hashtable.Contains(strLayerName)) { continue; } IFeatureClass pFtCls = null; try { pFtCls = ipFtWS.OpenFeatureClass(strLayerCode); } catch { continue; } IFields pFields = pFtCls.Fields; if (pFields == null) { continue; } int lFieldCount = pFields.FieldCount; IField pField; DataTable dtFields = new DataTable(); string strSQLFields = "select * from LR_DicField where LayerID = " + nLayerID + ""; dtFields = Hy.Common.Utility.Data.AdoDbHelper.GetDataTable(SysDbHelper.GetSysDbConnection(), strSQLFields); if (dtFields == null) { FieldError LRFieldErrorInfo = new FieldError(); LRFieldErrorInfo.DefectLevel = this.DefectLevel; LRFieldErrorInfo.strAttrTabName = strLayerName; LRFieldErrorInfo.strFieldName = null; LRFieldErrorInfo.m_strRuleInstID = this.m_InstanceID; LRFieldErrorInfo.strErrorMsg = string.Format("{0}层对应的属性字段,在《土地利用现状数据库标准》中不存在", strLayerName); pResult.Add(LRFieldErrorInfo); continue; } ///检查图层中是否存在多余字段 for (int i = 0; i < lFieldCount; i++) { if (strLayerName == "注记") { break; } pField = pFields.get_Field(i); if (pField.Name.ToUpper().Contains("OBJECTID") || pField.Name.ToLower().Contains("shape")) { continue; } int k = 0; int nFieldCount = dtFields.Rows.Count; for (k = 0; k < nFieldCount; k++) { DataRow drField = dtFields.Rows[k]; string strStdName = drField["FieldName"].ToString(); string strStdCode = drField["FieldCode"].ToString(); if (strStdCode.Trim().Equals("objectid", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("object id", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("shape", StringComparison.OrdinalIgnoreCase)) { continue; } if (pField.Name.Trim().Equals(strStdCode.Trim(), StringComparison.OrdinalIgnoreCase)) { break; } } if (k == nFieldCount) { if (!pField.AliasName.Contains("本软件")) { FieldError LRFieldErrorInfo2 = new FieldError(); LRFieldErrorInfo2.DefectLevel = this.DefectLevel; LRFieldErrorInfo2.strAttrTabName = strLayerName; LRFieldErrorInfo2.strFieldName = pField.Name; LRFieldErrorInfo2.m_strRuleInstID = this.m_InstanceID; LRFieldErrorInfo2.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_1, strLayerName, pField.Name); pResult.Add(LRFieldErrorInfo2); } } } ///检查标准中的字段在图层中是否存在,已经图层的字段是否和标准相符合 //二次for循环迭代控制器,add by wangxiang 20111201 int flag = 0; foreach (DataRow drField in dtFields.Rows) { if (drField != null) { string strStdName = drField["FieldName"].ToString(); string strStdCode = drField["FieldCode"].ToString(); if (strStdCode.Trim().Equals("objectid", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("object id", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("shape", StringComparison.OrdinalIgnoreCase)) { continue; } int nStdType = Convert.ToInt32(drField["FieldType"]); string strStdFldType = Hy.Common.Utility.Data.AdoDbHelper.GetFieldTypeName(nStdType); FieldError FieldErrInfo1 = new FieldError(); FieldErrInfo1.DefectLevel = this.DefectLevel; FieldErrInfo1.strAttrTabName = strLayerName; FieldErrInfo1.strFieldName = "" + strStdCode + "(" + strStdName + ")"; FieldErrInfo1.strStdFieldType = strStdFldType; FieldErrInfo1.m_strRuleInstID = this.m_InstanceID; int i = 0; for (i = 0; i < lFieldCount && flag < lFieldCount; i++) { pField = pFields.get_Field(i); if (pField.Name.Trim() == strStdCode.Trim()) { flag++; esriFieldType pType = pField.Type; if (nStdType == 3) { nStdType = 4; } esriFieldType pDTType = TopoHelper.en_GetEsriFieldByEnum(nStdType); if (pType == pDTType) { if (pType != esriFieldType.esriFieldTypeString) { break; } if (pField.Length != Convert.ToInt32(drField["Length"])) //字段长度不正确 { if (strLayerCode.Equals("JBNTBHTB", StringComparison.OrdinalIgnoreCase) && pField.Name.Trim().Equals("jbnttbbh", StringComparison.OrdinalIgnoreCase)) { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_4, strLayerName, pField.Name, pField.Length, Convert.ToInt32(drField["Length"])); } else { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_4, strLayerName, pField.Name, pField.Length, Convert.ToInt32(drField["Length"])); } pResult.Add(FieldErrInfo1); break; } break; } else { if (pDTType != esriFieldType.esriFieldTypeBlob) { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_3, strLayerName, pField.Name, TopoHelper.en_GetFieldTypebyEsriField(pType), TopoHelper.en_GetFieldTypebyEsriField(pDTType)); pResult.Add(FieldErrInfo1); } break; } } } if (i == lFieldCount) { if (drField["FieldOption"].ToString().Trim() != "fz") { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_2, strLayerName, drField["FieldName"].ToString()); pResult.Add(FieldErrInfo1); } } } } if (pFtCls != null) { Marshal.ReleaseComObject(pFtCls); pFtCls = null; } } } checkResult = pResult; } catch (Exception ex) { SendMessage(enumMessageType.Exception, ex.ToString()); return(false); } return(true); }
public virtual void TestShrinkToAfterShortestMatch3() { RAMDirectory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new TestPayloadAnalyzer(this), IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); doc.Add(new Field("content", new System.IO.StreamReader(new System.IO.MemoryStream(System.Text.Encoding.ASCII.GetBytes("j k a l f k k p a t a k l k t a"))))); writer.AddDocument(doc); writer.Close(); IndexSearcher is_Renamed = new IndexSearcher(directory); SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a")); SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k")); SpanQuery[] sqs = new SpanQuery[]{stq1, stq2}; SpanNearQuery snq = new SpanNearQuery(sqs, 0, true); Spans spans = snq.GetSpans(is_Renamed.GetIndexReader()); TopDocs topDocs = is_Renamed.Search(snq, 1); System.Collections.Hashtable payloadSet = new System.Collections.Hashtable(); for (int i = 0; i < topDocs.ScoreDocs.Length; i++) { while (spans.Next()) { System.Collections.Generic.ICollection<byte[]> payloads = spans.GetPayload(); for (System.Collections.IEnumerator it = payloads.GetEnumerator(); it.MoveNext(); ) { Support.CollectionsHelper.AddIfNotContains(payloadSet, new System.String(System.Text.UTF8Encoding.UTF8.GetChars((byte[]) it.Current))); } } } Assert.AreEqual(2, payloadSet.Count); if (DEBUG) { System.Collections.IEnumerator pit = payloadSet.GetEnumerator(); while (pit.MoveNext()) { System.Console.Out.WriteLine("match:" + pit.Current); } } Assert.IsTrue(payloadSet.Contains("a:Noise:10")); Assert.IsTrue(payloadSet.Contains("k:Noise:11")); }
/// <summary> Simple similarity query generators. /// Takes every unique word and forms a boolean query where all words are optional. /// After you get this you'll use to to query your {@link IndexSearcher} for similar docs. /// The only caveat is the first hit returned <b>should be</b> your source document - you'll /// need to then ignore that. /// /// <p> /// /// So, if you have a code fragment like this: /// <br> /// <code> /// Query q = formSimilaryQuery( "I use Lucene to search fast. Fast searchers are good", new StandardAnalyzer(), "contents", null); /// </code> /// /// <p> /// /// </summary> /// <summary> The query returned, in string form, will be <code>'(i use lucene to search fast searchers are good')</code>. /// /// <p> /// The philosophy behind this method is "two documents are similar if they share lots of words". /// Note that behind the scenes, Lucenes scoring algorithm will tend to give two documents a higher similarity score if the share more uncommon words. /// /// <P> /// This method is fail-safe in that if a long 'body' is passed in and /// {@link BooleanQuery#add BooleanQuery.add()} (used internally) /// throws /// {@link org.apache.lucene.search.BooleanQuery.TooManyClauses BooleanQuery.TooManyClauses}, the /// query as it is will be returned. /// /// /// /// /// /// </summary> /// <param name="body">the body of the document you want to find similar documents to /// </param> /// <param name="a">the analyzer to use to parse the body /// </param> /// <param name="field">the field you want to search on, probably something like "contents" or "body" /// </param> /// <param name="stop">optional set of stop words to ignore /// </param> /// <returns> a query with all unique words in 'body' /// </returns> /// <throws> IOException this can't happen... </throws> public static Query FormSimilarQuery(System.String body, Analyzer a, System.String field, System.Collections.Hashtable stop) { TokenStream ts = a.TokenStream(field, new System.IO.StringReader(body)); Lucene.Net.Analysis.Token t; BooleanQuery tmp = new BooleanQuery(); System.Collections.Hashtable already = new System.Collections.Hashtable(); // ignore dups while ((t = ts.Next()) != null) { System.String word = t.TermText(); // ignore opt stop words if (stop != null && stop.Contains(word)) continue; // ignore dups if (already.Contains(word) == true) continue; already.Add(word, word); // add to query TermQuery tq = new TermQuery(new Term(field, word)); try { tmp.Add(tq, BooleanClause.Occur.SHOULD); //false, false); } catch (BooleanQuery.TooManyClauses too) { // fail-safe, just return what we have, not the end of the world break; } } return tmp; }
public override bool Obtain() { lock (this) { if (IsLocked()) { // Our instance is already locked: return(false); } // Ensure that lockDir exists and is a directory. bool tmpBool; if (System.IO.File.Exists(lockDir.FullName)) { tmpBool = true; } else { tmpBool = System.IO.Directory.Exists(lockDir.FullName); } if (!tmpBool) { try { System.IO.Directory.CreateDirectory(lockDir.FullName); } catch { throw new System.IO.IOException("Cannot create directory: " + lockDir.FullName); } } else { try { System.IO.Directory.Exists(lockDir.FullName); } catch { throw new System.IO.IOException("Found regular file where directory expected: " + lockDir.FullName); } } System.String canonicalPath = path.FullName; bool markedHeld = false; try { // Make sure nobody else in-process has this lock held // already, and, mark it held if not: lock (LOCK_HELD) { if (LOCK_HELD.Contains(canonicalPath)) { // Someone else in this JVM already has the lock: return(false); } else { // This "reserves" the fact that we are the one // thread trying to obtain this lock, so we own // the only instance of a channel against this // file: LOCK_HELD.Add(canonicalPath, canonicalPath); markedHeld = true; } } try { f = new System.IO.FileStream(path.FullName, System.IO.FileMode.OpenOrCreate, System.IO.FileAccess.ReadWrite); } catch (System.IO.IOException e) { // On Windows, we can get intermittant "Access // Denied" here. So, we treat this as failure to // acquire the lock, but, store the reason in case // there is in fact a real error case. failureReason = e; f = null; } if (f != null) { try { channel = f; // f.getChannel(); // {{Aroush-2.1}} lock_Renamed = false; try { channel.Lock(0, channel.Length); lock_Renamed = true; } catch (System.IO.IOException e) { // At least on OS X, we will sometimes get an // intermittant "Permission Denied" IOException, // which seems to simply mean "you failed to get // the lock". But other IOExceptions could be // "permanent" (eg, locking is not supported via // the filesystem). So, we record the failure // reason here; the timeout obtain (usually the // one calling us) will use this as "root cause" // if it fails to get the lock. failureReason = e; } finally { if (lock_Renamed == false) { try { channel.Close(); } finally { channel = null; } } } } finally { if (channel == null) { try { f.Close(); } finally { f = null; } } } } } finally { if (markedHeld && !IsLocked()) { lock (LOCK_HELD) { if (LOCK_HELD.Contains(canonicalPath)) { LOCK_HELD.Remove(canonicalPath); } } } } return(IsLocked()); } }
/// <summary> /// /// </summary> /// <param name="bc"></param> /// <returns></returns> public static object GetMostDamager(BaseCreature bc) { try { double damageByFactioners = 0.0; double damageBySameFaction = 0.0; double damageByOthers = 0.0; foreach (DamageEntry de in bc.DamageEntries) { if (de.HasExpired) { continue; } if (de.Damager is PlayerMobile) { if (((PlayerMobile)de.Damager).IsRealFactioner) { if (((PlayerMobile)de.Damager).IOBAlignment != bc.IOBAlignment) { damageByFactioners += de.DamageGiven; } else { damageBySameFaction += de.DamageGiven; } } else { damageByOthers += de.DamageGiven; } } else if (de.Damager is BaseCreature) { if (((BaseCreature)de.Damager).IOBAlignment != IOBAlignment.None) { damageByFactioners += de.DamageGiven; } else { damageByOthers += de.DamageGiven; } } else { //*shrugs* } } if (KinSystemSettings.PointsEnabled && damageByFactioners > damageByOthers) { System.Collections.Hashtable ht_Groups = new System.Collections.Hashtable(); //need to find out who to assign the points to foreach (AggressorInfo ai in bc.Aggressors) { if (ai.Attacker is PlayerMobile) { int totalPoints = 0; foreach (DamageEntry de in bc.DamageEntries) { if (!de.HasExpired && de.Damager == ai.Attacker) { totalPoints += de.DamageGiven; } } if (ai.Attacker.Party == null) { ht_Groups.Add(ai.Attacker, totalPoints); } else { if (ht_Groups.Contains(ai.Attacker.Party)) { int prev = (int)ht_Groups[ai.Attacker.Party]; ht_Groups[ai.Attacker.Party] = prev + totalPoints; } else { ht_Groups.Add(ai.Attacker.Party, totalPoints); } } } } object winner = null; //decide who gets the points foreach (object key in ht_Groups.Keys) { if (winner == null) { winner = key; } else { int winTotal = (int)ht_Groups[winner]; int thisTotal = (int)ht_Groups[key]; if (thisTotal > winTotal) { winner = key; } } } return(winner); } } catch (Exception e) { Scripts.Commands.LogHelper.LogException(e); } //we get here if we aren't set to use points return(null); }
/// <summary>This constructor is only used for {@link #Reopen()} </summary> internal DirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, System.Collections.IDictionary oldNormsCache, bool readOnly, bool doClone, int termInfosIndexDivisor) { this.directory = directory; this.readOnly = readOnly; this.segmentInfos = infos; this.termInfosIndexDivisor = termInfosIndexDivisor; if (!readOnly) { // We assume that this segments_N was previously // properly sync'd: SupportClass.CollectionsHelper.AddAllIfNotContains(synced, infos.Files(directory, true)); } // we put the old SegmentReaders in a map, that allows us // to lookup a reader using its segment name System.Collections.IDictionary segmentReaders = new System.Collections.Hashtable(); if (oldReaders != null) { // create a Map SegmentName->SegmentReader for (int i = 0; i < oldReaders.Length; i++) { segmentReaders[oldReaders[i].GetSegmentName()] = (System.Int32) i; } } SegmentReader[] newReaders = new SegmentReader[infos.Count]; // remember which readers are shared between the old and the re-opened // DirectoryReader - we have to incRef those readers bool[] readerShared = new bool[infos.Count]; for (int i = infos.Count - 1; i >= 0; i--) { // find SegmentReader for this segment if (!segmentReaders.Contains(infos.Info(i).name)) { // this is a new segment, no old SegmentReader can be reused newReaders[i] = null; } else { System.Int32 oldReaderIndex = (System.Int32) segmentReaders[infos.Info(i).name]; // there is an old reader for this segment - we'll try to reopen it newReaders[i] = oldReaders[oldReaderIndex]; } bool success = false; try { SegmentReader newReader; if (newReaders[i] == null || infos.Info(i).GetUseCompoundFile() != newReaders[i].GetSegmentInfo().GetUseCompoundFile()) { // We should never see a totally new segment during cloning System.Diagnostics.Debug.Assert(!doClone); // this is a new reader; in case we hit an exception we can close it safely newReader = SegmentReader.Get(readOnly, infos.Info(i), termInfosIndexDivisor); } else { newReader = newReaders[i].ReopenSegment(infos.Info(i), doClone, readOnly); } if (newReader == newReaders[i]) { // this reader will be shared between the old and the new one, // so we must incRef it readerShared[i] = true; newReader.IncRef(); } else { readerShared[i] = false; newReaders[i] = newReader; } success = true; } finally { if (!success) { for (i++; i < infos.Count; i++) { if (newReaders[i] != null) { try { if (!readerShared[i]) { // this is a new subReader that is not used by the old one, // we can close it newReaders[i].Close(); } else { // this subReader is also used by the old reader, so instead // closing we must decRef it newReaders[i].DecRef(); } } catch (System.IO.IOException ignore) { // keep going - we want to clean up as much as possible } } } } } } // initialize the readers to calculate maxDoc before we try to reuse the old normsCache Initialize(newReaders); // try to copy unchanged norms from the old normsCache to the new one if (oldNormsCache != null) { System.Collections.IEnumerator it = new System.Collections.Hashtable(oldNormsCache).GetEnumerator(); while (it.MoveNext()) { System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) it.Current; System.String field = (System.String) entry.Key; if (!HasNorms(field)) { continue; } byte[] oldBytes = (byte[]) entry.Value; byte[] bytes = new byte[MaxDoc()]; for (int i = 0; i < subReaders.Length; i++) { bool exist = segmentReaders.Contains(subReaders[i].GetSegmentName()); int oldReaderIndex = -1; if (exist) { oldReaderIndex = (System.Int32)segmentReaders[subReaders[i].GetSegmentName()]; } // this SegmentReader was not re-opened, we can copy all of its norms if (exist && (oldReaders[oldReaderIndex] == subReaders[i] || oldReaders[oldReaderIndex].norms[field] == subReaders[i].norms[field])) { // we don't have to synchronize here: either this constructor is called from a SegmentReader, // in which case no old norms cache is present, or it is called from MultiReader.reopen(), // which is synchronized Array.Copy(oldBytes, oldStarts[oldReaderIndex], bytes, starts[i], starts[i + 1] - starts[i]); } else { subReaders[i].Norms(field, bytes, starts[i]); } } normsCache[field] = bytes; // update cache } } }
// todo - this is a pretty specialized helper function, hoist up to client code? public virtual String getLocalizedTextString(CultureInfo locale, Object obj) { String id = obj.GetType().FullName.Replace("$", "."); System.Collections.IDictionary parameters = new System.Collections.Hashtable(); Type c = obj.GetType(); while (c != typeof(Object)) { FieldInfo[] fields = c.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static); foreach (FieldInfo f in fields) { if (!f.IsPublic || f.IsStatic) { continue; } try { parameters[f.Name] = f.GetValue(obj); } catch (Exception) { } } c = c.BaseType; } String s = null; if ((parameters.Contains("id") && parameters["id"] != null)) { String subid = parameters["id"].ToString(); if (subid.Length > 0) { // fixme - Formalize? s = getLocalizedTextString(locale, id + "." + subid, parameters); } } if (s == null) { s = getLocalizedTextString(locale, id, parameters); } if (s == null) { s = id; if (parameters != null) { System.Text.StringBuilder sb = new System.Text.StringBuilder(); foreach (System.Collections.DictionaryEntry e in parameters) { if (sb.Length > 0) { sb.Append(", "); } sb.Append(e.Key.ToString()); if (e.Value != null) { sb.Append("='" + e.Value.ToString() + "'"); } } s += "[" + sb.ToString() + "]"; } return s; } return s; }
public void Sort() { hashListNode = new System.Collections.Hashtable(); //把listNode的内容放入DataTable中,方便排序 dtListNode = new DataTable(); dtListNode.Columns.Add("ACLID"); dtListNode.Columns.Add("FName"); dtListNode.Columns.Add("SeqNo"); for (int i = 0; i < listNode.Count; i++) { TreeNodeItem item = listNode[i]; DataRow dr = dtListNode.NewRow(); dr["ACLID"] = item.Aclid; dr["FNAME"] = item.FName; dr["SeqNo"] = item.SeqNo; dtListNode.Rows.Add(dr); if (!hashListNode.Contains(item.Aclid)) { hashListNode.Add(item.Aclid, item); } } }
public virtual void TestNoDupCommitFileNames() { Directory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED); writer.SetMaxBufferedDocs(2); writer.AddDocument(CreateDocument("a")); writer.AddDocument(CreateDocument("a")); writer.AddDocument(CreateDocument("a")); writer.Close(); var commits = IndexReader.ListCommits(dir); var it = commits.GetEnumerator(); while (it.MoveNext()) { IndexCommit commit = it.Current; System.Collections.Generic.ICollection<string> files = commit.FileNames; System.Collections.Hashtable seen = new System.Collections.Hashtable(); System.Collections.IEnumerator it2 = files.GetEnumerator(); while (it2.MoveNext()) { System.String fileName = (System.String) it2.Current; Assert.IsTrue(!seen.Contains(fileName), "file " + fileName + " was duplicated"); seen.Add(fileName, fileName); } } dir.Close(); }
private static System.Collections.Hashtable DifFiles(System.String[] files1, System.String[] files2) { System.Collections.Hashtable set1 = new System.Collections.Hashtable(); System.Collections.Hashtable set2 = new System.Collections.Hashtable(); System.Collections.Hashtable extra = new System.Collections.Hashtable(); for (int x = 0; x < files1.Length; x++) { Support.CollectionsHelper.AddIfNotContains(set1, files1[x]); } for (int x = 0; x < files2.Length; x++) { Support.CollectionsHelper.AddIfNotContains(set2, files2[x]); } System.Collections.IEnumerator i1 = set1.GetEnumerator(); while (i1.MoveNext()) { System.Object o = i1.Current; if (!set2.Contains(o)) { Support.CollectionsHelper.AddIfNotContains(extra, o); } } System.Collections.IEnumerator i2 = set2.GetEnumerator(); while (i2.MoveNext()) { System.Object o = i2.Current; if (!set1.Contains(o)) { Support.CollectionsHelper.AddIfNotContains(extra, o); } } return extra; }
/// <summary> /// Internal function to set up the cached data for the fitting procedure. /// </summary> /// <param name="paraSet">The set of parameters (the information which parameters are fixed is mainly used here).</param> private void CalculateCachedData(ParameterSet paraSet) { // Preparation: Store the parameter names by name and index, and store // all parameter values in _constantParameters System.Collections.Hashtable paraNames = new System.Collections.Hashtable(); System.Collections.Hashtable varyingParaNames = new System.Collections.Hashtable(); _constantParameters = new double[paraSet.Count]; int numberOfVaryingParameters = 0; for (int i = 0; i < paraSet.Count; ++i) { paraNames.Add(paraSet[i].Name, i); _constantParameters[i] = paraSet[i].Parameter; if (paraSet[i].Vary) ++numberOfVaryingParameters; } _cachedVaryingParameters = new double[numberOfVaryingParameters]; for (int i = 0, k = 0; i < paraSet.Count; ++i) { if (paraSet[i].Vary) { varyingParaNames.Add(paraSet[i].Name, k); _cachedVaryingParameters[k++] = paraSet[i].Parameter; } } _cachedNumberOfData = 0; _cachedFitElementInfo = new CachedFitElementInfo[_fitEnsemble.Count]; for (int i = 0; i < _fitEnsemble.Count; i++) { CachedFitElementInfo info = new CachedFitElementInfo(); _cachedFitElementInfo[i] = info; FitElement fitEle = _fitEnsemble[i]; info.ValidRows = fitEle.CalculateValidNumericRows(); info.Xs = new double[fitEle.NumberOfIndependentVariables]; info.Parameters = new double[fitEle.NumberOfParameters]; info.Ys = new double[fitEle.NumberOfDependentVariables]; // Calculate the number of used variables int numVariablesUsed = 0; for (int j = 0; j < fitEle.NumberOfDependentVariables; ++j) { if (fitEle.DependentVariables(j) != null) ++numVariablesUsed; } info.DependentVariablesInUse = new int[numVariablesUsed]; for (int j = 0, used = 0; j < fitEle.NumberOfDependentVariables; ++j) { if (fitEle.DependentVariables(j) != null) info.DependentVariablesInUse[used++] = j; } // calculate the total number of data points _cachedNumberOfData += numVariablesUsed * info.ValidRows.Count; // now create the parameter mapping info.ParameterMapping = new int[fitEle.NumberOfParameters]; for (int j = 0; j < info.ParameterMapping.Length; ++j) { if (!paraNames.Contains(fitEle.ParameterName(j))) throw new ArgumentException(string.Format("ParameterSet does not contain parameter {0}, which is used by function[{1}]", fitEle.ParameterName(j), i)); int idx = (int)paraNames[fitEle.ParameterName(j)]; if (paraSet[idx].Vary) { info.ParameterMapping[j] = (int)varyingParaNames[fitEle.ParameterName(j)]; } else { info.ParameterMapping[j] = -idx - 1; } } } _cachedDependentValues = new double[_cachedNumberOfData]; GetDependentValues(_cachedDependentValues); if (this.HasToUseWeights()) { _cachedWeights = new double[_cachedNumberOfData]; GetWeights(_cachedWeights); } else _cachedWeights = null; }
public override bool Check(ref List<Error> checkResult) { IFeatureWorkspace ipFtWS = (IFeatureWorkspace)m_BaseWorkspace; try { List<Error> pResult = new List<Error>(); string strAlias = m_pFieldPara.strAlias; List<string> listLayer = m_pFieldPara.m_LyrFldMap; System.Collections.Hashtable hashtable = new System.Collections.Hashtable(); for (int i = 0; i < listLayer.Count; i++) { string strTemp = listLayer[i]; int nIndex = strTemp.IndexOf('&'); if (nIndex < 0) { continue; } string str = strTemp.Substring(0, nIndex); if (!hashtable.Contains(str)) { hashtable.Add(str, ""); } } DataTable dtLayer = new DataTable(); string strSQL = "select AttrTableName,LayerName,LayerID from LR_DicLayer"; dtLayer = Hy.Common.Utility.Data.AdoDbHelper.GetDataTable(SysDbHelper.GetSysDbConnection(), strSQL); if (dtLayer==null) { return false; } foreach (DataRow drLayer in dtLayer.Rows) { if (drLayer != null) { string strLayerCode = drLayer["AttrTableName"].ToString(); string strLayerName = drLayer["LayerName"].ToString(); int nLayerID = Convert.ToInt32(drLayer["LayerID"]); if (!hashtable.Contains(strLayerName)) { continue; } IFeatureClass pFtCls = null; try { pFtCls = ipFtWS.OpenFeatureClass(strLayerCode); } catch { continue; } IFields pFields = pFtCls.Fields; if (pFields == null) { continue; } int lFieldCount = pFields.FieldCount; IField pField; DataTable dtFields = new DataTable(); string strSQLFields = "select * from LR_DicField where LayerID = " + nLayerID + ""; dtFields = Hy.Common.Utility.Data.AdoDbHelper.GetDataTable(SysDbHelper.GetSysDbConnection(), strSQLFields); if (dtFields==null) { FieldError LRFieldErrorInfo = new FieldError(); LRFieldErrorInfo.DefectLevel = this.DefectLevel; LRFieldErrorInfo.strAttrTabName = strLayerName; LRFieldErrorInfo.strFieldName = null; LRFieldErrorInfo.m_strRuleInstID = this.m_InstanceID; LRFieldErrorInfo.strErrorMsg = string.Format("{0}���Ӧ�������ֶΣ��ڡ�����������״���ݿ�����в�����", strLayerName); pResult.Add(LRFieldErrorInfo); continue; } ///���ͼ�����Ƿ���ڶ����ֶ� for (int i = 0; i < lFieldCount; i++) { if (strLayerName == "ע��") { break; } pField = pFields.get_Field(i); if (pField.Name.ToUpper().Contains("OBJECTID") || pField.Name.ToLower().Contains("shape")) { continue; } int k = 0; int nFieldCount = dtFields.Rows.Count; for (k = 0; k < nFieldCount; k++) { DataRow drField = dtFields.Rows[k]; string strStdName = drField["FieldName"].ToString(); string strStdCode = drField["FieldCode"].ToString(); if (strStdCode.Trim().Equals("objectid", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("object id", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("shape", StringComparison.OrdinalIgnoreCase)) { continue; } if (pField.Name.Trim().Equals(strStdCode.Trim(), StringComparison.OrdinalIgnoreCase)) { break; } } if (k == nFieldCount) { if (!pField.AliasName.Contains("�����")) { FieldError LRFieldErrorInfo2 = new FieldError(); LRFieldErrorInfo2.DefectLevel = this.DefectLevel; LRFieldErrorInfo2.strAttrTabName = strLayerName; LRFieldErrorInfo2.strFieldName = pField.Name; LRFieldErrorInfo2.m_strRuleInstID = this.m_InstanceID; LRFieldErrorInfo2.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_1, strLayerName, pField.Name); pResult.Add(LRFieldErrorInfo2); } } } ///�����е��ֶ���ͼ�����Ƿ���ڣ��Ѿ�ͼ����ֶ��Ƿ�ͱ������ //����forѭ��������������add by wangxiang 20111201 int flag = 0; foreach (DataRow drField in dtFields.Rows) { if (drField != null) { string strStdName = drField["FieldName"].ToString(); string strStdCode = drField["FieldCode"].ToString(); if (strStdCode.Trim().Equals("objectid", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("object id", StringComparison.OrdinalIgnoreCase) || strStdCode.Trim().Equals("shape", StringComparison.OrdinalIgnoreCase)) { continue; } int nStdType = Convert.ToInt32(drField["FieldType"]); string strStdFldType = Hy.Common.Utility.Data.AdoDbHelper.GetFieldTypeName(nStdType); FieldError FieldErrInfo1 = new FieldError(); FieldErrInfo1.DefectLevel = this.DefectLevel; FieldErrInfo1.strAttrTabName = strLayerName; FieldErrInfo1.strFieldName = "" + strStdCode + "(" + strStdName + ")"; FieldErrInfo1.strStdFieldType = strStdFldType; FieldErrInfo1.m_strRuleInstID = this.m_InstanceID; int i = 0; for (i = 0; i < lFieldCount && flag < lFieldCount; i++) { pField = pFields.get_Field(i); if (pField.Name.Trim() == strStdCode.Trim()) { flag++; esriFieldType pType = pField.Type; if (nStdType == 3) { nStdType = 4; } esriFieldType pDTType = TopoHelper.en_GetEsriFieldByEnum(nStdType); if (pType == pDTType) { if (pType != esriFieldType.esriFieldTypeString) { break; } if (pField.Length != Convert.ToInt32(drField["Length"])) //�ֶγ��Ȳ���ȷ { if (strLayerCode.Equals("JBNTBHTB", StringComparison.OrdinalIgnoreCase) && pField.Name.Trim().Equals("jbnttbbh", StringComparison.OrdinalIgnoreCase)) { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_4, strLayerName, pField.Name, pField.Length, Convert.ToInt32(drField["Length"])); } else { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_4, strLayerName, pField.Name, pField.Length, Convert.ToInt32(drField["Length"])); } pResult.Add(FieldErrInfo1); break; } break; } else { if (pDTType != esriFieldType.esriFieldTypeBlob) { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_3, strLayerName, pField.Name, TopoHelper.en_GetFieldTypebyEsriField(pType), TopoHelper.en_GetFieldTypebyEsriField(pDTType)); pResult.Add(FieldErrInfo1); } break; } } } if (i == lFieldCount) { if (drField["FieldOption"].ToString().Trim() != "fz") { FieldErrInfo1.strErrorMsg = string.Format(Helper.ErrMsgFormat.ERR_410100001_2, strLayerName, drField["FieldName"].ToString()); pResult.Add(FieldErrInfo1); } } } } if (pFtCls != null) { Marshal.ReleaseComObject(pFtCls); pFtCls = null; } } } checkResult = pResult; } catch (Exception ex) { SendMessage(enumMessageType.Exception, ex.ToString()); return false; } return true; }
/// <summary> Simple similarity query generators. /// Takes every unique word and forms a boolean query where all words are optional. /// After you get this you'll use to to query your {@link IndexSearcher} for similar docs. /// The only caveat is the first hit returned <b>should be</b> your source document - you'll /// need to then ignore that. /// /// <p> /// /// So, if you have a code fragment like this: /// <br> /// <code> /// Query q = formSimilaryQuery( "I use Lucene to search fast. Fast searchers are good", new StandardAnalyzer(), "contents", null); /// </code> /// /// <p> /// /// </summary> /// <summary> The query returned, in string form, will be <code>'(i use lucene to search fast searchers are good')</code>. /// /// <p> /// The philosophy behind this method is "two documents are similar if they share lots of words". /// Note that behind the scenes, Lucenes scoring algorithm will tend to give two documents a higher similarity score if the share more uncommon words. /// /// <P> /// This method is fail-safe in that if a long 'body' is passed in and /// {@link BooleanQuery#add BooleanQuery.add()} (used internally) /// throws /// {@link org.apache.lucene.search.BooleanQuery.TooManyClauses BooleanQuery.TooManyClauses}, the /// query as it is will be returned. /// /// /// /// /// /// </summary> /// <param name="body">the body of the document you want to find similar documents to /// </param> /// <param name="a">the analyzer to use to parse the body /// </param> /// <param name="field">the field you want to search on, probably something like "contents" or "body" /// </param> /// <param name="stop">optional set of stop words to ignore /// </param> /// <returns> a query with all unique words in 'body' /// </returns> /// <throws> IOException this can't happen... </throws> public static Query FormSimilarQuery(System.String body, Analyzer a, System.String field, System.Collections.Hashtable stop) { TokenStream ts = a.TokenStream(field, new System.IO.StringReader(body)); TermAttribute termAtt = (TermAttribute)ts.AddAttribute(typeof(TermAttribute)); BooleanQuery tmp = new BooleanQuery(); System.Collections.Hashtable already = new System.Collections.Hashtable(); // ignore dups while (ts.IncrementToken()) { String word = termAtt.Term(); // ignore opt stop words if (stop != null && stop.Contains(word)) continue; // ignore dups if (already.Contains(word) == true) continue; already.Add(word, word); // add to query TermQuery tq = new TermQuery(new Term(field, word)); try { tmp.Add(tq, BooleanClause.Occur.SHOULD); } catch (BooleanQuery.TooManyClauses) { // fail-safe, just return what we have, not the end of the world break; } } return tmp; }
/// <summary> /// ��o��ܤ��e�˪O��k /// </summary> /// <param name="type">�ƥ����</param> /// <returns>��ܤ��e</returns> protected virtual System.Collections.Hashtable getDisplayContent(CategoryType type) { int maxSegId = 0; string devsMeg = ""; int mile_s = Convert.ToInt32(ht["FROM_MILEPOST1"]); int mile_e = Convert.ToInt32(ht["TO_MILEPOST1"]); string lineid = Convert.ToString(ht["INC_LINEID"]).Trim(); string direction = Convert.ToString(ht["INC_DIRECTION"]).Trim(); switch (type) { case CategoryType.GEN: //�@��ƥ� { //�d�� //System.Collections.Hashtable DevHTIn; //if (mile_s != mile_e) //{ // DevHTIn = new System.Collections.Hashtable(); // if (devType != DeviceType.RMS && devType != DeviceType.LCS) // { // DevHTIn = setGENDisplay(getInDeviceName(ref maxSegId, ref devsMeg, mile_s, mile_e, lineid), -99); // } //} if (direction.Length > 1) { #region ���V RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction[0].ToString()); System.Collections.Hashtable DevHT1 = setGENDisplay(fetchDevs, maxSegId, direction[0].ToString()); if (DevHT1 == null) DevHT1 = new System.Collections.Hashtable(); fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction[1].ToString()); System.Collections.Hashtable DevHT2 = setGENDisplay(fetchDevs, maxSegId, direction[1].ToString()); if (DevHT2 != null) { foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) DevHT1.Add(Dev.Key, Dev.Value); } } if (mile_s != mile_e )//&& (int)ht["INC_NAME"] == 31) //�I�u�d�� { RemoteInterface.HC.I_HC_FWIS hobj = EasyClient.getHost(); if (devType != DeviceType.LCS)//(devType != DeviceType.RMS && devType != DeviceType.LCS) { fetchDevs = hobj.Fetch(new string[] { devType.ToString() }, lineid, direction[0].ToString(), mile_s, mile_e); foreach (RemoteInterface.HC.FetchDeviceData dev in fetchDevs) { dev.SegId = -99; } DevHT2 = setGENDisplay(fetchDevs,maxSegId,direction[0].ToString()); foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) { DevHT1.Add(Dev.Key, Dev.Value); } } fetchDevs = hobj.Fetch(new string[] { devType.ToString() }, lineid, direction[1].ToString(), mile_s, mile_e); foreach (RemoteInterface.HC.FetchDeviceData dev in fetchDevs) { dev.SegId = -99; } DevHT2 = setGENDisplay(fetchDevs,maxSegId,direction[1].ToString()); foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) { DevHT1.Add(Dev.Key, Dev.Value); } } if (devType == DeviceType.CMS || devType == DeviceType.RGS) { DevHT2 = setGENDisplay(GetRangeBrachCMS(lineid, mile_s, mile_e), maxSegId, direction); foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) { DevHT1.Add(Dev.Key, Dev.Value); } } } } } //if (DevHTIn != null) //{ // foreach (System.Collections.DictionaryEntry Dev in DevHTIn) // { // if (!DevHT1.Contains(Dev.Key)) // DevHT1.Add(Dev.Key, Dev.Value); // } //} return DevHT1; #endregion } else { RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); System.Collections.Hashtable DevHT1 = setGENDisplay(fetchDevs, maxSegId, direction); if (mile_s != mile_e )//&& (int)ht["INC_NAME"] == 31) //�I�u�d�� { RemoteInterface.HC.I_HC_FWIS hobj = EasyClient.getHost(); //if (devType != DeviceType.LCS)//(devType != DeviceType.RMS && devType != DeviceType.LCS) //{ System.Collections.Hashtable DevHT2 = new System.Collections.Hashtable(); fetchDevs = hobj.Fetch(new string[] { devType.ToString() }, lineid, direction, mile_s, mile_e); if (devType == DeviceType.LCS) //LCS ���X�ʲ��I�G�D�᭱�]�� { RemoteInterface.HC.FetchDeviceData[] tmpfetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_e, lineid, direction); if (tmpfetchDevs != null) { fetchDevs = tmpfetchDevs; } //int tmpSegId = maxSegId; //string tmpDevsMeg = devsMeg; //RemoteInterface.HC.FetchDeviceData[] fetchDevs2 = getOutDeviceName(ref tmpSegId, ref tmpDevsMeg, mile_e, lineid, direction); //if (fetchDevs2 != null) //{ // System.Collections.Generic.Dictionary<string, RemoteInterface.HC.FetchDeviceData> tmpDict = new Dictionary<string, RemoteInterface.HC.FetchDeviceData>(fetchDevs.Length + fetchDevs2.Length); // for (int i = 0; i < fetchDevs.Length; i++) // { // if (!tmpDict.ContainsKey(fetchDevs[i].DevName)) // { // tmpDict.Add(fetchDevs[i].DevName, fetchDevs[i]); // } // } // for (int i = 0; i < fetchDevs2.Length; i++) // { // if (!tmpDict.ContainsKey(fetchDevs2[i].DevName)) // { // tmpDict.Add(fetchDevs2[i].DevName, fetchDevs2[i]); // } // } // fetchDevs = new RemoteInterface.HC.FetchDeviceData[tmpDict.Count]; // int j = 0; // foreach (System.Collections.Generic.KeyValuePair<string, RemoteInterface.HC.FetchDeviceData> tmpDev in tmpDict) // { // fetchDevs[j++] = tmpDev.Value; // } //} } foreach (RemoteInterface.HC.FetchDeviceData dev in fetchDevs) { dev.SegId = -99; } DevHT2 = setGENDisplay(fetchDevs, maxSegId, direction); if (DevHT1 == null) DevHT1 = new System.Collections.Hashtable(); foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) { DevHT1.Add(Dev.Key, Dev.Value); } } if (devType == DeviceType.CMS || devType == DeviceType.RGS) { DevHT2 = setGENDisplay(GetRangeBrachCMS(lineid, mile_s, mile_e), maxSegId, direction); foreach (System.Collections.DictionaryEntry Dev in DevHT2) { if (!DevHT1.Contains(Dev.Key)) { DevHT1.Add(Dev.Key, Dev.Value); } } } //} } return DevHT1; } } case CategoryType.OBS: //�ö�ƥ� { RemoteInterface.HC.FetchDeviceData[] fetchDevs; if ((int)ht["INC_NAME"] == 133 && devType != DeviceType.CCTV) //�J�f�`�D�ö� { if (devType == DeviceType.CMS) { string where = string.Format("mileage > {0} and mileage < {1}", mile_s - 500, mile_s + 500); string cmd = string.Format("Select mileage,DivisionType,DivisionID From {0}.{1} where LineID = '{2}' and DivisionType in ('I','C') and {3} fetch first 1 rows only; " , RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblGroupDivision, lineid,where); System.Data.DataTable DT = com.Select(cmd); if (DT.Rows.Count == 0 )// { return setOBSDisplay(new RemoteInterface.HC.FetchDeviceData[0], maxSegId); } else if ((string)DT.Rows[0][1] != "I")//�t�Υ�y�D { string DivisionID = (string)DT.Rows[0][2]; cmd = string.Format("select LineId2,mileage2,direction2 from {0}.VWCLOVERLEAF where DivisionType = 'C' " + "and LineID1 = '{1}' and Mileage1 = {2} and Direction1 = '{3}';", RSPGlobal.GlobaSchema, lineid, DT.Rows[0][0], direction); DT = com.Select(cmd); if (DT.Rows.Count == 0) { return setOBSDisplay(new RemoteInterface.HC.FetchDeviceData[0], maxSegId); } else { System.Collections.Hashtable DevHT = new System.Collections.Hashtable(); foreach (System.Data.DataRow dr in DT.Rows) { fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, (int)dr[1], (string)dr[0], (string)dr[2]); System.Collections.Hashtable tmpDevHT = setOBSDisplay(fetchDevs, maxSegId); foreach (System.Collections.DictionaryEntry Dev in tmpDevHT) { if (!DevHT.Contains(Dev.Key)) { DevHT.Add(Dev.Key, Dev.Value); } } } return DevHT; } } else//��y�D { fetchDevs = LoadLCMS(lineid, direction, mile_s); return setOBSDisplay(fetchDevs, maxSegId); } } } fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); if ((devType == DeviceType.CMS || devType == DeviceType.RMS || devType == DeviceType.CCTV) && mile_s != mile_e) { RemoteInterface.HC.I_HC_FWIS hobj = EasyClient.getHost(); RemoteInterface.HC.FetchDeviceData[] fetchDevsIn = hobj.Fetch(new string[] { devType.ToString() }, lineid, direction, mile_s, mile_e); System.Data.DataTable DT = RSPGlobal.GetDeviceDT(); for (int i = 0; i < fetchDevsIn.Length; i++) { System.Data.DataRow dr = DT.Rows.Find(fetchDevsIn[i].DevName); if (dr != null) { fetchDevsIn[i].Location = dr[RSPGlobal.Location].ToString(); fetchDevsIn[i].SegId = -99; } } List<RemoteInterface.HC.FetchDeviceData> fetchList = new List<RemoteInterface.HC.FetchDeviceData>(fetchDevs.Length + fetchDevsIn.Length); fetchList.AddRange(fetchDevs); fetchList.AddRange(fetchDevsIn); return setOBSDisplay(fetchList.ToArray(), maxSegId); } return setOBSDisplay(fetchDevs, maxSegId); } case CategoryType.WEA: //�ѭԨƥ� { //RemoteInterface.HC.FetchDeviceData[] fetchDevs = getDeviceName(ref maxSegId); //return setWEADisplay(fetchDevs, maxSegId); System.Collections.Hashtable devHT = new System.Collections.Hashtable(); System.Collections.Hashtable displayHT = new System.Collections.Hashtable(); //�d�� if (devType != DeviceType.RMS && devType != DeviceType.LCS) { devHT = setWEADisplay(getInDeviceName(ref maxSegId, ref devsMeg, mile_s, mile_e, lineid), -99); if (devHT != null) { foreach (System.Collections.DictionaryEntry de in devHT)//-99���d�� { displayHT.Add(de.Key, de.Value); } } } string dir1 = string.Empty; string dir2 = string.Empty; if (ht["LINE_DIRECTION"].ToString() == "NS") { dir1 = "N"; dir2 = "S"; } else { dir1 = "W"; dir2 = "E"; } //devHT = setWEADisplay(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s > mile_e ? mile_e : mile_s, lineid, dir1), maxSegId); devHT = setWEADisplay(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s < mile_e ? mile_e : mile_s, lineid, dir1), maxSegId); // 8/31�L�� if (devHT != null) { foreach (System.Collections.DictionaryEntry de in devHT) { if (!displayHT.Contains(de.Key)) displayHT.Add(de.Key, de.Value); } } //devHT = setWEADisplay(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s > mile_e ? mile_s : mile_e, lineid, dir2), maxSegId); devHT = setWEADisplay(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s < mile_e ? mile_s : mile_e, lineid, dir2), maxSegId); // 8/31�L�� if (devHT != null) { foreach (System.Collections.DictionaryEntry de in devHT) { if (!displayHT.Contains(de.Key)) displayHT.Add(de.Key, de.Value); } } return displayHT; } case CategoryType.TUN: //�G�D���q�ƥ� { RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); return setTUNDisplay(fetchDevs, maxSegId); } case CategoryType.RES: //�ި�ƥ� { RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); return setRESDisplay(fetchDevs, maxSegId); } case CategoryType.OTH: //��L�ƥ� { RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); return setOTHDisplay(fetchDevs, maxSegId); } case CategoryType.RMS: //�`�D���� { if (devType == DeviceType.CMS) { string cmd = string.Format("Select cfg.DeviceName,cfg.Mile_M,section.MaxSpeed,section.MinSpeed From {0}.{1} cfg ,{0}.{4} section " + " where cfg.LineID = '{2}' and cfg.device_Type = 'CMS' and cfg.Location = 'L' " + " and cfg.mile_m > ({3} - 500) and cfg.mile_m < ({3} + 500) and cfg.sectionID = section.sectionID;" , RSPGlobal.GlobaSchema, DBConnect.DB2TableName.tblDeviceConfig, lineid, mile_s, DBConnect.DB2TableName.tblGroupSection); System.Data.DataTable DT = com.Select(cmd); RemoteInterface.HC.FetchDeviceData[] fetchDevs = new RemoteInterface.HC.FetchDeviceData[DT.Rows.Count]; int j = 0; foreach (System.Data.DataRow dr in DT.Rows) { fetchDevs[j] = new RemoteInterface.HC.FetchDeviceData((string)dr[0], 0, lineid, direction, (int)dr[1], (int)dr[2], (int)dr[3], "RMS"); fetchDevs[j].Location = "L"; j++; } return setRESDisplay(fetchDevs, maxSegId); } else { return setRESDisplay(new RemoteInterface.HC.FetchDeviceData[0], 0); } } case CategoryType.RGS: { RemoteInterface.HC.FetchDeviceData[] fetchDevs = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, direction); return setRESDisplay(fetchDevs, maxSegId); } case CategoryType.TUNFire: { RemoteInterface.HC.FetchDeviceData[] fetchDevs1, fetchDevs2; List<RemoteInterface.HC.FetchDeviceData> fetchDevs = new List<RemoteInterface.HC.FetchDeviceData>(); switch (direction) { //case "N": //case "S": // fetchDevs1 = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "N"); // fetchDevs2 = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "S"); // break; default: fetchDevs1 = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "W"); fetchDevs2 = getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "E"); break; } if (fetchDevs1 != null) fetchDevs.AddRange(fetchDevs1); if (fetchDevs2 != null) fetchDevs.AddRange(fetchDevs2); return setTUNDisplay(fetchDevs.ToArray(), maxSegId); } case CategoryType.PARK: { List<RemoteInterface.HC.FetchDeviceData> DevList = new List<RemoteInterface.HC.FetchDeviceData>(); DevList.AddRange(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "N")); DevList.AddRange(getOutDeviceName(ref maxSegId, ref devsMeg, mile_s, lineid, "S")); return setOTHDisplay(DevList.ToArray(), maxSegId); } default: return null; } }
private RemoteInterface.HC.FetchDeviceData[] GetRangeBrachCMS(string Line, int mile_s, int mile_e) { if ((int)DevRange["NORMAL"] == 0) { return new RemoteInterface.HC.FetchDeviceData[0]; } if (mile_s > mile_e) { int k = mile_e; mile_e = mile_s; mile_s = k; } RemoteInterface.HC.I_HC_FWIS hobj = EasyClient.getHost(); System.Data.DataTable DivDT = RSPGlobal.GetDivisionDT(); System.Data.DataTable CloverleafDT = RSPGlobal.GetCloverleafDT(); System.Collections.Hashtable tmpHT = new System.Collections.Hashtable(); //List<System.Data.DataRow> DRList = new List<System.Data.DataRow>(); foreach (System.Data.DataRow dr in DivDT.Rows) { if ((string)dr[1] == "C" && (string)dr[3] == Line && (int)dr[2] >= mile_s && (int)dr[2] <= mile_e) { int k = 0; foreach (System.Data.DataRow CloverDR in CloverleafDT.Rows) { if ((string)CloverDR[0] == Line && (int)CloverDR[1] == (int)dr[2]) { k++; RemoteInterface.HC.FetchDeviceData[] fetDevs = hobj.Fetch(new string[] { "CMS","RGS" }, (string)CloverDR[2], (string)CloverDR[4], (int)CloverDR[3], (int)DevRange["NORMAL"] - 1, 0, false); foreach (RemoteInterface.HC.FetchDeviceData dev in fetDevs) { if (!tmpHT.Contains(dev.DevName)) { tmpHT.Add(dev.DevName, dev); } if (dev.SegId == (int)DevRange["NORMAL"] -1) break; } if (k == 2) { break; } } } } } List<RemoteInterface.HC.FetchDeviceData> DevList = new List<RemoteInterface.HC.FetchDeviceData>(tmpHT.Count); foreach (System.Collections.DictionaryEntry de in tmpHT) { if (((RemoteInterface.HC.FetchDeviceData)de.Value).DeviceType == devType.ToString()) { DevList.Add((RemoteInterface.HC.FetchDeviceData)de.Value); } } return DevList.ToArray(); }
protected void Page_Load(object sender, EventArgs e) { if (Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.IsLoggedIn] != null && Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.IsLoggedIn].Equals("True")) { //Turn on the menu control panel pnlMenuItems.Visible = true; int userType = MtbBillCollection.Global.Definitions.UserType.Value.Issuer; Int32.TryParse(Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.UserTypeId].ToString(), out userType); //Top info part LitCollDate.Text = DateTime.Now.Date.ToString("dd/MMM/yyyy"); LitUserName.Text = Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.FullName].ToString(); LitBranchName.Text = " @ " + Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.BranchName].ToString(); string[] pageList = (string[])Session[MtbBillCollection.Global.Definitions.SessionVariable.Value.PageList]; System.Collections.Hashtable hs = (System.Collections.Hashtable)Application[MtbBillCollection.Global.Definitions.SessionVariable.Value.MenuList]; if (pageList != null) { Menu1.Items.Clear(); foreach (string pageName in pageList) { if (hs.Contains(pageName)) { MenuItem menuItem = new MenuItem(); menuItem.Text = pageName; menuItem.NavigateUrl = hs[pageName].ToString(); Menu1.Items.Add(menuItem); } } } LitLogMenu.Text = "<a href='Logout.aspx' style=' color:GREEN' >Logout</a>"; //menu part if (userType == MtbBillCollection.Global.Definitions.UserType.Value.SuperAdmin) { //LitMenu1.Text = "<a href='Register.aspx' >Register User</a>"; //LitMenu2.Text = "<a href='BillCollection.aspx' >Bill Collection</a>"; //LitMenu3.Text = "<a href='AuthorizeBillCollection.aspx' >Manage Collection</a>"; //LitMenuPassReset.Text = "<a href='RecoverPassword.aspx' >Recover Password</a>"; //LitLogMenu.Text = "<a href='Logout.aspx' >Logout</a>"; } else if (userType == MtbBillCollection.Global.Definitions.UserType.Value.Manager) { //LitMenu1.Text = "<a href='AuthorizeBillCollection.aspx' >Manage Collection</a>"; //LitMenu2.Text = "<a href='ViewCollections.aspx' >View Collection</a>"; //LitMenu3.Text = "<a href='CollectionLogReport.aspx' >Upload Log</a>"; //LitMenu3.Text = "<a href='Clearing.aspx' >Clearing</a>"; //LitLogMenu.Text = "<a href='Logout.aspx' >Logout</a>"; } else if (userType == MtbBillCollection.Global.Definitions.UserType.Value.Issuer) { //LitMenu1.Text = "<a href='BillCollection.aspx' >Bill Collection</a>"; //LitMenu2.Text = "<a href='ViewCollections.aspx' >View Collection</a>"; //LitLogMenu.Text = "<a href='Logout.aspx' >Logout</a>"; } else if (userType == MtbBillCollection.Global.Definitions.UserType.Value.Reviewer) { //LitMenu1.Text = "<a href='BillCollection.aspx' >Bill Collection</a>"; //LitMenu2.Text = "<a href='Clearing.aspx' >Clearing</a>"; //LitLogMenu.Text = "<a href='Logout.aspx' >Logout</a>"; } //LitMenuChangePass.Text = "<a href='ChangePassword.aspx' >Change Password</a>"; } else { pnlMenuItems.Visible = false; LitLogMenu.Text = ""; } }
protected virtual Holodeck.Resource[] ReadResources(System.IO.StreamReader file) { const string Name = "Name: "; const string Func = "Func: "; const string Proc = "Proc: "; const string Time = "Time: "; const string Retv = "Retv: "; const string Errc = "Errc: "; const string Exce = "Exce: "; Holodeck.Resource resource; System.Collections.Hashtable resources = new System.Collections.Hashtable (10); string stream = file.ReadToEnd (); try { int begIndex = 0; int endIndex = 0; while (true) { begIndex = stream.IndexOf (Name, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Name.Length; resource.Name = stream.Substring (begIndex, endIndex - begIndex - 1); begIndex = stream.IndexOf (Func, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Func.Length; resource.LastFunction = stream.Substring (begIndex, endIndex - begIndex - 1); begIndex = stream.IndexOf (Proc, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Proc.Length; resource.processID = Int64.Parse (stream.Substring (begIndex, endIndex - begIndex - 1)); begIndex = stream.IndexOf (Time, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Time.Length; resource.TimeStamp = stream.Substring (begIndex, endIndex - begIndex - 1); begIndex = stream.IndexOf (Retv, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Retv.Length; resource.ReturnValue = stream.Substring (begIndex, endIndex - begIndex - 1); begIndex = stream.IndexOf (Errc, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Errc.Length; resource.ErrorCode = stream.Substring (begIndex, endIndex - begIndex - 1); begIndex = stream.IndexOf (Exce, endIndex); endIndex = stream.IndexOf ("\n", begIndex); begIndex += Exce.Length; // resource. = stream.Substring (begIndex, endIndex - begIndex - 1); resource.NumHits = 1; resource.threadID = 0; if (resources.Contains (resource.Name)) { Holodeck.Resource oldResource = (Holodeck.Resource) resources[resource.Name]; resource.NumHits += oldResource.NumHits; resources.Remove (resource.Name); } resources.Add (resource.Name, resource); } } catch (Exception) { } Holodeck.Resource[] result = new Holodeck.Resource[resources.Count]; System.Collections.IEnumerator iEnum = resources.Values.GetEnumerator (); int i = 0; while (iEnum.MoveNext ()) { result[i] = (Holodeck.Resource) iEnum.Current; i++; } return result; }
public virtual void TestStopList() { System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable(); stopWordsSet.Add("good", "good"); stopWordsSet.Add("test", "test"); stopWordsSet.Add("analyzer", "analyzer"); StopAnalyzer newStop = new StopAnalyzer(stopWordsSet); System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer"); TokenStream stream = newStop.TokenStream("test", reader); Assert.IsNotNull(stream); TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute)); PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.AddAttribute(typeof(PositionIncrementAttribute)); while (stream.IncrementToken()) { System.String text = termAtt.Term(); Assert.IsFalse(stopWordsSet.Contains(text)); Assert.AreEqual(1, posIncrAtt.GetPositionIncrement()); // by default stop tokenizer does not apply increments. } }
public virtual void TestStopListPositions() { bool defaultEnable = StopFilter.GetEnablePositionIncrementsDefault(); StopFilter.SetEnablePositionIncrementsDefault(true); try { System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable(); stopWordsSet.Add("good", "good"); stopWordsSet.Add("test", "test"); stopWordsSet.Add("analyzer", "analyzer"); StopAnalyzer newStop = new StopAnalyzer(stopWordsSet); System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer with positions"); int[] expectedIncr = new int[]{1, 1, 1, 3, 1, 1, 1, 2, 1}; TokenStream stream = newStop.TokenStream("test", reader); Assert.IsNotNull(stream); Token token = null; int i = 0; while ((token = stream.Next()) != null) { System.String text = token.TermText(); Assert.IsFalse(stopWordsSet.Contains(text)); Assert.AreEqual(expectedIncr[i++], token.GetPositionIncrement()); } } finally { StopFilter.SetEnablePositionIncrementsDefault(defaultEnable); } }
/// <summary>Does the actual (time-consuming) work of the merge, /// but without holding synchronized lock on IndexWriter /// instance /// </summary> private int MergeMiddle(MergePolicy.OneMerge merge) { merge.CheckAborted(directory); System.String mergedName = merge.info.name; SegmentMerger merger = null; int mergedDocCount = 0; SegmentInfos sourceSegments = merge.segments; int numSegments = sourceSegments.Count; if (infoStream != null) Message("merging " + merge.SegString(directory)); merger = new SegmentMerger(this, mergedName, merge); merge.readers = new SegmentReader[numSegments]; merge.readersClone = new SegmentReader[numSegments]; bool mergeDocStores = false; System.Collections.Hashtable dss = new System.Collections.Hashtable(); // This is try/finally to make sure merger's readers are // closed: bool success = false; try { int totDocCount = 0; for (int i = 0; i < numSegments; i++) { SegmentInfo info = sourceSegments.Info(i); // Hold onto the "live" reader; we will use this to // commit merged deletes SegmentReader reader = merge.readers[i] = readerPool.Get(info, merge.mergeDocStores, MERGE_READ_BUFFER_SIZE, - 1); // We clone the segment readers because other // deletes may come in while we're merging so we // need readers that will not change SegmentReader clone = merge.readersClone[i] = (SegmentReader) reader.Clone(true); merger.Add(clone); if (clone.HasDeletions()) { mergeDocStores = true; } if (info.GetDocStoreOffset() != - 1) { dss[info.GetDocStoreSegment()] = info.GetDocStoreSegment(); } totDocCount += clone.NumDocs(); } if (infoStream != null) { Message("merge: total " + totDocCount + " docs"); } merge.CheckAborted(directory); // If deletions have arrived and it has now become // necessary to merge doc stores, go and open them: if (mergeDocStores && !merge.mergeDocStores) { merge.mergeDocStores = true; lock (this) { String key = docWriter.GetDocStoreSegment(); if (key!=null && dss.Contains(key)) { if (infoStream != null) Message("now flush at mergeMiddle"); DoFlush(true, false); } } for (int i = 0; i < numSegments; i++) { merge.readersClone[i].OpenDocStores(); } // Clear DSS lock (this) { merge.info.SetDocStore(- 1, null, false); } } // This is where all the work happens: mergedDocCount = merge.info.docCount = merger.Merge(merge.mergeDocStores); System.Diagnostics.Debug.Assert(mergedDocCount == totDocCount); // TODO: in the non-realtime case, we may want to only // keep deletes (it's costly to open entire reader // when we just need deletes) SegmentReader mergedReader = readerPool.Get(merge.info, false, BufferedIndexInput.BUFFER_SIZE, - 1); try { if (poolReaders && mergedSegmentWarmer != null) { mergedSegmentWarmer.Warm(mergedReader); } if (!CommitMerge(merge, merger, mergedDocCount, mergedReader)) // commitMerge will return false if this merge was aborted return 0; } finally { lock (this) { readerPool.Release(mergedReader); } } success = true; } finally { lock (this) { if (!success) { // Suppress any new exceptions so we throw the // original cause for (int i = 0; i < numSegments; i++) { if (merge.readers[i] != null) { try { readerPool.Release(merge.readers[i], true); } catch (System.Exception t) { } } if (merge.readersClone[i] != null) { try { merge.readersClone[i].Close(); } catch (System.Exception t) { } // This was a private clone and we had the only reference System.Diagnostics.Debug.Assert(merge.readersClone[i].GetRefCount() == 0); } } } else { for (int i = 0; i < numSegments; i++) { if (merge.readers[i] != null) { readerPool.Release(merge.readers[i], true); } if (merge.readersClone[i] != null) { merge.readersClone[i].Close(); // This was a private clone and we had the only reference System.Diagnostics.Debug.Assert(merge.readersClone[i].GetRefCount() == 0); } } } } } // Must checkpoint before decrefing so any newly // referenced files in the new merge.info are incref'd // first: lock (this) { deleter.Checkpoint(segmentInfos, false); } DecrefMergeSegments(merge); if (merge.useCompoundFile) { // Maybe force a sync here to allow reclaiming of the // disk space used by the segments we just merged: if (autoCommit && DoCommitBeforeMergeCFS(merge)) { long size; lock (this) { size = merge.info.SizeInBytes(); } Commit(size); } success = false; System.String compoundFileName = mergedName + "." + IndexFileNames.COMPOUND_FILE_EXTENSION; try { merger.CreateCompoundFile(compoundFileName); success = true; } catch (System.IO.IOException ioe) { lock (this) { if (merge.IsAborted()) { // This can happen if rollback or close(false) // is called -- fall through to logic below to // remove the partially created CFS: success = true; } else HandleMergeException(ioe, merge); } } catch (System.Exception t) { HandleMergeException(t, merge); } finally { if (!success) { if (infoStream != null) Message("hit exception creating compound file during merge"); lock (this) { deleter.DeleteFile(compoundFileName); } } } if (merge.IsAborted()) { if (infoStream != null) Message("abort merge after building CFS"); deleter.DeleteFile(compoundFileName); return 0; } lock (this) { if (segmentInfos.IndexOf(merge.info) == - 1 || merge.IsAborted()) { // Our segment (committed in non-compound // format) got merged away while we were // building the compound format. deleter.DeleteFile(compoundFileName); } else { merge.info.SetUseCompoundFile(true); Checkpoint(); } } } // Force a sync after commiting the merge. Once this // sync completes then all index files referenced by the // current segmentInfos are on stable storage so if the // OS/machine crashes, or power cord is yanked, the // index will be intact. Note that this is just one // (somewhat arbitrary) policy; we could try other // policies like only sync if it's been > X minutes or // more than Y bytes have been written, etc. if (autoCommit) { long size; lock (this) { size = merge.info.SizeInBytes(); } Commit(size); } return mergedDocCount; }
/// <summary> Perform synonym expansion on a query. /// /// </summary> /// <param name="">query /// </param> /// <param name="">syns /// </param> /// <param name="">a /// </param> /// <param name="">field /// </param> /// <param name="">boost /// </param> public static Query Expand(System.String query, Searcher syns, Analyzer a, System.String field, float boost) { System.Collections.Hashtable already = new System.Collections.Hashtable(); // avoid dups System.Collections.IList top = new System.Collections.ArrayList(); // needs to be separately listed.. // [1] Parse query into separate words so that when we expand we can avoid dups TokenStream ts = a.TokenStream(field, new System.IO.StringReader(query)); Lucene.Net.Analysis.Token t; while ((t = ts.Next()) != null) { System.String word = t.TermText(); if (already.Contains(word) == false) { already.Add(word, word); top.Add(word); } } BooleanQuery tmp = new BooleanQuery(); // [2] form query System.Collections.IEnumerator it = top.GetEnumerator(); while (it.MoveNext()) { // [2a] add to level words in System.String word = (System.String) it.Current; TermQuery tq = new TermQuery(new Term(field, word)); tmp.Add(tq, BooleanClause.Occur.SHOULD); // [2b] add in unique synonums Hits hits = syns.Search(new TermQuery(new Term(Syns2Index.F_WORD, word))); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); System.String[] values = doc.GetValues(Syns2Index.F_SYN); for (int j = 0; j < values.Length; j++) { System.String syn = values[j]; if (already.Contains(syn) == false) { already.Add(syn, syn); tq = new TermQuery(new Term(field, syn)); if (boost > 0) // else keep normal 1.0 tq.SetBoost(boost); tmp.Add(tq, BooleanClause.Occur.SHOULD); } } } } return tmp; }
void ChangeForm(string country, int category) { m_countryNameLabel.Text = country; m_currentCategoryLabel.Text = Headers[category]; System.Collections.Hashtable fieldHash = (System.Collections.Hashtable)CountryHash[country]; object value = fieldHash[Headers[category]]; if (value is double) { if (!SortedFieldData.Contains(Headers[category])) { SortField(Headers[category]); } List <KeyDataPair> sortedDataList = (List <KeyDataPair>)SortedFieldData[Headers[category]]; int index = -1; double minValue = -1; double maxValue = -1; for (int i = 0; i < sortedDataList.Count; i++) { if (sortedDataList[i].Key == country) { index = i; } if (minValue == -1 || sortedDataList[i].Data < minValue) { minValue = sortedDataList[i].Data; } if (maxValue == -1 || sortedDataList[i].Data > maxValue) { maxValue = sortedDataList[i].Data; } } for (int i = 0; i < m_wavingFlagsList.ChildObjects.Count; i++) { if (m_wavingFlagsList.ChildObjects[i] is WavingFlagLayer) { WavingFlagLayer wavingFlag = (WavingFlagLayer)m_wavingFlagsList.ChildObjects[i]; wavingFlag.Bar3D.UseScaling = true; wavingFlag.Bar3D.ScalarMinimum = minValue; wavingFlag.Bar3D.ScalarMaximum = maxValue; bool foundScalar = false; for (int j = 0; j < sortedDataList.Count; j++) { if (sortedDataList[j].Key == wavingFlag.Name) { wavingFlag.Bar3D.ScalarValue = sortedDataList[j].Data; foundScalar = true; break; } } if (!foundScalar) { wavingFlag.Bar3D.IsOn = false; } else { wavingFlag.Bar3D.IsOn = true; } } } m_currentBodyText.Text = ""; int startIndex = index - (index % 2 == 0 ? 5 : 4); if (startIndex > sortedDataList.Count - 10) { startIndex = sortedDataList.Count - 10; } int counter = 0; while (counter < 10) { if (startIndex < 0) { startIndex++; continue; } m_scrollbars[counter].Visible = true; m_scrollbars[counter].Minimum = minValue; m_scrollbars[counter].Maximum = maxValue; m_scrollbars[counter].Value = sortedDataList[startIndex].Data; m_listLabels[counter].Visible = true; m_listLabels[counter].Text = string.Format("{0}. {1}\n", startIndex + 1, sortedDataList[startIndex].Key); counter++; startIndex++; } } else { for (int i = 0; i < m_scrollbars.Length; i++) { m_scrollbars[i].Visible = false; } for (int i = 0; i < m_listLabels.Length; i++) { m_listLabels[i].Visible = false; } for (int i = 0; i < m_wavingFlagsList.ChildObjects.Count; i++) { if (m_wavingFlagsList.ChildObjects[i] is WavingFlagLayer) { WavingFlagLayer wavingFlag = (WavingFlagLayer)m_wavingFlagsList.ChildObjects[i]; wavingFlag.Bar3D.IsOn = false; } } m_currentBodyText.Text = (string)value; } }