private ESection ProcessResults(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreClusterResults; return(currentSection); } Match m; if (this.isTwoDimensional) { m = TestFileStrings.ParseExpected2D.Match(currentLine); if (m.Success) { this.VariableDefs[int.Parse(m.Groups["var"].ToString())].SetExpected( double.Parse(m.Groups["posX"].ToString()), double.Parse(m.Groups["posY"].ToString())); } } else { m = TestFileStrings.ParseExpected1D.Match(currentLine); if (m.Success) { this.VariableDefs[int.Parse(m.Groups["var"].ToString())].SetExpected( double.Parse(m.Groups["pos"].ToString())); } } if (!m.Success) { Validate.Fail(string.Format("Unparsable RESULT line {0}: {1}", lineNumber, currentLine)); } return(currentSection); }
private ESection ProcessClusterOrConstraints(string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.BeginCluster, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Cluster; currentClusterState = EClusterState.Id; this.currentClusterDef = new ClusterDef(this.MinClusterSizeX, this.MinClusterSizeY); return(currentSection); } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsX, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return(currentSection); } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsY, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintsDefY; return(currentSection); } if (currentLine.StartsWith( TestFileStrings.BeginConstraints, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return(currentSection); } return(currentSection); }
private ESection ProcessNeighbours(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndNeighbours, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreResults; this.currentConstraintDefs = null; return(currentSection); } Match m = TestFileStrings.ParseNeighbour.Match(currentLine); if (m.Success) { this.NeighborDefs.Add(new NeighborDef( this.VariableDefs[int.Parse(m.Groups["left"].ToString())], this.VariableDefs[int.Parse(m.Groups["right"].ToString())], double.Parse(m.Groups["weight"].ToString()))); } else { Validate.Fail(string.Format("Unparsable NEIGHBOUR line {0}: {1}", lineNumber, currentLine)); } return(currentSection); }
public void TestRimes() { var rimes = ESection.GetAllByChar('樂'); CollectionAssert.AreEquivalent(new char[] { '效', '鐸', '覺' }, rimes.Select(er => er.Title)); CollectionAssert.AreEquivalent(new char[] { '效', '覺', '藥' }, Rime106.GetAllByChar('樂').Select(r => r.Title) ); var rime = Rime106.GetByTitle('咸'); Assert.True(rime.Contains('鹹')); Assert.AreEqual('咸', rime.Title); var league = rime.League; Assert.AreEqual('豏', league.Rising.Value.Title); Assert.AreEqual('陷', league.Going.Value.Title); Assert.AreEqual('洽', league.Entering.Value.Title); rime = Rime106.GetByTitle('有'); Assert.False(rime.Peer(Tone.Entering).HasValue); Assert.AreEqual('宥', rime.Peer(Tone.Going).Value.Title); var r19s = Rime19.GetAllByChar('蛙'); CollectionAssert.AreEquivalent(new int[] { 5, 10 }, r19s.Select(r => r.Ordinal)); Assert.True(r19s.All(r => r.Volume == 1)); r19s = Rime19.GetAllByChar('會'); Assert.AreEqual(1, r19s.Count()); Assert.AreEqual(3, r19s.First().Ordinal); Assert.AreEqual(3, r19s.First().Volume); Assert.True(r19s.First().Contains('會')); }
public ModelPager <RBookList> GetBookListPager(QBookList query) { ModelPager <RBookList> result = new ModelPager <RBookList>(); result.pageIndex = query.pageIndex; result.pageSize = query.pageSize; RefAsync <int> totalNumber = new RefAsync <int>(); if (query.QueryType == QBookList_Type.Tag) { result.datas = _bookDb.GetBookListByTag(query.pageIndex, query.pageSize, query.Code, totalNumber).Result; result.totalCount = totalNumber; return(result); } else if (query.QueryType == QBookList_Type.Section) { ESection section = _sectionRepository.GetByKey(query.Code).Result; if (section != null) { if (section.SectionType == SectionType.Column) { result.datas = _bookDb.GetBookListBySection_ST(query.pageIndex, query.pageSize, query.Code, totalNumber).Result; result.totalCount = totalNumber; return(result); } else { if (section.Code == WebSectionCode.NewExpress) { result.datas = _bookDb.GetBookListBySection_DT(query.pageIndex, query.pageSize, query.Code, totalNumber).Result; result.totalCount = totalNumber; return(result); } else if (section.Code == WebSectionCode.ResDownLoad) { result.datas = _bookDb.GetBookListBySection_Resource(query.pageIndex, query.pageSize, totalNumber).Result; result.totalCount = totalNumber; return(result); } else if (section.Code == WebSectionCode.HighScore) { result.datas = _bookDb.GetBookListBySection_HighScroe(query.pageIndex, query.pageSize, totalNumber, defaultTop: query.HighScoreTop).Result; if (totalNumber > query.HighScoreTop) { totalNumber = new RefAsync <int>(query.HighScoreTop); } result.totalCount = totalNumber; return(result); } } } } return(null); // return _bookDb.GetSimpleBookPager(pageIndex, pageSize, bookSection).Result; }
private ESection ProcessVariables(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith(TestFileStrings.EndVariables, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreClusterOrConstraints; return(currentSection); } Match m; if (this.isTwoDimensional) { m = TestFileStrings.ParseVariable2D.Match(currentLine); if (m.Success) { this.VariableDefs.Add(new VariableDef( uint.Parse(m.Groups["ord"].ToString()), double.Parse(m.Groups["posX"].ToString()), double.Parse(m.Groups["posY"].ToString()), double.Parse(m.Groups["sizeX"].ToString()), double.Parse(m.Groups["sizeY"].ToString()), double.Parse(m.Groups["weightX"].ToString()), double.Parse(m.Groups["weightY"].ToString()))); } } else { m = TestFileStrings.ParseVariable1D.Match(currentLine); double scale = 1.0; if (!m.Success) { m = TestFileStrings.ParseVariable1DScale.Match(currentLine); scale = m.Success ? double.Parse(m.Groups["scale"].ToString()) : scale; } if (m.Success) { var varDef = new VariableDef( uint.Parse(m.Groups["ord"].ToString()), double.Parse(m.Groups["pos"].ToString()), double.Parse(m.Groups["size"].ToString()), double.Parse(m.Groups["weight"].ToString())) { ScaleX = scale }; this.VariableDefs.Add(varDef); } } if (!m.Success) { Validate.Fail(string.Format("Unparsable VARIABLE line {0}: {1}", lineNumber, currentLine)); } // Verify the variables in the file are sorted. This makes it easier for the results // reading to be in sync. Validate.AreEqual(this.VariableDefs[this.VariableDefs.Count - 1].Ordinal, (uint)(this.VariableDefs.Count - 1), "Out of order VARIABLE ordinal"); return(currentSection); }
private static ESection ProcessPreClusterResults(string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.BeginClusterResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.ClusterResults; } return(currentSection); }
public List <EPlan_FromDouBanTagUrls> InitPlanFromTagUrl(List <Secction_Tag> tagLists) { List <EPlan_FromDouBanTagUrls> result = new List <EPlan_FromDouBanTagUrls>(); if (!_PlanFTURepository.IsExistPlan()) { NLogUtil.InfoTxt("豆瓣计划写入到数据库"); if (tagLists != null && tagLists.Count > 0) { List <EDataSection> dsList = new List <EDataSection>(); // List<EPlan_FromDouBanTagUrls> planList = new List<EPlan_FromDouBanTagUrls>(); var allSection = _SectionDb.AllSection(); foreach (Secction_Tag st in tagLists) { ESection section = null; try { section = allSection[st.sectionName]; } catch { section = null; } foreach (var tag in st.TagList) { result.Add(PlanFTURepository.NewModelInstance(tag.Name, tag.Url)); if (section != null) { dsList.Add(DataSectionRepository.newModelInstance(section.Code, tag.Code)); } } } var rAll = _Db.Ado.UseTran(async() => { await _DataSectionDb.CoverNewSectionCodeAsync(dsList); _PlanFTURepository.CoverPlans(GenCodeHelper.Plan_FromDouBanTagUrls, result); }); if (rAll.IsSuccess) { NLogUtil.InfoTxt("【成功】豆瓣计划已在数据库初始化"); } else { NLogUtil.ErrorTxt($"【失败】豆瓣计划:{rAll.ErrorMessage}"); } } } else { NLogUtil.InfoTxt("DouBan Plan 已经存在数据库中!"); //DouBand Tag List 只能获取1000内的 result = _PlanFTURepository.QueryPlan(GenCodeHelper.Plan_FromDouBanTagUrls); } return(result); }
private ESection ProcessClusterResults(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndClusterResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Done; return(currentSection); } Match m = TestFileStrings.ParseClusterResult.Match(currentLine); if (m.Success) { int ord = int.Parse(m.Groups["ord"].ToString()); // Root-level clusters aren't in the cluster list, which includes cluster 0. So we'll // just walk forward from the last cluster index. If this is the first time, then we've // initialized m_idxLastClusterDef to < 0. if (this.lastClusterDefIndex < 0) { this.lastClusterDefIndex = 0; } else { // Verify the cluster results in the file are sorted. This makes it easier for the results // reading to be in sync. Validate.IsTrue(this.ClusterDefs[this.lastClusterDefIndex].ClusterId < ord, "Out of order CLUSTER RESULT ordinal"); ++this.lastClusterDefIndex; } for (;; ++this.lastClusterDefIndex) { if (this.lastClusterDefIndex >= this.ClusterDefs.Count) { Validate.Fail(string.Format("Ordinal not in Cluster List at CLUSTER RESULT line {0}: {1}", lineNumber, currentLine)); } if (this.ClusterDefs[this.lastClusterDefIndex].ClusterId == ord) { break; } } this.ClusterDefs[this.lastClusterDefIndex].SetResultPositions( double.Parse(m.Groups["lpos"].ToString()), double.Parse(m.Groups["rpos"].ToString()), double.Parse(m.Groups["tpos"].ToString()), double.Parse(m.Groups["bpos"].ToString())); } else { Validate.Fail(string.Format("Unparsable CLUSTER RESULT line {0}: {1}", lineNumber, currentLine)); } return(currentSection); }
private static ESection ProcessNeighboursOrResults(string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.BeginNeighbours, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Neighbours; return(currentSection); } if (currentLine.StartsWith( TestFileStrings.BeginResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Results; return(currentSection); } return(currentSection); }
public int AddRangeTagToSection(ESection section, List <ETag> tagList) { List <ESectionTag> objList = new List <ESectionTag>(); foreach (var tag in tagList) { objList.Add(new ESectionTag { SectionCode = section.Code, //SectionName = section.Title, TagCode = tag.Code, // TagName = tag.Name, }); } return(base.AddRange(objList)); }
public bool SaveSectionTag(ESection section, List <ETag> tagList) { var result = _adminDb.Db.Ado.UseTran(() => { var r = _adminDb.DeleteRangeByExp(s => s.SectionCode == section.Code); r.Wait(); var r2 = _adminDb.AddRangeTagToSection(section, tagList); }); // var result = trans.Result; if (result.IsSuccess == false) { throw new Exception(result.ErrorMessage); } return(result.IsSuccess); }
private ESection ProcessConstraints(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndConstraints, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreNeighboursOrResults; this.currentConstraintDefs = null; return(currentSection); } // TODOclust: if we have clusters, then we get Lnn/Rnn (TEST_MSAGL) or blank (RELEASE) which // we can't read. Currently we don't use these constraints programmatically; eventually // I want to be able to test them for changes, but for right now they're just useful as // a windiffable comparison after regeneration. if (0 == this.ClusterDefs.Count) { Match m = TestFileStrings.ParseConstraint.Match(currentLine); if (m.Success) { bool isEquality = m.Groups["eq"].Length > 0; this.currentConstraintDefs.Add(new ConstraintDef( this.VariableDefs[int.Parse(m.Groups["left"].ToString())], this.VariableDefs[int.Parse(m.Groups["right"].ToString())], double.Parse(m.Groups["gap"].ToString()), isEquality)); if (isEquality) { this.HasEqualityConstraints = true; } } else { Validate.Fail(string.Format("Unparsable CONSTRAINT line {0}: {1}", lineNumber, currentLine)); } } return(currentSection); }
// Some variables are instantiated after we see the section header. // ReSharper disable PossibleNullReferenceException public void Load(string strFullName) { this.VariableDefs.Clear(); this.ConstraintDefsX.Clear(); this.ConstraintsDefY.Clear(); using (var sr = new StreamReader(strFullName)) { string currentLine; ESection currentSection = ESection.PreVariables; EClusterState currentClusterState = EClusterState.Id; int lineNumber = 0; while ((currentLine = sr.ReadLine()) != null) { ++lineNumber; if (string.IsNullOrEmpty(currentLine) || currentLine.StartsWith("//", StringComparison.OrdinalIgnoreCase)) { continue; } if (ESection.Done == currentSection) { break; } switch (currentSection) { case ESection.PreVariables: // Some stuff gets in at the top before variables. currentSection = ProcessPreVariables(lineNumber, currentLine, currentSection); break; case ESection.Variables: currentSection = ProcessVariables(lineNumber, currentLine, currentSection); break; case ESection.PreClusterOrConstraints: currentSection = ProcessClusterOrConstraints(currentLine, currentSection, ref currentClusterState); break; case ESection.Cluster: currentSection = ProcessCluster(lineNumber, currentLine, currentSection, ref currentClusterState); break; case ESection.Constraints: currentSection = ProcessConstraints(lineNumber, currentLine, currentSection); break; case ESection.PreNeighboursOrResults: currentSection = ProcessNeighboursOrResults(currentLine, currentSection); break; case ESection.Neighbours: currentSection = ProcessNeighbours(lineNumber, currentLine, currentSection); break; case ESection.PreResults: currentSection = ProcessPreResults(currentLine, currentSection); break; case ESection.Results: currentSection = ProcessResults(lineNumber, currentLine, currentSection); break; case ESection.PreClusterResults: currentSection = ProcessPreClusterResults(currentLine, currentSection); break; case ESection.ClusterResults: currentSection = ProcessClusterResults(lineNumber, currentLine, currentSection); break; default: Validate.Fail("Unknown section"); break; } } // endwhile sr.ReadLine } // end using sr if (0 == this.VariableDefs.Count) { Validate.Fail("No VARIABLEs found in file"); } }
private ESection ProcessPreVariables(int lineNumber, string currentLine, ESection currentSection) { Match m = TestFileStrings.ParseSeed.Match(currentLine); if (m.Success) { string strArg = m.Groups["seed"].ToString(); System.Globalization.NumberStyles style = System.Globalization.NumberStyles.Integer; if (strArg.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) { // For some reason the 0x prefix is not allowed for hex strings. strArg = strArg.Substring(2); style = System.Globalization.NumberStyles.HexNumber; } this.Seed = int.Parse(strArg, style); return currentSection; } m = TestFileStrings.ParseWeight.Match(currentLine); if (m.Success) { this.Weight = double.Parse(m.Groups["weight"].ToString()); return currentSection; } // Scale is optional. m = TestFileStrings.ParseScale.Match(currentLine); if (m.Success) { this.Scale = double.Parse(m.Groups["scale"].ToString()); return currentSection; } m = TestFileStrings.ParsePadding.Match(currentLine); if (m.Success) { this.PaddingX = double.Parse(m.Groups["X"].ToString()); this.PaddingY = double.Parse(m.Groups["Y"].ToString()); return currentSection; } // Currently not actually used; the individual clusters record the random values // based upon this. m = TestFileStrings.ParseMinClusterSize.Match(currentLine); if (m.Success) { this.MinClusterSizeX = double.Parse(m.Groups["X"].ToString()); this.MinClusterSizeY = double.Parse(m.Groups["Y"].ToString()); return currentSection; } m = TestFileStrings.ParseMargin.Match(currentLine); if (m.Success) { this.Margin = int.Parse(m.Groups["margin"].ToString()); return currentSection; } m = TestFileStrings.ParseUnsatisfiableConstraints.Match(currentLine); if (m.Success) { this.UnsatisfiableConstraintCount = int.Parse(m.Groups["count"].ToString()); return currentSection; } m = this.isTwoDimensional ? TestFileStrings.ParseGoal2D.Match(currentLine) : TestFileStrings.ParseGoal1D.Match(currentLine); if (m.Success) { this.GoalX = double.Parse(m.Groups["goalx"].ToString()); if (this.isTwoDimensional) { this.GoalY = double.Parse(m.Groups["goaly"].ToString()); } return currentSection; } if (currentLine.StartsWith(TestFileStrings.BeginVariables, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Variables; return currentSection; } Validate.Fail(string.Format("Unknown header line {0}: {1}", lineNumber, currentLine)); return currentSection; }
private ESection ProcessVariables(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith(TestFileStrings.EndVariables, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreClusterOrConstraints; return currentSection; } Match m; if (this.isTwoDimensional) { m = TestFileStrings.ParseVariable2D.Match(currentLine); if (m.Success) { this.VariableDefs.Add(new VariableDef( uint.Parse(m.Groups["ord"].ToString()), double.Parse(m.Groups["posX"].ToString()), double.Parse(m.Groups["posY"].ToString()), double.Parse(m.Groups["sizeX"].ToString()), double.Parse(m.Groups["sizeY"].ToString()), double.Parse(m.Groups["weightX"].ToString()), double.Parse(m.Groups["weightY"].ToString()))); } } else { m = TestFileStrings.ParseVariable1D.Match(currentLine); double scale = 1.0; if (!m.Success) { m = TestFileStrings.ParseVariable1DScale.Match(currentLine); scale = m.Success ? double.Parse(m.Groups["scale"].ToString()) : scale; } if (m.Success) { var varDef = new VariableDef( uint.Parse(m.Groups["ord"].ToString()), double.Parse(m.Groups["pos"].ToString()), double.Parse(m.Groups["size"].ToString()), double.Parse(m.Groups["weight"].ToString())) { ScaleX = scale }; this.VariableDefs.Add(varDef); } } if (!m.Success) { Validate.Fail(string.Format("Unparsable VARIABLE line {0}: {1}", lineNumber, currentLine)); } // Verify the variables in the file are sorted. This makes it easier for the results // reading to be in sync. Validate.AreEqual(this.VariableDefs[this.VariableDefs.Count - 1].Ordinal, (uint)(this.VariableDefs.Count - 1), "Out of order VARIABLE ordinal"); return currentSection; }
private ESection ProcessClusterOrConstraints(string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.BeginCluster, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Cluster; currentClusterState = EClusterState.Id; this.currentClusterDef = new ClusterDef(this.MinClusterSizeX, this.MinClusterSizeY); return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsX, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsY, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintsDefY; return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraints, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return currentSection; } return currentSection; }
private ESection ProcessCluster(int lineNumber, string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.EndCluster, StringComparison.OrdinalIgnoreCase)) { if (EClusterState.Variable != currentClusterState) { Validate.Fail(string.Format("Unexpected END CLUSTER line {0}: {1}", lineNumber, currentLine)); } currentSection = ESection.PreClusterOrConstraints; this.ClusterDefs.Add(this.currentClusterDef); this.currentClusterDef = null; return currentSection; } if (EClusterState.Id == currentClusterState) { Match m = TestFileStrings.ParseClusterId.Match(currentLine); if (m.Success) { // Verify the Clusters in the file are sorted on ID. This makes it easier for the results // reading to be in sync, as we'll index ClusterDefs by [Parent - 1]. var id = int.Parse(m.Groups["id"].ToString()); Validate.IsTrue(this.currentClusterDef.ClusterId == id, "Out of order CLUSTER id"); } else { Validate.Fail(string.Format("Unparsable CLUSTER ID line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.Parent; } else if (EClusterState.Parent == currentClusterState) { Match m = TestFileStrings.ParseClusterParent.Match(currentLine); if (m.Success) { int parentId = int.Parse(m.Groups["parent"].ToString()); // Cluster IDs are 1-based because we use 0 for the "root cluster". if (0 != parentId) { ClusterDef clusParent = this.ClusterDefs[parentId - 1]; Validate.AreEqual(clusParent.ClusterId, parentId, "clusParent.ClusterId mismatch with idParent"); clusParent.AddClusterDef(this.currentClusterDef); } } else { Validate.Fail(string.Format("Unparsable CLUSTER Parent line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.LeftBorder; } else if (EClusterState.LeftBorder == currentClusterState) { // Older files didn't have MinSize. Match m = TestFileStrings.ParseClusterMinSize.Match(currentLine); if (m.Success) { this.currentClusterDef.MinimumSizeX = double.Parse(m.Groups["X"].ToString()); this.currentClusterDef.MinimumSizeY = double.Parse(m.Groups["Y"].ToString()); return currentSection; } if (0 == string.Compare("NewHierarchy", currentLine, StringComparison.OrdinalIgnoreCase)) { // NewHierarchy is optional. this.currentClusterDef.IsNewHierarchy = true; return currentSection; } this.currentClusterDef.LeftBorderInfo = ParseBorderInfo("Left", currentLine, lineNumber); currentClusterState = EClusterState.RightBorder; } else if (EClusterState.RightBorder == currentClusterState) { this.currentClusterDef.RightBorderInfo = ParseBorderInfo("Right", currentLine, lineNumber); currentClusterState = EClusterState.TopBorder; } else if (EClusterState.TopBorder == currentClusterState) { this.currentClusterDef.TopBorderInfo = ParseBorderInfo("Top", currentLine, lineNumber); currentClusterState = EClusterState.BottomBorder; } else if (EClusterState.BottomBorder == currentClusterState) { this.currentClusterDef.BottomBorderInfo = ParseBorderInfo("Bottom", currentLine, lineNumber); currentClusterState = EClusterState.Variable; } else if (EClusterState.Variable == currentClusterState) { Match m = TestFileStrings.ParseClusterVariable.Match(currentLine); if (m.Success) { int variableId = int.Parse(m.Groups["var"].ToString()); this.currentClusterDef.AddVariableDef(this.VariableDefs[variableId]); } else { Validate.Fail(string.Format("Unparsable CLUSTER Variable line {0}: {1}", lineNumber, currentLine)); } } return currentSection; }
private ESection ProcessConstraints(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndConstraints, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreNeighboursOrResults; this.currentConstraintDefs = null; return currentSection; } // TODOclust: if we have clusters, then we get Lnn/Rnn (DEBUG) or blank (RELEASE) which // we can't read. Currently we don't use these constraints programmatically; eventually // I want to be able to test them for changes, but for right now they're just useful as // a windiffable comparison after regeneration. if (0 == this.ClusterDefs.Count) { Match m = TestFileStrings.ParseConstraint.Match(currentLine); if (m.Success) { bool isEquality = m.Groups["eq"].Length > 0; this.currentConstraintDefs.Add(new ConstraintDef( this.VariableDefs[int.Parse(m.Groups["left"].ToString())], this.VariableDefs[int.Parse(m.Groups["right"].ToString())], double.Parse(m.Groups["gap"].ToString()), isEquality)); if (isEquality) { this.HasEqualityConstraints = true; } } else { Validate.Fail(string.Format("Unparsable CONSTRAINT line {0}: {1}", lineNumber, currentLine)); } } return currentSection; }
private static ESection ProcessNeighboursOrResults(string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.BeginNeighbours, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Neighbours; return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Results; return currentSection; } return currentSection; }
private ESection ProcessCluster(int lineNumber, string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.EndCluster, StringComparison.OrdinalIgnoreCase)) { if (EClusterState.Variable != currentClusterState) { Validate.Fail(string.Format("Unexpected END CLUSTER line {0}: {1}", lineNumber, currentLine)); } currentSection = ESection.PreClusterOrConstraints; this.ClusterDefs.Add(this.currentClusterDef); this.currentClusterDef = null; return(currentSection); } if (EClusterState.Id == currentClusterState) { Match m = TestFileStrings.ParseClusterId.Match(currentLine); if (m.Success) { // Verify the Clusters in the file are sorted on ID. This makes it easier for the results // reading to be in sync, as we'll index ClusterDefs by [Parent - 1]. var id = int.Parse(m.Groups["id"].ToString()); Validate.IsTrue(this.currentClusterDef.ClusterId == id, "Out of order CLUSTER id"); } else { Validate.Fail(string.Format("Unparsable CLUSTER ID line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.Parent; } else if (EClusterState.Parent == currentClusterState) { Match m = TestFileStrings.ParseClusterParent.Match(currentLine); if (m.Success) { int parentId = int.Parse(m.Groups["parent"].ToString()); // Cluster IDs are 1-based because we use 0 for the "root cluster". if (0 != parentId) { ClusterDef clusParent = this.ClusterDefs[parentId - 1]; Validate.AreEqual(clusParent.ClusterId, parentId, "clusParent.ClusterId mismatch with idParent"); clusParent.AddClusterDef(this.currentClusterDef); } } else { Validate.Fail(string.Format("Unparsable CLUSTER Parent line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.LeftBorder; } else if (EClusterState.LeftBorder == currentClusterState) { // Older files didn't have MinSize. Match m = TestFileStrings.ParseClusterMinSize.Match(currentLine); if (m.Success) { this.currentClusterDef.MinimumSizeX = double.Parse(m.Groups["X"].ToString()); this.currentClusterDef.MinimumSizeY = double.Parse(m.Groups["Y"].ToString()); return(currentSection); } if (0 == string.Compare("NewHierarchy", currentLine, StringComparison.OrdinalIgnoreCase)) { // NewHierarchy is optional. this.currentClusterDef.IsNewHierarchy = true; return(currentSection); } this.currentClusterDef.LeftBorderInfo = ParseBorderInfo("Left", currentLine, lineNumber); currentClusterState = EClusterState.RightBorder; } else if (EClusterState.RightBorder == currentClusterState) { this.currentClusterDef.RightBorderInfo = ParseBorderInfo("Right", currentLine, lineNumber); currentClusterState = EClusterState.TopBorder; } else if (EClusterState.TopBorder == currentClusterState) { this.currentClusterDef.TopBorderInfo = ParseBorderInfo("Top", currentLine, lineNumber); currentClusterState = EClusterState.BottomBorder; } else if (EClusterState.BottomBorder == currentClusterState) { this.currentClusterDef.BottomBorderInfo = ParseBorderInfo("Bottom", currentLine, lineNumber); currentClusterState = EClusterState.Variable; } else if (EClusterState.Variable == currentClusterState) { Match m = TestFileStrings.ParseClusterVariable.Match(currentLine); if (m.Success) { int variableId = int.Parse(m.Groups["var"].ToString()); this.currentClusterDef.AddVariableDef(this.VariableDefs[variableId]); } else { Validate.Fail(string.Format("Unparsable CLUSTER Variable line {0}: {1}", lineNumber, currentLine)); } } return(currentSection); }
private ESection ProcessNeighbours(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndNeighbours, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreResults; this.currentConstraintDefs = null; return currentSection; } Match m = TestFileStrings.ParseNeighbour.Match(currentLine); if (m.Success) { this.NeighborDefs.Add(new NeighborDef( this.VariableDefs[int.Parse(m.Groups["left"].ToString())], this.VariableDefs[int.Parse(m.Groups["right"].ToString())], double.Parse(m.Groups["weight"].ToString()))); } else { Validate.Fail(string.Format("Unparsable NEIGHBOUR line {0}: {1}", lineNumber, currentLine)); } return currentSection; }
private ESection ProcessResults(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.PreClusterResults; return currentSection; } Match m; if (this.isTwoDimensional) { m = TestFileStrings.ParseExpected2D.Match(currentLine); if (m.Success) { this.VariableDefs[int.Parse(m.Groups["var"].ToString())].SetExpected( double.Parse(m.Groups["posX"].ToString()), double.Parse(m.Groups["posY"].ToString())); } } else { m = TestFileStrings.ParseExpected1D.Match(currentLine); if (m.Success) { this.VariableDefs[int.Parse(m.Groups["var"].ToString())].SetExpected( double.Parse(m.Groups["pos"].ToString())); } } if (!m.Success) { Validate.Fail(string.Format("Unparsable RESULT line {0}: {1}", lineNumber, currentLine)); } return currentSection; }
private ESection ProcessPreVariables(int lineNumber, string currentLine, ESection currentSection) { Match m = TestFileStrings.ParseSeed.Match(currentLine); if (m.Success) { string strArg = m.Groups["seed"].ToString(); System.Globalization.NumberStyles style = System.Globalization.NumberStyles.Integer; if (strArg.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) { // For some reason the 0x prefix is not allowed for hex strings. strArg = strArg.Substring(2); style = System.Globalization.NumberStyles.HexNumber; } this.Seed = int.Parse(strArg, style); return(currentSection); } m = TestFileStrings.ParseWeight.Match(currentLine); if (m.Success) { this.Weight = double.Parse(m.Groups["weight"].ToString()); return(currentSection); } // Scale is optional. m = TestFileStrings.ParseScale.Match(currentLine); if (m.Success) { this.Scale = double.Parse(m.Groups["scale"].ToString()); return(currentSection); } m = TestFileStrings.ParsePadding.Match(currentLine); if (m.Success) { this.PaddingX = double.Parse(m.Groups["X"].ToString()); this.PaddingY = double.Parse(m.Groups["Y"].ToString()); return(currentSection); } // Currently not actually used; the individual clusters record the random values // based upon this. m = TestFileStrings.ParseMinClusterSize.Match(currentLine); if (m.Success) { this.MinClusterSizeX = double.Parse(m.Groups["X"].ToString()); this.MinClusterSizeY = double.Parse(m.Groups["Y"].ToString()); return(currentSection); } m = TestFileStrings.ParseMargin.Match(currentLine); if (m.Success) { this.Margin = int.Parse(m.Groups["margin"].ToString()); return(currentSection); } m = TestFileStrings.ParseUnsatisfiableConstraints.Match(currentLine); if (m.Success) { this.UnsatisfiableConstraintCount = int.Parse(m.Groups["count"].ToString()); return(currentSection); } m = this.isTwoDimensional ? TestFileStrings.ParseGoal2D.Match(currentLine) : TestFileStrings.ParseGoal1D.Match(currentLine); if (m.Success) { this.GoalX = double.Parse(m.Groups["goalx"].ToString()); if (this.isTwoDimensional) { this.GoalY = double.Parse(m.Groups["goaly"].ToString()); } return(currentSection); } if (currentLine.StartsWith(TestFileStrings.BeginVariables, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Variables; return(currentSection); } Validate.Fail(string.Format("Unknown header line {0}: {1}", lineNumber, currentLine)); return(currentSection); }
private static ESection ProcessPreClusterResults(string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.BeginClusterResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.ClusterResults; } return currentSection; }
private ESection ProcessClusterResults(int lineNumber, string currentLine, ESection currentSection) { if (currentLine.StartsWith( TestFileStrings.EndClusterResults, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Done; return currentSection; } Match m = TestFileStrings.ParseClusterResult.Match(currentLine); if (m.Success) { int ord = int.Parse(m.Groups["ord"].ToString()); // Root-level clusters aren't in the cluster list, which includes cluster 0. So we'll // just walk forward from the last cluster index. If this is the first time, then we've // initialized m_idxLastClusterDef to < 0. if (this.lastClusterDefIndex < 0) { this.lastClusterDefIndex = 0; } else { // Verify the cluster results in the file are sorted. This makes it easier for the results // reading to be in sync. Validate.IsTrue(this.ClusterDefs[this.lastClusterDefIndex].ClusterId < ord, "Out of order CLUSTER RESULT ordinal"); ++this.lastClusterDefIndex; } for (;; ++this.lastClusterDefIndex) { if (this.lastClusterDefIndex >= this.ClusterDefs.Count) { Validate.Fail(string.Format("Ordinal not in Cluster List at CLUSTER RESULT line {0}: {1}", lineNumber, currentLine)); } if (this.ClusterDefs[this.lastClusterDefIndex].ClusterId == ord) { break; } } this.ClusterDefs[this.lastClusterDefIndex].SetResultPositions( double.Parse(m.Groups["lpos"].ToString()), double.Parse(m.Groups["rpos"].ToString()), double.Parse(m.Groups["tpos"].ToString()), double.Parse(m.Groups["bpos"].ToString())); } else { Validate.Fail(string.Format("Unparsable CLUSTER RESULT line {0}: {1}", lineNumber, currentLine)); } return currentSection; }