public void BBCodes_Parse_Tests() { StringTokenizer strTok = new StringTokenizer("alpha beta 123 456.57, x [b]bold text[/b] qq \r\n" + " [url=http://test.com/~user/index.html]url text[/url]"); strTok.IgnoreWhiteSpace = false; strTok.RecognizeDecimals = false; string result = ""; Token tok = strTok.Next(); while (tok.Kind != TokenKind.EOF) { if (tok.Kind == TokenKind.Symbol && tok.Value == "[") { tok = strTok.Next(); // closed tag if (tok.Kind == TokenKind.Symbol && tok.Value == "/") { tok = strTok.Next(); } if (tok.Kind != TokenKind.Word) { throw new Exception("not tag"); } string tag = tok.Value; tok = strTok.Next(); if (tag == "url") { if (tok.Kind == TokenKind.Symbol && tok.Value == "=") { tok = strTok.Next(); string url = ""; do { url += tok.Value; tok = strTok.Next(); } while (tok.Kind != TokenKind.Symbol || tok.Value != "]"); Assert.AreEqual("http://test.com/~user/index.html", url); } } if (tok.Kind != TokenKind.Symbol || tok.Value != "]") { throw new Exception("not bracket"); } } else { result += tok.Value; } tok = strTok.Next(); } Assert.AreEqual("alpha beta 123 456.57, x bold text qq \r\n url text", result); }
public void Test_Common() { Assert.Throws(typeof(ArgumentNullException), () => { new StringTokenizer(null); }); StringTokenizer strTok = new StringTokenizer("alpha beta 123 456.57, x 0x123F"); Assert.IsNotNull(strTok); strTok.RecognizeHex = true; strTok.IgnoreWhiteSpace = false; Assert.IsFalse(strTok.IgnoreWhiteSpace); char[] symChars = strTok.SymbolChars; strTok.SymbolChars = symChars; strTok.RecognizeDecimals = false; Assert.IsFalse(strTok.RecognizeDecimals); Token tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("alpha", tok.Value); Assert.AreEqual(5, strTok.Position); Assert.AreEqual(TokenKind.WhiteSpace, strTok.Next().Kind); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("beta", tok.Value); Assert.AreEqual(TokenKind.WhiteSpace, strTok.Next().Kind); tok = strTok.Next(); Assert.AreEqual(TokenKind.Number, tok.Kind); Assert.AreEqual("123", tok.Value); Assert.AreEqual(TokenKind.WhiteSpace, strTok.Next().Kind); tok = strTok.Next(); Assert.AreEqual(TokenKind.Number, tok.Kind); Assert.AreEqual("456", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Symbol, tok.Kind); Assert.AreEqual(".", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Number, tok.Kind); Assert.AreEqual("57", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Symbol, tok.Kind); Assert.AreEqual(",", tok.Value); Assert.AreEqual(TokenKind.WhiteSpace, strTok.Next().Kind); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("x", tok.Value); Assert.AreEqual(TokenKind.WhiteSpace, strTok.Next().Kind); tok = strTok.Next(); Assert.AreEqual(TokenKind.HexNumber, tok.Kind); Assert.AreEqual(true, strTok.RequireToken(TokenKind.HexNumber)); Assert.AreEqual("0x123F", tok.Value); // strTok = new StringTokenizer("alpha beta 123 456.57, x; \r\n \r \n \"test quote\" \"test \r \n quote2\""); Assert.IsNotNull(strTok); strTok.IgnoreWhiteSpace = true; Assert.IsTrue(strTok.IgnoreWhiteSpace); strTok.RecognizeDecimals = true; Assert.IsTrue(strTok.RecognizeDecimals); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("alpha", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("beta", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Number, tok.Kind); Assert.AreEqual("123", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Number, tok.Kind); Assert.AreEqual("456.57", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Symbol, tok.Kind); Assert.AreEqual(",", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("x", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Symbol, tok.Kind); Assert.AreEqual(";", tok.Value); Assert.AreEqual(1, tok.Line); tok = strTok.Next(); Assert.AreEqual(TokenKind.EOL, tok.Kind); Assert.AreEqual(1, tok.Line); tok = strTok.Next(); Assert.AreEqual(TokenKind.EOL, tok.Kind); Assert.AreEqual(2, tok.Line); tok = strTok.Next(); Assert.AreEqual(TokenKind.EOL, tok.Kind); Assert.AreEqual(3, tok.Line); tok = strTok.Next(); Assert.AreEqual(TokenKind.QuotedString, tok.Kind); Assert.AreEqual("\"test quote\"", tok.Value); Assert.AreEqual(4, tok.Line); tok = strTok.Next(); Assert.AreEqual(TokenKind.QuotedString, tok.Kind); Assert.AreEqual("\"test \r \n quote2\"", tok.Value); // strTok = new StringTokenizer("alpha 0x601 0b11000000001 alg_123"); Assert.IsNotNull(strTok); strTok.IgnoreWhiteSpace = true; strTok.RecognizeHex = true; Assert.IsTrue(strTok.RecognizeHex); strTok.RecognizeBin = true; Assert.IsTrue(strTok.RecognizeBin); strTok.RecognizeIdents = true; Assert.IsTrue(strTok.RecognizeIdents); tok = strTok.Next(); Assert.AreEqual(TokenKind.Word, tok.Kind); Assert.AreEqual("alpha", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.HexNumber, tok.Kind); Assert.AreEqual("0x601", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.BinNumber, tok.Kind); Assert.AreEqual("0b11000000001", tok.Value); tok = strTok.Next(); Assert.AreEqual(TokenKind.Ident, tok.Kind); Assert.AreEqual("alg_123", tok.Value); }
public List <SearchToken> ToTokens(string searchText) { Token token; var tokenizer = new StringTokenizer(searchText); var tokenList = new List <SearchToken>(); do { token = tokenizer.Next(); switch (token.Kind) { case TokenKind.NotOperator: token = tokenizer.Next(); tokenList.Add(new AntiStringToken { Value = token.Value }); break; case TokenKind.AttributeOperator: var attributeValue = token.Value.Substring(1); tokenList.Add(new SearchAttributeToken { Value = attributeValue }); break; case TokenKind.KeyValueOperator: Token operand = tokenizer.Next(); var keyAsString = token.Value.Remove(token.Value.Length - 1).ToLower(); // strip off the trailing colon (:) TSearchKeyOperator keyAsEnum; if (!Enum.TryParse(keyAsString, true, out keyAsEnum)) { var allowedKeys = ((TSearchKeyOperator[])Enum.GetValues(typeof(TSearchKeyOperator))).ToList(); allowedKeys.RemoveAll(m => Convert.ToInt32(m) == 0); var allowedKeyCsv = string.Join(", ", allowedKeys); throw ArtemisException.Create($"{keyAsString} is not a recognised search key. Valid values are: {allowedKeyCsv}"); } var keyValueTokenPair = new SearchKeyValuePairToken <TSearchKeyOperator> { Key = keyAsEnum, Value = operand.Value }; // Allow transformation of search input values if (OnSearchTokenKeyValuePair != null) { OnSearchTokenKeyValuePair(operand, keyValueTokenPair); } tokenList.Add(keyValueTokenPair); break; case TokenKind.Number: tokenList.Add(new SearchNumberToken { Value = token.Value }); break; case TokenKind.Word: case TokenKind.QuotedString: tokenList.Add(new SearchStringToken { Value = token.Value }); break; } }while (token.Kind != TokenKind.EOF); return(tokenList); }
public void ParseText(List <BBTextChunk> chunksList, string text) { fChunks = chunksList; fChunks.Clear(); float lastFontSize = fDefaultFontSize; BBTextChunk lastChunk = null; Stack <SizeChange> stackSizes = new Stack <SizeChange>(); //lastChunk = SetChunkFontSize(0, lastChunk, fDefaultFontSize); if (string.IsNullOrEmpty(text)) { text = EMPTY_CHUNK; lastChunk = SetChunkText(0, lastChunk, text); return; } StringTokenizer strTok = new StringTokenizer(text); strTok.IgnoreWhiteSpace = false; strTok.RecognizeDecimals = false; strTok.RecognizeQuotedStrings = false; Token tok = strTok.Next(); while (tok.Kind != TokenKind.EOF) { if (tok.Kind == TokenKind.Symbol && tok.Value == "[") { string temp = tok.Value; tok = strTok.Next(); bool closedTag; if (tok.Kind == TokenKind.Symbol && tok.Value == "/") { closedTag = true; temp += tok.Value; tok = strTok.Next(); } else { closedTag = false; } if (tok.Kind != TokenKind.Word) { // not tag lastChunk = SetChunkText(tok.Line, lastChunk, temp + tok.Value); } else { string tag = tok.Value; //bool skipTag = false; if (tag == "color") { // [color="{red|#ff0000}"][/color] IColor color = fTextColor; if (!closedTag) { tok = strTok.Next(); if (tok.Kind == TokenKind.Symbol && tok.Value == "=") { tok = strTok.Next(); if (tok.Kind == TokenKind.Word) { color = fGfxProvider.CreateColor(tok.Value); lastChunk = SetChunkColor(tok.Line, lastChunk, color); } } } else { // TODO: colorStack color = fTextColor; lastChunk = SetChunkColor(tok.Line, lastChunk, color); } } else if (tag == "size") { // [size={+/-x}][/size] if (!closedTag) { tok = strTok.Next(); if (tok.Kind == TokenKind.Symbol && tok.Value == "=") { tok = strTok.Next(); int factor = 0; if (tok.Kind == TokenKind.Symbol) { if (tok.Value == "+") { factor = +1; } else if (tok.Value == "-") { factor = -1; } tok = strTok.Next(); } if (tok.Kind == TokenKind.Number) { float newSize = lastFontSize + factor * ConvertHelper.ParseInt(tok.Value, 0); stackSizes.Push(new SizeChange(lastFontSize, newSize)); lastChunk = SetChunkFontSize(tok.Line, lastChunk, newSize); lastFontSize = newSize; } } } else { if (stackSizes.Count > 0) { SizeChange sizeChange = stackSizes.Pop(); lastChunk = SetChunkFontSize(tok.Line, lastChunk, sizeChange.PrevSize); lastFontSize = sizeChange.PrevSize; } } } else if (tag == "b") { // [b][/b] lastChunk = SetChunkFontStyle(tok.Line, lastChunk, ExtFontStyle.Bold, !closedTag); } else if (tag == "i") { // [i][/i] lastChunk = SetChunkFontStyle(tok.Line, lastChunk, ExtFontStyle.Italic, !closedTag); } else if (tag == "s") { // [s][/s] lastChunk = SetChunkFontStyle(tok.Line, lastChunk, ExtFontStyle.Strikeout, !closedTag); } else if (tag == "u") { // [u][/u] lastChunk = SetChunkFontStyle(tok.Line, lastChunk, ExtFontStyle.Underline, !closedTag); } else if (tag == "url") { // bad impementation // [url][/url] and [url=...][/url], but now only [url=...][/url] string url = ""; tok = strTok.Next(); if (tok.Kind == TokenKind.Symbol && tok.Value == "=") { tok = strTok.Next(); do { url += tok.Value; tok = strTok.Next(); } while (tok.Kind != TokenKind.Symbol || tok.Value != "]"); } else { // } lastChunk = SetChunkFontStyle(tok.Line, lastChunk, ExtFontStyle.Underline, !closedTag); IColor color = (closedTag) ? fTextColor : fLinkColor; lastChunk = SetChunkColor(tok.Line, lastChunk, color); if (!closedTag) { lastChunk.URL = url; } } else { // not tag lastChunk = SetChunkText(tok.Line, lastChunk, temp + tok.Value); } if (tok.Kind != TokenKind.Symbol || tok.Value != "]") { // Possible syntax error? strTok.Next(); } } } else if (tok.Kind == TokenKind.EOL) { lastChunk = SetChunkText(tok.Line, null, EMPTY_CHUNK); lastChunk = null; } else { lastChunk = SetChunkText(tok.Line, lastChunk, tok.Value); } tok = strTok.Next(); } // eof lastChunk = SetChunkText(tok.Line + 1, null, EMPTY_CHUNK); }
private bool WriteShadowDffs(StreamWriter sw, StreamReader sr) { string fileText; fileText = sr.ReadToEnd(); StringTokenizer tok = new StringTokenizer(fileText); tok.IgnoreWhiteSpace = true; int totalDFFs = 0; List <DFFModule> dffs = new List <DFFModule>(); List <Token> bkmrks = new List <Token>(); while (true) { Token currToken = tok.Next(); if (currToken.Value == "module") { bkmrks = DFFModule.ParseModuleHeader(tok); } if (currToken.Kind == TokenKind.EOF) { break; } if (currToken.Kind == TokenKind.EOL) { while (currToken.Kind == TokenKind.EOL) { currToken = tok.Next(); } string currWord = currToken.Value; if (currWord == "module") { bkmrks = DFFModule.ParseModuleHeader(tok); } if (currWord.StartsWith("dff")) { DFFModule dff = DFFModule.ParseDFF(tok); totalDFFs += dff.numDFFs; dffs.Add(dff); progressBar1.PerformStep(); } } } logBox.Text += "Total DFFs Detected: " + dffs.Count() + "\r\n"; logBox.Text += "Total DFF bits detected: " + totalDFFs + "\r\n"; logBox.Text += "DFF Output Names:" + "\r\n"; foreach (DFFModule dff in dffs) { logBox.Text += "\t" + dff.numDFFs + ": " + dff.qName + "\r\n"; progressBar1.PerformStep(); } bool writeSuccess = WriteShadowFile(sw, fileText, bkmrks, dffs, totalDFFs); return(writeSuccess); }
private void buttonIndent_Click(object sender, EventArgs e) { syntaxEditBES.SyntaxPaint.DrawColumnsIndent = true; StringTokenizer tok = new StringTokenizer(syntaxEditBES.Text); tok.IgnoreWhiteSpace = true; tok.SymbolChars = new char[] { '(', ')' }; syntaxEditBES.Text = ""; int sp = 0; String space = ""; String firstIndent = "\t"; String oneTab = "\t"; Boolean crlf = false; Boolean newline = true; Boolean justElse = false; Boolean justElse2 = false; Boolean justElseNoParen = false; String parsedResults = ""; Boolean previousTokenIsSymbol = false; Token token; do { token = tok.Next(); // Console.WriteLine(token.Kind.ToString() + ": " + token.Value); // ================================= ( =========================================== if (token.Kind.ToString() == "EOL") { // nothing, kill the EOLs } else if (token.Kind.ToString() == "Symbol" && token.Value == "(") { if (crlf == true) { parsedResults = parsedResults + "\r\n"; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; parsedResults = parsedResults + space + token.Value + "\r\n"; crlf = false; newline = true; justElseNoParen = false; sp++; } // ================================= ) =========================================== else if (token.Kind.ToString() == "Symbol" && token.Value == ")") { if (justElse == true && justElse2 == true) { sp--; sp--; justElse = false; justElse2 = false; } else { sp--; } if (justElse == true && justElse2 == false) { justElse2 = true; // sp--; } if (justElseNoParen == true) { sp--; justElseNoParen = false; justElse = false; justElse2 = false; } if (crlf == true) { parsedResults = parsedResults + "\r\n"; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; parsedResults = parsedResults + space + token.Value + "\r\n"; crlf = false; // crlf = true; newline = true; } // ================================= then/else ========================================== else if (token.Kind.ToString() == "Word" && (token.Value.ToLower() == "then" || token.Value.ToLower() == "else" )) { if (justElseNoParen == true) { sp--; justElseNoParen = false; justElse = false; justElse2 = false; } if (token.Value.ToLower() == "else") { justElse = true; justElseNoParen = true; } if (crlf == true) { parsedResults = parsedResults + "\r\n"; } sp--; if (sp < 0) { sp = 0; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; parsedResults = parsedResults + space + token.Value + "\r\n"; sp++; crlf = false; newline = true; } // ================================= and/or =========================================== else if (token.Kind.ToString() == "Word" && (token.Value.ToLower() == "and" || token.Value.ToLower() == "or")) { if (token.Value.ToLower() == "else") { justElse = true; } if (crlf == true) { parsedResults = parsedResults + "\r\n"; } // sp--; // MessageBox.Show(sp.ToString()); // if (sp < 0) { sp = 0; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } // space = firstIndent + space; parsedResults = parsedResults + space + token.Value + "\r\n"; // sp++; crlf = false; newline = true; } // ================================= whose =========================================== else if (token.Kind.ToString() == "Word" && (token.Value.ToLower() == "whose")) { if (crlf == true) { parsedResults = parsedResults + "\r\n"; } // sp--; if (sp < 0) { sp = 0; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; parsedResults = parsedResults + space + token.Value; // MessageBox.Show("crlf is: " + crlf.ToString()); // sp++; crlf = true; } // ================================= if =========================================== else if (token.Kind.ToString() == "Word" && (token.Value.ToLower() == "if")) { if (crlf == true) { } // sp--; if (sp < 0) { sp = 0; } space = ""; for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; parsedResults = parsedResults + space + token.Value + "\r\n"; sp++; crlf = false; justElse = false; justElse2 = false; } // ================================= others =========================================== else { space = ""; if (newline) { for (int i = 0; i < sp; i++) { space = space + oneTab; } space = firstIndent + space; } else { // space = firstIndent + space; } // Added June 22nd 2010 to fix extra space problem between >= if ((token.Kind.ToString() == "Unknown") && previousTokenIsSymbol) { parsedResults = parsedResults.TrimEnd() + space + token.Value; } else { parsedResults = parsedResults + space + token.Value + " "; } // ============================================================== crlf = true; newline = false; // Added June 22nd 2010 to fix extra space problem between >= if (token.Kind.ToString() == "Unknown") { previousTokenIsSymbol = true; } else { previousTokenIsSymbol = false; } // ============================================================== } } while (token.Kind != TokenKind.EOF); syntaxEditBES.Text = parsedResults; buttonIndentLW.Enabled = true; buttonIndent.Enabled = false; buttonFlatten.Enabled = true; }
public void JumpTo_Old(string jumpString) { string dbName = _defaultDatabase; label1.Text = jumpString; StringTokenizer tok = new StringTokenizer(jumpString); IList <Token> tokens = new List <Token>(); Token token = null; do { token = tok.Next(); if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.EOF || token.Kind == TokenKind.Unknown || (token.Kind == TokenKind.Symbol && token.Value != ".") ) { continue; } tokens.Add(token); } while (token.Kind != TokenKind.EOF); // By default we do not want params and cols in list. string filterStr = "( Name Like'%%') AND ( Type <> 'Param' AND Type <> 'Col' )"; if (tokens.Count > 0) { Token lastToken = tokens[tokens.Count - 1]; //i.e: Catalog.Schema.DBObject.Colu if (lastToken.Kind == TokenKind.Word) { // if (tokens.Count - 3 >= 0) { Token parentToken = tokens[tokens.Count - 3]; // Check if parent token is a user. if (parentToken.Value == ".") { // We must try to find the database if (tokens.Count - 4 >= 0) { Token dbToken = tokens[tokens.Count - 4]; dbName = dbToken.Value.ToLower(); filterStr = "( Schema = '" + "dbo" + "' ) " + " AND ( Catalog = '" + dbToken.Value + "' ) " + " AND ( Name Like '" + lastToken.Value + "%' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } // No need to find the database else { filterStr = "( Schema = '" + "dbo" + "') " + " AND ( Name Like '" + lastToken.Value + "%' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } } else if (IsValidUser(parentToken.Value)) { // We must try to find the database if (tokens.Count - 5 >= 0) { Token dbToken = tokens[tokens.Count - 5]; dbName = dbToken.Value.ToLower(); filterStr = "( Schema = '" + parentToken.Value + "' ) " + " AND ( Catalog = '" + dbToken.Value + "' ) " + " AND ( Name Like '" + lastToken.Value + "%' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } // No need to find the database else { filterStr = "( Schema = '" + parentToken.Value + "') " + " AND ( Name Like '" + lastToken.Value + "%' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } } //Check if parent token is a database else if (IsValidDatabase(parentToken.Value)) { dbName = parentToken.Value.ToLower(); filterStr = "( Type = 'Usr') " + " AND ( Name Like '" + lastToken.Value + "%' )"; } else { filterStr = "( ParentName = '" + parentToken.Value + "') " + " AND ( Name Like '" + lastToken.Value + "%' ) "; } } // Parent token does not exist else { filterStr = "( Name Like '" + lastToken.Value + "%' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } } else if (lastToken.Value == ".") { if (tokens.Count - 2 >= 0) { Token parentToken = tokens[tokens.Count - 2]; // Check if parent token is a user. if (parentToken.Value == "." || IsValidUser(parentToken.Value)) { // We must try to find the database if (tokens.Count - 4 >= 0 && parentToken.Value != ".") { Token dbToken = tokens[tokens.Count - 4]; dbName = dbToken.Value.ToLower(); filterStr = "( Schema = '" + parentToken.Value + "' ) " + " AND ( Catalog = '" + dbToken.Value + "' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } else if (tokens.Count - 3 >= 0 && parentToken.Value == ".") { Token dbToken = tokens[tokens.Count - 3]; dbName = dbToken.Value.ToLower(); filterStr = "( Schema = '" + "dbo" + "' ) " + " AND ( Catalog = '" + dbToken.Value + "' ) " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } // No need to find the database else { filterStr = "( Schema = '" + parentToken.Value + "') " + " AND ( Type <> 'Param' AND Type <> 'Col' ) "; } } //Check if parent token is a database else if (IsValidDatabase(parentToken.Value)) { dbName = parentToken.Value.ToLower(); filterStr = "( Type = 'Usr')"; } else { filterStr = "ParentName = '" + parentToken.Value + "'"; } } } } if (_currentDatabase != dbName) { _currentDatabase = dbName; PopulateCodeCompletionList(); } if (_bsObjects.DataSource == null || _tblObjects == null) { this.Hide(); return; } _bsObjects.Filter = filterStr; _bsObjects.Sort = "Order ASC, Name ASC"; if (_bsObjects.Count > 0) { _bsObjects.Position = 0; } }
public static SpouseLineRet ParseSpouseLine(string str) { // "М/Ж[N]{": " | " - "}name" string spouse = ""; int marrNum = 1; string extData = ""; int pos = 0; StringTokenizer strTok = new StringTokenizer(str); strTok.IgnoreWhiteSpace = false; strTok.RecognizeDecimals = false; Token token; token = strTok.Next(); if (token.Kind == TokenKind.Word && (token.Value == "М" || token.Value == "Ж")) { spouse = token.Value; token = strTok.Next(); } else { return(null); } if (token.Kind == TokenKind.Number) { marrNum = int.Parse(token.Value); token = strTok.Next(); } else { marrNum = 1; } if (token.Kind == TokenKind.WhiteSpace && token.Value == " ") { token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && token.Value == "(") { extData += token.Value; do { token = strTok.Next(); if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.EOF) { return(null); } extData += token.Value; } while (token.Kind != TokenKind.Symbol || token.Value != ")"); token = strTok.Next(); } if (token.Kind == TokenKind.WhiteSpace && token.Value == " ") { token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && (token.Value == ":" || token.Value == "-")) { token = strTok.Next(); } else { return(null); } pos = strTok.Position; return(new SpouseLineRet(spouse, marrNum, extData, pos)); }
public VerilogModuleInstance ParseModuleInstance(VerilogModule parent, StringTokenizer tknzr, Token possibleParamList, VerilogModule modInstType) { string instName; Token paramListBegin = possibleParamList;//tknzr.Next(); bool paramExists = false; bool paramsNamed = false; string word = tknzr.Next().Value; if (word == "#") { // Run through parameter lists until parens all closed paramExists = true; //paramListBegin = tknzr.Next(); // after "#(" int parenPairs = 1; while (parenPairs > 0) { word = tknzr.Next().Value; if (word.Contains(".")) { paramsNamed = true; } if (word == "(") { parenPairs++; } else if (word == ")") { parenPairs--; } } instName = tknzr.Next().Value; } else { instName = word; } if (instName.Contains("st4_rrobin") || parent.Name == "lsu_rrobin_picker2") { Console.Write("pOOp"); } Token currTok = tknzr.Next(); Token prevTok = currTok; Token twoPrevTok = currTok; while (currTok.Value != ";") { // Run through in/out list to end of instantiation twoPrevTok = prevTok; // At ')' prevTok = currTok; // At ';' currTok = tknzr.Next(); // After ';' } VerilogModuleInstance vModInst = modInstType.Instantiate(parent, instName, twoPrevTok); vModInst.ParameterList = paramListBegin; vModInst.Parameterized = paramExists; vModInst.ParametersNamed = paramsNamed; return(vModInst); }
private Boolean RunPrecompiler(StringTokenizer tknzr, ref Token prevTok, ref Token currTok) { if (currTok.Value != "`") { return(false); } // PRECOMPILER DIRECTIVE FOUND prevTok = currTok; currTok = tknzr.Next(); if (currTok.Value == "ifdef") { prevTok = currTok; currTok = tknzr.Next(); if (defineSet.Contains(currTok.Value)) { //PARSE: If it is defined, parse the ifdef ifdefStack.Push(currTok.Value + "@" + currTok.Line + ":" + currTok.Column); } else { //IGNORE: Seek to end or else int ifdefCount = 0; while (true) { if (currTok.Kind == TokenKind.EOF) { break; } prevTok = currTok; currTok = tknzr.Next(); if (currTok.Value == "ifdef") { ifdefCount++; } else if (currTok.Value == "else" && ifdefCount == 0) { break; } else if (currTok.Value == "endif") { if (ifdefCount > 0) { ifdefCount--; } else { break; } } } if (currTok.Value == "else") { ifdefStack.Push(currTok.Value + "@" + currTok.Line + ":" + currTok.Column); } } } else if (currTok.Value == "else") { // IGNORE: int ifdefCount = 0; while (true) { if (currTok.Kind == TokenKind.EOF) { break; } prevTok = currTok; currTok = tknzr.Next(); if (currTok.Value == "ifdef") { ifdefCount++; } else if (currTok.Value == "endif") { if (ifdefCount > 0) { ifdefCount--; } else { break; } } } //ifdefStack.Pop(); } else if (currTok.Value == "endif") { // PARSE: //ifdefStack.Pop(); } else if (currTok.Value == "define") { prevTok = currTok; currTok = tknzr.Next(); defineSet.Add(currTok.Value); } prevTok = currTok; currTok = tknzr.Next(); return(true); }
private DffInstance ParseDffInstance(StringTokenizer tknzr, Token possibleParameter, DFFModule dffType) { DffInstance dff = new DffInstance(dffType); Token currToken = tknzr.Next(); dff.ParameterBegin = possibleParameter; dff.ParametersExist = false; dff.ParametersNamed = false; dff.ParamsInParens = false; if (currToken.Value == "#") { dff.ParametersExist = true; dff.ParameterBegin = currToken; currToken = tknzr.Next(); if (currToken.Value == "(") { dff.ParamsInParens = true; dff.ParameterBegin = currToken; Token tempParamList; string paramValue = tknzr.ParseToCloseParenthesis(out tempParamList); dff.ParameterList = tempParamList; dff.ParameterEnd = tempParamList; try { dff.Size = Convert.ToInt32(paramValue); } catch (FormatException) { if (paramValue.StartsWith("`")) { // TODO: Deal with `DEFINE'd values dff.Size = 1; } else if (paramValue.StartsWith(".")) { dff.ParametersNamed = true; } else { dff.ParametersExist = true; dff.IsParameterized = true; dff.Parameter = paramValue; //throw new InvalidDataException("Wah. I don't get it. '" + currToken.Value + "' isn't a number."); } } } else { dff.ParameterEnd = currToken; try { dff.Size = Convert.ToInt32(currToken.Value); // In case they use the weird '#12' notation instead of '#(12)' } catch (FormatException) { if (currToken.Value == "`") { // TODO: Deal with `DEFINE'd values dff.Size = 1; } else { throw new InvalidDataException("Wah. I don't get it. '" + currToken.Value + "' isn't a number."); } } } //tknzr.Next(); currToken = tknzr.Next(); dff.InstanceName = currToken.Value; } else { dff.Size = 1; dff.InstanceName = currToken.Value; } while (currToken.Value != "(") { currToken = tknzr.Next(); } //currToken = tknzr.Next(); dff.PortList = currToken; while (dff.ClockPort == null || dff.DPort == null || dff.QPort == null) { currToken = tknzr.Next(); string word = currToken.Value; if (word == ";") { break; } if (word == ".") { currToken = tknzr.Next(); switch (currToken.Value) { case "clk": { tknzr.Next(); Token tempToken; dff.ClockPort = tknzr.ParseToCloseParenthesis(out tempToken); break; } case "q": { tknzr.Next(); Token tempToken; dff.QPort = tknzr.ParseToCloseParenthesis(out tempToken); break; } case "din": { tknzr.Next(); Token tempToken; dff.DPort = tknzr.ParseToCloseParenthesis(out tempToken); break; } } } } while (currToken.Value != ";" && currToken.Kind != TokenKind.EOF) { currToken = tknzr.Next(); } return(dff); }
private VerilogModule ParseModuleDeclaration(StringTokenizer tknzr, VerilogFile vFile) { #region Are the ports even needed? Besides knowing where to insert the shadow chain ports /* * List<string> inPorts = new List<string>(); * List<string> outPorts = new List<string>(); * List<string> inoutPorts = new List<string>(); * Token currToken = null; * Token prevToken = new Token(TokenKind.Unknown, "", 0, 0, 0); * while (true) { * if (currToken == null) { * tknzr.Next(); * } * currToken = tknzr.Next(); * if (currToken.Kind == TokenKind.EOF) { * break; * } else if (currToken.Value == ";" && prevToken.Value == ")") { * break; * } else if (currToken.Value == "input" && prevToken.Kind == TokenKind.EOL) { * * } * prevToken = currToken; * } */ #endregion Token prevTok = tknzr.Next(); Token twoPrevTok = prevTok; Token currTok = tknzr.Next(); VerilogModule vMod = new VerilogModule(vFile.FileName, prevTok.Value); bool headerParsed = false; while (currTok.Value != "endmodule" && currTok.Kind != TokenKind.EOF) { if (prevTok.Kind == TokenKind.EOL) { if (!RunPrecompiler(tknzr, ref prevTok, ref currTok)) { if (currTok.Value == "parameter") { // PARAMETER FOUND ParseParameter(tknzr, vMod.Parameters); } else if (this.project.IsDff(currTok.Value)) { // DFF INSTANCE FOUND DffInstance dffInst = ParseDffInstance(tknzr, currTok, project.GetDffType(currTok.Value)); if (dffInst == null) { throw new InvalidDataException("DFF Library was unable to instantiate from type retrieved from project."); } vMod.AddDffInstance(dffInst); } else if (this.project.IsModule(currTok.Value)) { // MODULE INSTANCE FOUND VerilogModuleInstance vModInst = ParseModuleInstance(vMod, tknzr, currTok, project.GetModule(currTok.Value)); if (vModInst == null) { throw new InvalidDataException("Error instantiating module from type retrieved from project."); } vMod.AddModuleInstance(vModInst); } else if (headerParsed && !this.project.IsKeyword(currTok.Value) && currTok.Kind == TokenKind.Word) { // POSSIBLE MODULE, NOT YET PARSED /* OPTIMZATION: * TODO: Change tokenizer to ignore everything between certain keywords and ';', * EX: "assign blah = blah blah blah;" in case there is weird indenting for * readibility. This will minimize the number of false Possibles. * */ if (currTok.Value == "lsu_dc_parity_gen") { Console.Write("!"); } StringTokenizer tempTknzr = new StringTokenizer(tknzr); // Perform deep copy to leave tknzr untouched Token nameTok = tempTknzr.Next(); bool paramExist = false; bool paramNamed = false; Token paramList = null; /*if (nameTok.Value == "#") { * paramsExist = true; * tempTknzr.Next();// ( * tempTknzr.Next();// Number * tempTknzr.Next();// ) * nameTok = tempTknzr.Next(); * }*/ if (nameTok.Value == "#") { // Run through parameter lists until parens all closed paramExist = true; paramList = tempTknzr.Next(); // after "#(" if (paramList.Value == "(") { int parenPairs = 1; while (parenPairs > 0) { nameTok = tempTknzr.Next(); if (nameTok.Value.Contains(".")) { paramNamed = true; } if (nameTok.Value == "(") { parenPairs++; } else if (nameTok.Value == ")") { parenPairs--; } } } nameTok = tempTknzr.Next(); } else { paramList = currTok; } Token tempCurrTok = tempTknzr.Next(); Token tempPrevTok = tempCurrTok; Token tempTwoPrevTok = tempCurrTok; while (tempCurrTok.Value != ";") { // Run through in/out list to end of instantiation tempTwoPrevTok = tempPrevTok; // At ')' tempPrevTok = tempCurrTok; // At ';' tempCurrTok = tempTknzr.Next(); // After ';' } vMod.AddPossibleInstance(currTok, nameTok.Value, tempTwoPrevTok, paramExist, paramNamed, paramList); } } } twoPrevTok = prevTok; prevTok = currTok; currTok = tknzr.Next(); if (!headerParsed && currTok.Value == ";" /*&& prevTok.Value == ")"*/) { vMod.InOutListEnd = twoPrevTok; vMod.PostHeader = tknzr.Next(); twoPrevTok = prevTok; prevTok = (currTok.Value == ")")? currTok : prevTok; currTok = vMod.PostHeader; headerParsed = true; } } vMod.PrevEndModule = prevTok; return(vMod); }
private void LoadFromStream(Stream fileStream, StreamReader reader) { fTree.State = GEDCOMState.osLoading; try { ProgressEventHandler progressHandler = fTree.OnProgress; fSourceEncoding = DEFAULT_ENCODING; fEncodingState = EncodingState.esUnchecked; long fileSize = fileStream.Length; int progress = 0; GEDCOMCustomRecord curRecord = null; GEDCOMTag curTag = null; int lineNum = 0; while (reader.Peek() != -1) { lineNum++; string str = reader.ReadLine(); str = GEDCOMUtils.TrimLeft(str); if (str.Length == 0) { continue; } if (!ConvertHelper.IsDigit(str[0])) { FixFTBLine(curRecord, curTag, lineNum, str); } else { int tagLevel; string tagXRef = "", tagName, tagValue = ""; try { var strTok = new StringTokenizer(str); strTok.RecognizeDecimals = false; strTok.IgnoreWhiteSpace = false; strTok.RecognizeIdents = true; var token = strTok.Next(); // already trimmed if (token.Kind != TokenKind.Number) { // syntax error throw new EGEDCOMException(string.Format("The string {0} doesn't start with a valid number", str)); } tagLevel = (int)token.ValObj; token = strTok.Next(); if (token.Kind != TokenKind.WhiteSpace) { // syntax error } token = strTok.Next(); if (token.Kind == TokenKind.Symbol && token.Value[0] == '@') { token = strTok.Next(); while (token.Kind != TokenKind.Symbol && token.Value[0] != '@') { tagXRef += token.Value; token = strTok.Next(); } // FIXME: check for errors //throw new EGEDCOMException(string.Format("The string {0} contains an unterminated XRef pointer", str)); //throw new EGEDCOMException(string.Format("The string {0} is expected to start with an XRef pointer", str)); token = strTok.Next(); strTok.SkipWhiteSpaces(); } token = strTok.CurrentToken; if (token.Kind != TokenKind.Word && token.Kind != TokenKind.Ident) { // syntax error } tagName = token.Value.ToUpperInvariant(); token = strTok.Next(); if (token.Kind == TokenKind.WhiteSpace) { tagValue = strTok.GetRest(); } } catch (EGEDCOMException ex) { throw new EGEDCOMException("Syntax error in line " + Convert.ToString(lineNum) + ".\r" + ex.Message); } // convert codepages if (!string.IsNullOrEmpty(tagValue) && fEncodingState == EncodingState.esChanged) { tagValue = ConvertStr(fSourceEncoding, tagValue); } if (tagLevel == 0) { if (curRecord == fTree.Header && fEncodingState == EncodingState.esUnchecked) { // beginning recognition of the first is not header record // to check for additional versions of the code page DefineEncoding(reader); } if (tagName == "INDI") { curRecord = fTree.AddRecord(new GEDCOMIndividualRecord(fTree, fTree, "", "")); } else if (tagName == "FAM") { curRecord = fTree.AddRecord(new GEDCOMFamilyRecord(fTree, fTree, "", "")); } else if (tagName == "OBJE") { curRecord = fTree.AddRecord(new GEDCOMMultimediaRecord(fTree, fTree, "", "")); } else if (tagName == "NOTE") { curRecord = fTree.AddRecord(new GEDCOMNoteRecord(fTree, fTree, "", tagValue)); } else if (tagName == "REPO") { curRecord = fTree.AddRecord(new GEDCOMRepositoryRecord(fTree, fTree, "", "")); } else if (tagName == "SOUR") { curRecord = fTree.AddRecord(new GEDCOMSourceRecord(fTree, fTree, "", "")); } else if (tagName == "SUBN") { curRecord = fTree.AddRecord(new GEDCOMSubmissionRecord(fTree, fTree, "", "")); } else if (tagName == "SUBM") { curRecord = fTree.AddRecord(new GEDCOMSubmitterRecord(fTree, fTree, "", "")); } else if (tagName == "_GROUP") { curRecord = fTree.AddRecord(new GEDCOMGroupRecord(fTree, fTree, "", "")); } else if (tagName == "_RESEARCH") { curRecord = fTree.AddRecord(new GEDCOMResearchRecord(fTree, fTree, "", "")); } else if (tagName == "_TASK") { curRecord = fTree.AddRecord(new GEDCOMTaskRecord(fTree, fTree, "", "")); } else if (tagName == "_COMM") { curRecord = fTree.AddRecord(new GEDCOMCommunicationRecord(fTree, fTree, "", "")); } else if (tagName == "_LOC") { curRecord = fTree.AddRecord(new GEDCOMLocationRecord(fTree, fTree, "", "")); } else if (tagName == "HEAD") { curRecord = fTree.Header; } else if (tagName == "TRLR") { break; } else { curRecord = null; } if (curRecord != null && tagXRef != "") { curRecord.XRef = tagXRef; } curTag = null; } else { if (curRecord != null) { if (curTag == null || tagLevel == 1) { curTag = curRecord.AddTag(tagName, tagValue, null); } else { while (tagLevel <= curTag.Level) { curTag = (curTag.Parent as GEDCOMTag); } curTag = curTag.AddTag(tagName, tagValue, null); } } } } if (progressHandler != null) { int newProgress = (int)Math.Min(100, (fileStream.Position * 100.0f) / fileSize); if (progress != newProgress) { progress = newProgress; progressHandler(fTree, progress); } } } } finally { fTree.State = GEDCOMState.osReady; } }
/// <summary> /// /// </summary> /// <param name="str"></param> /// <param name="pId"></param> /// <returns>Personal identifier or empty if line isn't valid Konovalov's line</returns> public static string IsPersonLine_Konovalov(string str) { // "11-21/1 (test+2, test)." string pId = ""; StringTokenizer strTok = new StringTokenizer(str); strTok.RecognizeDecimals = false; Token token; token = strTok.Next(); if (token.Kind == TokenKind.Number) { pId += token.Value; } else { return(null); } token = strTok.Next(); if (token.Kind == TokenKind.Symbol && token.Value == "-") { pId += token.Value; token = strTok.Next(); if (token.Kind == TokenKind.Number) { pId += token.Value; } else { return(null); } token = strTok.Next(); if (token.Kind == TokenKind.Symbol && token.Value == "/") { pId += token.Value; token = strTok.Next(); if (token.Kind == TokenKind.Number || (token.Kind == TokenKind.Symbol && token.Value == "?")) { pId += token.Value; } else { return(null); } token = strTok.Next(); } } if (token.Kind == TokenKind.WhiteSpace && token.Value == " ") { pId += token.Value; token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && token.Value == "(") { pId += token.Value; do { token = strTok.Next(); if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.EOF) { return(null); } pId += token.Value; } while (token.Kind != TokenKind.Symbol || token.Value != ")"); token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && token.Value == ".") { pId += token.Value; } else { return(null); } /*token = strTok.Next(); * if (token.Kind != TokenKind.WhiteSpace || token.Value != " ") { * return false; * }*/ return(pId); }
public override string ParseString(string strValue) { GEDCOMFormat format = GEDCOMProvider.GetGEDCOMFormat(Owner); fApproximated = GEDCOMApproximated.daExact; fCalendar = GEDCOMCalendar.dcGregorian; fYear = UNKNOWN_YEAR; fYearBC = false; fYearModifier = ""; fMonth = ""; fDay = 0; if (!string.IsNullOrEmpty(strValue)) { if (format == GEDCOMFormat.gf_Ahnenblatt) { strValue = PrepareAhnenblattDate(strValue); } var strTok = new StringTokenizer(strValue); strTok.IgnoreWhiteSpace = false; strTok.Next(); strTok.SkipWhiteSpaces(); // extract approximated var token = strTok.CurrentToken; if (token.Kind == TokenKind.Word) { string su = token.Value.ToUpperInvariant(); int idx = SysUtils.IndexOf(GEDCOMDateApproximatedArray, su); if (idx >= 0) { fApproximated = (GEDCOMApproximated)idx; strTok.Next(); strTok.SkipWhiteSpaces(); } } // extract escape token = strTok.CurrentToken; if (token.Kind == TokenKind.Symbol && token.Value[0] == '@') { var escapeStr = token.Value; do { token = strTok.Next(); escapeStr += token.Value; } while (token.Kind != TokenKind.Symbol || token.Value[0] != '@'); // FIXME: check for errors int idx = SysUtils.IndexOf(GEDCOMDateEscapeArray, escapeStr); if (idx >= 0) { fCalendar = (GEDCOMCalendar)idx; } strTok.Next(); strTok.SkipWhiteSpaces(); } // extract day token = strTok.CurrentToken; if (token.Kind == TokenKind.Number && token.Value.Length <= 2) { fDay = (byte)(int)token.ValObj; token = strTok.Next(); } // extract delimiter if (token.Kind == TokenKind.WhiteSpace && token.Value[0] == ' ') { fDateFormat = GEDCOMDateFormat.dfGEDCOMStd; token = strTok.Next(); } else if (token.Kind == TokenKind.Symbol && token.Value[0] == '.') { fDateFormat = GEDCOMDateFormat.dfSystem; token = strTok.Next(); } // extract month string[] monthes = GetMonthNames(fCalendar); if (token.Kind == TokenKind.Word) { string mth = token.Value; int idx = SysUtils.IndexOf(monthes, mth); if (idx >= 0) { fMonth = mth; } token = strTok.Next(); } else if (fDateFormat == GEDCOMDateFormat.dfSystem && token.Kind == TokenKind.Number) { int idx = (int)token.ValObj; fMonth = monthes[idx - 1]; token = strTok.Next(); } // extract delimiter if (fDateFormat == GEDCOMDateFormat.dfSystem) { if (token.Kind == TokenKind.Symbol && token.Value[0] == '.') { token = strTok.Next(); } } else { if (token.Kind == TokenKind.WhiteSpace && token.Value[0] == ' ') { token = strTok.Next(); } } // extract year if (token.Kind == TokenKind.Number) { fYear = (short)(int)token.ValObj; token = strTok.Next(); // extract year modifier if (token.Kind == TokenKind.Symbol && token.Value[0] == '/') { token = strTok.Next(); if (token.Kind != TokenKind.Number) { // error } fYearModifier = token.Value; token = strTok.Next(); } // extract bc/ad if (token.Kind == TokenKind.Word && token.Value[0] == 'B') { token = strTok.Next(); if (token.Kind != TokenKind.Symbol || token.Value[0] != '.') { // error } token = strTok.Next(); if (token.Kind != TokenKind.Word || token.Value[0] != 'C') { // error } token = strTok.Next(); if (token.Kind != TokenKind.Symbol || token.Value[0] != '.') { // error } strTok.Next(); fYearBC = true; } } strValue = strTok.GetRest(); } DateChanged(); return(strValue); }
public static PersonLineRet ParsePersonLine_Konovalov(string str) { // "11-21/1 (test+2, test)." string persId = ""; string parentId = ""; string marNum = ""; string extData = ""; int pos = 0; StringTokenizer strTok = new StringTokenizer(str); strTok.RecognizeDecimals = false; Token token; token = strTok.Next(); if (token.Kind == TokenKind.Number) { persId = token.Value; } else { return(null); } token = strTok.Next(); if (token.Kind == TokenKind.Symbol && token.Value == "-") { //p_id += token.Value; token = strTok.Next(); if (token.Kind == TokenKind.Number) { parentId = token.Value; } else { return(null); } token = strTok.Next(); if (token.Kind == TokenKind.Symbol && token.Value == "/") { //p_id += token.Value; token = strTok.Next(); if (token.Kind == TokenKind.Number || (token.Kind == TokenKind.Symbol && token.Value == "?")) { marNum = token.Value; } else { return(null); } token = strTok.Next(); } } if (token.Kind == TokenKind.WhiteSpace && token.Value == " ") { //p_id += token.Value; token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && token.Value == "(") { extData += token.Value; do { token = strTok.Next(); if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.EOF) { return(null); } extData += token.Value; } while (token.Kind != TokenKind.Symbol || token.Value != ")"); token = strTok.Next(); } if (token.Kind == TokenKind.Symbol && token.Value == ".") { //p_id += token.Value; } else { return(null); } /*token = strTok.Next(); * if (token.Kind != TokenKind.WhiteSpace || token.Value != " ") { * return false; * }*/ pos = strTok.Position; return(new PersonLineRet(persId, parentId, marNum, extData, pos)); }
private void buttonFlatten_Click(object sender, EventArgs e) { syntaxEditBES.SyntaxPaint.DrawColumnsIndent = false; StringTokenizer tok = new StringTokenizer(syntaxEditBES.Text); tok.IgnoreWhiteSpace = true; tok.SymbolChars = new char[] { '(', ')' }; syntaxEditBES.Text = ""; Token token; String parsedResults = ""; Boolean previousTokenIsSymbol = false; do { token = tok.Next(); // Console.WriteLine(token.Kind.ToString() + ": " + token.Value); // ================================= ( =========================================== if (token.Kind.ToString() == "EOL") { } else if (token.Kind.ToString() == "Symbol" && token.Value == "(") { parsedResults = parsedResults + token.Value; } else if (token.Kind.ToString() == "Symbol" && token.Value == ")") { parsedResults = parsedResults.TrimEnd(' ') + token.Value + " "; } else { // Added June 22nd 2010 to fix extra space problem between >= if ((token.Kind.ToString() == "Unknown") && previousTokenIsSymbol) { parsedResults = parsedResults.TrimEnd() + token.Value; } else { parsedResults = parsedResults + token.Value + " "; } // ============================================================== // Added June 22nd 2010 to fix extra space problem between >= if (token.Kind.ToString() == "Unknown") { previousTokenIsSymbol = true; } else { previousTokenIsSymbol = false; } // ============================================================== } } while (token.Kind != TokenKind.EOF); syntaxEditBES.Text = parsedResults; buttonIndentLW.Enabled = true; buttonIndent.Enabled = true; buttonFlatten.Enabled = false; }
private void JumpToDbObject(string jumpString) { _bsObjects.Filter = String.Empty; _bsObjects.Sort = String.Empty; _bsObjects.DataSource = _tblObjects; string filterStr = String.Empty; string dbName = _defaultDatabase; string schemaName = "???"; bool isInvalidFilter = false; StringTokenizer tok = new StringTokenizer(jumpString); IList <Token> tokens = new List <Token>(); Token token = null; do { token = tok.Next(); if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.EOF || token.Kind == TokenKind.Unknown || (token.Kind == TokenKind.Symbol && token.Value != ".") ) { continue; } tokens.Add(token); } while (token.Kind != TokenKind.EOF); if (tokens.Count == 0) { filterStr = " ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Usr')"; // + " AND ( Catalog = '" + _defaultDatabase + "' OR Catalog = '') " // + " AND ( ( Schema = 'dbo') OR (Schema = '') )"; schemaName = "dbo"; } // 1- Object1 else if (tokens.Count == 1) { filterStr = " ( Type <> 'Param' AND Type <> 'Col' ) " + " AND ( Name Like '" + tokens[0].Value + "%') "; //+ " AND ( Catalog = '" + _defaultDatabase + "')" //+ " AND ( Schema = 'dbo')"; schemaName = "dbo"; } // 2- Object1. else if (tokens.Count == 2) { Token lastToken = tokens[1]; if (lastToken.Value == ".") { Token parentToken = tokens[0]; if (IsValidDatabase(parentToken.Value)) { dbName = parentToken.Value; filterStr = "( Type = 'Usr' )"; } else if (IsValidUser(parentToken.Value)) { dbName = _defaultDatabase; filterStr = "( Catalog = '" + _defaultDatabase + "' OR Catalog = '') " + " AND ( Schema = '" + parentToken.Value + "' OR Schema = '') " + " AND ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Usr' AND Type <> 'Db') "; schemaName = parentToken.Value; } else { dbName = _defaultDatabase; filterStr = "ParentName = '" + parentToken.Value + "' " + " AND ( Catalog = '" + _defaultDatabase + "' OR Catalog = '') " + " AND ( Schema = 'dbo' ) OR ( Schema = '')" + " AND ( Type = 'Param' OR Type = 'Col' ) "; schemaName = "dbo"; } } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } // 3- Object1.Object2, Object1.. else if (tokens.Count == 3) { Token lastToken = tokens[2]; // 3.1 - Object1.. if (lastToken.Value == ".") { Token parentToken = tokens[0]; if (IsValidDatabase(parentToken.Value)) { dbName = parentToken.Value; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = 'dbo' OR Schema = '') " + " AND ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Db' AND Type <> 'Usr') "; schemaName = "dbo"; } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } // 3.2 - Object1.Object2 else { Token parentToken = tokens[0]; // Object1 if (IsValidDatabase(parentToken.Value)) { dbName = parentToken.Value; filterStr = "Name Like '" + tokens[2].Value + "%' " + " AND ( Type = 'Usr')"; schemaName = "dbo"; } else if (IsValidUser(parentToken.Value)) { dbName = _defaultDatabase; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + parentToken.Value + "' OR Schema = '') " + " AND ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Db' AND Type <> 'Usr') " + " AND ( Name Like '" + tokens[2].Value + "%') "; schemaName = parentToken.Value; } else { dbName = _defaultDatabase; filterStr = "ParentName = '" + parentToken.Value + "' " + " AND ( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = 'dbo' OR Schema = '') " + " AND ( Type = 'Param' OR Type = 'Col') " + " AND ( Name Like '" + tokens[2].Value + "%') "; schemaName = "dbo"; } } } // 4- Object1.Object2. else if (tokens.Count == 4) { if (tokens[3].Value == ".") //. { if (IsValidDatabase(tokens[0].Value) && IsValidUser(tokens[2].Value)) // Object1 and Object2 { dbName = tokens[0].Value; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + tokens[2].Value + "' OR Schema = '')" + " AND ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Usr' AND Type <> 'Db' ) "; schemaName = tokens[2].Value; } else if (IsValidUser(tokens[0].Value)) { dbName = _defaultDatabase; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + tokens[0].Value + "' OR Schema = '')" + " AND ( Type = 'Param' OR Type = 'Col')" + " AND ( ParentName = '" + tokens[2].Value + "' ) "; schemaName = tokens[2].Value; } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } // 5- Object1.Object2.Object3 else if (tokens.Count == 5) { if (IsValidDatabase(tokens[0].Value) && IsValidUser(tokens[2].Value)) // Object1 and Object2 { dbName = tokens[0].Value; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + tokens[2].Value + "' OR Schema = '')" + " AND ( Type <> 'Param' AND Type <> 'Col' AND Type <> 'Usr' AND Type <> 'Db' ) " + " AND ( Name Like '" + tokens[4].Value + "%') "; schemaName = tokens[2].Value; } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } // 6- Object1.Object2.Object3. else if (tokens.Count == 6) { if (tokens[5].Value == ".") { if (IsValidDatabase(tokens[0].Value) && IsValidUser(tokens[2].Value)) // Object1 and Object2 { dbName = tokens[0].Value; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + tokens[2].Value + "' OR Schema = '')" + " AND ( Type = 'Param' OR Type = 'Col')" + " AND ( ParentName = '" + tokens[4].Value + "') "; schemaName = tokens[2].Value; } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } // 7- Object1.Object2.Object3.Object4 else if (tokens.Count == 7) { if (IsValidDatabase(tokens[0].Value) && IsValidUser(tokens[2].Value)) // Object1 and Object2 { dbName = tokens[0].Value; filterStr = "( Catalog = '" + dbName + "' OR Catalog = '') " + " AND ( Schema = '" + tokens[2].Value + "' OR Schema = '')" + " AND ( Type = 'Param' OR Type = 'Col')" + " AND ( ParentName = '" + tokens[4].Value + "') " + " AND ( Name Like '" + tokens[6].Value + "%') "; } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } } else { dbName = _defaultDatabase; // Invalid filter filterStr = " ( Type = '@@@')"; isInvalidFilter = true; } if (_currentDatabase != dbName) { _currentDatabase = dbName; PopulateCodeCompletionList(); } if (_bsObjects.DataSource == null || _tblObjects == null) { this.Hide(); return; } _bsObjects.Filter = filterStr; _bsObjects.Sort = "Order ASC, Name ASC"; if (_bsObjects.Count > 0) { _bsObjects.Position = 0; } }
public String highlistString(String text, String queryText) { List <String> queryWords = TextProcessor.processText(queryText).Split(' ').ToList(); StringTokenizer tokenizer = new StringTokenizer(text); String ret = ""; bool prevTokenWasWhiteSpace = false; int numToKeep = 10; int addMore = 0; List <String> prevWords = new List <string>(); tokenizer.IgnoreWhiteSpace = false; while (true) { Token token = tokenizer.Next(); if (token.Kind == TokenKind.EOF) { break; } if (TextProcessor.processText(token.Value).Length > 0 && queryWords.Contains(TextProcessor.processText(token.Value))) { if (prevWords.Count > 0) { ret += "...."; foreach (String word in prevWords) { ret += word + " "; } } prevWords = new List <string>(); ret += method.highlight(token.Value); addMore = numToKeep; } else { if (token.Kind == TokenKind.EOL || token.Kind == TokenKind.WhiteSpace) { continue; } //{ // prevTokenWasWhiteSpace = true; // continue; //} //else //{ // prevTokenWasWhiteSpace = false; //} if (addMore == 0) { prevWords.Add(token.Value); } if (prevWords.Count > numToKeep) { prevWords.RemoveAt(0); } if (addMore > 0) { ret += " " + token.Value; if (addMore == 1) { ret += ".....<br>"; } addMore--; } } } if (ret.Length == 0) { ret = text.Substring(0, Math.Min(text.Length, 100)) + ".."; } return(ret); }