/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && parser != null && tc.Count > 3 && Common.Utility.IsEqual(tc.First.Text, Field.KEY_ELSEIF)) { if (tc[1].TokenKind == TokenKind.LeftParentheses && tc.Last.TokenKind == TokenKind.RightParentheses) { ElseifTag tag = new ElseifTag(); TokenCollection coll = new TokenCollection(); coll.Add(tc, 2, tc.Count - 2); tag.Test = parser.Read(coll); return tag; } else { throw new Exception.ParseException(String.Concat("syntax error near if:", tc), tc.First.BeginLine, tc.First.BeginColumn); } } return null; }
private static bool CanOptimizeWholePower(TokenCollection tokens, int index, out int power) { if (index < 1 || index > tokens.Count - 1) { throw new IndexOutOfRangeException(); } power = 0; var op = tokens[index] as OperatorToken; if (op == null || op.Operator != Operator.Power) { return false; } var powerValue = tokens[index - 1] as ValueToken; if (powerValue == null) { // Not a constant, can't optimize return false; } // Make sure the power is actually whole if (Math.Abs(powerValue.Value - Math.Truncate(powerValue.Value)) <= double.Epsilon) { power = (int)Math.Truncate(powerValue.Value); return true; } return false; }
private static CompiledExpression CompilePostfix(TokenCollection tokens, string originalInfix, bool optimizeGraphingArg) { // The following is used for outputting to a physical assembly and testing with peverify.exe /* AssemblyName an = new AssemblyName("ExpressionOutput"); AppDomain ad = AppDomain.CurrentDomain; AssemblyBuilder ab = ad.DefineDynamicAssembly(an, AssemblyBuilderAccess.Save); ModuleBuilder mb = ab.DefineDynamicModule(an.Name, "ExpressionOutput.dll"); TypeBuilder tb = mb.DefineType("ExpressionOutput", TypeAttributes.Public | TypeAttributes.Class); MethodBuilder meth = tb.DefineMethod("ExpressionOutput", MethodAttributes.Public | MethodAttributes.Static, typeof(double), new[] { typeof(double[]), typeof(Calculator) }); */ // Signature: double Expression(double[] args, Calculator calc) var expression = new DynamicMethod("Expression", typeof(double), new[] { typeof(double[]), typeof(Calculator) }, typeof(Calculator)); ILGenerator il = expression.GetILGenerator(); var state = new CompilerState(optimizeGraphingArg); OptimizeWholePowers(tokens); bool isConstant = true; if (tokens.Count == 0) { il.Emit(OpCodes.Ldc_R8, 0D); } else { foreach (Token t in tokens) { if (!(t is OperatorToken || t is ValueToken || t is NegateToken)) { // If the token is anything other than an operator or value, it is not a constant isConstant = false; } t.EmitIl(il, state); } if (state.StackHeight != 1) { throw new FormatException("A disproportionate number of operators and values are present."); } } // Return the value il.Emit(OpCodes.Ret); /* tb.CreateType(); ab.Save("ExpressionOutput.dll"); */ return new CompiledExpression(expression, state, originalInfix, isConstant); }
public Token(TokenCollection tokens) { if (tokens != null && tokens.Count >= 1) { Token prev = tokens[tokens.Count - 1]; prevToken = prev; prev.nextToken = this; } }
public void TypedAndLiteralAreMergedByTypedLiteral() { var target = new TokenCollection(); target.Add(TokenIdentity.Typed(-1, typeof(string))); Assert.AreEqual(1, target.Count); target.Add(TokenIdentity.Literal(-1, "foo")); Assert.AreEqual(2, target.Count); target.Add(TokenIdentity.TypedLiteral(-1, "foo", typeof(string))); Assert.AreEqual(1, target.Count); }
/// <summary> /// Initializes an instance of the <see cref="Interpreter"/> class. /// </summary> /// <param name="tokens">The tokens.</param> public Interpreter(TokenCollection tokens) { Errors = new List<Error>(); _structures = new List<string>(); _typeDefinitions = new Dictionary<string, string>(); _tokens = tokens; Init(); }
public TokenCollection Lex(LexReader src) { _stack = new Stack<TokenId>(); _tokens = new TokenCollection(); _src = src; _curChar = src.Read(); Lex(); return _tokens; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && tc.Count == 1 && Common.Utility.IsEqual(tc.First.Text, Field.KEY_END)) { return new EndTag(); } return null; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns>标签</returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && tc.Count == 1 && tc.First.TokenKind == TokenKind.TextData) { VariableTag tag = new VariableTag(); tag.Name = tc.First.Text; return tag; } return null; }
public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc.Count == 1 && (tc.First.Text == "true" || tc.First.Text == "false")) { BooleanTag tag = new BooleanTag(); tag.Value = tc.First.Text == "true"; return tag; } return null; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && parser != null && tc.Count == 1 && Common.ParserHelpers.IsEqual(tc.First.Text, Field.KEY_ELSE)) { return new ElseTag(); } return null; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && parser != null && tc.Count > 3 && Common.Utility.IsEqual(tc.First.Text, Field.KEY_IF)) { if (tc[1].TokenKind == TokenKind.LeftParentheses && tc.Last.TokenKind == TokenKind.RightParentheses) { IfTag tag = new IfTag(); ElseifTag t = new ElseifTag(); TokenCollection coll = new TokenCollection(); coll.Add(tc, 2, tc.Count - 2); t.Test = parser.Read(coll); t.FirstToken = coll.First; //t.LastToken = coll.Last; tag.AddChild(t); while (parser.MoveNext()) { if (parser.Current is EndTag) { tag.AddChild(parser.Current); return tag; } else if (parser.Current is ElseifTag || parser.Current is ElseTag) { tag.AddChild(parser.Current); } else { tag.Children[tag.Children.Count - 1].AddChild(parser.Current); } } throw new Exception.ParseException(String.Concat("if is not properly closed by a end tag:", tc), tc.First.BeginLine, tc.First.BeginColumn); } else { throw new Exception.ParseException(String.Concat("syntax error near if:", tc), tc.First.BeginLine, tc.First.BeginColumn); } } return null; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc!=null && tc.Count == 3 && tc.First.TokenKind == TokenKind.StringStart && tc[1].TokenKind == TokenKind.String && tc.Last.TokenKind == TokenKind.StringEnd ) { StringTag tag = new StringTag(); tag.Value = tc[1].Text; return tag; } return null; }
public void CanRecieveTokens() { uint yellowCount = 2; uint redCount = 1; uint greenCount = 6; uint blackCount = 1; uint blueCount = 0; uint whiteCount = 2; var tokensToReturnToBoard = new TokenCollection(whiteCount, blackCount, blueCount, greenCount, redCount, yellowCount); var expectedTokens = new TokenCollection(7 + whiteCount, 7 + blackCount, 7 + blueCount, 7 + greenCount, 7 + redCount, 5 + yellowCount); _sut.AddTokensToBoard(tokensToReturnToBoard); var actualTokens = _sut.BoardTokens; Assert.AreEqual(expectedTokens, actualTokens); }
//===================================================================== /// <summary> /// Constructor /// </summary> /// <param name="fileItem">The project file item to edit</param> public TokenEditorWindow(FileItem fileItem) { InitializeComponent(); sbStatusBarText.InstanceStatusBar = MainForm.Host.StatusBarTextLabel; editor.TextEditorProperties.Font = Settings.Default.TextEditorFont; editor.TextEditorProperties.ShowLineNumbers = Settings.Default.ShowLineNumbers; editor.SetHighlighting("XML"); tokens = new TokenCollection(fileItem); tokens.Load(); tokens.ListChanged += new ListChangedEventHandler(tokens_ListChanged); this.Text = Path.GetFileName(fileItem.FullPath); this.ToolTipText = fileItem.FullPath; this.LoadTokens(); }
protected static TokenCollection Create(string[] pTokens) { TokenCollection a = new TokenCollection(); foreach (string token in pTokens) { a.Add(token); } Assert.AreEqual(pTokens.Length, a.Sum); Assert.AreEqual(pTokens.Distinct().Count(), a.Count); foreach (string token in pTokens) { Assert.AreNotEqual(0, a.get(token)); } return(a); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public ITag Parse(TemplateParser parser, TokenCollection tc) { if (Utility.IsEqual(tc.First.Text, Field.KEY_LOAD)) { if (tc != null && parser != null && tc.Count > 2 && (tc[1].TokenKind == TokenKind.LeftParentheses) && tc.Last.TokenKind == TokenKind.RightParentheses) { LoadTag tag = new LoadTag(); tag.Path = parser.Read(new TokenCollection(tc, 2, tc.Count - 2)); return(tag); } } return(null); }
public TokenCollection ConstructTokens(string exp) { if (string.IsNullOrEmpty(exp)) { logger.Error("Значение формулы было неопределенным"); return(null); } var fplist = exp.Replace("(", " ( ").Replace(")", " ) ").Replace(".", ","); var stringTokens = Operator.GetAll .Aggregate(fplist, (list, op) => list.Replace(op.Name, $" {op.Name} ")) .Split(new[] { " " }, StringSplitOptions.RemoveEmptyEntries); Tokens = new TokenCollection(); Tokens.AddMany(stringTokens); return(Tokens); }
private static CategorizedToken GetLeftToken(TextViewCaret caret) { TextDocument document = caret.TextDocument; if (document == null) { return(null); } TokenCollection tokens = CodeRush.Language.GetTokens(caret.LeftText); if (tokens != null && tokens.Count > 1) { Token token = tokens[tokens.Count - 2]; return(token as CategorizedToken); } return(null); }
private void HighlightSyntax(TokenCollection tokens) { if (tokens == null || tokens.Count == 0) { return; } Document document = editor.Document; CharacterProperties cp = document.BeginUpdateCharacters(0, 1); List <SyntaxHighlightToken> syntaxTokens = new List <SyntaxHighlightToken>(tokens.Count); foreach (Token token in tokens) { HighlightCategorizedToken((CategorizedToken)token, syntaxTokens); } document.ApplySyntaxHighlight(syntaxTokens); document.EndUpdateCharacters(cp); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (Common.Utility.IsEqual(tc.First.Text, Field.KEY_LOAD)) { if (tc != null && parser != null && tc.Count > 2 && (tc[1].TokenKind == TokenKind.LeftParentheses) && tc.Last.TokenKind == TokenKind.RightParentheses) { LoadTag tag = new LoadTag(); tag.Path = parser.Read(new TokenCollection(tc, 2, tc.Count - 2)); return tag; } } return null; }
/// <summary> /// Parses an incoming URI string and sets the instance variables /// of this object. /// </summary> /// <param name="targetUri">Takes an incoming Uri string and parses all the properties of it</param> /// <returns>Throws a query exception when it finds something wrong with the input, otherwise returns a baseuri.</returns> protected override Uri ParseUri(Uri targetUri) { base.ParseUri(targetUri); if (targetUri != null) { char[] delimiters = { '?', '&' }; TokenCollection tokens = new TokenCollection(targetUri.Query, delimiters); foreach (String token in tokens) { if (token.Length > 0) { char[] otherDelimiters = { '=' }; String[] parameters = token.Split(otherDelimiters, 2); switch (parameters[0]) { case "sq": this.SpreadsheetQuery = parameters[1]; break; case "orderby": if (parameters[1].Equals("position")) { OrderByPosition = true; } else if (parameters[1].StartsWith("column:")) { OrderByColumn = parameters[1].Substring(("column:").Length); } else { throw new ClientQueryException(); } break; case "reverse": this.Reverse = bool.Parse(parameters[1]); break; } } } } return(this.Uri); }
private TokenCollection JsonToTokens(string json) { TokenCollection tokens = new TokenCollection(); try { JsonTextReader reader = new JsonTextReader(new StringReader(json)); while (reader.Read()) { if (reader.Value != null && reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName) { string name = reader.Value.ToString() ?? ""; string path = reader.Path; // Console.WriteLine(path); reader.Read(); if (reader.TokenType == Newtonsoft.Json.JsonToken.String) { string value = reader.Value.ToString() ?? ""; Types supported = Types.String | TypeGuesser.GuessTypes(value); tokens.Add(new Tokens.JsonToken(name, value, path, supported)); } else if (reader.TokenType == Newtonsoft.Json.JsonToken.Integer) { tokens.Add(new Tokens.JsonToken(name, reader.Value.ToString() ?? "", path, Types.Integer)); } else if (reader.TokenType == Newtonsoft.Json.JsonToken.Boolean) { tokens.Add(new Tokens.JsonToken(name, reader.Value.ToString() ?? "", path, Types.Boolean)); } else if (reader.TokenType == Newtonsoft.Json.JsonToken.Null) { // TODO: Re-Evaluate handling of null values. tokens.Add(new Tokens.JsonToken(name, "", path, Types.Boolean)); } } } } catch { Console.WriteLine("JSON parsing failure."); } return(tokens); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public static Tag Parse(TemplateParser parser, TokenCollection tc) { Tag t; for (Int32 i = 0; i < collection.Count; i++) { t = collection[i].Parse(parser, tc); if (t != null) { t.FirstToken = tc.First; if (t.Children.Count == 0 || (t.LastToken = t.Children[t.Children.Count - 1].LastToken ?? t.Children[t.Children.Count - 1].FirstToken) == null || tc.Last.CompareTo(t.LastToken) > 0) { t.LastToken = tc.Last; } return t; } } return null; }
public void CookieTokenizer_SimpleCookieResponse_Parsed() { Response response = new Response(System.Net.HttpStatusCode.OK, ""); response.Headers.Add("Set-Cookie", new List <string> { "test_name=test_value; HttpOnly" }); CookieTokenizer tokenizer = new CookieTokenizer(); TokenCollection tokens = tokenizer.ExtractTokens(response); Assert.Equal(1, tokens.Count()); IToken token = tokens.GetByName("test_name")[0]; Assert.Equal("test_name", token.Name); Assert.Equal("test_value", token.Value); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc.Count == 1 && tc.First.TokenKind == TokenKind.Number) { NumberTag tag = new NumberTag(); if (tc.First.Text.IndexOf('.') == -1) { tag.Value = Int32.Parse(tc.First.Text); } else { tag.Value = Double.Parse(tc.First.Text); } return(tag); } return(null); }
public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc.Count == 1 && tc.First.TokenKind == TokenKind.Number) { NumberTag tag = new NumberTag(); if (tc.First.Text.IndexOf('.') == -1) { tag.Value = Int32.Parse(tc.First.Text); } else { tag.Value = Double.Parse(tc.First.Text); } return tag; } return null; }
internal TokenCollection Analyze(AxData data) { if (!data.IsStarted) { throw new InvalidOperationException(); } TokenCollection stream = new TokenCollection(); BinaryReader reader = data.Reader; long sizeOfCode = data.Header.CodeSize; long startOfCode = data.StartOfCode; tokenOffset = 0; reader.BaseStream.Seek(startOfCode, SeekOrigin.Begin); while (tokenOffset < sizeOfCode) { PrimitiveToken code = ReadPrimitive(reader, data); if (code != null) { stream.Add(code); } } #if DEBUG List <int> variablesCount = null; variablesCount = subAnalyzeVariables(stream); for (int i = 0; i < variablesCount.Count; i++) { if (variablesCount[i] == 1) { string errMsg = GlobalVariablePrimitive.ToString(i); errMsg += ":この変数は一度しか使われていません"; global::KttK.HspDecompiler.HspConsole.Warning(errMsg); } if (variablesCount[i] == 0) { string errMsg = GlobalVariablePrimitive.ToString(i); errMsg += ":この変数は使われていません"; global::KttK.HspDecompiler.HspConsole.Warning(errMsg); } } #endif return(stream); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public static Tag Parse(TemplateParser parser, TokenCollection tc) { Tag t; for (Int32 i = 0; i < collection.Count; i++) { t = collection[i].Parse(parser, tc); if (t != null) { t.FirstToken = tc.First; if (t.Children.Count == 0 || (t.LastToken = t.Children[t.Children.Count - 1].LastToken ?? t.Children[t.Children.Count - 1].FirstToken) == null || tc.Last.CompareTo(t.LastToken) > 0) { t.LastToken = tc.Last; } return(t); } } return(null); }
public TokenCollection ExtractTokens(Request request) { TokenCollection tokens = new TokenCollection(); if (request.Headers.ContainsKey("Cookie")) { foreach (string cookieHeader in request.Headers["Cookie"]) { string[] cookies = cookieHeader.Split(';'); foreach (string cookieString in cookies) { string[] vals = cookieString.Split('='); tokens.Add(new CookieToken(vals[0], vals[1], TypeGuesser.GuessTypes(vals[1]))); } } } return(tokens); }
public void Calcuate_WithCorrectListOfFormulaTest() { //Arrange var stringTokens = "2 + a * 10".Split(); TokenCollection tokens = new TokenCollection(); List <ServerFormula> formulas = new List <ServerFormula>() { new ServerFormula { Name = "a", Value = 13 } }; //Act tokens.AddMany(stringTokens); tokens.CreateRPN(); var result = tokens.Calculate(formulas); //Assert Assert.AreEqual(132, result, 0.01); }
public void BearerToken_RequestHeaders_TokenReplaced() { Request initializeCart = new Request(new Uri(@"http://localhost/api/BasketItems/"), HttpMethod.Post); initializeCart.Content = "{\"ProductId\":24,\"BasketId\":\"20\",\"quantity\":1}"; initializeCart.Headers.Add("Authorization", new List <string> { $"Bearer {BearerExample}" }); BearerTokenizer tokenizer = new BearerTokenizer(); TokenCollection tokens = tokenizer.ExtractTokens(initializeCart); List <IToken> match = tokens.GetByName("BearerToken"); Assert.Single(match); match[0].ReplaceValue(initializeCart, "testresult"); Assert.Equal("Bearer testresult", initializeCart.Headers["Authorization"][0]); }
public Json(TokenCollection json) { tokens = json; if (tokens[0].Kind == SyntaxKind.OBA && tokens[tokens.Count - 1].Kind == SyntaxKind.CBA) { throw new Exception("Use JsonObjectArray(string json)."); } else if (tokens[0].Kind == SyntaxKind.OB && tokens[tokens.Count - 1].Kind == SyntaxKind.CB) { tokens.RemoveAt(tokens.Count - 1); tokens.RemoveAt(0); } else { throw new Exception("Unknown Json format."); } Parse(); }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public ITag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && tc.Count > 1 && tc.First.TokenKind == TokenKind.StringStart && tc.Last.TokenKind == TokenKind.StringEnd ) { StringTag tag = new StringTag(); if (tc.Count == 3 && tc[1].TokenKind == TokenKind.String) { tag.Value = tc[1].Text; } else { tag.Value = ""; } return(tag); } return(null); }
public TokenCollection ExtractTokens(Request request) { TokenCollection tokens = new TokenCollection(); if (request.Headers.ContainsKey("Content-Type") && request.Headers["Content-Type"].Count >= 1 && request.Headers["Content-Type"][0] == "application/x-www-form-urlencoded") { string[] pairs = request.Content.Split("&"); foreach (string pair in pairs) { if (pair.Length >= 3) { string[] data = pair.Split('=', 2); HtmlFormToken token = new HtmlFormToken(HttpUtility.UrlDecode(data[0]), HttpUtility.UrlDecode(data[1]), TypeGuesser.GuessTypes(HttpUtility.UrlDecode(data[1]))); tokens.Add(token); } } } return(tokens); }
public void Request_ConstantSubstitution_ExpectedValue() { Request request = new Request(new Uri(@"http://localhost/rest/user/login/"), HttpMethod.Get); request.Content = "{ \"email\":\"[email protected]\",\"password\":\"123456\"}"; IRequestTokenizer tokenizer = new JsonTokenizer(); TokenCollection tokens = tokenizer.ExtractTokens(request); List <IToken> email = tokens.GetByName("email"); Assert.Single(email); ISubstitution substitution = new SubstituteConstant(email[0], "*****@*****.**"); substitution.MakeSubstitution(null, request); tokens = tokenizer.ExtractTokens(request); email = tokens.GetByName("email"); Assert.Equal("*****@*****.**", email[0].Value); }
public TokenCollection ExtractTokens(Request request) { TokenCollection tokens = new TokenCollection(); string[] segments = request.Url.AbsolutePath.Trim('/').Split('/'); for (int i = 0; i < segments.Length; ++i) { string segment = HttpUtility.UrlDecode(segments[i]); if (segment.Length >= 3 && segment[0] == '{' && segment[segment.Length - 1] == '}') { string trimmed = segment.Trim('{', '}'); string[] parsed = trimmed.Split(':'); // TODO: Parse supported types. tokens.Add(new PathToken(i, parsed[0], parsed[1], Types.Integer | Types.String)); } } return(tokens); }
/// <summary> /// Allows for setting the collapsed text for the specified <see cref="OutliningNode"/>. /// </summary> /// <param name="node">The <see cref="OutliningNode"/> that is requesting collapsed text.</param> public override void SetOutliningNodeCollapsedText(OutliningNode node) { TokenCollection tokens = node.Document.Tokens; int tokenIndex = tokens.IndexOf(node.StartOffset); switch (tokens[tokenIndex].Key) { case "MultiLineCommentStartToken": node.CollapsedText = "/**/"; break; case "RegionStartToken": { string collapsedText = String.Empty; while (++tokenIndex < tokens.Count) { if (tokens[tokenIndex].Key == "CommentStringEndToken") break; collapsedText += tokens.Document.GetTokenText(tokens[tokenIndex]); } node.CollapsedText = collapsedText.Trim(); break; } } }
/// <summary> /// Evaluates a TokenCollection representing a mathematical /// expression and returns the result. /// </summary> /// <param name="tokens">The TokenCollection to be evaluated.</param> /// <returns>The result of the expression.</returns> public decimal Evaluate(TokenCollection tokens) { var values = new Stack <decimal>(); try { foreach (Token token in tokens.AsPostfix()) { decimal result; switch (token) { case ValueToken value: result = value.Evaluate(); break; case UnaryOperatorToken unaryOp: result = unaryOp.Evaluate(values.Pop()); break; case BinaryOperatorToken binaryOp: decimal rightValue = values.Pop(); decimal leftValue = values.Pop(); result = binaryOp.Evaluate(leftValue, rightValue); break; default: throw new InvalidExpressionException($"Token '{token}' cannot be evaluated"); } values.Push(result); } return(values.Pop()); } catch (InvalidOperationException ex) { throw new InvalidExpressionException(ex); } }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public ITag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && tc.Count == 1 && tc.First.TokenKind == TokenKind.Number) { NumberTag tag = new NumberTag(); if (tc.First.Text.IndexOf('.') == -1) { if (tc.First.Text.Length < 9) { tag.Value = int.Parse(tc.First.Text); } else if (tc.First.Text.Length == 9) { var value = long.Parse(tc.First.Text); if (value <= int.MaxValue) { tag.Value = int.Parse(tc.First.Text); } else { tag.Value = value; } } else { tag.Value = long.Parse(tc.First.Text); } } else { tag.Value = Double.Parse(tc.First.Text); } return(tag); } return(null); }
public void Test_Subtract_Rule() { TokenCollection a = new TokenCollection(); a.Add("mouse"); a.Add("mouse"); a.Add("mouse"); TokenCollection b = new TokenCollection(); b.Add("house"); b.Add("house"); try { TokenCollection.Subtract(b, a); Assert.Fail(); } catch (ArgumentException) { } }
// internal List<string> Decode(string axPath) #if AllowDecryption // internal List<string> DecodeAndDecrypt(BinaryReader reader,int fileSize) #endif public override List <string> Decode(BinaryReader reader) { AxData data = new AxData(); LexicalAnalyzer lex = null; TokenCollection stream = null; SyntacticAnalyzer synt = null; List <LogicalLine> lines = null; List <string> stringLines = new List <string>(); try { global::KttK.HspDecompiler.HspConsole.Write("ヘッダー解析中..."); data.LoadStart(reader, dictionary); data.ReadHeader(); global::KttK.HspDecompiler.HspConsole.Write("プリプロセッサ解析中..."); data.ReadPreprocessor(dictionary); global::KttK.HspDecompiler.HspConsole.Write("字句解析中..."); lex = new LexicalAnalyzer(dictionary); stream = lex.Analyze(data); data.LoadEnd(); global::KttK.HspDecompiler.HspConsole.Write("構文解析中..."); synt = new SyntacticAnalyzer(); lines = synt.Analyze(stream, data); global::KttK.HspDecompiler.HspConsole.Write("出力ファイル作成中..."); foreach (LogicalLine line in lines) { if (line.Visible) { string str = new string('\t', line.TabCount); stringLines.Add(str + line.ToString()); } } } catch (SystemException e) { throw new HspDecoderException("AxData", "想定外のエラー", e); } return(stringLines); }
/// <inheritdoc /> public override Func <TemplateParser, TokenCollection, ITag> BuildParseMethod() { return((parser, tc) => { if (tc != null && parser != null && tc.Count > 3 && (Utility.IsEqual(tc.First.Text, Field.KEY_ELSEIF) || Utility.IsEqual(tc.First.Text, Field.KEY_ELIF)) && tc[1].TokenKind == TokenKind.LeftParentheses && tc.Last.TokenKind == TokenKind.RightParentheses) { var tag = new ElseifTag(); var coll = new TokenCollection(); tag.Condition = parser.Read(tc[2, -1]); return tag; } return null; }); }
///////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// /// <summary>tests the tokenizer collection</summary> ////////////////////////////////////////////////////////////////////// [Test] public void TestTokenCollection() { Tracing.TraceMsg("Entering TestTokenCollection"); String toTest = "Test=Test?other=whatever\nTest2=Line2?other=whatishere"; TokenCollection tokens = new TokenCollection(toTest, new char[] { '\n', '=' }); TokenCollection tokenSmart = new TokenCollection(toTest, '=', true, 2); int iTokens = 0; foreach (string token in tokens) { // tokens should have 5 tokens, as the = signs split into 5 iTokens++; if (iTokens == 1) { Assert.IsTrue(token.Equals("Test"), "The first token should be Test, but it is: " + token); } if (iTokens == 4) { Assert.IsTrue(token.Equals("Test2"), "The fourth token should be Test2 but it is: " + token); } } iTokens = 0; foreach (string token in tokenSmart) { // tokens should have 5 tokens, as the = signs split into 5 iTokens++; if (iTokens == 1) { Assert.IsTrue(token.Equals("Test"), "The first smart token should be Test, but it is: " + token); } if (iTokens == 4) { Assert.IsTrue(token.Equals("Line2?other=whatishere"), "The fourth smart token should be whatishere, but it is: " + token); } } }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(TemplateParser parser, TokenCollection tc) { if (tc != null && parser != null && tc.Count > 0 && Common.ParserHelpers.IsEqual(Field.KEY_FOREACH, tc.First.Text)) { if (tc.Count > 5 && tc[1].TokenKind == TokenKind.LeftParentheses && tc[2].TokenKind == TokenKind.TextData && Common.ParserHelpers.IsEqual(tc[3].Text, Field.KEY_IN) && tc.Last.TokenKind == TokenKind.RightParentheses) { ForeachTag tag = new ForeachTag(); tag.Name = tc[2].Text; TokenCollection coll = new TokenCollection(); coll.Add(tc, 4, tc.Count - 2); tag.Source = parser.Read(coll); while (parser.MoveNext()) { tag.Children.Add(parser.Current); if (parser.Current is EndTag) { return tag; } } throw new Exception.ParseException(String.Concat("foreach is not properly closed by a end tag:", tc), tc.First.BeginLine, tc.First.BeginColumn); } else { throw new Exception.ParseException(String.Concat("syntax error near foreach:", tc), tc.First.BeginLine, tc.First.BeginColumn); } } return null; }
/// <summary> /// This converts the token entries to a token file and adds it to /// the project. /// </summary> private void CreateTokenFile() { XmlReader xr = converter.Reader; StreamWriter sw = null; string tokenFile = Path.Combine(converter.ProjectFolder, Path.GetFileNameWithoutExtension(converter.Project.Filename) + ".tokens"); // Create an empty token file try { sw = File.CreateText(tokenFile); sw.WriteLine("<content/>"); } finally { if (sw != null) { sw.Close(); } } FileItem fileItem = converter.Project.AddFileToProject(tokenFile, tokenFile); TokenCollection tokens = new TokenCollection(fileItem); while (!xr.EOF && xr.NodeType != XmlNodeType.EndElement) { if (xr.NodeType == XmlNodeType.Element && xr.Name == "token") { tokens.Add(new Token(xr.GetAttribute("name"), xr.GetAttribute("value"))); } xr.Read(); } tokens.Save(); }
public async Task <string> GetAccessTokenAsync(CancellationToken cancellationToken) { if (tokens != null && tokens.AccessTokenExpiration < DateTime.Now - TimeSpan.FromSeconds(30)) { return(tokens.AccessToken); } await semaphoreSlim.WaitAsync(cancellationToken); try { // try again to handle cases when 2 requests missed if (tokens != null && tokens.AccessTokenExpiration < DateTime.Now - TimeSpan.FromSeconds(30)) { return(tokens.AccessToken); } var vm = loginViewModelFactory(); if (await vm.RefreshTokenIsAvailableAsync()) { tokens = await vm.LoginUsingRefreshTokenAsync(cancellationToken); } else { if (await dialogService.ShowDialogAsync(vm, cancellationToken) != true) { throw new NotImplementedException(); } tokens = vm.Tokens; } return(tokens.AccessToken); } finally { semaphoreSlim.Release(); } }
public CompilationUnit Parse(TokenCollection tokens) { _tokens = tokens; _index = 0; CompilationUnit compilationUnit = new CompilationUnit(); try { Advance(); while (_curtok != _eof) { compilationUnit.Statements.Add(ParseStatement()); } } catch (Exception ex) { throw ex; } return compilationUnit; }
public void Parse(TokenCollection tokens) { Token token = null; if (tokens == null) { return; } int i = 0; token = tokens[i]; try { Expression tree = new Expression(); Expression curtExp = null; while (token != null) { if (curtExp == null) { curtExp = tree.CreateInstance(token); } else { curtExp.Process(token); } token = curtExp.LastToken.NextToken; if (curtExp.IsComplete) { tree.Add(curtExp); curtExp = null; } } } catch (Exception e) { throw new Exception(e.Message + "\r\nAt line " + token.LineNumber); } }
//===================================================================== /// <summary> /// Load a token file for editing /// </summary> /// <param name="tokenFile">The token file to load</param> /// <param name="selectedToken">The token ID to select by default or null if no selection</param> public void LoadTokenFile(string tokenFile, string selectedToken) { if(tokenFile == null) throw new ArgumentNullException("tokenFile", "A token filename must be specified"); tokens = new TokenCollection(tokenFile); tokens.Load(); tokens.ListChanged += new ListChangedEventHandler(tokens_ListChanged); if(tokens.Count != 0) if(selectedToken == null) tokens[0].IsSelected = true; else { var match = tokens.Find(t => t.TokenName == selectedToken).FirstOrDefault(); if(match != null) match.IsSelected = true; else tokens[0].IsSelected = true; } lbTokens.ItemsSource = tokens; this.tokens_ListChanged(this, new ListChangedEventArgs(ListChangedType.Reset, -1)); }
static string GetFormattedCode(string code, TokenCollection tokens) { var currentLine = new CodeLine(); var lines = new List<CodeLine>(); int pos = 0; foreach (CategorizedToken token in tokens) { AppendCode(lines, ref currentLine, code.Substring(pos, token.StartPosition - pos), null); AppendCode(lines, ref currentLine, token.Value, CssClasses[token.Category].GetClassName(token.Language)); pos = token.EndPosition; } AppendCode(lines, ref currentLine, code.Substring(pos), null); lines.Add(currentLine); return MergeCodeLines(lines); }
/// <summary> /// Parses a declaration at the current position. /// </summary> private TokenCollection ParseDeclarationTokens() { var tokens = new TokenCollection(); /* void (*pt) (int par1, int par2); <------------------------> */ while (true) { if (Current.Type == TokenId.Semicolon || Current.Type == TokenId.Equals) { return tokens; } tokens.AddLast(Current); Next(); } }
/// <summary> /// This loads the tree view with table of contents file entries from the project /// </summary> /// <remarks>Token information is also loaded here and passed on to the converter.</remarks> private void LoadTableOfContentsInfo() { List<ITableOfContents> tocFiles; TopicCollection contentLayout; TokenCollection tokens; tvContent.ItemsSource = null; tableOfContents = null; lblCurrentProject.Text = null; browserHistory.Clear(); historyLocation = -1; if(currentProject == null) { lblCurrentProject.Text = "None - Select a help file builder project in the Solution Explorer"; return; } // Make sure the base path is set for imported code blocks this.SetImportedCodeBasePath(); // Get content from open file editors var args = new FileContentNeededEventArgs(FileContentNeededEvent, this); base.RaiseEvent(args); lblCurrentProject.Text = currentProject.Filename; browserHistory.Clear(); historyLocation = -1; tableOfContents = new TocEntryCollection(); try { converter.MediaFiles.Clear(); // Get the image files. This information is used to resolve media link elements in the // topic files. foreach(var file in currentProject.ImagesReferences) converter.MediaFiles[file.Id] = new KeyValuePair<string, string>(file.FullPath, file.AlternateText); } catch(Exception ex) { tableOfContents.Add(new TocEntry(currentProject) { Title = "ERROR: Unable to load media info: " + ex.Message }); } try { converter.Tokens.Clear(); // Get the token files. This information is used to resolve token elements in the topic files. foreach(var file in currentProject.ContentFiles(BuildAction.Tokens).OrderBy(f => f.LinkPath)) { // If open in an editor, use the edited values if(!args.TokenFiles.TryGetValue(file.FullPath, out tokens)) { tokens = new TokenCollection(file.FullPath); tokens.Load(); } // Store the tokens as XElements so that they can be parsed inline with the topic foreach(var t in tokens) converter.Tokens.Add(t.TokenName, XElement.Parse("<token>" + t.TokenValue + "</token>")); } } catch(Exception ex) { tableOfContents.Add(new TocEntry(currentProject) { Title = "ERROR: Unable to load token info: " + ex.Message }); } try { converter.TopicTitles.Clear(); // Load the content layout files. Site maps are ignored as we don't support rendering them. tocFiles = new List<ITableOfContents>(); foreach(var contentFile in currentProject.ContentFiles(BuildAction.ContentLayout)) { // If open in an editor, use the edited values if(!args.ContentLayoutFiles.TryGetValue(contentFile.FullPath, out contentLayout)) { contentLayout = new TopicCollection(contentFile); contentLayout.Load(); } tocFiles.Add(contentLayout); } tocFiles.Sort((x, y) => { ContentFile fx = x.ContentLayoutFile, fy = y.ContentLayoutFile; if(fx.SortOrder < fy.SortOrder) return -1; if(fx.SortOrder > fy.SortOrder) return 1; return String.Compare(fx.Filename, fy.Filename, StringComparison.OrdinalIgnoreCase); }); // Create the merged TOC. For the purpose of adding links, we'll include everything even topics // marked as invisible. foreach(ITableOfContents file in tocFiles) file.GenerateTableOfContents(tableOfContents, true); // Pass the topic IDs and titles on to the converter for use in hyperlinks foreach(var t in tableOfContents.All()) if(!String.IsNullOrEmpty(t.Id)) converter.TopicTitles[t.Id] = t.LinkText; } catch(Exception ex) { tableOfContents.Add(new TocEntry(currentProject) { Title = "ERROR: Unable to load TOC info: " + ex.Message }); } if(tableOfContents.Count != 0) { foreach(var t in tableOfContents.All()) t.IsSelected = false; tableOfContents[0].IsSelected = true; } tvContent.ItemsSource = tableOfContents; }
void HighlightSyntax(TokenCollection tokens) { if (tokens == null || tokens.Count == 0) return; var document = _editor.Control.Document; CharacterProperties cp = document.BeginUpdateCharacters(0, 1); var syntaxTokens = new List<SyntaxHighlightToken>(tokens.Count); foreach (Token token in tokens) { HighlightCategorizedToken((CategorizedToken)token, syntaxTokens); } document.ApplySyntaxHighlight(syntaxTokens); document.EndUpdateCharacters(cp); }
public TokenCollection Lex(LexReader src) { TokenCollection tokens = new TokenCollection(); return tokens; }
private void HighlightSyntax(TokenCollection tokens) { commentProperties = new SyntaxHighlightProperties(); commentProperties.ForeColor = syntaxColors.CommentColor; keywordProperties = new SyntaxHighlightProperties(); keywordProperties.ForeColor = syntaxColors.KeywordColor; stringProperties = new SyntaxHighlightProperties(); stringProperties.ForeColor = syntaxColors.StringColor; xmlCommentProperties = new SyntaxHighlightProperties(); xmlCommentProperties.ForeColor = syntaxColors.XmlCommentColor; textProperties = new SyntaxHighlightProperties(); textProperties.ForeColor = syntaxColors.TextColor; if (tokens == null || tokens.Count == 0) { return; } Document document = syntaxEditor.Document; //CharacterProperties cp = document.BeginUpdateCharacters(0, 1); List<SyntaxHighlightToken> syntaxTokens = new List<SyntaxHighlightToken>(tokens.Count); foreach (Token token in tokens) { HighlightCategorizedToken((CategorizedToken)token, syntaxTokens); } document.ApplySyntaxHighlight(syntaxTokens); //document.EndUpdateCharacters(cp); }
//===================================================================== /// <summary> /// This loads the tree view with token file entries from the project /// </summary> private List<EntityReference> LoadTokenInfo() { EntityReference tokenFileEntity = null; TokenCollection tokenColl; if(tokens != null) return tokens; tokens = new List<EntityReference>(); // Get content from open file editors var args = new FileContentNeededEventArgs(FileContentNeededEvent, this); base.RaiseEvent(args); foreach(var tokenFile in currentProject.ContentFiles(BuildAction.Tokens).OrderBy(f => f.LinkPath)) try { if(File.Exists(tokenFile.FullPath)) { tokenFileEntity = new EntityReference { EntityType = EntityType.File, Id = tokenFile.FullPath, Label = Path.GetFileName(tokenFile.FullPath), ToolTip = tokenFile.FullPath }; tokens.Add(tokenFileEntity); // If open in an editor, use the edited values if(!args.TokenFiles.TryGetValue(tokenFile.FullPath, out tokenColl)) { tokenColl = new TokenCollection(tokenFile.FullPath); tokenColl.Load(); } foreach(Token t in tokenColl) tokenFileEntity.SubEntities.Add(new EntityReference { EntityType = EntityType.Token, Id = t.TokenName, Label = t.TokenName, ToolTip = t.TokenName, Tag = t }); } tokenFileEntity = null; } catch(Exception ex) { if(tokenFileEntity == null) tokens.Add(new EntityReference { EntityType = EntityType.File, Label = "Unable to load file '" + tokenFile.FullPath + "'. Reason: " + ex.Message, ToolTip = "Error" }); else tokens.Add(new EntityReference { EntityType = EntityType.File, Label = "Unable to load file: " + ex.Message, ToolTip = "Error" }); } if(tokens.Count != 0) { tokens[0].IsSelected = true; if(tokens[0].SubEntities.Count != 0) tokens[0].IsExpanded = true; } return tokens; }
public void PlayTokens(Game game, Type token, int count) { if (!this.TokenPiles.ContainsKey(token) || count <= 0) return; if (Phase == PhaseEnum.Buy || Phase == PhaseEnum.Cleanup || Phase == PhaseEnum.Endgame || PlayerMode == PlayerMode.Waiting || PlayerMode == PlayerMode.Choosing || PlayerMode == PlayerMode.Playing) throw new Exception("Can't play tokens right now!"); if (Phase == PhaseEnum.Action) Phase = PhaseEnum.BuyTreasure; int finalCount = count > this.TokenPiles[token].Count ? this.TokenPiles[token].Count : count; TokenCollection tokens = new TokenCollection(this.TokenPiles[token].Take(finalCount)); if (TokenPlaying != null) { TokenPlayingEventArgs tpgea = new TokenPlayingEventArgs(this, tokens); TokenPlaying(this, tpgea); } this.TokenPiles[token].First().Play(this, finalCount); this.RemoveTokens(tokens); if (TokenPlayed != null) { TokenPlayedEventArgs tpgea = new TokenPlayedEventArgs(this, tokens); TokenPlayed(this, tpgea); } if (this.Phase == PhaseEnum.BuyTreasure && this.Hand[Category.Treasure].Count == 0 && !this.TokenPiles.IsAnyPlayable) this.Phase = PhaseEnum.Buy; }
/// <summary> /// 分析标签 /// </summary> /// <param name="parser">TemplateParser</param> /// <param name="tc">Token集合</param> /// <returns></returns> public Tag Parse(JinianNet.JNTemplate.Parser.TemplateParser parser, TokenCollection tc) { if (tc != null && parser != null && tc.Count > 2) { Int32 start, end, pos; start = end = pos = 0; Boolean isFunc = false; List<Token> data = new List<Token>(); Queue<TokenCollection> queue = new Queue<TokenCollection>(); for (Int32 i = 0; i < tc.Count; i++) { end = i; if (tc[i].TokenKind == TokenKind.LeftParentheses) { if (pos == 0) { if (i > 0 && tc[i - 1].TokenKind == TokenKind.TextData) { isFunc = true; } } pos++; } else if (tc[i].TokenKind == TokenKind.RightParentheses) { if (pos > 0) { pos--; } else { throw new Exception.ParseException(String.Concat("syntax error near ):", tc), data[i].BeginLine, data[i].BeginColumn); } if (pos == 0) { TokenCollection coll = new TokenCollection(); if (!isFunc) { coll.Add(tc, start + 1, end - 1); } else { coll.Add(tc, start, end); } queue.Enqueue(coll); data.Add(null); start = i + 1; //tag.AddChild(parser.Read(coll)); } } else if (pos == 0 && (tc[i].TokenKind == TokenKind.Dot || tc[i].TokenKind == TokenKind.Operator)) { if (end > start) { TokenCollection coll = new TokenCollection(); coll.Add(tc, start, end - 1); queue.Enqueue(coll); data.Add(null); } start = i + 1; data.Add(tc[i]); } if (i == tc.Count - 1 && end >= start) { if (start == 0 && end == i) { throw new Exception.ParseException(String.Concat("Unexpected tag:", tc), tc[0].BeginLine, tc[0].BeginColumn); } TokenCollection coll = new TokenCollection(); coll.Add(tc, start, end); queue.Enqueue(coll); data.Add(null); start = i + 1; } } List<Tag> tags = new List<Tag>(); for (Int32 i = 0; i < data.Count; i++) { if (data[i] == null) { tags.Add(parser.Read(queue.Dequeue())); } else if (data[i].TokenKind == TokenKind.Dot) { if (tags.Count == 0 || i == data.Count - 1 || data[i + 1] != null) { throw new Exception.ParseException(String.Concat("syntax error near .:", tc), data[i].BeginLine, data[i].BeginColumn); } if (tags[tags.Count - 1] is ReferenceTag) { tags[tags.Count - 1].AddChild(parser.Read(queue.Dequeue())); } else { ReferenceTag t = new ReferenceTag(); t.AddChild(tags[tags.Count - 1]); t.AddChild(parser.Read(queue.Dequeue())); tags[tags.Count - 1] = t; } i++; } else if (data[i].TokenKind == TokenKind.Operator) { tags.Add(new TextTag()); tags[tags.Count - 1].FirstToken = data[i]; } } if (tags.Count == 1) { return tags[0]; } if (tags.Count > 1) { ExpressionTag t = new ExpressionTag(); for (Int32 i = 0; i < tags.Count; i++) { t.AddChild(tags[i]); } tags.Clear(); return t; } } return null; }