public LexerDefinitions(Mq4Lexer mq4Lexer) { _mq4Lexer = mq4Lexer; Definitions = new List<LexDefinition>(); Definitions.AddRange(new[] { new LexDefinition(new MatchMultiLineCommentText(_mq4Lexer), TokenKind.Comment), new LexDefinition(new MatchStartText("//"), TokenKind.Comment), new LexDefinition(new MatchStartText("#property"), TokenKind.SharpProperty), new LexDefinition(new MatchStartText("#define"), TokenKind.SharpDefine), new LexDefinition(new MatchStartText("#import"), TokenKind.SharpImport), new LexDefinition(new MatchStartText("#include"), TokenKind.SharpInclude), new LexDefinition(@"\s", TokenKind.Space), // Thanks to http://www.regular-expressions.info/floatingpoint.html new LexDefinition(@"[-+]?\d*\.\d+([eE][-+]?\d+)?", TokenKind.Float), new LexDefinition(new HexMatcher(), TokenKind.Int), new LexDefinition(@"[-+]?\d+", TokenKind.Int), new LexDefinition(new UnicodeIdentifierMatcher(), TokenKind.Identifier), new LexDefinition(new MatchQuotedString(), TokenKind.QuotedString), new LexDefinition(new MatchChar(), TokenKind.Char), new LexDefinition(new MatchExactTextInList( new[] { "+", "-", "*", "/", "%", "<=", ">=", "<", ">", "!=", "!", "&&", "||", "&", "|", "^", "==" }), TokenKind.Operator), }); }
public void ProblemFile() { var fileName = @"C:\ProgramData\MetaTrader ECN - FXOpen\experts\MySmartAdviser.mq4"; var lexer = new Mq4Lexer(); lexer.BuildTextTokens(fileName.CodeFromFile()); }
public void TestParseGetMargin() { var lexer = new Mq4Lexer(); var resultTokens = lexer.BuildTextTokens(ScriptUtils.GetMarginFunctionWithComplexComments); var parser = new Mq4Parser(); var astTree = parser.Parse(resultTokens); Assert.IsTrue(astTree.Children.Count == 1); }
public void TestParseIf() { var lexer = new Mq4Lexer(); var resultTokens = lexer.BuildTextTokens(ScriptUtils.CodeWithIfs); var parser = new Mq4Parser(); var astTree = parser.Parse(resultTokens); Assert.IsTrue(astTree.Children.Count == 1); }
public void TestMethod1() { var lexer = new Mq4Lexer(); var resultTokens = lexer.BuildTextTokens(ScriptUtils.ComplexCode); var parser = new Mq4Parser(); var astTree = parser.Parse(resultTokens); Assert.IsTrue(astTree.Children.Count == 8); }
public static void GenerateCode() { var lexer = new Mq4Lexer(); var resultTokens = lexer.BuildTextTokens(ScriptUtils.ComplexCode); var parser = new Mq4Parser(); var astTree = parser.Parse(resultTokens); var semantic = new SemanticAnalysis(astTree); semantic.Perform(); var codeGenerator = new CsCodeGenerator(); codeGenerator.GenerateCodeForNode(astTree.Children[0]); }
public static void ParseData(ParseNode node) { var tokenData = node.Children.First(); var defineText = tokenData.Content.Remove(0, "#define ".Length); var lexer = new Mq4Lexer(); var defineTokens = lexer.BuildTextTokens(defineText); var nodes = defineTokens.Select(token => token.BuildTerminalNode()).ToList(); var states = new CleanupAstNodeStates(nodes); var definedConstantType = states.MappedNodes[1].Token; string typeName; switch (definedConstantType) { case TokenKind.Int: case TokenKind.Float: typeName = definedConstantType.NameOfType(); break; case TokenKind.QuotedString: typeName = "string"; break; case TokenKind.Operator: typeName = states.MappedNodes[2].Token.NameOfType(); break; case TokenKind.Identifier: typeName = ComputeConstTypeFromPreviousConstants(node, states.MappedNodes[1].Content); break; default: throw new InvalidDataException("Type not supported"); } var insertTokenType = new ParseNode(TokenKind.TypeName, typeName); var buildTerminalToken = insertTokenType.BuildTerminalNode(); var variableToken = states[0]; var valueToken = states[1]; var valueToken2 = definedConstantType == TokenKind.Operator ? states.MappedNodes[2] : null; node.Children.Clear(); node.Add(buildTerminalToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Space, " ")); node.Add(variableToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Assign, "=")); node.Add(valueToken); if (definedConstantType == TokenKind.Operator) { node.Add(valueToken2); } var colon = new ParseNode(TokenKind.SemiColon, ";"); node.Rule = RuleKind.DeclareConstant; node.Children.Add(colon); }
public static void ParseData(ParseNode node) { var tokenData = node.Children.First(); var defineText = tokenData.Content.Remove(0, "#property ".Length); var lexer = new Mq4Lexer(); var defineTokens = lexer.BuildTextTokens(defineText); var nodes = defineTokens.Select(token => token.BuildTerminalNode()).ToList(); var states = new CleanupAstNodeStates(nodes); var assemblyOfAttributes = typeof(indicator_color1Attribute).Assembly; var fullTypeName = string.Format("Mql2Fdk.Attributes.{0}Attribute", states[0].Content); var fullTypeNameReduced = string.Format("Mql2Fdk.Attributes.{0}", states[0].Content); var getTypeofAttribute = assemblyOfAttributes.GetType(fullTypeName) ?? assemblyOfAttributes.GetType(fullTypeNameReduced); bool needsQuotes = false; var typeName = getTypeofAttribute!=null ? ComputeTypenameFromReflection(getTypeofAttribute, states, ref needsQuotes) : ComputeTypeFromStatements(states, ref needsQuotes); var variableToken = states.MappedNodes[0]; var valueToken = states.Count != 1 ? states.MappedNodes[1] : new TokenData(0, 0, TokenKind.Int, "1").BuildTerminalNode(); if (needsQuotes) { var finalTokenData = valueToken.GetTokenData(); finalTokenData.Token = TokenKind.QuotedString; finalTokenData.Content = string.Format("\"{0}\"", finalTokenData.Content); } node.Children.Clear(); var insertTokenType = new TokenData(0, 0, TokenKind.TypeName, typeName); var buildTerminalToken = insertTokenType.BuildTerminalNode(); node.Add(buildTerminalToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Space, " ")); node.Add(variableToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Assign, "=")); node.Add(valueToken); var colon = new TokenData(0, 0, TokenKind.SemiColon, ";").BuildTerminalNode(); node.Children.Add(colon); }
public void ScanAllTokensInLibs() { var pathToSearch = @"C:\ProgramData\MetaTrader ECN - FXOpen\"; var lexer = new Mq4Lexer(); var allFiles = Directory.GetFiles(pathToSearch, "*.mq4", SearchOption.AllDirectories); foreach (var file in allFiles) { try { lexer.BuildFileTokens(file); } catch (Exception) { Assert.Fail("Files could not be tokenized"); } } }
public static void TestFileLexing(string fileToTest) { var translator = new Mq4Lexer(); var fileText = fileToTest.CodeFromExtras(Encoding.Default); var resultParse = translator.BuildTextTokens(fileText); Assert.IsTrue(resultParse.Count != 0); }
public Mq4Translator() { _lexer = new Mq4Lexer(); _parser = new Mq4Parser(); _codeGenerator = new CsCodeGenerator(); }
public void TestMethod1() { var lexer = new Mq4Lexer(); var resultTokens = lexer.BuildTextTokens("SimpleTicks.mq4".CodeFromExtras(Encoding.Default)); }
public MatchMultiLineCommentText(Mq4Lexer mq4Lexer) { _mq4Lexer = mq4Lexer; }