public static SyntaxTreeBuilder CreateDatalistTreeBuilder() { var builder = new SyntaxTreeBuilder(); builder.RegisterGrammer(new DatalistGrammer()); return builder; }
public static SyntaxTreeBuilder CreateInfrigistsTreeBuilder() { var builder = new SyntaxTreeBuilder(); builder.RegisterGrammer(new StringLiteralGrammer()); builder.RegisterGrammer(new InfrigisticNumericLiteralGrammer()); builder.RegisterGrammer(new BooleanLiteralGrammer()); builder.RegisterGrammer(new InfrigisticsGrammer()); return builder; }
public override ParseNode_Base Parse(SyntaxTreeBuilder pSyntaxTreeBuilder) { var ifNode = node as ParseNode_ManyOpt_If; if (ifNode != null) { if (!ifNode.Matches(pSyntaxTreeBuilder)) { return(parent.NextAfterChild(this, pSyntaxTreeBuilder)); } var tokenIndex = pSyntaxTreeBuilder.TokenScanner.CurrentTokenIndex; var line = pSyntaxTreeBuilder.TokenScanner.CurrentLine; var nextNode = node.Parse(pSyntaxTreeBuilder); if (nextNode != this || pSyntaxTreeBuilder.TokenScanner.CurrentTokenIndex != tokenIndex || pSyntaxTreeBuilder.TokenScanner.CurrentLine != line) { return(nextNode); } //Debug.Log("Exiting Many " + this + " in goal: " + pSyntaxTreeBuilder.CurrentParseTreeNode); } else { if (!FirstSet.Contains(pSyntaxTreeBuilder.TokenScanner.Current.tokenId)) { return(parent.NextAfterChild(this, pSyntaxTreeBuilder)); } var tokenIndex = pSyntaxTreeBuilder.TokenScanner.CurrentTokenIndex; var line = pSyntaxTreeBuilder.TokenScanner.CurrentLine; var nextNode = node.Parse(pSyntaxTreeBuilder); if (!(nextNode == this && pSyntaxTreeBuilder.TokenScanner.CurrentTokenIndex == tokenIndex && pSyntaxTreeBuilder.TokenScanner.CurrentLine == line)) { // throw new Exception("Infinite loop!!! while parsing " + pSyntaxTreeBuilder.Current + " on line " + pSyntaxTreeBuilder.CurrentLine()); return(nextNode); } } return(parent.NextAfterChild(this, pSyntaxTreeBuilder)); }
private void ExtendedReadAccessorDesugaring(read_accessor_name read_accessor, simple_property simple_property, class_members members) { var read_accessor_procedure = read_accessor.pr as procedure_definition; var func_header = (read_accessor_procedure.proc_header as function_header); func_header.return_type = simple_property.property_type; //extended indexed property if (simple_property.parameter_list != null) { var func_header_params = new List <ident>(); var func_header_types = new List <type_definition>(); foreach (var param in simple_property.parameter_list.parameters) { foreach (var ident in param.names.idents) { func_header_params.Add(ident); func_header_types.Add(param.type); } } func_header.parameters = SyntaxTreeBuilder.BuildFormalParameters( func_header_params, func_header_types); } members.Add(read_accessor_procedure); if (simple_property.attr == definition_attribute.Static) { read_accessor_procedure.proc_header.class_keyword = true; var proc_attr = new procedure_attribute(proc_attribute.attr_static); proc_attr.source_context = read_accessor_procedure.proc_header.source_context; read_accessor_procedure.proc_header.proc_attributes = new procedure_attributes_list(proc_attr); } }
public void DeclarationCollectingPass_ShouldPassTestCasesWithoutErrors(string fileName) { var parser = new ExpressParser(); using (var stream = File.Open($@"..\..\..\..\test\{fileName}.exp", FileMode.Open, FileAccess.Read)) { var errors = new List <ParsingError>(); var tokens = Tokenizer.Tokenize(new StreamReader(stream, Encoding.ASCII)).ToArray(); var declarationPass = new DeclarationPass(tokens, errors); var syntaxPass = new SyntaxAnalysisPass(tokens, errors); var builder = new SyntaxTreeBuilder(); declarationPass.Run(builder); syntaxPass.Run(builder); foreach (var error in errors) { Console.WriteLine(error.ToString()); } Assert.AreEqual(0, errors.Count); } }
private static LClosure Compile(string code) { var codeReader = new StringReader(code); var rawTokens = new LuaRawTokenizer(); var luaTokens = new LuaTokenizer(); rawTokens.Reset(codeReader); luaTokens.Reset(rawTokens); var parser = new LuaParser(); var builder = new SyntaxTreeBuilder(); builder.Start(); luaTokens.EnsureMoveNext(); parser.Reset(luaTokens, builder); parser.Parse(); var ast = builder.Finish(); var codeGen = new CodeGenerator(); return(codeGen.Compile(ast, _fastLuaEnv)); }
public bool CollectCompletions(TokenSet tokenSet, SyntaxTreeBuilder pSyntaxTreeBuilder, int identifierId) { var clone = pSyntaxTreeBuilder.Clone(); var hasName = false; var current = this; while (current != null && current.parent != null) { tokenSet.Add(current.FirstSet); if (current.FirstSet.Contains(identifierId)) { ParseNode_Rule currentRule = null; var currentId = current as ParseNode_Id; if (currentId == null) { var rule = current.parent; while (rule != null && !(rule is ParseNode_Rule)) { rule = rule.parent; } currentId = rule != null ? rule.parent as ParseNode_Id : null; currentRule = rule as ParseNode_Rule; } if (currentId != null) { var peerAsRule = currentId.LinkedTarget as ParseNode_Rule; if (peerAsRule != null && peerAsRule.contextualKeyword) { Debug.Log(currentId.Name); } else if (currentRule != null && currentRule.contextualKeyword) { Debug.Log("currentRule " + currentRule.NtName); } else { var id = currentId.Name; if (Array.IndexOf(new[] { "constantDeclarators", "constantDeclarator", }, id) >= 0) { hasName = true; } } } } if (!current.FirstSet.ContainsEmpty()) { break; } current = current.parent.NextAfterChild(current, clone); } tokenSet.RemoveEmpty(); return(hasName); }
private void AddReferencesToIdentInLambda(type_declaration upperScopeWhereVarsAreCapturedClass, CapturedVariablesTreeNode scope, string varName, syntax_tree_node syntaxTreeNodeWithVarDeclaration, dot_node substDotNode, bool nestedLambda) { for (var i = 0; i < scope.ChildNodes.Count; i++) { if (!(scope.ChildNodes[i] is CapturedVariablesTreeNodeLambdaScope)) { var substKey = new SubstitutionKey(varName, syntaxTreeNodeWithVarDeclaration, scope.ChildNodes[i].CorrespondingSyntaxTreeNode); if (_capturedVarsClassDefs.ContainsKey(scope.ChildNodes[i].ScopeIndex)) { var cl = _capturedVarsClassDefs[scope.ChildNodes[i].ScopeIndex]; if (cl.AssignNodeForUpperClassFieldInitialization == null) { var fieldType = SyntaxTreeBuilder.BuildSimpleType(upperScopeWhereVarsAreCapturedClass.type_name.name); var field = SyntaxTreeBuilder.BuildClassFieldsSection( new List <ident> { new ident(cl.GeneratedUpperClassFieldName) }, new List <type_definition> { fieldType }); var clClass = (class_definition)cl.ClassDeclaration.type_def; clClass.body.Add(field); cl.AssignNodeForUpperClassFieldInitialization = new assign( new dot_node(new ident(cl.GeneratedSubstitutingFieldName), new ident(cl.GeneratedUpperClassFieldName)), new ident(compiler_string_consts.self_word)); } } if (!_substitutions.ContainsKey(substKey)) { _substitutions.Add(substKey, substDotNode); } AddReferencesToIdentInLambda(upperScopeWhereVarsAreCapturedClass, scope.ChildNodes[i], varName, syntaxTreeNodeWithVarDeclaration, substDotNode, nestedLambda); } else { var scopeAsLambda = scope.ChildNodes[i] as CapturedVariablesTreeNodeLambdaScope; if (scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.HasValue) { dot_node substDotNode1; if (!nestedLambda) { var parts = new Stack <ident>(); var dn = substDotNode; parts.Push((ident)dn.right); while (!(dn.left is ident && dn.right is ident)) { dn = (dot_node)dn.left; parts.Push((ident)dn.right); } substDotNode1 = new dot_node(new ident(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].GeneratedUpperClassFieldName), parts.Pop()); while (parts.Count > 0) { substDotNode1 = new dot_node(substDotNode1, parts.Pop()); } } else { var parts = new Stack <ident>(); var dn = substDotNode; parts.Push((ident)dn.right); while (!(dn.left is ident && dn.right is ident)) { dn = (dot_node)dn.left; parts.Push((ident)dn.right); } parts.Push((ident)dn.left); substDotNode1 = new dot_node(new ident(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].GeneratedUpperClassFieldName), parts.Pop()); while (parts.Count > 0) { substDotNode1 = new dot_node(substDotNode1, parts.Pop()); } } var substKey = new SubstitutionKey(varName, syntaxTreeNodeWithVarDeclaration, scope.ChildNodes[0].CorrespondingSyntaxTreeNode); if (!_substitutions.ContainsKey(substKey)) { _substitutions.Add(substKey, substDotNode1); } AddReferencesToIdentInLambda(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].ClassDeclaration, scopeAsLambda.ChildNodes[0], varName, syntaxTreeNodeWithVarDeclaration, substDotNode1, true); } else { AddReferencesToIdentInLambda(upperScopeWhereVarsAreCapturedClass, scope.ChildNodes[0], varName, syntaxTreeNodeWithVarDeclaration, substDotNode, nestedLambda); } } } }
/*public var_def_statement NewVarOrIdentifier(ident identifier, named_type_reference fptype, LexLocation loc) * { * var n_t_r = fptype; * var vds = new var_def_statement(); * vds.vars = new ident_list(); * vds.vars.idents.Add(identifier); * vds.vars_type = n_t_r; * vds.source_context = loc; * return vds; * }*/ /* Функция стала короткой и утратила необходимость * public statement_list NewLambdaBody(expression expr_l1, LexLocation loc) * { * var sl = new statement_list(); * sl.expr_lambda_body = true; * var id = new ident("result"); * var op = new op_type_node(Operators.Assignment); * var ass = new assign(id, expr_l1, op.type); * parsertools.create_source_context(ass, id, expr_l1); // дурацкая функция - если хотя бы у одного sc=null, то возвращает null * if (ass.source_context == null) * if (expr_l1.source_context != null) * ass.source_context = expr_l1.source_context; * else if (id.source_context != null) * ass.source_context = id.source_context; * sl.subnodes.Add(ass); * sl.source_context = loc; * var sl = new statement_list(new assign("result",expr_l1,loc),loc); * * return sl; * }/**/ public procedure_definition CreateAndAddToClassReadFunc(expression ex, ident id, SourceContext sc) { var fd = SyntaxTreeBuilder.BuildShortFuncDefinition(new formal_parameters(), null, new method_name(id.name, sc), new no_type(), ex, sc); return(fd); }
/// <summary> /// Is the list. /// </summary> /// <param name="activityField">The activity field.</param> /// <returns></returns> public IList <string> FormatDsfActivityField(string activityField) { //2013.06.10: Ashley Lewis for bug 9306 - handle the case of miss-matched region braces IList <string> result = new List <string>(); var regions = DataListCleaningUtils.SplitIntoRegionsForFindMissing(activityField); foreach (var region in regions) { // Sashen: 09-10-2012 : Using the new parser var intellisenseParser = new SyntaxTreeBuilder(); Node[] nodes = intellisenseParser.Build(region); // No point in continuing ;) if (nodes == null) { return(result); } if (intellisenseParser.EventLog.HasEventLogs) { IDev2StudioDataLanguageParser languageParser = DataListFactory.CreateStudioLanguageParser(); try { result = languageParser.ParseForActivityDataItems(region); } catch (Dev2DataLanguageParseError) { return(new List <string>()); } catch (NullReferenceException) { return(new List <string>()); } } var allNodes = new List <Node>(); if (nodes.Any() && !intellisenseParser.EventLog.HasEventLogs) { nodes[0].CollectNodes(allNodes); // ReSharper disable once ForCanBeConvertedToForeach for (int i = 0; i < allNodes.Count; i++) { if (allNodes[i] is DatalistRecordSetNode) { var refNode = allNodes[i] as DatalistRecordSetNode; string nodeName = refNode.GetRepresentationForEvaluation(); nodeName = nodeName.Substring(2, nodeName.Length - 4); result.Add(nodeName); } else if (allNodes[i] is DatalistReferenceNode) { var refNode = allNodes[i] as DatalistReferenceNode; string nodeName = refNode.GetRepresentationForEvaluation(); nodeName = nodeName.Substring(2, nodeName.Length - 4); result.Add(nodeName); } } } } try { FsInteropFunctions.ParseLanguageExpressionWithoutUpdate(activityField); } catch (Exception) { return(result.Where(lang => activityField.Contains("[[" + lang + "]]")).ToList()); } return(result); }
public abstract bool Scan(SyntaxTreeBuilder pSyntaxTreeBuilder);
public virtual bool Matches(SyntaxTreeBuilder pSyntaxTreeBuilder) { return(FirstSet.Contains(pSyntaxTreeBuilder.TokenScanner.Current)); }
public void AbstractSyntaxTreeBuilder_RegisterMultipleFromGrammerGroup_Test() { SyntaxTreeBuilder builder = new SyntaxTreeBuilder(); builder.RegisterGrammer(new CSharpNumericLiteralGrammer()); }
type_declarations GenClassesForYield(procedure_definition pd, IEnumerable <var_def_statement> fields) { var fh = (pd.proc_header as function_header); if (fh == null) { throw new SyntaxError("Only functions can contain yields", "", pd.proc_header.source_context, pd.proc_header); } var seqt = fh.return_type as sequence_type; if (seqt == null) { throw new SyntaxError("Functions with yields must return sequences", "", fh.return_type.source_context, fh.return_type); } // Теперь на месте функции генерируем класс // Захваченные переменные var cm = class_members.Public; foreach (var m in fields) { cm.Add(m); } // Параметры функции List <ident> lid = new List <ident>(); var pars = fh.parameters; if (pars != null) { foreach (var ps in pars.params_list) { if (ps.param_kind != parametr_kind.none) { throw new SyntaxError("Parameters of functions with yields must not have 'var', 'const' or 'params' modifier", "", pars.source_context, pars); } if (ps.inital_value != null) { throw new SyntaxError("Parameters of functions with yields must not have initial values", "", pars.source_context, pars); } var_def_statement vds = new var_def_statement(ps.idents, ps.vars_type); cm.Add(vds); // все параметры функции делаем полями класса lid.AddRange(vds.vars.idents); } } var stels = seqt.elements_type; // Системные поля и методы для реализации интерфейса IEnumerable cm.Add(new var_def_statement(Consts.State, "integer"), new var_def_statement(Consts.Current, stels), procedure_definition.EmptyDefaultConstructor, new procedure_definition("Reset"), new procedure_definition("MoveNext", "boolean", pd.proc_body), new procedure_definition("get_Current", "object", new assign("Result", Consts.Current)), new procedure_definition("GetEnumerator", "System.Collections.IEnumerator", new assign("Result", "Self")) ); var className = newClassName(); var classNameHelper = className + "Helper"; var interfaces = new named_type_reference_list("System.Collections.IEnumerator", "System.Collections.IEnumerable"); var td = new type_declaration(classNameHelper, SyntaxTreeBuilder.BuildClassDefinition(interfaces, cm)); // Изменение тела процедуры var stl = new statement_list(new var_statement("res", new new_expr(className))); stl.AddMany(lid.Select(id => new assign(new dot_node("res", id), id))); stl.Add(new assign("Result", "res")); pd.proc_body = new block(stl); // Второй класс var tpl = new template_param_list(stels); var IEnumeratorT = new template_type_reference("System.Collections.Generic.IEnumerator", tpl); var cm1 = class_members.Public.Add( procedure_definition.EmptyDefaultConstructor, new procedure_definition(new function_header("get_Current", stels), new assign("Result", Consts.Current)), new procedure_definition(new function_header("GetEnumerator", IEnumeratorT), new assign("Result", "Self")), new procedure_definition("Dispose") ); var interfaces1 = new named_type_reference_list(classNameHelper); var IEnumerableT = new template_type_reference("System.Collections.Generic.IEnumerable", tpl); interfaces1.Add(IEnumerableT).Add(IEnumeratorT); var td1 = new type_declaration(className, SyntaxTreeBuilder.BuildClassDefinition(interfaces1, cm1)); var cct = new type_declarations(td); cct.Add(td1); return(cct); }
public override bool Scan(SyntaxTreeBuilder pSyntaxTreeBuilder) { return(!pSyntaxTreeBuilder.KeepScanning || LinkedTarget.Scan(pSyntaxTreeBuilder)); }
public override bool Matches(SyntaxTreeBuilder pSyntaxTreeBuilder) { return(FirstSet.Contains(pSyntaxTreeBuilder.TokenScanner.Current) && CheckPredicate(pSyntaxTreeBuilder)); }
public virtual ParseNode_Base NextAfterChild(ParseNode_Base child, SyntaxTreeBuilder pSyntaxTreeBuilder) { return(parent != null?parent.NextAfterChild(this, pSyntaxTreeBuilder) : null); }
bool VisitInstanceDeclaration(type_declaration instanceDeclaration) { var instanceDefinition = instanceDeclaration.type_def as instance_definition; if (instanceDefinition == null) { return(false); } var instanceName = instanceDeclaration.type_name as typeclass_restriction; // If it is instance of derived typelass than it should have template parameters var templateArgs = new ident_list(); where_definition_list whereSection = null; var originalTypeclass = instancesAndRestrictedFunctions.typeclasses[instanceName.name].type_def as typeclass_definition; var typeclassParents = originalTypeclass.additional_restrictions; if (typeclassParents != null && typeclassParents.Count > 0) { whereSection = new where_definition_list(); for (int i = 0; i < typeclassParents.Count; i++) { ident template_name; if (typeclassParents[i] is typeclass_reference tr) { string name = tr.names[0].name; template_name = TypeclassRestrctionToTemplateName(name, tr.restriction_args); whereSection.Add(GetWhereRestriction( TypeclassReferenceToInterfaceName(name, instanceName.restriction_args), template_name)); } else { throw new NotImplementedException("Should be syntactic error"); } templateArgs.Add(template_name); } } List <type_definition> templateLists = instanceName.restriction_args.params_list.Concat(templateArgs.idents.Select(x => new named_type_reference(x.name)).OfType <type_definition>()).ToList(); var parents = new named_type_reference_list(new List <named_type_reference> { new template_type_reference(instanceName.name, new template_param_list(templateLists)), new template_type_reference("I" + instanceName.name, instanceName.restriction_args) }); var instanceDefTranslated = SyntaxTreeBuilder.BuildClassDefinition( parents, null, instanceDefinition.body.class_def_blocks.ToArray()); instanceDefTranslated.template_args = templateArgs; instanceDefTranslated.where_section = whereSection; instanceDefTranslated.source_context = instanceDefinition.source_context; for (int i = 0; i < instanceDefTranslated.body.class_def_blocks.Count; i++) { var cm = instanceDefTranslated.body.class_def_blocks[i].members; for (int j = 0; j < cm.Count; j++) { procedure_header header = null; if (cm[j] is procedure_header ph) { cm[j] = ph; } else if (cm[j] is procedure_definition pd) { header = pd.proc_header; } header.proc_attributes.Add(new procedure_attribute("override", proc_attribute.attr_override)); ConvertOperatorNameIdent(header); } } /* * { * // Add constructor * var cm = instanceDefTranslated.body.class_def_blocks[0]; * var def = new procedure_definition( * new constructor(), * new statement_list(new empty_statement())); * def.proc_body.Parent = def; * def.proc_header.proc_attributes = new procedure_attributes_list(); * * cm.Add(def); * } */ var typeName = new ident(CreateInstanceName(instanceName.restriction_args as typeclass_param_list, instanceName.name), instanceName.source_context); var instanceDeclTranslated = new type_declaration(typeName, instanceDefTranslated, instanceDeclaration.source_context); instanceDeclTranslated.attributes = instanceDeclaration.attributes; AddAttribute( instanceDeclTranslated, "__TypeclassInstanceAttribute", new expression_list(new string_const(TypeclassRestrictionToString(instanceName)))); AddAttribute(instanceDeclTranslated, "__TypeclassAttribute", new expression_list(new string_const(TypeclassRestrictionToString( (originalTypeclass.Parent as type_declaration).type_name as typeclass_restriction)))); Replace(instanceDeclaration, instanceDeclTranslated); visit(instanceDeclTranslated); return(true); }
public void AbstractSyntaxTreeBuilder_RegisterDuplicate_Test() { SyntaxTreeBuilder builder = new SyntaxTreeBuilder(); builder.RegisterGrammer(new InfrigisticsGrammer()); }
public abstract ParseNode_Base Parse(SyntaxTreeBuilder pSyntaxTreeBuilder);
public SyntaxTreeBuilderHelper() { Builder = new SyntaxTreeBuilder(); }
public override bool Matches(SyntaxTreeBuilder pSyntaxTreeBuilder) { return(node.Matches(pSyntaxTreeBuilder)); }
public override ParseNode_Base Parse(SyntaxTreeBuilder pSyntaxTreeBuilder) { return(nodes[0].Parse(pSyntaxTreeBuilder)); }
public SyntaxTreeNode_Rule AddNode(ParseNode_Id rule, SyntaxTreeBuilder pSyntaxTreeBuilder, out bool skipParsing) { skipParsing = false; bool removedReusable = false; if (NumValidNodes < nodes.Count) { var reusable = nodes[NumValidNodes] as SyntaxTreeNode_Rule; if (reusable != null) { var firstLeaf = reusable.GetFirstLeaf(false); if (reusable.ParseNode != rule) { if (firstLeaf == null || firstLeaf.token == null || firstLeaf.Line <= pSyntaxTreeBuilder.TokenScanner.CurrentLine) { reusable.Dispose(); removedReusable = true; } } else { if (firstLeaf != null && firstLeaf.token != null && firstLeaf.Line > pSyntaxTreeBuilder.TokenScanner.CurrentLine) { // Ignore this node for now } else if (firstLeaf == null || firstLeaf.token != null && firstLeaf.m_sSyntaxError != null) { reusable.Dispose(); removedReusable = true; } else if (firstLeaf.token == pSyntaxTreeBuilder.TokenScanner.Current) { var lastLeaf = reusable.GetLastLeaf(); if (lastLeaf != null && !reusable.HasErrors()) { if (lastLeaf.token != null) { ((SyntaxTreeBuilder_CSharp)pSyntaxTreeBuilder).MoveAfterLeaf(lastLeaf); skipParsing = true; ++NumValidNodes; return(pSyntaxTreeBuilder.SyntaxRule_Cur); } } else { reusable.Dispose(); removedReusable = true; } } else if (reusable.NumValidNodes == 0) { ++NumValidNodes; reusable.m_sSyntaxError = null; reusable.m_bMissing = false; return(reusable); } else if (pSyntaxTreeBuilder.TokenScanner.Current != null && (firstLeaf.token == null || firstLeaf.Line <= pSyntaxTreeBuilder.TokenScanner.CurrentLine)) { reusable.Dispose(); if (firstLeaf.token == null || firstLeaf.Line == pSyntaxTreeBuilder.TokenScanner.CurrentLine) { removedReusable = true; } else { nodes.RemoveAt(NumValidNodes); for (var i = NumValidNodes; i < nodes.Count; ++i) { --nodes[i].m_iChildIndex; } return(AddNode(rule, pSyntaxTreeBuilder, out skipParsing)); } } } } } var node = new SyntaxTreeNode_Rule(rule) { Parent = this, m_iChildIndex = NumValidNodes }; if (NumValidNodes == nodes.Count) { nodes.Add(node); ++NumValidNodes; } else { if (removedReusable) { nodes[NumValidNodes] = node; } else { nodes.Insert(NumValidNodes, node); } ++NumValidNodes; } if (NumValidNodes < nodes.Count && nodes[NumValidNodes].m_iChildIndex != NumValidNodes) { for (var i = NumValidNodes; i < nodes.Count; ++i) { nodes[i].m_iChildIndex = i; } } return(node); }
public override bool Scan(SyntaxTreeBuilder pSyntaxTreeBuilder) { throw new InvalidOperationException(); }
public override ParseNode_Base Parse(SyntaxTreeBuilder pSyntaxTreeBuilder) { throw new InvalidOperationException(); }
private void AddReferencesToIdentInLambda(type_declaration upperScopeWhereVarsAreCapturedClass, CapturedVariablesTreeNode scope, string varName, syntax_tree_node syntaxTreeNodeWithVarDeclaration, dot_node substDotNode, bool nestedLambda) { /*#if DEBUG * var pp = scope.ToString().IndexOf("TreeNode"); * var ss = scope.ToString().Remove(0, pp + 8).Replace("Scope"," "); * var cn = ""; * if (scope.ChildNodes.Count>0) * cn = "Childs: "+ scope.ChildNodes.Aggregate("",(s, x) => s + x.ScopeIndex.ToString() + " "); * System.IO.File.AppendAllText("d:\\w.txt", "AddR enter: " + ss + scope.ScopeIndex + " " + cn + "" +scope.CorrespondingSyntaxTreeNode + "\n"); #endif*/ for (var i = 0; i < scope.ChildNodes.Count; i++) { if (!(scope.ChildNodes[i] is CapturedVariablesTreeNodeLambdaScope)) { var substKey = new SubstitutionKey(varName, syntaxTreeNodeWithVarDeclaration, scope.ChildNodes[i].CorrespondingSyntaxTreeNode); if (_capturedVarsClassDefs.ContainsKey(scope.ChildNodes[i].ScopeIndex)) { var cl = _capturedVarsClassDefs[scope.ChildNodes[i].ScopeIndex]; if (cl.AssignNodeForUpperClassFieldInitialization == null) { var fieldType = SyntaxTreeBuilder.BuildSimpleType(upperScopeWhereVarsAreCapturedClass.type_name.name); var field = SyntaxTreeBuilder.BuildClassFieldsSection( new List <ident> { new ident(cl.GeneratedUpperClassFieldName) }, new List <type_definition> { fieldType }); var clClass = (class_definition)cl.ClassDeclaration.type_def; clClass.body.Add(field); cl.AssignNodeForUpperClassFieldInitialization = new assign( new dot_node(new ident(cl.GeneratedSubstitutingFieldName), new ident(cl.GeneratedUpperClassFieldName)), new ident(compiler_string_consts.self_word)); } } if (!_substitutions.ContainsKey(substKey)) { /*#if DEBUG * System.IO.File.AppendAllText("d:\\w.txt", "1 substitutions.Add: " + substKey + " " + substDotNode + "\n"); #endif*/ _substitutions.Add(substKey, substDotNode); } AddReferencesToIdentInLambda(upperScopeWhereVarsAreCapturedClass, scope.ChildNodes[i], varName, syntaxTreeNodeWithVarDeclaration, substDotNode, nestedLambda); } else { var scopeAsLambda = scope.ChildNodes[i] as CapturedVariablesTreeNodeLambdaScope; if (scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.HasValue) { dot_node substDotNode1; if (!nestedLambda) { var parts = new Stack <ident>(); var dn = substDotNode; parts.Push((ident)dn.right); while (!(dn.left is ident && dn.right is ident)) { dn = (dot_node)dn.left; parts.Push((ident)dn.right); } substDotNode1 = new dot_node(new ident(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].GeneratedUpperClassFieldName), parts.Pop()); while (parts.Count > 0) { substDotNode1 = new dot_node(substDotNode1, parts.Pop()); } } else { var parts = new Stack <ident>(); var dn = substDotNode; parts.Push((ident)dn.right); while (!(dn.left is ident && dn.right is ident)) { dn = (dot_node)dn.left; parts.Push((ident)dn.right); } parts.Push((ident)dn.left); substDotNode1 = new dot_node(new ident(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].GeneratedUpperClassFieldName), parts.Pop()); while (parts.Count > 0) { substDotNode1 = new dot_node(substDotNode1, parts.Pop()); } } var substKey = new SubstitutionKey(varName, syntaxTreeNodeWithVarDeclaration, scope.ChildNodes[0].CorrespondingSyntaxTreeNode); if (!_substitutions.ContainsKey(substKey)) { /*#if DEBUG * System.IO.File.AppendAllText("d:\\w.txt", "2 substitutions.Add: " + substKey + " " + substDotNode + "\n"); #endif*/ _substitutions.Add(substKey, substDotNode1); } AddReferencesToIdentInLambda(_capturedVarsClassDefs[scopeAsLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.Value].ClassDeclaration, scopeAsLambda.ChildNodes[0], varName, syntaxTreeNodeWithVarDeclaration, substDotNode1, true); } else { // SSM 25.06.19 fix #1988 - заменил ошибочное scope.ChildNodes[0] на scope.ChildNodes[i] AddReferencesToIdentInLambda(upperScopeWhereVarsAreCapturedClass, scope.ChildNodes[i], varName, syntaxTreeNodeWithVarDeclaration, substDotNode, nestedLambda); } } } /*#if DEBUG * System.IO.File.AppendAllText("d:\\w.txt", "AddR exit: " + scope.ScopeIndex+"\n"); #endif*/ }
public bool ParseStep(SyntaxTreeBuilder pSyntaxTreeBuilder) { if (pSyntaxTreeBuilder.ParseNode_Cur == null) { return(false); } var token = pSyntaxTreeBuilder.TokenScanner.Current; if (pSyntaxTreeBuilder.ErrorMessage == null) { while (pSyntaxTreeBuilder.ParseNode_Cur != null) { pSyntaxTreeBuilder.ParseNode_Cur = pSyntaxTreeBuilder.ParseNode_Cur.Parse(pSyntaxTreeBuilder); if (pSyntaxTreeBuilder.ErrorMessage != null || token != pSyntaxTreeBuilder.TokenScanner.Current) { break; } } if (pSyntaxTreeBuilder.ErrorMessage == null && token != pSyntaxTreeBuilder.TokenScanner.Current) { pSyntaxTreeBuilder.SyntaxRule_Err = pSyntaxTreeBuilder.SyntaxRule_Cur; pSyntaxTreeBuilder.ParseNode_Err = pSyntaxTreeBuilder.ParseNode_Cur; } } if (pSyntaxTreeBuilder.ErrorMessage != null) { if (token.tokenKind == LexerToken.Kind.EOF) { return(false); } var missingParseTreeNode = pSyntaxTreeBuilder.SyntaxRule_Cur; var missingParseNode = pSyntaxTreeBuilder.ParseNode_Cur; pSyntaxTreeBuilder.SyntaxRule_Cur = pSyntaxTreeBuilder.SyntaxRule_Err; pSyntaxTreeBuilder.ParseNode_Cur = pSyntaxTreeBuilder.ParseNode_Err; if (pSyntaxTreeBuilder.SyntaxRule_Cur != null) { var cpt = pSyntaxTreeBuilder.SyntaxRule_Cur; for (var i = cpt.NumValidNodes; i > 0 && !cpt.ChildAt(--i).HasLeafs();) { cpt.InvalidateFrom(i); } } if (pSyntaxTreeBuilder.ParseNode_Cur != null) { int numSkipped; pSyntaxTreeBuilder.ParseNode_Cur = pSyntaxTreeBuilder.ParseNode_Cur.Recover(pSyntaxTreeBuilder, out numSkipped); } if (pSyntaxTreeBuilder.ParseNode_Cur == null) { if (token.m_kLinkedLeaf != null) { token.m_kLinkedLeaf.ReparseToken(); } new SyntaxTreeNode_Leaf(pSyntaxTreeBuilder.TokenScanner); if (cachedErrorParseNode == pSyntaxTreeBuilder.ParseNode_Err) { token.m_kLinkedLeaf.m_sSyntaxError = cachedErrorMessage; } else { token.m_kLinkedLeaf.m_sSyntaxError = "Unexpected token! Expected " + pSyntaxTreeBuilder.ParseNode_Err.FirstSet.ToString(this); cachedErrorMessage = token.m_kLinkedLeaf.m_sSyntaxError; cachedErrorParseNode = pSyntaxTreeBuilder.ParseNode_Err; } pSyntaxTreeBuilder.ParseNode_Cur = pSyntaxTreeBuilder.ParseNode_Err; pSyntaxTreeBuilder.SyntaxRule_Cur = pSyntaxTreeBuilder.SyntaxRule_Err; if (!pSyntaxTreeBuilder.TokenScanner.MoveNext()) { return(false); } pSyntaxTreeBuilder.ErrorMessage = null; } else { if (missingParseNode != null && missingParseTreeNode != null) { pSyntaxTreeBuilder.SyntaxRule_Cur = missingParseTreeNode; pSyntaxTreeBuilder.ParseNode_Cur = missingParseNode; } pSyntaxTreeBuilder.InsertMissingToken(pSyntaxTreeBuilder.ErrorMessage ?? ("Expected " + missingParseNode.FirstSet.ToString(this))); if (missingParseNode != null && missingParseTreeNode != null) { pSyntaxTreeBuilder.ErrorMessage = null; pSyntaxTreeBuilder.ErrorToken = null; pSyntaxTreeBuilder.SyntaxRule_Cur = missingParseTreeNode; pSyntaxTreeBuilder.ParseNode_Cur = missingParseNode; pSyntaxTreeBuilder.ParseNode_Cur = missingParseNode.parent.NextAfterChild(missingParseNode, pSyntaxTreeBuilder); } pSyntaxTreeBuilder.ErrorMessage = null; pSyntaxTreeBuilder.ErrorToken = null; } } return(true); }
/// <summary> /// Is the list. /// </summary> /// <param name="activityField">The activity field.</param> /// <returns></returns> public IList<string> FormatDsfActivityField(string activityField) { //2013.06.10: Ashley Lewis for bug 9306 - handle the case of miss-matched region braces IList<string> result = new List<string>(); var regions = DataListCleaningUtils.SplitIntoRegionsForFindMissing(activityField); foreach(var region in regions) { // Sashen: 09-10-2012 : Using the new parser var intellisenseParser = new SyntaxTreeBuilder(); Node[] nodes = intellisenseParser.Build(region); // No point in continuing ;) if(nodes == null) { return result; } if(intellisenseParser.EventLog.HasEventLogs) { IDev2StudioDataLanguageParser languageParser = DataListFactory.CreateStudioLanguageParser(); try { result = languageParser.ParseForActivityDataItems(region); } catch(Dev2DataLanguageParseError) { return new List<string>(); } catch(NullReferenceException) { return new List<string>(); } } var allNodes = new List<Node>(); if(nodes.Any() && !(intellisenseParser.EventLog.HasEventLogs)) { nodes[0].CollectNodes(allNodes); // ReSharper disable once ForCanBeConvertedToForeach for(int i = 0; i < allNodes.Count; i++) { if(allNodes[i] is DatalistRecordSetNode) { var refNode = allNodes[i] as DatalistRecordSetNode; string nodeName = refNode.GetRepresentationForEvaluation(); nodeName = nodeName.Substring(2, nodeName.Length - 4); result.Add(nodeName); } else if(allNodes[i] is DatalistReferenceNode) { var refNode = allNodes[i] as DatalistReferenceNode; string nodeName = refNode.GetRepresentationForEvaluation(); nodeName = nodeName.Substring(2, nodeName.Length - 4); result.Add(nodeName); } } } } return result; }
private void VisitCapturedVar(CapturedVariablesTreeNode scope, CapturedVariablesTreeNode.CapturedSymbolInfo symbolInfo) { var varName = ((IVAriableDefinitionNode)symbolInfo.SymbolInfo.sym_info).name.ToLower(); var isSelfWordInClass = scope is CapturedVariablesTreeNodeClassScope && varName == compiler_string_consts.self_word; foreach (var referencingLambda in symbolInfo.ReferencingLambdas.OrderByDescending(rl => rl.ScopeIndex)) { if (scope != referencingLambda.ParentNode) { var upperScopesStack = new Stack <CapturedVariablesTreeNode>(); var crawlUpScope = referencingLambda.ParentNode; var anotherLambdaIsOnTheWay = crawlUpScope is CapturedVariablesTreeNodeLambdaScope; while (crawlUpScope != null && crawlUpScope != scope && !anotherLambdaIsOnTheWay) { upperScopesStack.Push(crawlUpScope); crawlUpScope = crawlUpScope.ParentNode; anotherLambdaIsOnTheWay = crawlUpScope is CapturedVariablesTreeNodeLambdaScope; } if (anotherLambdaIsOnTheWay || crawlUpScope == null) { continue; } var upperScopeWhereVarsAreCaptured = scope; var upperScopeWhereVarsAreCapturedClass = _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex].ClassDeclaration; var substKey = new SubstitutionKey(varName, symbolInfo.SyntaxTreeNodeWithVarDeclaration, scope.CorrespondingSyntaxTreeNode); if (!_substitutions.ContainsKey(substKey)) { _substitutions.Add(substKey, new dot_node( new ident( _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName), new ident(varName))); } while (upperScopesStack.Count != 0) { var foundScopeWhereVarsWereCaptured = false; while (upperScopesStack.Count != 0 && !foundScopeWhereVarsWereCaptured) { if ( upperScopesStack.Peek() .VariablesDefinedInScope.Exists(var => var.ReferencingLambdas.Count > 0)) { foundScopeWhereVarsWereCaptured = true; } else { var curScope = upperScopesStack.Pop(); if (upperScopeWhereVarsAreCaptured == scope && upperScopeWhereVarsAreCaptured is CapturedVariablesTreeNodeClassScope) { continue; } substKey = new SubstitutionKey(varName, symbolInfo.SyntaxTreeNodeWithVarDeclaration, curScope.CorrespondingSyntaxTreeNode); dot_node dotnode; if (upperScopeWhereVarsAreCaptured != scope) { dotnode = new dot_node( new ident( _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName), new ident( _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedUpperClassFieldName)); var nodeForDotNodeCalc = upperScopeWhereVarsAreCaptured.ParentNode; while (nodeForDotNodeCalc != scope) { if (_capturedVarsClassDefs.ContainsKey(nodeForDotNodeCalc.ScopeIndex) && _capturedVarsClassDefs[nodeForDotNodeCalc.ScopeIndex] .AssignNodeForUpperClassFieldInitialization != null) { dotnode = new dot_node(dotnode, new ident( _capturedVarsClassDefs[ nodeForDotNodeCalc.ScopeIndex] .GeneratedUpperClassFieldName)); } nodeForDotNodeCalc = nodeForDotNodeCalc.ParentNode; } if (!isSelfWordInClass) { dotnode = new dot_node(dotnode, new ident(varName)); } } else { dotnode = new dot_node(new ident( _capturedVarsClassDefs[ upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName), new ident(varName)); } if (!_substitutions.ContainsKey(substKey)) { _substitutions.Add(substKey, dotnode); } } } if (foundScopeWhereVarsWereCaptured) { var nextNodeWhereVarsAreCaptured = upperScopesStack.Pop(); if (!_capturedVarsClassDefs.ContainsKey(nextNodeWhereVarsAreCaptured.ScopeIndex)) { var classDef = SyntaxTreeBuilder.BuildClassDefinition(); var typeDeclaration = new type_declaration(GeneratedClassName, classDef); _capturedVarsClassDefs.Add(nextNodeWhereVarsAreCaptured.ScopeIndex, new ScopeClassDefinition( nextNodeWhereVarsAreCaptured.CorrespondingSyntaxTreeNode, typeDeclaration, nextNodeWhereVarsAreCaptured)); } var nextNodeWhereVarsAreCapturedClass = (class_definition) _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex].ClassDeclaration .type_def; if ( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .AssignNodeForUpperClassFieldInitialization == null) { var fieldType = SyntaxTreeBuilder.BuildSimpleType(upperScopeWhereVarsAreCapturedClass.type_name.name); var field = SyntaxTreeBuilder.BuildClassFieldsSection( new List <ident> { new ident( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .GeneratedUpperClassFieldName) }, new List <type_definition> { fieldType }); nextNodeWhereVarsAreCapturedClass.body.Add(field); _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .AssignNodeForUpperClassFieldInitialization = new assign( new dot_node( new ident( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName), new ident( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .GeneratedUpperClassFieldName)), new ident( _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName)); } substKey = new SubstitutionKey(varName, symbolInfo.SyntaxTreeNodeWithVarDeclaration, nextNodeWhereVarsAreCaptured.CorrespondingSyntaxTreeNode); var dot = new dot_node( new ident( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .GeneratedSubstitutingFieldName), new ident( _capturedVarsClassDefs[nextNodeWhereVarsAreCaptured.ScopeIndex] .GeneratedUpperClassFieldName)); var nodeForDotNodeCalculation = nextNodeWhereVarsAreCaptured.ParentNode; while (nodeForDotNodeCalculation != scope) { if (_capturedVarsClassDefs.ContainsKey(nodeForDotNodeCalculation.ScopeIndex) && _capturedVarsClassDefs[nodeForDotNodeCalculation.ScopeIndex] .AssignNodeForUpperClassFieldInitialization != null) { dot = new dot_node(dot, new ident( _capturedVarsClassDefs[nodeForDotNodeCalculation.ScopeIndex] .GeneratedUpperClassFieldName)); } nodeForDotNodeCalculation = nodeForDotNodeCalculation.ParentNode; } if (!isSelfWordInClass) { dot = new dot_node(dot, new ident(varName)); } if (!_substitutions.ContainsKey(substKey)) { _substitutions.Add(substKey, dot); } upperScopeWhereVarsAreCaptured = nextNodeWhereVarsAreCaptured; upperScopeWhereVarsAreCapturedClass = _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex].ClassDeclaration; } } if (!(upperScopeWhereVarsAreCaptured == scope && upperScopeWhereVarsAreCaptured is CapturedVariablesTreeNodeClassScope)) { if (upperScopeWhereVarsAreCaptured != scope) { var dotnode1 = new dot_node( new ident(compiler_string_consts.self_word), new ident( _capturedVarsClassDefs[upperScopeWhereVarsAreCaptured.ScopeIndex] .GeneratedUpperClassFieldName)); if (upperScopeWhereVarsAreCaptured != scope) { var nodeForDotNodeCalc = upperScopeWhereVarsAreCaptured.ParentNode; while (nodeForDotNodeCalc != scope) { if (_capturedVarsClassDefs.ContainsKey(nodeForDotNodeCalc.ScopeIndex) && _capturedVarsClassDefs[nodeForDotNodeCalc.ScopeIndex] .AssignNodeForUpperClassFieldInitialization != null) { dotnode1 = new dot_node(dotnode1, new ident( _capturedVarsClassDefs[nodeForDotNodeCalc.ScopeIndex] .GeneratedUpperClassFieldName)); } nodeForDotNodeCalc = nodeForDotNodeCalc.ParentNode; } } if (!isSelfWordInClass) { var classScope = scope as CapturedVariablesTreeNodeClassScope; if (classScope != null) { Tuple <string, class_field, semantic_node> publicProperty; if (classScope.NonPublicMembersNamesMapping.TryGetValue(varName, out publicProperty)) { dotnode1 = new dot_node(dotnode1, new ident(publicProperty.Item1)); } else { dotnode1 = new dot_node(dotnode1, new ident(varName)); } } else { dotnode1 = new dot_node(dotnode1, new ident(varName)); } } _lambdaIdReferences.Add(new LambdaReferencesSubstitutionInfo { LambdaScope = referencingLambda, VarName = varName, SyntaxTreeNodeWithVarDeclaration = symbolInfo.SyntaxTreeNodeWithVarDeclaration, DotNode = dotnode1 }); } else { var dotnode1 = new dot_node( new ident(compiler_string_consts.self_word), new ident(varName)); _lambdaIdReferences.Add(new LambdaReferencesSubstitutionInfo { LambdaScope = referencingLambda, VarName = varName, SyntaxTreeNodeWithVarDeclaration = symbolInfo.SyntaxTreeNodeWithVarDeclaration, DotNode = dotnode1 }); } } if (!referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.HasValue || upperScopeWhereVarsAreCaptured.ScopeIndex > referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod) { referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod = upperScopeWhereVarsAreCaptured.ScopeIndex; } } else { if (!_capturedVarsClassDefs.ContainsKey(scope.ScopeIndex)) { var classDef = SyntaxTreeBuilder.BuildClassDefinition(); var typeDeclaration = new type_declaration(GeneratedClassName, classDef); _capturedVarsClassDefs.Add(scope.ScopeIndex, new ScopeClassDefinition( scope.CorrespondingSyntaxTreeNode, typeDeclaration, scope)); } var substKey = new SubstitutionKey(varName, symbolInfo.SyntaxTreeNodeWithVarDeclaration, scope.CorrespondingSyntaxTreeNode); if (!_substitutions.ContainsKey(substKey)) { if (!isSelfWordInClass) { string propertyName = null; var classScope = scope as CapturedVariablesTreeNodeClassScope; if (classScope != null) { Tuple <string, class_field, semantic_node> publicProperty; if (classScope.NonPublicMembersNamesMapping.TryGetValue(varName, out publicProperty)) { propertyName = publicProperty.Item1; } } _substitutions.Add(substKey, new dot_node( new ident( _capturedVarsClassDefs[scope.ScopeIndex] .GeneratedSubstitutingFieldName), new ident(propertyName ?? varName))); } } var dotnode1 = new dot_node( new ident(compiler_string_consts.self_word), new ident(varName)); _lambdaIdReferences.Add(new LambdaReferencesSubstitutionInfo { LambdaScope = referencingLambda, VarName = varName, SyntaxTreeNodeWithVarDeclaration = symbolInfo.SyntaxTreeNodeWithVarDeclaration, DotNode = dotnode1 }); if (!referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod.HasValue || scope.ScopeIndex > referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod) { referencingLambda.ScopeIndexOfClassWhereLambdaWillBeAddedAsMethod = scope.ScopeIndex; } } if (!_lambdasToBeAddedAsMethods.Contains(referencingLambda)) { _lambdasToBeAddedAsMethods.Add(referencingLambda); } } }
bool VisitTypeclassDeclaration(type_declaration typeclassDeclaration) { var typeclassDefinition = typeclassDeclaration.type_def as typeclass_definition; if (typeclassDefinition == null) { return(false); } var typeclassName = typeclassDeclaration.type_name as typeclass_restriction; // TODO: typeclassDefinition.additional_restrictions - translate to usual classes // Creating interface // Get members for typeclass interface var interfaceMembers = new List <class_members>(); foreach (var cm in typeclassDefinition.body.class_def_blocks) { var cmNew = (class_members)cm.Clone(); for (int i = 0; i < cmNew.members.Count; i++) { var member = cmNew.members[i]; if (member is function_header || member is procedure_header) { cmNew.members[i] = member; } else if (member is procedure_definition procDef) { cmNew.members[i] = procDef.proc_header; } AddAttribute(cmNew.members[i], "__TypeclassMemberAttribute"); if (cmNew.members[i] is procedure_header ph) { ConvertOperatorNameIdent(ph); } } interfaceMembers.Add(cmNew); } var interfaceInheritance = (named_type_reference_list)typeclassDefinition.additional_restrictions?.Clone(); if (interfaceInheritance != null) { interfaceInheritance.source_context = null; for (int i = 0; i < interfaceInheritance.types.Count; i++) { if (interfaceInheritance.types[i] is typeclass_reference tr) { interfaceInheritance.types[i] = TypeclassReferenceToInterfaceName(tr.names[0].name, tr.restriction_args); } else { throw new NotImplementedException("Syntactic Error"); } } } var typeclassInterfaceDef = SyntaxTreeBuilder.BuildClassDefinition( interfaceInheritance, null, interfaceMembers.ToArray()); typeclassInterfaceDef.keyword = class_keyword.Interface; var typeclassInterfaceName = new template_type_name("I" + typeclassName.name, RestrictionsToIdentList(typeclassName.restriction_args)); var typeclassInterfaceDecl = new type_declaration(typeclassInterfaceName, typeclassInterfaceDef); typeclassInterfaceDecl.attributes = typeclassDeclaration.attributes; AddAttribute( typeclassInterfaceDecl, "__TypeclassAttribute", new expression_list(new string_const(TypeclassRestrictionToString(typeclassName)))); // Creating class var typeclassDefTranslated = SyntaxTreeBuilder.BuildClassDefinition( new named_type_reference_list(new template_type_reference(typeclassInterfaceName.name, typeclassName.restriction_args)), null, typeclassDefinition.body.class_def_blocks.ToArray()); typeclassDefTranslated.attribute = class_attribute.Abstract; for (int i = 0; i < typeclassDefTranslated.body.class_def_blocks.Count; i++) { var cm = typeclassDefTranslated.body.class_def_blocks[i].members; for (int j = 0; j < cm.Count; j++) { procedure_header header = null; if (cm[j] is procedure_header ph) { header = ph; header.proc_attributes.Add(new procedure_attribute("abstract", proc_attribute.attr_abstract)); } else if (cm[j] is procedure_definition pd) { header = pd.proc_header; header.proc_attributes.Add(new procedure_attribute("virtual", proc_attribute.attr_virtual)); } ConvertOperatorNameIdent(header); } } /* * { * // Add constructor * var cm = typeclassDefTranslated.body.class_def_blocks[0]; * var def = new procedure_definition( * new constructor(), * new statement_list(new empty_statement())); * def.proc_body.Parent = def; * def.proc_header.proc_attributes = new procedure_attributes_list(); * * cm.Add(def); * } */ // Add template parameters for typeclass class(derived typeclasses) ident_list templates = RestrictionsToIdentList(typeclassName.restriction_args); if (typeclassDefinition.additional_restrictions != null) { for (int i = 0; i < typeclassDefinition.additional_restrictions.types.Count; i++) { string name; string templateName; if (typeclassDefinition.additional_restrictions.types[i] is typeclass_reference tr) { name = tr.names[0].name; templateName = TypeclassRestrctionToTemplateName(name, tr.restriction_args).name; } else { throw new NotImplementedException("SyntaxError"); } // Add template parameter templates.Add(templateName); // Add where restriction if (typeclassDefTranslated.where_section == null) { typeclassDefTranslated.where_section = new where_definition_list(); } typeclassDefTranslated.where_section.Add(GetWhereRestriction( interfaceInheritance.types[i], templateName)); // Add methods from derived typeclasses var body = (instancesAndRestrictedFunctions.typeclasses[name].type_def as typeclass_definition).body; foreach (var cdb in body.class_def_blocks) { var cdbNew = new class_members(cdb.access_mod == null ? access_modifer.none : cdb.access_mod.access_level); foreach (var member in cdb.members) { procedure_header memberHeaderNew; if (member is procedure_header || member is function_header) { memberHeaderNew = (procedure_header)member.Clone(); memberHeaderNew.source_context = null; } else if (member is procedure_definition procDefinition) { memberHeaderNew = (procedure_header)procDefinition.proc_header.Clone(); memberHeaderNew.Parent = null; memberHeaderNew.source_context = null; } else { continue; } var variableName = templateName + "Instance"; var parameters = memberHeaderNew.parameters.params_list.Aggregate(new expression_list(), (x, y) => new expression_list(x.expressions.Concat(y.idents.idents).ToList())); expression methodCall = null; if (memberHeaderNew.name.meth_name is operator_name_ident oni) { ConvertOperatorNameIdent(memberHeaderNew); Debug.Assert(parameters.expressions.Count == 2, "Parameters count for operation should be equal to 2"); //methodCall = new bin_expr(parameters.expressions[0], parameters.expressions[1], oni.operator_type); } var callName = new dot_node(variableName, memberHeaderNew.name.meth_name.name); methodCall = new method_call(callName, parameters); statement exec = null; if (memberHeaderNew is function_header) { exec = new assign("Result", methodCall); } else if (memberHeaderNew is procedure_header) { exec = new procedure_call(methodCall as method_call); } var procDef = new procedure_definition( memberHeaderNew, new statement_list( GetInstanceSingletonVarStatement(templateName), exec)); cdbNew.Add(procDef); } typeclassDefTranslated.body.class_def_blocks.Add(cdbNew); } } } var typeclassNameTanslated = new template_type_name(typeclassName.name, templates, typeclassName.source_context); var typeclassDeclTranslated = new type_declaration(typeclassNameTanslated, typeclassDefTranslated, typeclassDeclaration.source_context); typeclassDeclTranslated.attributes = typeclassDeclaration.attributes; AddAttribute( typeclassDeclTranslated, "__TypeclassAttribute", new expression_list(new string_const(TypeclassRestrictionToString(typeclassName)))); Replace(typeclassDeclaration, typeclassDeclTranslated); UpperNodeAs <type_declarations>().InsertBefore(typeclassDeclTranslated, typeclassInterfaceDecl); visit(typeclassInterfaceDecl); visit(typeclassDeclTranslated); return(true); }
private void VisitTreeAndBuildClassDefinitions(CapturedVariablesTreeNode currentNode) { var variablesFromThisScopeWhichWereCaptured = currentNode .VariablesDefinedInScope .Where(var => var.ReferencingLambdas.Count > 0) .ToList(); if (variablesFromThisScopeWhichWereCaptured.Count > 0) { if (currentNode is CapturedVariablesTreeNodeClassScope) { if (!_capturedVarsClassDefs.ContainsKey(currentNode.ScopeIndex)) { var classDef = SyntaxTreeBuilder.BuildClassDefinition(); var typeDeclaration = new type_declaration(((CapturedVariablesTreeNodeClassScope)currentNode).ClassName, classDef); _capturedVarsClassDefs.Add(currentNode.ScopeIndex, new ScopeClassDefinition(currentNode.CorrespondingSyntaxTreeNode, typeDeclaration, currentNode, compiler_string_consts.self_word)); } } else { if (!_capturedVarsClassDefs.ContainsKey(currentNode.ScopeIndex)) { var classDef = SyntaxTreeBuilder.BuildClassDefinition(); var typeDeclaration = new type_declaration(GeneratedClassName, classDef); _capturedVarsClassDefs.Add(currentNode.ScopeIndex, new ScopeClassDefinition(currentNode.CorrespondingSyntaxTreeNode, typeDeclaration, currentNode)); } var vars = variablesFromThisScopeWhichWereCaptured .Select(field => field.SymbolInfo.sym_info as IVAriableDefinitionNode) .ToList(); var fieldNames = vars .Select(var => new ident(var.name)) .ToList(); var fieldTypes = vars .Select(var => SyntaxTreeBuilder.BuildSemanticType(var.type)) .ToList(); var classFields = SyntaxTreeBuilder.BuildClassFieldsSection(fieldNames, fieldTypes); ((class_definition)_capturedVarsClassDefs[currentNode.ScopeIndex].ClassDeclaration.type_def).body .Add( classFields); if (currentNode is CapturedVariablesTreeNodeProcedureScope) { var constructorSection = SyntaxTreeBuilder.BuildSimpleConstructorSection(fieldNames, fieldNames.Select( id => new ident("_" + id.name)) .ToList(), fieldTypes); ((class_definition)_capturedVarsClassDefs[currentNode.ScopeIndex].ClassDeclaration.type_def) .body.Add(constructorSection); } } foreach (var capturedVar in variablesFromThisScopeWhichWereCaptured) { VisitCapturedVar(currentNode, capturedVar); } } foreach (CapturedVariablesTreeNode childNode in currentNode.ChildNodes) { VisitTreeAndBuildClassDefinitions(childNode); } if (variablesFromThisScopeWhichWereCaptured.Count > 0) { var vars = variablesFromThisScopeWhichWereCaptured .Select(x => new { IVarDefinitionNode = x.SymbolInfo.sym_info as IVAriableDefinitionNode, VarDeclNode = x.SyntaxTreeNodeWithVarDeclaration }) .Where(x => x.IVarDefinitionNode != null) .ToList(); foreach (var var in vars) { foreach (CapturedVariablesTreeNode childNode in currentNode.ChildNodes) { _rewriteReferencesForNodesThatAreChildNodesToThoseThatContainCapturedVariableInfo.Add( new RewriteReferencesForNodesThatAreChildNodesToThoseThatContainCapturedVariableInfo { TreeNode = childNode, Varname = var.IVarDefinitionNode.name, NodeWithVarDecl = var.VarDeclNode }); } } } }
/// <summary> /// Evaluates the function. /// </summary> /// <param name="expressionTO">The expression TO.</param> /// <param name="curDLID">The cur DLID.</param> /// <param name="errors">The errors.</param> /// <returns></returns> public string EvaluateFunction(IEvaluationFunction expressionTO, Guid curDLID, out ErrorResultTO errors) { string expression = expressionTO.Function; IDataListCompiler compiler = DataListFactory.CreateDataListCompiler(); SyntaxTreeBuilder builder = new SyntaxTreeBuilder(); ErrorResultTO allErrors = new ErrorResultTO(); // Travis.Frisinger : 31.01.2013 - Hi-jack this and evaluate all our internal IBinaryDataListEntry bde = compiler.Evaluate(curDLID, enActionType.CalculateSubstitution, expression, false, out errors); allErrors.MergeErrors(errors); if (bde != null) { expression = bde.FetchScalar().TheValue; if (expression.StartsWith("\"")) { expression = expression.Replace("\"", "").Trim(); } } Node[] nodes = builder.Build(expression); string result = string.Empty; if (builder.EventLog.HasEventLogs) { IEnumerable <string> err = EvaluateEventLogs(expression); foreach (string e in err) { allErrors.AddError(e); } } else { List <Node> allNodes = new List <Node>(); nodes[0].CollectNodes(allNodes); IterationNodeValueSource valueSource = null; bool startedIteration = false; bool isIteration = false; bool pendingIterationRecordSet = false; int maxRecords = -1; int currentRecord = 0; do { if (startedIteration) { foreach (Node t in allNodes) { t.EvaluatedValue = null; } } for (int i = allNodes.Count - 1; i >= 0; i--) { if (allNodes[i] is IterationNode) { IterationNode refNode = allNodes[i] as IterationNode; if (valueSource == null) { valueSource = new IterationNodeValueSource(1); } refNode.ValueSource = valueSource; pendingIterationRecordSet = true; isIteration = true; } else if (allNodes[i] is DatalistRecordSetNode) { DatalistRecordSetNode refNode = allNodes[i] as DatalistRecordSetNode; if (refNode.Parameter != null) { if ((refNode.Parameter.Items != null && refNode.Parameter.Items.Length != 0) || refNode.Parameter.Statement != null) { refNode.Parameter.EvaluatedValue = InternalEval(refNode.Parameter.GetEvaluatedValue()); } } // this way we fetch the correct field with the data... IBinaryDataListEntry e = compiler.Evaluate(curDLID, enActionType.User, refNode.GetRepresentationForEvaluation(), false, out errors); allErrors.MergeErrors(errors); string error; refNode.EvaluatedValue = e.TryFetchLastIndexedRecordsetUpsertPayload(out error).TheValue; allErrors.AddError(error); if (pendingIterationRecordSet) { pendingIterationRecordSet = false; if (refNode.NestedIdentifier != null) { allErrors.AddError("An error occurred while parsing { " + expression + " } Iteration operator can not be used with nested recordset identifiers."); break; } string evaluateRecordLeft = refNode.GetRepresentationForEvaluation(); evaluateRecordLeft = evaluateRecordLeft.Substring(2, evaluateRecordLeft.IndexOf('(') - 2); int totalRecords = 0; IBinaryDataList bdl = compiler.FetchBinaryDataList(curDLID, out errors); IBinaryDataListEntry entry; if (bdl.TryGetEntry(evaluateRecordLeft, out entry, out error)) { totalRecords = entry.FetchLastRecordsetIndex(); } allErrors.AddError(error); maxRecords = Math.Max(totalRecords, maxRecords); } } else if (allNodes[i] is DatalistReferenceNode) { DatalistReferenceNode refNode = allNodes[i] as DatalistReferenceNode; IBinaryDataListEntry entry = compiler.Evaluate(curDLID, enActionType.User, refNode.GetRepresentationForEvaluation(), false, out errors); allErrors.MergeErrors(errors); if (entry.IsRecordset) { string error; refNode.EvaluatedValue = entry.TryFetchLastIndexedRecordsetUpsertPayload(out error).TheValue; double testParse; if (!Double.TryParse(refNode.EvaluatedValue, out testParse)) { refNode.EvaluatedValue = String.Concat("\"", refNode.EvaluatedValue, "\""); //Bug 6438 } } else { refNode.EvaluatedValue = entry.FetchScalar().TheValue; double testParse; if (!Double.TryParse(refNode.EvaluatedValue, out testParse)) { refNode.EvaluatedValue = String.Concat("\"", refNode.EvaluatedValue, "\""); //Bug 6438 } } } else if (allNodes[i] is BinaryOperatorNode && allNodes[i].Identifier.Start.Definition == TokenKind.Colon) { BinaryOperatorNode biNode = (BinaryOperatorNode)allNodes[i]; if (!(biNode.Left is DatalistRecordSetFieldNode)) { allErrors.AddError("An error occurred while parsing { " + expression + " } Range operator can only be used with record set fields."); break; } if (!(biNode.Right is DatalistRecordSetFieldNode)) { allErrors.AddError("An error occurred while parsing { " + expression + " } Range operator can only be used with record set fields."); break; } DatalistRecordSetFieldNode fieldLeft = (DatalistRecordSetFieldNode)biNode.Left; DatalistRecordSetFieldNode fieldRight = (DatalistRecordSetFieldNode)biNode.Right; string evaluateFieldLeft = (fieldLeft.Field != null) ? fieldLeft.Field.GetEvaluatedValue() : fieldLeft.Identifier.Content; string evaluateFieldRight = (fieldRight.Field != null) ? fieldRight.Field.GetEvaluatedValue() : fieldRight.Identifier.Content; if (!String.Equals(evaluateFieldLeft, evaluateFieldRight, StringComparison.Ordinal)) { allErrors.AddError("An error occurred while parsing { " + expression + " } Range operator must be used with the same record set fields."); break; } string evaluateRecordLeft = fieldLeft.RecordSet.GetRepresentationForEvaluation(); evaluateRecordLeft = evaluateRecordLeft.Substring(2, evaluateRecordLeft.IndexOf('(') - 2); string evaluateRecordRight = fieldRight.RecordSet.GetRepresentationForEvaluation(); evaluateRecordRight = evaluateRecordRight.Substring(2, evaluateRecordRight.IndexOf('(') - 2); if (!String.Equals(evaluateRecordLeft, evaluateRecordRight, StringComparison.Ordinal)) { allErrors.AddError("An error occurred while parsing { " + expression + " } Range operator must be used with the same record sets."); break; } int totalRecords = 0; IBinaryDataList bdl = compiler.FetchBinaryDataList(curDLID, out errors); string error; IBinaryDataListEntry entry; if (bdl.TryGetEntry(evaluateRecordLeft, out entry, out error)) { totalRecords = entry.FetchLastRecordsetIndex(); } string rawParamLeft = fieldLeft.RecordSet.Parameter.GetEvaluatedValue(); rawParamLeft = rawParamLeft.Length == 2 ? "" : rawParamLeft.Substring(1, rawParamLeft.Length - 2); string rawParamRight = fieldRight.RecordSet.Parameter.GetEvaluatedValue(); rawParamRight = rawParamRight.Length == 2 ? "" : rawParamRight.Substring(1, rawParamRight.Length - 2); int startIndex; int endIndex; if (!String.IsNullOrEmpty(rawParamLeft)) { if (!Int32.TryParse(rawParamLeft, out startIndex) || startIndex <= 0) { allErrors.AddError("An error occurred while parsing { " + expression + " } Recordset index must be a positive whole number that is greater than zero."); break; } } else { startIndex = 1; } if (!String.IsNullOrEmpty(rawParamRight)) { if (!Int32.TryParse(rawParamRight, out endIndex) || endIndex <= 0) { allErrors.AddError("An error occurred while parsing { " + expression + " } Recordset index must be a positive whole number that is greater than zero."); break; } if (endIndex > totalRecords) { allErrors.AddError("An error occurred while parsing { " + expression + " } Recordset end index must be a positive whole number that is less than the number of entries in the recordset."); break; } } else { endIndex = totalRecords; } endIndex++; StringBuilder rangeBuilder = new StringBuilder(); for (int k = startIndex; k < endIndex; k++) { if (k != startIndex) { rangeBuilder.Append("," + entry.TryFetchRecordsetColumnAtIndex(evaluateFieldLeft, k, out error).TheValue); allErrors.AddError(error); } else { rangeBuilder.Append(entry.TryFetchRecordsetColumnAtIndex(evaluateFieldLeft, k, out error).TheValue); allErrors.AddError(error); } } allNodes[i].EvaluatedValue = rangeBuilder.ToString(); } } string evaluatedValue = nodes[0].GetEvaluatedValue(); result = InternalEval(evaluatedValue); if (startedIteration) { currentRecord = valueSource.Index++; } if (isIteration && !startedIteration) { startedIteration = true; currentRecord = valueSource.Index++; } }while(startedIteration && currentRecord < maxRecords); } errors = allErrors; return(result); }