public double SearchPertinence(State <T> from, State <T> to, T c, NormAbstract norm, ConormAbstract conorm) { double pertinenceMax = 0.0; Queue <Tuple <State <T>, double> > queue = new Queue <Tuple <State <T>, double> >(); queue.Enqueue(Tuple.Create <State <T>, double>(from, 1)); List <State <T> > visits = new List <State <T> >(); while (queue.Count > 0) { Tuple <State <T>, double> tupleActual = queue.Dequeue(); State <T> actual = tupleActual.Item1; visits.Add(actual); var trans = actual.Transitions.Where(t => t.To == to).ToList(); if (trans.Count() > 0) { pertinenceMax = conorm.Calculate( conorm.Calculate(trans, t => norm.Calculate(tupleActual.Item2, t.Rule.Math(c))), pertinenceMax); } for (int i = 0; i < actual.Transitions.Count; i++) { if (!visits.Contains(actual.Transitions[i].To) && actual.Transitions[i].Rule.Math(SymbolEmpty) > 0) { queue.Enqueue(Tuple.Create <State <T>, double>(actual.Transitions[i].To, norm.Calculate(actual.Transitions[i].Rule.Pertinence, tupleActual.Item2))); } } } return(pertinenceMax); }
public List <GraphPath <T, U> > AllPaths(NormAbstract norm) { var paths = new List <GraphPath <T, U> >(); AllPaths(paths, this.Root, null, 1, norm); return(paths); }
public GraphPath(NormAbstract norm) { this.MinEquality = 0.001; this.Norm = norm; Nodes = new List <Node <T, U> >(); Cost = 1; Id = id++; }
public RecognitionFuzzy(string regexFuzzy, NormAbstract norm, ConormAbstract conorm, double valueDefaultTransitions) { ValueDefaultTransitions = valueDefaultTransitions; this.Norm = norm; this.Conorm = conorm; this.RegexFuzzy = regexFuzzy; RegexToAutoma(); }
public AbstractLexicalAnalysis(List<RecognitionToken> rules, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; Rules = rules; IdsToken = 0; IdsNode = 0; RulesUnitys = rules.Where(r => r.Unitary).ToList(); }
public AbstractLexicalAnalysis(List <RecognitionToken> rules, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; Rules = rules; IdsToken = 0; IdsNode = 0; RulesUnitys = rules.Where(r => r.Unitary).ToList(); }
public CompilerFuzzy(List<RecognitionToken> recs, Grammar grammar, Dictionary<int, List<string>> namesVars, Dictionary<string, Func<Container, string>> dicCompileExample, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; this.DicCompile = dicCompileExample; this.Grammar = grammar; this.NameVars = namesVars; }
public CompilerFuzzy(List <RecognitionToken> recs, Grammar grammar, Dictionary <int, List <string> > namesVars, Dictionary <string, Func <Container, string> > dicCompileExample, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; this.DicCompile = dicCompileExample; this.Grammar = grammar; this.NameVars = namesVars; }
public RecognitionToken(int id, string name, string fregex, string hexColor, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; Id = id; Name = name; RegexFuzzy = new RecognitionFuzzy(fregex, norm, conorm, 0.8); Unitary = (!String.IsNullOrEmpty(fregex)) && (fregex.Length == 1); if (string.IsNullOrWhiteSpace(hexColor)) { System.Array colorsArray = Enum.GetValues(typeof(KnownColor)); this.Color = Color.FromKnownColor((KnownColor)colorsArray.GetValue((id * 10) % colorsArray.Length)); } else { this.Color = System.Drawing.ColorTranslator.FromHtml(hexColor); } }
public SyntacticAnalysisLR1(GrammarFuzzy.Grammar grammar, NormAbstract norm, ConormAbstract conorm) : base(grammar, norm, conorm) { CacheFirst = new Hashtable(); Table = new Dictionary<State<Symbol>, Dictionary<Symbol, List<Operation>>>(); //Rules = new List<RuleProductionState>(); List<Symbol> alphabet = new List<Symbol>(); alphabet.AddRange(grammar.Variables); alphabet.AddRange(grammar.Terminals); this.Grammar = grammar; GrammarLine = new GrammarFuzzy.Grammar(); GrammarLine.Rules.AddRange(grammar.Rules); GrammarLine.Terminals.AddRange(grammar.Terminals); GrammarLine.Variables.AddRange(grammar.Variables); GrammarLine.VariablesEmpty.AddRange(grammar.VariablesEmpty); SymbolInitialLine = new Symbol(100, "S'", false); GrammarLine.Variables.Add(SymbolInitialLine); GrammarLine.AddRule(SymbolInitialLine, grammar.VariableStart); GrammarLine.VariableStart = SymbolInitialLine; Automa = new Automa<Symbol>(alphabet, Symbol.EmptySymbol); SymbolInitial = grammar.VariableStart; CreateAutoma(); CreateTable(); }
public SyntacticAnalysisLR1(GrammarFuzzy.Grammar grammar, NormAbstract norm, ConormAbstract conorm) : base(grammar, norm, conorm) { CacheFirst = new Hashtable(); Table = new Dictionary <State <Symbol>, Dictionary <Symbol, List <Operation> > >(); //Rules = new List<RuleProductionState>(); List <Symbol> alphabet = new List <Symbol>(); alphabet.AddRange(grammar.Variables); alphabet.AddRange(grammar.Terminals); this.Grammar = grammar; GrammarLine = new GrammarFuzzy.Grammar(); GrammarLine.Rules.AddRange(grammar.Rules); GrammarLine.Terminals.AddRange(grammar.Terminals); GrammarLine.Variables.AddRange(grammar.Variables); GrammarLine.VariablesEmpty.AddRange(grammar.VariablesEmpty); SymbolInitialLine = new Symbol(100, "S'", false); GrammarLine.Variables.Add(SymbolInitialLine); GrammarLine.AddRule(SymbolInitialLine, grammar.VariableStart); GrammarLine.VariableStart = SymbolInitialLine; Automa = new Automa <Symbol>(alphabet, Symbol.EmptySymbol); SymbolInitial = grammar.VariableStart; CreateAutoma(); CreateTable(); }
public TokenizerLexicalAnalysis(List <RecognitionToken> rules, NormAbstract norm, ConormAbstract conorm) : base(rules, norm, conorm) { }
public void AllPaths(List <GraphPath <T, U> > returns, Node <T, U> node, GraphPath <T, U> path, double cost, NormAbstract norm) { if (path == null) { path = new GraphPath <T, U>(norm); } path.AddNode(node, cost); if (node.Edges.Count > 0) { var original = path.Copy(); for (int i = 0; i < node.Edges.Count; i++) { if (i == 0) { AllPaths(returns, node.Edges[i].Destiny, path, node.Edges[i].Cost, norm); } else { AllPaths(returns, node.Edges[i].Destiny, original.Copy(), node.Edges[i].Cost, norm); } } } else { if (!returns.Contains(path)) { returns.Add(path); } } }
public FullLexicalAnalysis(List<RecognitionToken> rules, NormAbstract norm, ConormAbstract conorm) : base(rules, norm, conorm) { }
public SyntacticAnalysisAbstract(Grammar grammar, NormAbstract norm, ConormAbstract conorm) { this.Norm = norm; this.Conorm = conorm; this.Grammar = grammar; }