public override void TrainFromQueries(params string[] queries) { //TODO: use the ParseQuery method here somehow as they are much the same InParseMode = false; var examples = new List <Example>(); foreach (var query in queries) { var tokeniser = new SparqlTokeniser(ParsingTextReader.Create(new StringReader(query)), SparqlQuerySyntax.Sparql_1_1); var token = tokeniser.GetNextToken(); PredicateNet currentPrefix = null; while (token != null && !(token is EOFToken)) { token = tokeniser.GetNextToken(); if (token is PrefixToken) { var prefix = token.Value; var ontology = new Uri(tokeniser.GetNextToken().Value); currentPrefix = new PredicateNet(prefix, ontology, this.PrefixLoader); Add(currentPrefix, true); } else { var w = token.Value.ToLower(); var words = w.Split(':'); var wordSuffix = words.Length > 1 ? ":" : ""; var offset = 0; foreach (var word in words) { var fullWord = word + wordSuffix; var node = Node(fullWord); wordSuffix = ""; if (query.Trim().Length > token.StartPosition) { var partQuery = query.Substring(0, (token.StartPosition - 1) + offset).Trim(); var inputNode = Node(partQuery); examples.Add(new Example(inputNode, fullWord)); offset = fullWord.Length; } } } } } Train(examples.ToArray()); InParseMode = true; }
private void ParseQuery(string query) { nodes.Clear(); var tokeniser = new SparqlTokeniser(ParsingTextReader.Create(new StringReader(query)), SparqlQuerySyntax.Sparql_1_1); var token = tokeniser.GetNextToken(); while (token != null && !(token is EOFToken)) { try { token = tokeniser.GetNextToken(); if (token is PrefixDirectiveToken) { token = tokeniser.GetNextToken(); var prefix = token.Value; var ontology = new Uri(tokeniser.GetNextToken().Value); var prefixNode = new PredicateNet(prefix, ontology, this.PrefixLoader); Add(prefixNode, true); var linkToPrefix = new TokenNode(new Uri(R(token.Value.ToLower())), token); nodes.Add(linkToPrefix); } else if (token is QNameToken) { var node = new TokenNode(new Uri(R(token.Value.ToLower().Replace(":", "_namespace"))), token); var link = Node(R(token.Value.ToLower())); nodes.Add(node); Add(node, "link", link); } else { var node = new TokenNode(new Uri(R(token.Value.ToLower())), token); nodes.Add(node); } } catch (Exception ex) { CurrentError = ex; return; } } }
public override void TrainFromQueries(params string[] queries) { InParseMode = false; var examples = new List <NodeExample>(); var queryId = 0; foreach (var query in queries) { queryId++; var tokeniser = new SparqlTokeniser(ParsingTextReader.Create(new StringReader(query)), SparqlQuerySyntax.Sparql_1_1); var token = tokeniser.GetNextToken(); IToken beforeLastToken = null; IToken lastToken = null; while (token != null && !(token is EOFToken)) { token = tokeniser.GetNextToken(); var tokenValue = token is VariableToken ? queryId + "." + token.Value : token.Value; var node = Node(tokenValue); node.UseEdgesAsInterface = false; if (tokenValue.Contains(Prefix) && Outputs.Contains(node)) { var last = Node(queryId + "." + lastToken.Value); examples.Add(new NodeExample(last, node)); } if (beforeLastToken is VariableToken && (lastToken.Value.Contains("a") || lastToken.Value.Contains(Prefix))) { var n = Node(queryId + "." + beforeLastToken.Value); n.AddEdge(Node("a"), node); } beforeLastToken = lastToken; lastToken = token; } } Train(examples.ToArray()); InParseMode = true; }
public virtual void TrainFromQueries(params string[] queries) { InParseMode = false; var examples = new List <NodeExample>(); foreach (var query in queries) { var tokeniser = new SparqlTokeniser(ParsingTextReader.Create(new StringReader(query)), SparqlQuerySyntax.Sparql_1_1); var lastWord = Node(""); var token = tokeniser.GetNextToken(); while (token != null && !(token is EOFToken)) { token = tokeniser.GetNextToken(); var word = token.Value.ToLower(); var node = Node(word); examples.Add(new NodeExample(Node(lastWord), node)); lastWord = node; } } Train(examples.ToArray()); InParseMode = true; }