public void Clear() { term = null; nextNode = null; nextClause = null; level = 0; }
public async Task HandleAddDataAction(BaseTermsAddAction baseTermAction, IDispatcher dispatcher) { var returnData = new BaseTerm(); var returnCode = HttpStatusCode.OK; try { _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Token", baseTermAction.Token); var response = await _httpClient.PostAsJsonAsync( requestUri : $"{Const.BaseTerms}", baseTermAction.BaseTerm); returnData = await response.Content.ReadFromJsonAsync <BaseTerm>(); } catch (Exception e) { dispatcher.Dispatch(new NotificationAction(e.Message, SnackbarColor.Danger)); returnCode = HttpStatusCode.BadRequest; } dispatcher.Dispatch( new BaseTermsAddResultAction( returnData ?? new BaseTerm(), httpStatusCode: returnCode)); if (returnCode != HttpStatusCode.BadRequest) { dispatcher.Dispatch( new NotificationAction(baseTermAction.BaseTermAddedMessage, SnackbarColor.Success)); } }
static void DoJsonObject0(JsonTextBuffer avb, string attrName, BaseTerm t, bool first) { avb.AppendPossibleCommaAndNewLine(first); // is this entire {}-last the first element? avb.EmitOpenBracket(attrName, '{'); DoJsonObject(avb, t, true); avb.EmitCloseBracket('}'); }
public void Assert(BaseTerm assertion, bool asserta) { BaseTerm head; TermNode body = null; PredicateDescr pd; if (assertion.HasFunctor(PrologParser.IMPLIES)) { head = assertion.Arg(0); body = assertion.Arg(1).ToGoalList(); } else { head = assertion; } if (!head.IsCallable) { IO.Error("Illegal predicate head '{0}'", head.ToString()); } string key = head.Key; if ((predefineds.Contains(key)) || (head.Precedence >= 1000)) { IO.Error("assert/1 cannot be applied to predefined predicate or operator '{0}'", assertion.Index); } predTable.TryGetValue(key, out pd); ClauseNode newC = new ClauseNode(head, body); if (pd == null) // first head { SetClauseList(head.FunctorToString, head.Arity, newC); ResolveIndices(); } else if (pd.IsCacheable) { IO.Error("assert/1 cannot be applied to cached predicate '{0}'", assertion.Index); } else if (asserta) // at beginning { newC.NextClause = pd.ClauseList; // pd.ClauseList may be null SetClauseList(head.FunctorToString, head.Arity, newC); #if arg1index pd.CreateFirstArgIndex(); // re-create #endif } else // at end { pd.AppendToClauseList(newC); #if arg1index pd.CreateFirstArgIndex(); // re-create #endif } InvalidateCrossRef(); }
public OperandToken(BaseTerm term) { this.term = term; this.type = TT.Term; this.role = this.type; this.prevOd = null; }
static void DoJsonValue(JsonTextBuffer avb, string attrName, BaseTerm t, bool first) { string functor = t.FunctorToString; if (functor == PrologParser.CURL) { DoJsonObject0(avb, attrName, t, first); // is this the first object of a last? } else if (t is AltListTerm) // if {...} has been declared a last with wrap( '{', |, '}') { DoJsonObject0(avb, attrName, (AltListTerm)t, first); } else if (t.IsProperList) { DoJsonArray0(avb, attrName, (ListTerm)t, first); } else if (t.IsNumber || t.Arity == 0) { avb.EmitAttrValuePair(attrName, t.ToString(), first); } else { IO.Error("Not a JSON-term:\r\n{0}", t); } }
private void JsonObject(TerminalSet _TS, out BaseTerm t) { BaseTerm e; List <BaseTerm> listItems = new List <BaseTerm>(); GetSymbol(new TerminalSet(terminalCount, LCuBracket), true, true); GetSymbol(new TerminalSet(terminalCount, StringLiteral, RCuBracket), false, true); if (symbol.TerminalId == StringLiteral) { while (true) { JsonPair(new TerminalSet(terminalCount, Comma, RCuBracket), out e); listItems.Add(e); GetSymbol(new TerminalSet(terminalCount, Comma, RCuBracket), false, true); if (symbol.TerminalId == Comma) { symbol.SetProcessed(); } else { break; } } } GetSymbol(new TerminalSet(terminalCount, RCuBracket), true, true); t = JsonTerm.FromArray(listItems.ToArray()); }
public static bool SetWorkingDirectory(BaseTerm term, VarStack varStack) { if (term.Arity == 0) { workingDirectory = GetConfigSetting("WorkingDirectory", null); IO.Message("Working directory set to '{0}'", WorkingDirectory); return(true); } BaseTerm t0 = term.Arg(0); if (t0.IsVar) { t0.Unify(new StringTerm(workingDirectory), varStack); // symbolic names return(true); } string wd = Utils.DirectoryNameFromTerm(t0); if (wd == null) { IO.Error("Illegal name '{0}' for working directory", t0.FunctorToString); return(false); } workingDirectory = wd; IO.Message("Working directory set to '{0}'", WorkingDirectory); return(true); }
public override bool Unify(BaseTerm t, VarStack varStack) { NextUnifyCount(); if (!((t = t.ChainEnd()) is ListPatternElem)) { return(false); // should never occur } #if old if (isNegSearch != ((ListPatternElem)t).isNegSearch) { return(false); } #endif for (int i = 0; i < arity; i++) { if (!((args[i] == null && t.Args[i] == null) || args[i].Unify(t.Args[i], varStack))) { return(false); } } return(true); }
public IntRangeTerm(BaseTerm lowBound, BaseTerm hiBound) : base("..", lowBound, hiBound) { this.lowBound = lowBound; this.hiBound = hiBound; iEnum = GetEnumerator(); }
public override bool Unify(BaseTerm t, VarStack varStack) { if ((t = t.ChainEnd()) is Variable) // t not unified { ((Variable)t).Bind(this); varStack.Push(t); return(true); } if (t is ListPatternTerm && arity == t.Arity) // two ListPatternTerms match if their rangeTerms match { for (int i = 0; i < arity; i++) { if (!args[i].Unify(t.Args[i], varStack)) { return(false); } } return(true); } if (t is ListTerm) { pattern = args; // pattern is searched ... target = ((ListTerm)t).ToList(); // ... in target int ip = 0; int it = 0; return(UnifyTailEx(ip, it, varStack)); } return(false); }
public BaseTerm Eval() // evaluate the term { BaseTerm t = ChainEnd(); if (!t.IsEvaluatable) { IO.Error("{0} cannot be evaluated by is/2", t); } if (t is ValueTerm) { return(t); // a ValueTerm stands for itself } if (t.IsProperList && !((ListTerm)t).IsEvaluated) // evaluate all members recursively { ListTerm result = ListTerm.EMPTYLIST; List <BaseTerm> tl = ((ListTerm)t).ToList(); for (int i = tl.Count - 1; i >= 0; i--) { result = new ListTerm(tl [i].Eval(), result); } result.IsEvaluated = true; return(result); } return(t.Apply()); }
public IntRangeTerm(IntRangeTerm that) // for copying only : base("..", that.lowBound, that.hiBound) { this.lowBound = that.lowBound; this.hiBound = that.hiBound; iEnum = GetEnumerator(); }
private void JsonArray(TerminalSet _TS, out BaseTerm t) { BaseTerm e; List <BaseTerm> listItems = new List <BaseTerm>(); GetSymbol(new TerminalSet(terminalCount, LSqBracket), true, true); GetSymbol(new TerminalSet(terminalCount, IntLiteral, RealLiteral, StringLiteral, LSqBracket, RSqBracket, LCuBracket, TrueSym, FalseSym, NullSym), false, true); if (symbol.IsMemberOf(IntLiteral, RealLiteral, StringLiteral, LSqBracket, LCuBracket, TrueSym, FalseSym, NullSym)) { while (true) { JsonValue(new TerminalSet(terminalCount, Comma, RSqBracket), out e); listItems.Add(e); GetSymbol(new TerminalSet(terminalCount, Comma, RSqBracket), false, true); if (symbol.TerminalId == Comma) { symbol.SetProcessed(); } else { break; } } } GetSymbol(new TerminalSet(terminalCount, RSqBracket), true, true); t = new CompoundTerm("array", ListTerm.ListFromArray(listItems.ToArray(), BaseTerm.EMPTYLIST)); }
void JsonObject(TerminalSet _TS, out BaseTerm t) { #if LL1_tracing ReportParserProcEntry("JsonObject"); #endif BaseTerm e; List <BaseTerm> listItems = new List <BaseTerm> (); GetSymbol(new TerminalSet(terminalCount, LCuBracket), true, true); GetSymbol(new TerminalSet(terminalCount, StringLiteral, RCuBracket), false, true); if (symbol.Terminal == StringLiteral) { while (true) { JsonPair(new TerminalSet(terminalCount, Comma, RCuBracket), out e); listItems.Add(e); GetSymbol(new TerminalSet(terminalCount, Comma, RCuBracket), false, true); if (symbol.Terminal == Comma) { symbol.SetProcessed(); } else { break; } } } GetSymbol(new TerminalSet(terminalCount, RCuBracket), true, true); t = JsonTerm.FromArray(listItems.ToArray()); #if LL1_tracing ReportParserProcExit("JsonObject"); #endif }
static void DoJsonObject(XmlTextWriter xwr, string [] attributes, BaseTerm t, ref bool contentWritten) { foreach (BaseTerm a in t.Args) { DoJsonPair(xwr, attributes, a, ref contentWritten); } }
// MinRemMatchLen and MaxRemMatchLen are calculated on the fly during the matching process // DEZE UITEINDELIJK GEBRUIKEN !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // DENK AAN CopyEx() public ListPatternElem(DownRepFactor downRepFactor, DisjunctiveSearchTerm altSearchTerms, AcrossRepFactor acrossRepFactor) : base("RANGE") // used by CopyEx { this.downRepFactor = downRepFactor; this.altSearchTerms = altSearchTerms; this.acrossRepFactor = acrossRepFactor; int n = TARGETOFFSET + (altSearchTerms == null ? 1 : altSearchTerms.Count); args = new BaseTerm [n]; args [0] = HasAcrossRepFactor ? acrossRepFactor.minLenTerm : DecimalTerm.ZERO; args [1] = HasAcrossRepFactor ? acrossRepFactor.maxLenTerm : DecimalTerm.ZERO; args [2] = HasAcrossRepFactor ? acrossRepFactor.bindVar : null; args [3] = HasAltSearchTerms ? altSearchTerms.bindVar : null; int i = TARGETOFFSET; if (altSearchTerms != null) { foreach (BaseTerm t in altSearchTerms.alternatives) { args [i++] = t; } } isNegSearch = (altSearchTerms == null) ? false : altSearchTerms.isNegSearch; }
public override bool Unify(BaseTerm t, VarStack varStack) { if (t is Variable) { return(t.Unify(this, varStack)); } NextUnifyCount(); const double eps = 1.0e-6; // arbitrary, cosmetic if (t is DecimalTerm) { return(Math.Abs(im) < eps && Math.Abs(re - ((DecimalTerm)t).ValueD) < eps); } if (t is ComplexTerm) { return(Math.Abs(re - ((ComplexTerm)t).Re) < eps && Math.Abs(im - ((ComplexTerm)t).Im) < eps); } //if (t is ComplexTerm) // return ( re == ((ComplexTerm)t).Re && im == ((ComplexTerm)t).Im ); return(false); }
public override int CompareTo(BaseTerm t) { ComplexTerm c; DecimalTerm d; if (t is ComplexTerm) { c = (ComplexTerm)t; if (re == c.re && im == c.im) { return(0); } return(magnitude.CompareTo(((ComplexTerm)c).magnitude)); // compare |this| and |c| } if (t is DecimalTerm) { d = (DecimalTerm)t; if (im == 0) { return(re.CompareTo(d.ValueD)); } return(magnitude.CompareTo(d.ValueD)); } IO.Error("Relational operator cannot be applied to '{0}' and '{1}'", this, t); return(0); }
// 'xmldocument$'(['xmldecl$'([version=1.0,encoding=ISO-8859-1])], ...]), []) static Encoding GetEncodingFromTerm(BaseTerm t, Encoding defEnc) { if (t.Arity == 0 || (t = t.Arg(0)).FunctorToString != XMLDECL) { return(defEnc); } if (!((t = t.Arg(0)) is ListTerm)) // attributes last, find encoding { return(defEnc); } foreach (BaseTerm b in (ListTerm)t) { if (!(b is OperatorTerm) || !((OperatorTerm)b).HasBinaryOperator("=")) { return(defEnc); } OperatorTerm ot = (OperatorTerm)b; if (ot.Arg(0).FunctorToString == "encoding") { return(GetEncodingFromString(ot.Arg(1).FunctorToString)); } } return(defEnc); }
public bool Abolish(string functor, int arity) { string key = BaseTerm.MakeKey(functor, arity); if (predefineds.Contains(key)) { IO.Error("abolish of predefined predicate '{0}/{1}' not allowed", functor, arity); } PredicateDescr pd = this[key]; if (pd == null) { return(false); } predTable.Remove(key); #if arg1index pd.DestroyFirstArgIndex(); // rebuilt by ResolveIndices() #endif ResolveIndices(); return(true); }
public ListPatternElem(BaseTerm minLenTerm, BaseTerm maxLenTerm, BaseTerm rangeBindVar, BaseTerm altListVar, List <SearchTerm> altSearchTerms, bool isNegSearch, bool hasDownRepFactor) { int n = TARGETOFFSET + (altSearchTerms == null ? 1 : altSearchTerms.Count); args = new BaseTerm[n]; args[0] = minLenTerm; args[1] = maxLenTerm; args[2] = rangeBindVar; args[3] = altListVar; int i = TARGETOFFSET; if (altSearchTerms != null) { foreach (SearchTerm t in altSearchTerms) { args[i++] = t.term; } } #if old this.isNegSearch = isNegSearch; #endif }
public UndefAction ActionWhenUndefined(string f, int a) { UndefAction u; actionWhenUndefined.TryGetValue(BaseTerm.MakeKey(f, a), out u); return(u); }
public AltListTerm(string leftBracket, string rightBracket, BaseTerm t0, BaseTerm t1) : base(t0.ChainEnd(), t1.ChainEnd()) { isAltList = true; functor = leftBracket + ".." + rightBracket; this.leftBracket = leftBracket; this.rightBracket = rightBracket; }
static void DoJsonObject(JsonTextBuffer avb, BaseTerm t, bool first) { foreach (BaseTerm a in t.Args) { DoJsonPair(avb, a, first); first = false; } }
public void ConstructPrefixTerm(out BaseTerm term) { CheckTokenPair(SeqEndToken); // force a check on the last token added //DumpIS (); term = null; InfixToPrefix(); term = PrefixToTerm(); }
public static void JsonToXml(BaseTerm jsonTerm, string root, ref string fileNameOrXmlString, string[] attributes) { // convert the JSON term to an equivalent term in which JSON-term // functors are replaced by names that will appear as tag names XmlTextWriter xwr = null; StringWriter sw = new StringWriter(); bool contentWritten = false; try { if (fileNameOrXmlString == null) // return flat XmlString { xwr = new XmlTextWriter(sw); xwr.Formatting = Formatting.None; } else // write to file { xwr = new XmlTextWriter(fileNameOrXmlString, Encoding.UTF8); xwr.Formatting = Formatting.Indented; xwr.Indentation = 2; xwr.IndentChar = ' '; // default } xwr.WriteStartElement(root); if (jsonTerm.FunctorToString == PrologParser.CURL) { DoJsonObject(xwr, attributes, jsonTerm, ref contentWritten); } else if (jsonTerm is AltListTerm) // if {...} has been declared a last with wrap( '{', |, '}') { DoJsonObject(xwr, attributes, (AltListTerm)jsonTerm, ref contentWritten); } else if (jsonTerm.IsProperList) { DoJsonArray(xwr, attributes, null, ((ListTerm)jsonTerm), ref contentWritten); } else { IO.Error("Unable to convert term to XML:\r\n{0}", jsonTerm); } xwr.WriteEndElement(); } finally { if (fileNameOrXmlString == null) { fileNameOrXmlString = sw.ToString(); } else if (xwr != null) { xwr.Close(); } } }
static void DoJsonPair(JsonTextBuffer avb, BaseTerm t, bool first) { if (!(t.FunctorToString == ":" && t.Arity == 2)) { IO.Error("Not a JSON-term:\r\n{0}", t); } DoJsonValue(avb, t.Arg(0).FunctorToString, t.Arg(1), first); }
public static string DirectoryNameFromTerm(BaseTerm t) { if (!t.IsAtomOrString) { return(null); } return(GetFullDirectoryName(t.FunctorToString.Dequoted())); }
// try to bind the variable associated with the list of search alternatives // (t1|t2|...|tn) to the target term found matching one of these alternatives bool TryBindingAltListVarToMatch(BaseTerm AltListVar, BaseTerm searchTerm, VarStack varStack) { if (AltListVar == null) { return(true); } return(AltListVar.Unify(searchTerm, varStack)); }
public DisjunctiveSearchTerm (DisjunctiveSearchTerm t) { alternatives = new BaseTerm [t.Count]; for (int i = 0; i < t.Count; i++) alternatives [i] = t.alternatives [i].Copy (); bindVar = (Variable)(t.bindVar.Copy ()); isNegSearch = t.isNegSearch; }
public SettingsForm(BaseTerm.CommBaseTermSettings settings) { // // Required for Windows Form Designer support // InitializeComponent(); FillASCII(comboBoxXon); FillASCII(comboBoxXoff); FillASCII(comboBoxBRK); }
public void SetEntry(int [] subscripts, BaseTerm t) { baseArray [CalculateOffset (subscripts)] = t; }
public DownRepFactor (Variable bindVar, BaseTerm minLenTerm, BaseTerm maxLenTerm) : base (bindVar, minLenTerm, maxLenTerm) { }
void AppendToRangeList (ref ListTerm RangeList, BaseTerm rangeSpecVar, BaseTerm t, ref BaseTerm tail) // append t to RangeList { if (rangeSpecVar == null) return; // no point in constructing a range last if it won't be bound later if (RangeList == null) { RangeList = new ListTerm (t); tail = RangeList; } else { tail.SetArg (1, new ListTerm (t)); tail = tail.Arg (1); } }
public TermNode(BaseTerm term, TermNode nextNode) { this.term = term; this.nextNode = nextNode; }
public TermNode(BaseTerm term, PredicateDescr predDescr) { this.predDescr = predDescr; this.term = term; }
// process an element content, i.e. a t static void ContentTermToXml(XmlTextWriter xwr, BaseTerm list) { while (!list.IsEmptyList) // traverse ... { BaseTerm e = list.Arg (0); string type = e.FunctorToString; #if !fullyTagged if (e is StringTerm) xwr.WriteString (((StringTerm)e).Value); else if (e is DecimalTerm) xwr.WriteString (((DecimalTerm)e).FunctorToString); else if (e is CompoundTerm) ElementTermToXml (xwr, e); else #endif switch (type) { case XMLDECL: xwr.WriteStartDocument (true); break; #if fullyTagged case ELEMENT: if (!ElementTermToXml (xwr, e)) return false; break; case TEXT: xwr.WriteString (e.Arg (0).FunctorToString); break; #endif case CDATA: xwr.WriteCData (e.Arg (0).FunctorToString); break; case COMMENT: xwr.WriteComment (e.Arg (0).FunctorToString); break; case INSTRUCTIONS: xwr.WriteProcessingInstruction (e.Arg (0).FunctorToString, e.Arg (1).ToString ()); break; default: IO.Error ("ContentTermToXml -- unhandled type: {0} ({1})", e.GetType ().Name, type); break; } list = list.Arg (1); } }
public override bool Unify (BaseTerm t, VarStack varStack) { NextUnifyCount (); if (!((t = t.ChainEnd ()) is ListPatternElem)) return false; // should never occur #if old if (isNegSearch != ((ListPatternElem)t).isNegSearch) return false; #endif for (int i = 0; i < arity; i++) if (!((args [i] == null && t.Args [i] == null) || args [i].Unify (t.Args [i], varStack))) return false; return true; }
public DisjunctiveSearchTerm (BaseTerm bindVar, bool isNegSearch, List<BaseTerm> alternatives) { this.bindVar = (Variable)bindVar; this.isNegSearch = isNegSearch; this.alternatives = alternatives.ToArray (); }
List<BaseTerm> target; // ... in target public ListPatternTerm (BaseTerm [] a) : base (PrologParser.LISTPATOPEN, PrologParser.LISTPATCLOSE, a) { }
void Write(BaseTerm t, bool dequote) { if (t.IsString) BaseWriteCurrentOutput (dequote ? t.FunctorToString : '"' + t.FunctorToString + '"'); else if (t.IsAtom) BaseWriteCurrentOutput (dequote ? t.FunctorToString.Dequoted ("'") : t.FunctorToString); else BaseWriteCurrentOutput (t.ToString ()); }
protected BaseRepFactor (BaseTerm bindVar, BaseTerm minLenTerm, BaseTerm maxLenTerm) { this.bindVar = (Variable)bindVar; this.minLenTerm = minLenTerm; this.maxLenTerm = maxLenTerm; }
public SearchTerm (DownRepFactor downRepFactor, BaseTerm term) { this.downRepFactor = downRepFactor; this.term = term; }
static BaseTerm ToTermEx(Node root) { BaseTerm [] args = new BaseTerm [3]; #if fullyTagged args [0] = new AtomTerm (root.TagName.ToAtom ()); #else string tagName = root.TagName.ToAtom (); #endif args [1] = ListTerm.EMPTYLIST; Decimal d; foreach (KeyValuePair<string, string> kv in root.Attributes) // XML Declaration { BaseTerm pair; if (Decimal.TryParse (kv.Value, styleAllowDecPnt, Utils.CIC, out d)) pair = new OperatorTerm (EqualOpDescr, new AtomTerm (kv.Key), new DecimalTerm (d)); else pair = new OperatorTerm (EqualOpDescr, new AtomTerm (kv.Key), new StringTerm (kv.Value)); args [1] = new ListTerm (pair, args [1]); } args [2] = ListTerm.EMPTYLIST; if (root.ChildNodes.Count > 0) { foreach (Node n in root.ChildNodes) { BaseTerm e; e = null; switch (n.type) { case XmlNodeType.Element: e = ToTermEx (n); break; case XmlNodeType.Comment: e = new CompoundTerm (COMMENT, new StringTerm (n.text.Trim ().EscapeDoubleQuotes ())); break; case XmlNodeType.Text: if (Decimal.TryParse (n.text, styleAllowDecPnt, Utils.CIC, out d)) #if fullyTagged e = new CompoundTerm (TEXT, new DecimalTerm (d)); else e = new CompoundTerm (TEXT, new StringTerm (n.text.Trim ().EscapeDoubleQuotes ())); #else e = new DecimalTerm (d); else e = new StringTerm (n.text.Trim ().EscapeDoubleQuotes ()); #endif break; case XmlNodeType.CDATA: e = new CompoundTerm (CDATA, new StringTerm (n.text.Trim ().EscapeDoubleQuotes ())); break; case XmlNodeType.ProcessingInstruction: e = new CompoundTerm ("processing_instruction", new AtomTerm (n.name.ToAtom ()), new StringTerm (n.text.Trim ().EscapeDoubleQuotes ())); break; case XmlNodeType.SignificantWhitespace: case XmlNodeType.Whitespace: break;
public ListPatternElem (BaseTerm minLenTerm, BaseTerm maxLenTerm, BaseTerm rangeBindVar, BaseTerm altListVar, List<SearchTerm> altSearchTerms, bool isNegSearch, bool hasDownRepFactor) { int n = TARGETOFFSET + (altSearchTerms == null ? 1 : altSearchTerms.Count); args = new BaseTerm [n]; args [0] = minLenTerm; args [1] = maxLenTerm; args [2] = rangeBindVar; args [3] = altListVar; int i = TARGETOFFSET; if (altSearchTerms != null) foreach (SearchTerm t in altSearchTerms) args [i++] = t.term; #if old this.isNegSearch = isNegSearch; #endif }
// 'xmldocument$'(['xmldecl$'([version=1.0,encoding=ISO-8859-1])], ...]), []) static Encoding GetEncodingFromTerm(BaseTerm t, Encoding defEnc) { if (t.Arity == 0 || (t = t.Arg (0)).FunctorToString != XMLDECL) return defEnc; if (!((t = t.Arg (0)) is ListTerm)) // attributes last, find encoding return defEnc; foreach (BaseTerm b in (ListTerm)t) { if (!(b is OperatorTerm) || !((OperatorTerm)b).HasBinaryOperator ("=")) return defEnc; OperatorTerm ot = (OperatorTerm)b; if (ot.Arg (0).FunctorToString == "encoding") return GetEncodingFromString (ot.Arg (1).FunctorToString); } return defEnc; }
public ListPatternElem (BaseTerm [] a, DownRepFactor downRepFactor, bool isNegSearch) : base ("RANGE", a) // used by CopyEx { #if old this.isNegSearch = isNegSearch; #endif }
// process an element( <tag>, <attributes>, <content>) static void ElementTermToXml(XmlTextWriter xwr, BaseTerm e) { #if fullyTagged int ft = 1; #else int ft = 0; #endif // open tag if (e.Arity == 1 + ft && e.FunctorToString == XMLDECL) { xwr.WriteStartDocument (); } else if (e.Arity == 2 + ft) { #if fullyTagged xwr.WriteStartElement (e.Arg (0).ToString ().Dequoted ()); #else xwr.WriteStartElement (e.FunctorToString.Dequoted ()); #endif // attributes BaseTerm le = e.Arg (ft); // t with attribute-value pairs while (!le.IsEmptyList) { BaseTerm av = le.Arg (0); // BaseTerm: attr = value xwr.WriteAttributeString (av.Arg (0).FunctorToString.Dequoted (), av.Arg (1).FunctorToString); le = le.Arg (1); } // content ContentTermToXml (xwr, e.Arg (1 + ft)); xwr.WriteEndElement (); } else IO.Error ("Unexpected element encountered:\r\n{0}", e); }
// MinRemMatchLen and MaxRemMatchLen are calculated on the fly during the matching process // DEZE UITEINDELIJK GEBRUIKEN !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // DENK AAN CopyEx() public ListPatternElem (DownRepFactor downRepFactor, DisjunctiveSearchTerm altSearchTerms, AcrossRepFactor acrossRepFactor) : base ("RANGE") // used by CopyEx { this.downRepFactor = downRepFactor; this.altSearchTerms = altSearchTerms; this.acrossRepFactor = acrossRepFactor; int n = TARGETOFFSET + (altSearchTerms == null ? 1 : altSearchTerms.Count); args = new BaseTerm [n]; args [0] = HasAcrossRepFactor ? acrossRepFactor.minLenTerm : DecimalTerm.ZERO; args [1] = HasAcrossRepFactor ? acrossRepFactor.maxLenTerm : DecimalTerm.ZERO; args [2] = HasAcrossRepFactor ? acrossRepFactor.bindVar : null; args [3] = HasAltSearchTerms ? altSearchTerms.bindVar : null; int i = TARGETOFFSET; if (altSearchTerms != null) foreach (BaseTerm t in altSearchTerms.alternatives) args [i++] = t; isNegSearch = (altSearchTerms == null) ? false : altSearchTerms.isNegSearch; }
// Conversion of an XML-structure (in a string or in a file) to a Prolog BaseTerm public static BaseTerm XmlToTerm(BaseTerm settings, string s, bool inFile) { XmlTextReader xrd = null; StreamReader sr = null; Encoding encoding = GetEncodingFromString ("UTF-8"); Node result; string settingValue = null; // get settings -- currently, only 'encoding' is recognized if (settings != null) foreach (BaseTerm setting in (ListTerm)settings) // traverse ... { string settingName = setting.FunctorToString; if (setting.Arity == 1) settingValue = setting.Arg (0).FunctorToString; else IO.Error ("Illegal setting in xml_term/3: '{0}'", setting); switch (settingName) { // Expected string or file encoding. Superseded by explicit encoding attribute setting found in xml case "encoding": encoding = GetEncodingFromString (settingValue); // default is UTF-8 break; default: IO.Error ("Unknown setting in xml_term/3: '{0}'", setting); break; } } try { if (inFile) { sr = new StreamReader (s, encoding); xrd = new XmlTextReader (sr); } else xrd = new XmlTextReader (new StringReader (s)); //xrd.ProhibitDtd = true; // new in the .NET Framework version 2.0 xrd.Namespaces = false; result = new Node (); result.TagName = "<root>"; result.type = XmlNodeType.Element; result.ToNode (xrd, 0); // first, create an intermediate representation (a Node) containing the XML structure } catch (Exception e) { string source = inFile ? string.Format (" file '{0}'", s) : null; throw new ApplicationException ( string.Format ("Error in XML input{0}. Message was:\r\n{1}", source, e.Message)); } finally { if (sr != null) sr.Close (); if (xrd != null) xrd.Close (); } return result.ToTerm (); // Convert the Node to a Prolog BaseTerm }
public TermNode(BaseTerm term, PredicateDescr predDescr, int level) { this.term = term; this.predDescr = predDescr; this.level = level; }
public ClauseNode(BaseTerm t, TermNode body) : base(t, body) { }
public void Append(BaseTerm t) { if (term == null) // empty term { term = t; return; } TermNode tail = this; TermNode next = nextNode; while (next != null) { tail = next; next = next.nextNode; } tail.nextNode = new TermNode (t, (PredicateDescr)null); }
public override bool Unify (BaseTerm t, VarStack varStack) { if ((t = t.ChainEnd ()) is Variable) // t not unified { ((Variable)t).Bind (this); varStack.Push (t); return true; } if (t is ListPatternTerm && arity == t.Arity) // two ListPatternTerms match if their rangeTerms match { for (int i = 0; i < arity; i++) if (!args [i].Unify (t.Args [i], varStack)) return false; return true; } if (t is ListTerm) { pattern = args; // pattern is searched ... target = ((ListTerm)t).ToList (); // ... in target int ip = 0; int it = 0; return UnifyTailEx (ip, it, varStack); } return false; }
bool succeeds; // indicates whether the cached fact results in a failure or in a success #endregion Fields #region Constructors public CachedClauseNode(BaseTerm t, TermNode body, bool succeeds) : base(t, body) { this.succeeds = succeeds; }
public AcrossRepFactor (BaseTerm bindVar, BaseTerm minLenTerm, BaseTerm maxLenTerm) : base (bindVar, minLenTerm, maxLenTerm) { }
private AltLoopStatus TryOneAlternative (int ip, VarStack varStack, ListPatternElem e, int k, int marker, ListTerm RangeList, int i, BaseTerm t, ref bool negSearchSucceeded, BaseTerm searchTerm) { bool unified = searchTerm.Unify (t, varStack); if (e.IsNegSearch) // none of the terms in the inner loop may match. ~(a | b | c) = ~a & ~b & ~c { if (unified) // ... no point in investigating the other alternatives if one does { negSearchSucceeded = false; return AltLoopStatus.Break; // don't try the other alternatives } return AltLoopStatus.TryNextDown; // non of the downranges matches may lead to a success } else { if (unified && // we found a match. Unify, and TryBindingAltListVarToMatch (e.AltListBindVar, t, varStack) && // bind the AltListBindVar to the match TryBindingRangeRelatedVars (e, i, RangeList, varStack)) // bind the range to the range variables { if (ip == pattern.Length - 1) // this was the last pattern element { if (k == target.Count - 1) // both pattern and target exhausted return AltLoopStatus.MatchFound; } else if (UnifyTailEx (ip + 1, k + 1, varStack)) // now deal with the rest return AltLoopStatus.MatchFound; } // if we arrive here, it was not possible to ... // (1) ... unify the range's SearchTerm with the target element, or // (2) ... unify the range variable with the range last, or // (3) ... successfully process the rest of the pattern and target // Now unbind and try matching with the next target element BaseTerm.UnbindToMarker (varStack, marker); return AltLoopStatus.TryNextDown; // try the next downrange match } }
// try to bind the variable associated with the last of search alternatives // (t1|t2|...|tn) to the target term found matching one of these alternatives bool TryBindingAltListVarToMatch (BaseTerm AltListVar, BaseTerm searchTerm, VarStack varStack) { if (AltListVar == null) return true; return AltListVar.Unify (searchTerm, varStack); }