internal void InitSpecialRuleRef(Backend backend, ParseElementCollection parent) { Rule rule = null; switch (_type) { case SpecialRuleRefType.Null: parent.AddArc(backend.EpsilonTransition(1f)); break; case SpecialRuleRefType.Void: rule = backend.FindRule("VOID"); if (rule == null) { rule = backend.CreateRule("VOID", (SPCFGRULEATTRIBUTES)0); ((IElement)rule).PostParse((IElement)parent); } parent.AddArc(backend.RuleTransition(rule, parent._rule, 1f)); break; case SpecialRuleRefType.Garbage: { OneOf oneOf = new OneOf(parent._rule, backend); oneOf.AddArc(backend.RuleTransition(CfgGrammar.SPRULETRANS_WILDCARD, parent._rule, 0.5f)); oneOf.AddArc(backend.EpsilonTransition(0.5f)); ((IElement)oneOf).PostParse((IElement)parent); break; } } }
internal override void AddArc(Arc start, Arc end) { start = ParseElementCollection.TrimStart(start, _backend); end = ParseElementCollection.TrimEnd(end, _backend); State start2 = end.Start; State end2 = start.End; if ((start.IsEpsilonTransition & start.IsPropertylessTransition) && end2 != null && end2.InArcs.IsEmpty) { start.End = null; _backend.MoveOutputTransitionsAndDeleteState(end2, _startState); } else { start.Start = _startState; } if ((end.IsEpsilonTransition & end.IsPropertylessTransition) && start2 != null && start2.OutArcs.IsEmpty) { end.Start = null; _backend.MoveInputTransitionsAndDeleteState(start2, _endState); } else { end.End = _endState; } }
void IElement.PostParse(IElement parentElement) { ParseElementCollection parseElementCollection = (ParseElementCollection)parentElement; _propInfo._ulId = (uint)parseElementCollection._rule._iSerialize2; parseElementCollection.AddSementicPropertyTag(_propInfo); }
/// <summary> /// Add transition corresponding to Special or Uri. /// </summary> internal RuleRef(ParseElementCollection parent, Backend backend, Uri uri, List <Rule> undefRules, string semanticKey, string initParameters) : base(parent._rule) { string id = uri.OriginalString; Rule ruleRef = null; int posPound = id.IndexOf('#'); // Get the initial state for the RuleRef. if (posPound == 0) { // Internal RuleRef. Get InitialState of RuleRef. // GetRuleRef() may temporarily create a Rule placeholder for later resolution. ruleRef = GetRuleRef(backend, id.Substring(1), undefRules); } else { // External RuleRef. Build URL:GrammarUri#RuleName StringBuilder sbExternalRuleUri = new("URL:"); // Add the parameters to initialize a rule if (!string.IsNullOrEmpty(initParameters)) { // look for the # and insert the parameters sbExternalRuleUri.Append(posPound > 0 ? id.Substring(0, posPound) : id); sbExternalRuleUri.Append('>'); sbExternalRuleUri.Append(initParameters); if (posPound > 0) { sbExternalRuleUri.Append(id.Substring(posPound)); } } else { sbExternalRuleUri.Append(id); } // Get InitialState of external RuleRef. string sExternalRuleUri = sbExternalRuleUri.ToString(); ruleRef = backend.FindRule(sExternalRuleUri); if (ruleRef == null) { ruleRef = backend.CreateRule(sExternalRuleUri, SPCFGRULEATTRIBUTES.SPRAF_Import); } } Arc rulerefArc = backend.RuleTransition(ruleRef, _rule, 1.0f); #pragma warning disable 0618 if (!string.IsNullOrEmpty(semanticKey)) { CfgGrammar.CfgProperty propertyInfo = new(); propertyInfo._pszName = "SemanticKey"; propertyInfo._comValue = semanticKey; propertyInfo._comType = VarEnum.VT_EMPTY; backend.AddPropertyTag(rulerefArc, rulerefArc, propertyInfo); } #pragma warning restore 0618 parent.AddArc(rulerefArc); }
void IElement.PostParse(IElement parentElement) { ParseElementCollection parent = (ParseElementCollection)parentElement; _propInfo._ulId = (uint)parent._rule._iSerialize2; // Attach the semantic properties on the parent element. parent.AddSementicPropertyTag(_propInfo); }
void IElement.PostParse(IElement parentElement) { if (_startArc.End.OutArcs.IsEmpty) { XmlParser.ThrowSrgsException(SRID.EmptyOneOf); } _startArc = ParseElementCollection.TrimStart(_startArc, _backend); _endArc = ParseElementCollection.TrimEnd(_endArc, _backend); PostParse((ParseElementCollection)parentElement); }
void ISemanticTag.Content(IElement parentElement, string sTag, int iLine) { sTag = sTag.Trim(Helpers._achTrimChars); if (!string.IsNullOrEmpty(sTag)) { _propInfo._ulId = (uint)iLine; _propInfo._comValue = sTag; ParseElementCollection parseElementCollection = (ParseElementCollection)parentElement; parseElementCollection.AddSemanticInterpretationTag(_propInfo); } }
public Subset(ParseElementCollection parent, Backend backend, string text, MatchMode mode) : base(parent._rule) { char[] achTrimChars = Helpers._achTrimChars; foreach (char c in achTrimChars) { if (c != ' ' && text.IndexOf(c) >= 0) { text = text.Replace(c, ' '); } } parent.AddArc(backend.SubsetTransition(text, mode)); }
internal RuleRef(ParseElementCollection parent, Backend backend, Uri uri, List <Rule> undefRules, string semanticKey, string initParameters) : base(parent._rule) { string originalString = uri.OriginalString; Rule rule = null; int num = originalString.IndexOf('#'); if (num == 0) { rule = GetRuleRef(backend, originalString.Substring(1), undefRules); } else { StringBuilder stringBuilder = new StringBuilder("URL:"); if (!string.IsNullOrEmpty(initParameters)) { stringBuilder.Append((num > 0) ? originalString.Substring(0, num) : originalString); stringBuilder.Append('>'); stringBuilder.Append(initParameters); if (num > 0) { stringBuilder.Append(originalString.Substring(num)); } } else { stringBuilder.Append(originalString); } string text = stringBuilder.ToString(); rule = backend.FindRule(text); if (rule == null) { rule = backend.CreateRule(text, SPCFGRULEATTRIBUTES.SPRAF_Import); } } Arc arc = backend.RuleTransition(rule, _rule, 1f); if (!string.IsNullOrEmpty(semanticKey)) { backend.AddPropertyTag(arc, arc, new CfgGrammar.CfgProperty { _pszName = "SemanticKey", _comValue = semanticKey, _comType = VarEnum.VT_EMPTY }); } parent.AddArc(arc); }
/// <summary> /// Process the 'subset' element. /// </summary> public Subset(ParseElementCollection parent, Backend backend, string text, MatchMode mode) : base(parent._rule) { // replace tab, cr, lf with spaces foreach (char ch in Helpers._achTrimChars) { if (ch == ' ') { continue; } if (text.IndexOf(ch) >= 0) { text = text.Replace(ch, ' '); } } // Add transition to the new state with normalized token. parent.AddArc(backend.SubsetTransition(text, mode)); }
// The probability that this item will be repeated. void ISemanticTag.Content(IElement parentElement, string sTag, int iLine) { //Return if the Tag content is empty sTag = sTag.Trim(Helpers._achTrimChars); if (string.IsNullOrEmpty(sTag)) { return; } // Build semantic properties to attach to epsilon transition. // <tag>script</tag> _propInfo._ulId = (uint)iLine; _propInfo._comValue = sTag; ParseElementCollection parent = (ParseElementCollection)parentElement; // Attach the semantic properties on the parent element. parent.AddSemanticInterpretationTag(_propInfo); }
/// <summary> /// Returns the initial state of a special rule. /// For each type of special rule we make a rule with a numeric id and return a reference to it. /// </summary> internal void InitSpecialRuleRef(Backend backend, ParseElementCollection parent) { Rule rule = null; // Create a transition corresponding to Special or Uri switch (_type) { case SpecialRuleRefType.Null: parent.AddArc(backend.EpsilonTransition(1.0f)); break; case SpecialRuleRefType.Void: rule = backend.FindRule(szSpecialVoid); if (rule == null) { rule = backend.CreateRule(szSpecialVoid, 0); // Rule with no transitions is a void rule. ((IRule)rule).PostParse(parent); } parent.AddArc(backend.RuleTransition(rule, parent._rule, 1.0f)); break; case SpecialRuleRefType.Garbage: // Garbage transition is optional whereas Wildcard is not. So we need additional epsilon transition. OneOf oneOf = new(parent._rule, backend); // Add the garbage transition oneOf.AddArc(backend.RuleTransition(CfgGrammar.SPRULETRANS_WILDCARD, parent._rule, 0.5f)); // Add a parallel epsilon path oneOf.AddArc(backend.EpsilonTransition(0.5f)); ((IOneOf)oneOf).PostParse(parent); break; default: System.Diagnostics.Debug.Assert(false, "Unknown special ruleref type"); break; } }
private void ParseToken(ParseElementCollection parent, string sToken, string pronunciation, string display, float reqConfidence) { int requiredConfidence = parent?._confidence ?? 0; sToken = Backend.NormalizeTokenWhiteSpace(sToken); if (string.IsNullOrEmpty(sToken)) { return; } parent._confidence = 0; if (reqConfidence < 0f || reqConfidence.Equals(0.5f)) { parent._confidence = 0; } else if ((double)reqConfidence < 0.5) { parent._confidence = -1; } else { parent._confidence = 1; } if (pronunciation != null || display != null) { string text = EscapeToken(sToken); string text2 = (display == null) ? text : EscapeToken(display); if (pronunciation != null) { OneOf oneOf = (pronunciation.IndexOf(';') >= 0) ? new OneOf(parent._rule, _backend) : null; int num = 0; int num2 = 0; while (num < pronunciation.Length) { num2 = pronunciation.IndexOf(';', num); if (num2 == -1) { num2 = pronunciation.Length; } string text3 = pronunciation.Substring(num, num2 - num); string text4 = null; switch (_backend.Alphabet) { case AlphabetType.Sapi: text4 = PhonemeConverter.ConvertPronToId(text3, _grammar.Backend.LangId); break; case AlphabetType.Ipa: text4 = text3; PhonemeConverter.ValidateUpsIds(text4); break; case AlphabetType.Ups: text4 = PhonemeConverter.UpsConverter.ConvertPronToId(text3); break; } string sWord = string.Format(CultureInfo.InvariantCulture, "/{0}/{1}/{2};", new object[3] { text2, text, text4 }); if (oneOf != null) { oneOf.AddArc(_backend.WordTransition(sWord, 1f, requiredConfidence)); } else { parent.AddArc(_backend.WordTransition(sWord, 1f, requiredConfidence)); } num = num2 + 1; } ((IElement)oneOf)?.PostParse((IElement)parent); } else { string sWord2 = string.Format(CultureInfo.InvariantCulture, "/{0}/{1};", new object[2] { text2, text }); parent.AddArc(_backend.WordTransition(sWord2, 1f, requiredConfidence)); } } else { parent.AddArc(_backend.WordTransition(sToken, 1f, requiredConfidence)); } }
// Disable parameter validation check /// <summary> /// Add transition representing the normalized token. /// /// White Space Normalization - Trim leading/trailing white spaces. /// Collapse white space sequences to a single ' '. /// Restrictions - Normalized token cannot be empty. /// Normalized token cannot contain double-quote. /// /// If (Parent == Token) And (Parent.SAPIPron.Length > 0) Then /// Escape normalized token. "/" -> "\/", "\" -> "\\" /// Build /D/L/P; form from the escaped token and SAPIPron. /// /// SAPIPron may be a semi-colon delimited list of pronunciations. /// In this case, a transition for each of the pronunciations will be added. /// /// AddTransition(NormalizedToken, Parent.EndState, NewState) /// Parent.EndState = NewState /// </summary> private void ParseToken(ParseElementCollection parent, string sToken, string pronunciation, string display, float reqConfidence) { int requiredConfidence = (parent != null) ? parent._confidence : CfgGrammar.SP_NORMAL_CONFIDENCE; // Performs white space normalization in place sToken = Backend.NormalizeTokenWhiteSpace(sToken); if (string.IsNullOrEmpty(sToken)) { return; } // "sapi:reqconf" Attribute parent._confidence = CfgGrammar.SP_NORMAL_CONFIDENCE; // Default to normal if (reqConfidence < 0 || reqConfidence.Equals(0.5f)) { parent._confidence = CfgGrammar.SP_NORMAL_CONFIDENCE; // Default to normal } else if (reqConfidence < 0.5) { parent._confidence = CfgGrammar.SP_LOW_CONFIDENCE; } else { parent._confidence = CfgGrammar.SP_HIGH_CONFIDENCE; } // If SAPIPron is specified, use /D/L/P; as the transition text, for each of the pronunciations. if (pronunciation != null || display != null) { // Escape normalized token. "/" -> "\/", "\" -> "\\" string sEscapedToken = EscapeToken(sToken); string sDisplayToken = display == null ? sEscapedToken : EscapeToken(display); if (pronunciation != null) { // Garbage transition is optional whereas Wildcard is not. So we need additional epsilon transition. OneOf oneOf = pronunciation.IndexOf(';') >= 0 ? new OneOf(parent._rule, _backend) : null; for (int iCurPron = 0, iDeliminator = 0; iCurPron < pronunciation.Length; iCurPron = iDeliminator + 1) { // Find semi-colon delimiter and replace with null iDeliminator = pronunciation.IndexOf(';', iCurPron); if (iDeliminator == -1) { iDeliminator = pronunciation.Length; } string pron = pronunciation.Substring(iCurPron, iDeliminator - iCurPron); string sSubPron = null; switch (_backend.Alphabet) { case AlphabetType.Sapi: sSubPron = PhonemeConverter.ConvertPronToId(pron, _grammar.Backend.LangId); break; case AlphabetType.Ipa: sSubPron = pron; PhonemeConverter.ValidateUpsIds(sSubPron); break; case AlphabetType.Ups: sSubPron = PhonemeConverter.UpsConverter.ConvertPronToId(pron); break; } // Build /D/L/P; form for this pronunciation. string sDLP = string.Format(CultureInfo.InvariantCulture, "/{0}/{1}/{2};", sDisplayToken, sEscapedToken, sSubPron); // Add /D/L/P; transition to the new state. if (oneOf != null) { oneOf.AddArc(_backend.WordTransition(sDLP, 1.0f, requiredConfidence)); } else { parent.AddArc(_backend.WordTransition(sDLP, 1.0f, requiredConfidence)); } } if (oneOf != null) { ((IOneOf)oneOf).PostParse(parent); } } else { // Build /D/L; form for this pronunciation. string sDLP = string.Format(CultureInfo.InvariantCulture, "/{0}/{1};", sDisplayToken, sEscapedToken); // Add /D/L; transition to the new state. parent.AddArc(_backend.WordTransition(sDLP, 1.0f, requiredConfidence)); } } else { // Add transition to the new state with normalized token. parent.AddArc(_backend.WordTransition(sToken, 1.0f, requiredConfidence)); } }
void IElement.PostParse(IElement parentElement) { if (_maxRepeat != _minRepeat && _startArc != null && _startArc == _endArc && _endArc.IsEpsilonTransition && !_endArc.IsPropertylessTransition) { XmlParser.ThrowSrgsException(SRID.InvalidTagInAnEmptyItem); } if (_startArc == null || _maxRepeat == 0) { if (_maxRepeat == 0 && _startArc != null && _startArc.End != null) { State end = _startArc.End; _startArc.End = null; _backend.DeleteSubGraph(end); } _startArc = (_endArc = _backend.EpsilonTransition(_repeatProbability)); } else if (_minRepeat != 1 || _maxRepeat != 1) { _startArc = InsertState(_startArc, _repeatProbability, Position.Before); State end2 = _startArc.End; if (_maxRepeat == int.MaxValue && _minRepeat == 1) { _endArc = InsertState(_endArc, 1f, Position.After); AddEpsilonTransition(_endArc.Start, end2, 1f - _repeatProbability); } else { State srcFromState = end2; for (uint num = 1u; num < _maxRepeat && num < 255; num++) { State state = _backend.CreateNewState(_endArc.Start.Rule); State state2 = _backend.CloneSubGraph(srcFromState, _endArc.Start, state); _endArc.End = state; _endArc = state2.OutArcs.First; if (_maxRepeat == int.MaxValue) { if (num == _minRepeat - 1) { _endArc = InsertState(_endArc, 1f, Position.After); AddEpsilonTransition(_endArc.Start, state, 1f - _repeatProbability); break; } } else if (num <= _maxRepeat - _minRepeat) { AddEpsilonTransition(end2, state, 1f - _repeatProbability); } srcFromState = state; } } if (_minRepeat == 0 && (_startArc != _endArc || !_startArc.IsEpsilonTransition)) { if (!_endArc.IsEpsilonTransition || _endArc.SemanticTagCount > 0) { _endArc = InsertState(_endArc, 1f, Position.After); } AddEpsilonTransition(end2, _endArc.Start, 1f - _repeatProbability); } _startArc = ParseElementCollection.TrimStart(_startArc, _backend); } PostParse((ParseElementCollection)parentElement); }