internal Rule(Backend backend, string name, int offsetName, SPCFGRULEATTRIBUTES attributes, int id, int iSerialize, GrammarOptions SemanticFormat, ref int cImportedRules) : base(backend, null) { _rule = this; Init(name, new CfgRule(id, offsetName, attributes), iSerialize, SemanticFormat, ref cImportedRules); }
internal void CloneTags(Arc arc, List <Tag> _tags, Dictionary <Tag, Tag> endArcs, Backend be) { if (arc._startTags != null) { if (_startTags == null) { _startTags = new Collection <Tag>(); } foreach (Tag startTag in arc._startTags) { Tag tag = new Tag(startTag); _tags.Add(tag); _startTags.Add(tag); endArcs.Add(startTag, tag); if (be != null) { int idWord; tag._cfgTag._nameOffset = be.Symbols.Add(startTag._be.Symbols.FromOffset(startTag._cfgTag._nameOffset), out idWord); if (startTag._cfgTag._valueOffset != 0 && startTag._cfgTag.PropVariantType == VarEnum.VT_EMPTY) { tag._cfgTag._valueOffset = be.Symbols.Add(startTag._be.Symbols.FromOffset(startTag._cfgTag._valueOffset), out idWord); } } } } if (arc._endTags != null) { if (_endTags == null) { _endTags = new Collection <Tag>(); } foreach (Tag endTag in arc._endTags) { Tag item = endArcs[endTag]; _endTags.Add(item); endArcs.Remove(endTag); } } }
internal GrammarElement(Backend backend, CustomGrammar cg) : base(null) { _cg = cg; _backend = backend; }
internal Rule(Backend backend, string name, CfgRule cfgRule, int iSerialize, GrammarOptions SemanticFormat, ref int cImportedRules) : base(backend, null) { _rule = this; Init(name, cfgRule, iSerialize, SemanticFormat, ref cImportedRules); }
internal void CloneSubGraph(Rule rule, Backend org, Backend extra, Dictionary <State, State> srcToDestHash, bool fromOrg) { Backend backend = fromOrg ? org : extra; List <State> list = new List <State>(); Dictionary <Tag, Tag> endArcs = new Dictionary <Tag, Tag>(); CloneState(rule._firstState, list, srcToDestHash); while (list.Count > 0) { State state = list[0]; list.RemoveAt(0); State start = srcToDestHash[state]; foreach (Arc outArc in state.OutArcs) { State end = outArc.End; State end2 = null; if (end != null) { if (!srcToDestHash.ContainsKey(end)) { CloneState(end, list, srcToDestHash); } end2 = srcToDestHash[end]; } int idWord = outArc.WordId; if (backend != null && outArc.WordId > 0) { _words.Add(backend.Words[outArc.WordId], out idWord); } Arc arc2 = new Arc(outArc, start, end2, idWord); arc2.CloneTags(outArc, _tags, endArcs, this); if (outArc.RuleRef != null) { string text; if (outArc.RuleRef.Name.IndexOf("URL:DYNAMIC#", StringComparison.Ordinal) == 0) { text = outArc.RuleRef.Name.Substring(12); if (fromOrg && FindInRules(text) == null) { Rule rule2 = extra.FindInRules(text); if (rule2 == null) { XmlParser.ThrowSrgsException(SRID.DynamicRuleNotFound, text); } CloneSubGraph(rule2, org, extra, srcToDestHash, false); } } else if (outArc.RuleRef.Name.IndexOf("URL:STATIC#", StringComparison.Ordinal) == 0) { text = outArc.RuleRef.Name.Substring(11); if (!fromOrg && FindInRules(text) == null) { Rule rule3 = org.FindInRules(text); if (rule3 == null) { XmlParser.ThrowSrgsException(SRID.DynamicRuleNotFound, text); } CloneSubGraph(rule3, org, extra, srcToDestHash, true); } } else { text = outArc.RuleRef.Name; Rule rule4 = org.FindInRules(text); if (!fromOrg) { CloneSubGraph(outArc.RuleRef, org, extra, srcToDestHash, true); } } Rule rule5 = FindInRules(text); if (rule5 == null) { rule5 = CloneState(outArc.RuleRef._firstState, list, srcToDestHash); } arc2.RuleRef = rule5; } arc2.ConnectStates(); } } }
private static object CompileStream(int iCfg, ISrgsParser srgsParser, string srgsPath, string filename, Stream stream, bool fOutputCfg, StringBuilder innerCode, object cfgResources, out CultureInfo culture, string[] referencedAssemblies, string keyFile) { Backend backend = new Backend(); CustomGrammar customGrammar = new CustomGrammar(); SrgsElementCompilerFactory srgsElementCompilerFactory = (SrgsElementCompilerFactory)(srgsParser.ElementFactory = new SrgsElementCompilerFactory(backend, customGrammar)); srgsParser.Parse(); backend.Optimize(); culture = ((backend.LangId == 21514) ? new CultureInfo("es-us") : new CultureInfo(backend.LangId)); if (customGrammar._codebehind.Count > 0 && !string.IsNullOrEmpty(srgsPath)) { for (int i = 0; i < customGrammar._codebehind.Count; i++) { if (!File.Exists(customGrammar._codebehind[i])) { customGrammar._codebehind[i] = srgsPath + "\\" + customGrammar._codebehind[i]; } } } if (referencedAssemblies != null) { foreach (string item in referencedAssemblies) { customGrammar._assemblyReferences.Add(item); } } customGrammar._keyFile = keyFile; backend.ScriptRefs = customGrammar._scriptRefs; if (!fOutputCfg) { CustomGrammar.CfgResource cfgResource = new CustomGrammar.CfgResource(); cfgResource.data = BuildCfg(backend).ToArray(); cfgResource.name = iCfg.ToString(CultureInfo.InvariantCulture) + ".CFG"; ((List <CustomGrammar.CfgResource>)cfgResources).Add(cfgResource); innerCode.Append(customGrammar.CreateAssembly(iCfg, filename, culture)); return(customGrammar); } if (customGrammar._scriptRefs.Count > 0 && !customGrammar.HasScript) { XmlParser.ThrowSrgsException(SRID.NoScriptsForRules); } CreateAssembly(backend, customGrammar); if (!string.IsNullOrEmpty(filename)) { stream = new FileStream(filename, FileMode.Create, FileAccess.Write); } try { using (StreamMarshaler streamBuffer = new StreamMarshaler(stream)) { backend.Commit(streamBuffer); return(customGrammar); } } finally { if (!string.IsNullOrEmpty(filename)) { stream.Close(); } } }
internal PropertyTag(ParseElement parent, Backend backend) : base(parent._rule) { }
internal SrgsElementCompilerFactory(Backend backend, CustomGrammar cg) { _backend = backend; _cg = cg; _grammar = new GrammarElement(backend, cg); }
private void ParseToken(ParseElementCollection parent, string sToken, string pronunciation, string display, float reqConfidence) { int requiredConfidence = parent?._confidence ?? 0; sToken = Backend.NormalizeTokenWhiteSpace(sToken); if (string.IsNullOrEmpty(sToken)) { return; } parent._confidence = 0; if (reqConfidence < 0f || reqConfidence.Equals(0.5f)) { parent._confidence = 0; } else if ((double)reqConfidence < 0.5) { parent._confidence = -1; } else { parent._confidence = 1; } if (pronunciation != null || display != null) { string text = EscapeToken(sToken); string text2 = (display == null) ? text : EscapeToken(display); if (pronunciation != null) { OneOf oneOf = (pronunciation.IndexOf(';') >= 0) ? new OneOf(parent._rule, _backend) : null; int num = 0; int num2 = 0; while (num < pronunciation.Length) { num2 = pronunciation.IndexOf(';', num); if (num2 == -1) { num2 = pronunciation.Length; } string text3 = pronunciation.Substring(num, num2 - num); string text4 = null; switch (_backend.Alphabet) { case AlphabetType.Sapi: text4 = PhonemeConverter.ConvertPronToId(text3, _grammar.Backend.LangId); break; case AlphabetType.Ipa: text4 = text3; PhonemeConverter.ValidateUpsIds(text4); break; case AlphabetType.Ups: text4 = PhonemeConverter.UpsConverter.ConvertPronToId(text3); break; } string sWord = string.Format(CultureInfo.InvariantCulture, "/{0}/{1}/{2};", new object[3] { text2, text, text4 }); if (oneOf != null) { oneOf.AddArc(_backend.WordTransition(sWord, 1f, requiredConfidence)); } else { parent.AddArc(_backend.WordTransition(sWord, 1f, requiredConfidence)); } num = num2 + 1; } ((IElement)oneOf)?.PostParse((IElement)parent); } else { string sWord2 = string.Format(CultureInfo.InvariantCulture, "/{0}/{1};", new object[2] { text2, text }); parent.AddArc(_backend.WordTransition(sWord2, 1f, requiredConfidence)); } } else { parent.AddArc(_backend.WordTransition(sToken, 1f, requiredConfidence)); } }