private static List <LookupValue> GetValues(IVocabulary vocabulary, Field field, IDataRecord reader, IEnumerable <Mapper> m) { if (!string.IsNullOrEmpty(field.ConceptId)) { var conceptId = reader.GetInt(field.ConceptId); return(new List <LookupValue> { new LookupValue { ConceptId = conceptId } }); } var mapper = FindMapper(m, reader); if (mapper == null) { return new List <LookupValue> { new LookupValue { ConceptId = field.DefaultConceptId } } } ; var conceptKey = reader.GetString(field.Key); //return conceptKey == null // ? new List<LookupValue>() // : mapper.Map(vocabulary, field.Key, conceptKey, reader.GetDateTime(field.EventDate), // field.CaseSensitive); return(conceptKey == null ? new List <LookupValue>() : mapper.Map(vocabulary, field.Key, conceptKey, reader.GetDateTime(field.EventDate))); }
public VocabularyViewModel(IVocabulary vocabulary, IPractiseInitializer practiseInitializer) { _vocabulary = vocabulary; _practiseInitializer = practiseInitializer; TappedCommand = new Command(x => StartTrainingSession()); }
public void DumpTokens(IList <IToken> tokens, Language language, TextFile sourceFile) { IVocabulary vocabulary = language.CreateAntlrLexer().Vocabulary; var resultString = new StringBuilder(); foreach (IToken token in tokens) { if (!OnlyCommonTokens || token.Channel == 0) { resultString.Append(RenderToken(token, false, vocabulary)); if (EachTokenOnNewLine) { resultString.AppendLine(); } else { resultString.Append(" "); } } } resultString.Append("EOF"); Dump(resultString.ToString(), sourceFile, true); }
/// <summary> /// Creates a new Engine object with the specified vocabulary. /// </summary> /// <param name="vocabulary">The vocabulary to load in this instance.</param> public Engine(IVocabulary vocabulary) { if (vocabulary == null) throw new ArgumentNullException("vocabulary"); _vocabulary = vocabulary; _vars = new VarStore(); _subs = new SubStore(); }
public override void DumpTokens(ParseTree parseTree) { var antlrParseTree = parseTree as AntlrParseTree; IVocabulary vocabulary = ((AntlrParser)parseTree.SourceLanguage.CreateParser()).Lexer.Vocabulary; var resultString = new StringBuilder(); foreach (IToken token in antlrParseTree.Tokens) { if (!OnlyCommonTokens || token.Channel == 0) { resultString.Append(RenderToken(token, false, vocabulary)); if (EachTokenOnNewLine) { resultString.AppendLine(); } else { resultString.Append(" "); } } } resultString.Append("EOF"); Dump(resultString.ToString(), parseTree.SourceCodeFile, true); }
public Query(SearchEngine searchEngine, IVocabulary vocabulary, ITokinizer tokinizer, IDocumentStorage documentStorage) { _searchEngine = searchEngine; _vocabulary = vocabulary; _tokinizer = tokinizer; _documentStorage = documentStorage; }
public void JoinToVocabulary(IVocabulary vocabulary) { if (Vocabulary == null) { Vocabulary = vocabulary; } }
public SearchFactory(IBlobRepository blobRepository, IVocabulary vocabulary, IDocumentStorage documentStorage, ITokinizer tokinizer) { _blobRepository = blobRepository; _vocabulary = vocabulary; _documentStorage = documentStorage; _tokinizer = tokinizer; }
public DFASerializer(DFA dfa, IVocabulary vocabulary, string[] ruleNames, ATN atn) { this.dfa = dfa; this.vocabulary = vocabulary; this.ruleNames = ruleNames; this.atn = atn; }
public static string GetTokensString(this IList <IToken> tokens, IVocabulary vocabulary, TokenValueDisplayMode tokenValueDisplayMode = TokenValueDisplayMode.Show, bool onlyDefaultChannel = false, bool eachTokenOnNewLine = true) { var resultString = new StringBuilder(); foreach (var token in tokens) { if (!onlyDefaultChannel || token.Channel == 0) { resultString.Append(RenderToken(token, TokenValueDisplayMode.Show, false, vocabulary)); if (eachTokenOnNewLine) { resultString.AppendLine(); } else { resultString.Append(" "); } } } resultString.Append("EOF"); return(resultString.ToString()); }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable <string> ruleNames, ATN atn, ITokenStream input) : base(input) { this._grammarFileName = grammarFileName; this._atn = atn; this._ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).isPrecedenceDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } //init decision DFA int numberofDecisions = atn.NumberOfDecisions; this._decisionToDFA = new Dfa.DFA[numberofDecisions]; for (int i = 0; i < numberofDecisions; i++) { DecisionState decisionState = atn.GetDecisionState(i); _decisionToDFA[i] = new Dfa.DFA(decisionState, i); } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn, _decisionToDFA, null); }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable <string> ruleNames, ATN atn, ITokenStream input) : base(input) { this.grammarFileName = grammarFileName; this.atn = atn; #pragma warning disable 612 // Type or member is obsolete this.tokenNames = new string[atn.maxTokenType]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetDisplayName(i); } #pragma warning restore 612 // Type or member is obsolete this.ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext() must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).precedenceRuleDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn); }
public DFASerializer([NotNull] DFA dfa, [NotNull] IVocabulary vocabulary, [Nullable] string[] ruleNames, [Nullable] ATN atn) { this.dfa = dfa; this.vocabulary = vocabulary; this.ruleNames = ruleNames; this.atn = atn; }
public void Insert(IVocabulary vocabulary) { foreach (var wordInfo in vocabulary) { Insert(wordInfo.Word, wordInfo.Occurs); } }
private void Form1_Shown(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); DialogResult result; do { result = ofd.ShowDialog(); } while (result != DialogResult.OK || ofd.FileName == null); string fileName = ofd.FileName; FileInfo snakewareLogo = new FileInfo(fileName); MessageBox.Show("Bytes before: " + snakewareLogo.Length); ImageOptimizer optimizer = new ImageOptimizer(); optimizer.LosslessCompress(snakewareLogo); snakewareLogo.Refresh(); MessageBox.Show("Bytes after: " + snakewareLogo.Length); //Compressor return; AntlrFileStream inputStream = new AntlrFileStream(fileName); IParseTree tree = null; IVocabulary names = null; string typeS = fileName.Substring(fileName.LastIndexOf('.') + 1); switch (typeS.ToUpper()) { case "CSS": tree = new CssParser(new CommonTokenStream(new CssLexer(inputStream))).stylesheet(); names = CssParser.DefaultVocabulary; break; case "HTML": tree = new HtmlParser(new CommonTokenStream(new HtmlLexer(inputStream))).htmlDocument(); names = HtmlParser.DefaultVocabulary; break; case "JS": tree = new JsParser(new CommonTokenStream(new JsLexer(inputStream))).program(); names = JsParser.DefaultVocabulary; break; } var node = treeView.Nodes.Add(ToText(tree, names)); RecursFn(tree, node, (i) => ToText(i, names)); /*var d = new CommonTokenStream(new HtmlLexer(inputStream)); * names = HtmlParser.DefaultVocabulary; * d.Fill(); * foreach(var item in d.GetTokens()) { * treeView.Nodes.Add(names.GetDisplayName(item.Type) + " --- " + item.ToString()); * }*/ }
/// <summary> /// Constructs a VocabularyCommands object. /// </summary> /// <param name="vocabulary">The IVocabulary object which has methods /// decorated with the ConsoleCommandAttribute.</param> public VocabularyCommands(IVocabulary vocabulary) { commands = new Dictionary <string, CommandInfo>(StringComparer.OrdinalIgnoreCase); this.vocabulary = vocabulary; BuildCommands(); }
/// <summary> /// Creates a new Engine object with the specified vocabulary. /// </summary> /// <param name="vocabulary">The vocabulary to load in this instance.</param> public Engine(IVocabulary vocabulary) { if (vocabulary == null) { throw new ArgumentNullException("vocabulary"); } _vocabulary = vocabulary; }
private string ToText(IParseTree tree, IVocabulary names) { if (tree is TerminalNodeImpl token) { return(String.Concat("\"", token.GetText(), "\" - ", names.GetDisplayName(token.Symbol.Type))); } return(tree.GetType().Name.ToString()); }
/// <summary> /// Ctor /// </summary> /// <param name="vocabulary">Словарь</param> /// <exception cref="ArgumentNullException">Значение аргумента Null</exception> public GetCommand(IVocabulary vocabulary) { if (vocabulary == null) { throw new ArgumentNullException(nameof(vocabulary)); } _vocabulary = vocabulary; }
public void Reset() { //ChunkData.Clean(); Vocabulary = null; ObservationPeriodsRaw.Clear(); ObservationPeriodsRaw = null; PersonRecords.Clear(); PersonRecords = null; DeathRecords.Clear(); DeathRecords = null; CohortRecords.Clear(); CohortRecords = null; PayerPlanPeriodsRaw.Clear(); PayerPlanPeriodsRaw = null; ConditionOccurrencesRaw.Clear(); ConditionOccurrencesRaw = null; DrugExposuresRaw.Clear(); DrugExposuresRaw = null; ProcedureOccurrencesRaw.Clear(); ProcedureOccurrencesRaw = null; ObservationsRaw.Clear(); ObservationsRaw = null; MeasurementsRaw.Clear(); MeasurementsRaw = null; VisitOccurrencesRaw.Clear(); VisitOccurrencesRaw = null; VisitDetailsRaw.Clear(); VisitDetailsRaw = null; VisitCostsRaw.Clear(); VisitCostsRaw = null; DeviceExposureRaw.Clear(); DeviceExposureRaw = null; DeviceCostRaw.Clear(); DeviceCostRaw = null; DrugForEra.Clear(); DrugForEra = null; ConditionForEra.Clear(); ConditionForEra = null; NoteRecords.Clear(); NoteRecords = null; }
public LexicalAnalyzer(StreamReader sr, IVocabulary vocabulary) { stream = sr; this.Tokens = vocabulary.Tokens; this.TokensName = vocabulary.Names; this.TokensActions = vocabulary.Actions; eofIndex = TokensName.Count; TokensName[eofIndex] = "eof"; }
public virtual string ToString([NotNull] IVocabulary vocabulary) { if (s0.Get() == null) { return(string.Empty); } DFASerializer serializer = new DFASerializer(this, vocabulary); return(serializer.ToString()); }
public GrammarParserInterpreter(Grammar g, string grammarFileName, IVocabulary vocabulary, ICollection<string> ruleNames, ATN atn, ITokenStream input) : base(grammarFileName, vocabulary, ruleNames, atn, input) { this.g = g; }
public GrammarParserInterpreter(Grammar g, string grammarFileName, IVocabulary vocabulary, ICollection <string> ruleNames, ATN atn, ITokenStream input) : base(grammarFileName, vocabulary, ruleNames, atn, input) { this.g = g; }
public virtual string ToString([NotNull] IVocabulary vocabulary, [Nullable] string[] ruleNames) { if (s0.Get() == null) { return(string.Empty); } DFASerializer serializer = new DFASerializer(this, vocabulary, ruleNames, atnStartState.atn); return(serializer.ToString()); }
private void Fill(IVocabulary vocab, IEnumerable <IEntity> records, Dictionary <string, List <PregnancyConcept> > events) { foreach (var e in records) { foreach (var pc in vocab.LookupPregnancyConcept(e.ConceptId)) { if (pc == null) { continue; } pc.Entity = e; if (e.GeEntityType() == EntityType.Observation) { var o = (Observation)e; if (!string.IsNullOrEmpty(pc.DataValue) && pc.DataValue != o.ValueAsString) { continue; } pc.ValueAsNumber = o.ValueAsNumber; } else if (e.GeEntityType() == EntityType.Measurement) { var m = (Measurement)e; if (!string.IsNullOrEmpty(pc.DataValue) && pc.DataValue != m.ValueSourceValue) { continue; } pc.ValueAsNumber = m.ValueAsNumber; } if (!events.ContainsKey(pc.Category)) { events.Add(pc.Category, new List <PregnancyConcept>()); } events[pc.Category].Add(new PregnancyConcept { EventId = _eventId, Category = pc.Category, ConceptId = pc.ConceptId, DataValue = pc.DataValue, Entity = e, GestValue = pc.GestValue, ValueAsNumber = pc.ValueAsNumber }); _eventId++; } } }
private void CreateLookup(IVocabulary vocabulary) { if (Settings.Current.Building.BuildingState.LookupCreated) { return; } UpdateDate("CreateLookupStart"); _builderController.CreateLookup(vocabulary); UpdateDate("CreateLookupEnd"); }
public void StartPractise(IVocabulary vocabulary) { var practise = new Practise(_shuffleAlgorithm, vocabulary); var moderator = new Moderator(practise); moderator.QuestionUpdated = () => QuestionUpdated(moderator); moderator.PractiseCompleted = () => PractiseCompleted(moderator); moderator.StartPractise(); }
/// <summary> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </summary> /// <remarks> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </remarks> /// <param name="old">The interpreter to copy</param> /// <since>4.5</since> public ParserInterpreter(Antlr4.Runtime.ParserInterpreter old) : base(((ITokenStream)old.InputStream)) { this.grammarFileName = old.grammarFileName; this.atn = old.atn; this.pushRecursionContextStates = old.pushRecursionContextStates; this.tokenNames = old.tokenNames; this.ruleNames = old.ruleNames; this.vocabulary = old.vocabulary; Interpreter = new ParserATNSimulator(this, atn); }
public String ToString(IVocabulary vocabulary) { if (s0 == null) { return(""); } DFASerializer serializer = new DFASerializer(this, vocabulary); return(serializer.ToString()); }
protected void Dump(IVocabulary voc) { Console.Out.WriteLine(voc.Name); Console.Out.WriteLine(voc.Description); Dump(voc.TargetLanguage); Dump(voc.NativeLanguage); foreach(IWord word in voc.Words) { Dump(word); } }
private void LoadVocab(string path, NsfwFilter filter) { if (_vocabulary != null) return; if (String.IsNullOrEmpty(path)) { _vocabulary = new Vocabulary(null); return; } _vocabulary = Processus.Vocabulary.FromDirectory(path, filter); }
//public List<LookupValue> Map(IVocabulary vocabulary, string key, string source, DateTime eventDate, // bool caseSensitive) //{ // if (!string.IsNullOrEmpty(Lookup)) // { // return vocabulary.Lookup(source, Lookup, eventDate, caseSensitive); // } // return new List<LookupValue> {new LookupValue {ConceptId = _fields[key]}}; //} //public List<LookupValue> Map(IVocabulary vocabulary, string key, string source, DateTime eventDate, // bool caseSensitive) //{ // if (!string.IsNullOrEmpty(Lookup)) // { // return vocabulary.Lookup(source, Lookup, eventDate); // } // return new List<LookupValue> { new LookupValue { ConceptId = _fields[key] } }; //} public List <LookupValue> Map(IVocabulary vocabulary, string key, string source, DateTime eventDate) { if (!string.IsNullOrEmpty(Lookup)) { return(vocabulary.Lookup(source, Lookup, eventDate)); } return(new List <LookupValue> { new LookupValue { ConceptId = _fields[key] } }); }
public void Fill(IVocabulary vocab, ConditionOccurrence[] conditionOccurrences, ProcedureOccurrence[] procedureOccurrences, Observation[] observations, Measurement[] measurements, DrugExposure[] drugExposures) { var events = new Dictionary <string, List <PregnancyConcept> >(); Fill(vocab, conditionOccurrences, events); Fill(vocab, procedureOccurrences, events); Fill(vocab, observations, events); Fill(vocab, measurements, events); foreach (var e in GetNonDrug(events)) { if (!PregnancyEvents.ContainsKey(e.Category)) { PregnancyEvents.Add(e.Category, new List <Event>()); } PregnancyEvents[e.Category].Add(e); } foreach (var de in drugExposures) { var res = vocab.Lookup(de.ConceptId.ToString(), "PregnancyDrug", DateTime.MinValue); if (res.Count == 0 || !res[0].ConceptId.HasValue) { continue; } foreach (var pc in vocab.LookupPregnancyConcept(res[0].ConceptId.Value)) { if (pc == null) { continue; } if (!PregnancyEvents.ContainsKey(pc.Category)) { PregnancyEvents.Add(pc.Category, new List <Event>()); } PregnancyEvents[pc.Category].Add(new Event { EventId = _eventId, Category = pc.Category, Date = de.StartDate, PersonId = de.PersonId, GestValue = null }); _eventId++; } } }
public IEnumerable<Node> Parse(IEnumerable<string> templateLines, IVocabulary vocabulary) { var nodeList = new List<Node>(); var trimmedLines = templateLines .Select(x => x.TrimEnd('\r', '\t', ' ')); foreach (var line in trimmedLines) { vocabulary.Contains(line); } return nodeList.AsReadOnly(); }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this.Interpreter = new LexerATNSimulator(this, atn); }
private static string[] GenerateTokenNames(IVocabulary vocabulary, int length) { string[] tokenNames = new string[length]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = vocabulary.GetSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } return tokenNames; }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this.decisionToDFA = new DFA[atn.NumberOfDecisions]; for (int i = 0; i < decisionToDFA.Length; i++) { decisionToDFA[i] = new DFA(atn.GetDecisionState(i), i); } this.Interpreter = new LexerATNSimulator(this, atn, decisionToDFA, sharedContextCache); }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; #pragma warning disable 612 // 'fieldName' is obsolete this.tokenNames = new string[atn.maxTokenType]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetDisplayName(i); } #pragma warning restore 612 this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this._interp = new LexerATNSimulator(this, atn); }
private static void ConvertCategories(IVocabulary voc, SqlCeConnection conn) { using (SqlCeCommand cmd = conn.CreateCommand()) { cmd.CommandText = "INSERT INTO Categories (Position, Category) VALUES (@Position, @Category)"; cmd.Parameters.Add(new SqlCeParameter("Position", SqlDbType.SmallInt)); cmd.Parameters.Add(new SqlCeParameter("Category", SqlDbType.NVarChar)); cmd.Prepare(); int position = 0; foreach (String category in voc.Categories) { Console.Write("category [{0}]...", category); cmd.Parameters["Position"].Value = ++position; cmd.Parameters["Category"].Value = category; cmd.ExecuteNonQuery(); Console.WriteLine("ok"); } } }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, ATN atn, ITokenStream input) : base(input) { this._grammarFileName = grammarFileName; this._atn = atn; this._ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).precedenceRuleDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn); }
public IEnumerable<Node> Parse(string templateContent, IVocabulary vocabulary) { var templateLines = templateContent.Split(new[] {"\n"}, StringSplitOptions.RemoveEmptyEntries) ; return Parse(templateLines, vocabulary); }
private static void ConvertTypes(IVocabulary voc, SqlCeConnection conn) { using (SqlCeCommand cmd = conn.CreateCommand()) { cmd.CommandText = "INSERT INTO Types (Position, Type) VALUES (@Position, @Type)"; cmd.Parameters.Add(new SqlCeParameter("Position", SqlDbType.SmallInt)); cmd.Parameters.Add(new SqlCeParameter("Type", SqlDbType.NVarChar)); cmd.Prepare(); int position = 0; foreach (String type in voc.WordTypes) { Console.Write("type [{0}]...", type); cmd.Parameters["Position"].Value = ++position; cmd.Parameters["Type"].Value = type; cmd.ExecuteNonQuery(); Console.WriteLine("ok"); } } }
private static void Convert(IVocabulary voc, SqlCeConnection conn) { ConvertVocabulary(voc, conn); }
protected internal EditVocabularyPropertiesValidator(IVocabulary vocabulary) { Debug.Assert(vocabulary != null); this.vocabulary = vocabulary; Hook(vocabulary); Hook(vocabulary.WordTypes); Hook(vocabulary.Categories); Hook(vocabulary.NativeLanguage); Hook(vocabulary.TargetLanguage); Validate(); }
public virtual string ToString(IVocabulary vocabulary, string[] ruleNames) { if (s0.Get() == null) { return string.Empty; } DFASerializer serializer = new DFASerializer(this, vocabulary, ruleNames, atnStartState.atn); return serializer.ToString(); }
protected internal virtual string ElementName(IVocabulary vocabulary, int a) { if (a == TokenConstants.EOF) { return "<EOF>"; } else { if (a == TokenConstants.EPSILON) { return "<EPSILON>"; } else { return vocabulary.GetDisplayName(a); } } }
private static void ConvertWords(IVocabulary voc, SqlCeConnection conn) { using (SqlCeCommand cmd = conn.CreateCommand()) { cmd.CommandText = "INSERT INTO Words (TypeId, Prefix, Word) VALUES (@TypeId, @Prefix, @Word)"; cmd.Parameters.Add(new SqlCeParameter("TypeId", SqlDbType.UniqueIdentifier)); cmd.Parameters.Add(new SqlCeParameter("Prefix", SqlDbType.NVarChar)); cmd.Parameters.Add(new SqlCeParameter("Word", SqlDbType.NVarChar)); cmd.Prepare(); foreach (IWord word in voc.Words) { Console.Write("word [{0}]...", word.Text); Guid type_id = GetTypeId(word.Type, conn); if (type_id != Guid.Empty) { cmd.Parameters["TypeId"].Value = type_id; cmd.Parameters["Prefix"].Value = word.Prefix; cmd.Parameters["Word"].Value = word.Data; cmd.ExecuteNonQuery(); Console.WriteLine("ok"); } else { Console.WriteLine("bad type"); } } } foreach (IWord word in voc.Words) { ConvertMeanings(word, conn); } }
/// <summary> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </summary> /// <param name="old">The interpreter to copy</param> /// <since>4.5</since> public ParserInterpreter(Antlr4.Runtime.ParserInterpreter old) : base(((ITokenStream)old.InputStream)) { // latch and only override once; error might trigger infinite loop this.grammarFileName = old.grammarFileName; this.atn = old.atn; this.pushRecursionContextStates = old.pushRecursionContextStates; #pragma warning disable 612 // Type or member is obsolete this.tokenNames = old.tokenNames; #pragma warning restore 612 // Type or member is obsolete this.ruleNames = old.ruleNames; this.vocabulary = old.vocabulary; Interpreter = new ParserATNSimulator(this, atn); }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, ATN atn, ITokenStream input) : base(input) { this.grammarFileName = grammarFileName; this.atn = atn; #pragma warning disable 612 // Type or member is obsolete this.tokenNames = new string[atn.maxTokenType]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetDisplayName(i); } #pragma warning restore 612 // Type or member is obsolete this.ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext() must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).precedenceRuleDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn); }
public virtual string ToString(IVocabulary vocabulary) { StringBuilder buf = new StringBuilder(); if (this.intervals == null || this.intervals.Count == 0) { return "{}"; } if (this.Count > 1) { buf.Append("{"); } bool first = true; foreach (Interval I in intervals) { if (!first) buf.Append(", "); first = false; int a = I.a; int b = I.b; if (a == b) { buf.Append(ElementName(vocabulary, a)); } else { for (int i = a; i <= b; i++) { if (i > a) { buf.Append(", "); } buf.Append(ElementName(vocabulary, i)); } } } if (this.Count > 1) { buf.Append("}"); } return buf.ToString(); }
public String ToString(IVocabulary vocabulary) { if (s0 == null) { return ""; } DFASerializer serializer = new DFASerializer(this, vocabulary); return serializer.ToString(); }
private static void ConvertVocabulary(IVocabulary voc, SqlCeConnection conn) { Console.Write("vocabulary [{0}]...", voc.Name); using (SqlCeCommand cmd = conn.CreateCommand()) { cmd.CommandText = "INSERT INTO Vocabularies (Name, Description, TargetLang, NativeLang) VALUES (@Name, @Description, @TargetLang, @NativeLang)"; cmd.Parameters.Add(new SqlCeParameter("Name", voc.Name)); cmd.Parameters.Add(new SqlCeParameter("Description", voc.Description)); cmd.Parameters.Add(new SqlCeParameter("TargetLang", voc.TargetLanguage.InputLocale.Name)); cmd.Parameters.Add(new SqlCeParameter("NativeLang", voc.NativeLanguage.InputLocale.Name)); cmd.Prepare(); cmd.ExecuteNonQuery(); } Console.WriteLine("ok"); ConvertTypes(voc, conn); ConvertCategories(voc, conn); ConvertWords(voc, conn); }
public DFASerializer(DFA dfa, IVocabulary vocabulary) : this(dfa, vocabulary, null, null) { }
public virtual string ToString(IVocabulary vocabulary) { if (s0.Get() == null) { return string.Empty; } DFASerializer serializer = new DFASerializer(this, vocabulary); return serializer.ToString(); }