/// <summary> /// Preloads the lexicon. /// </summary> /// <param name="loger">The loger.</param> /// <param name="context">The context.</param> public void preloadLexicon(ILogBuilder loger, semanticLexiconContext context) { loger.log("Preloading Semantic Lexicon"); if (isLexiconPreloaded) { loger.log("Semantic Lexicon is already loaded"); return; } double ratio = 0; int i = 0; int repIndex = lemmaCount / 20; foreach (ITermLemma lemma in context.TermLemmas) { Add(lemma); i++; repIndex--; if (repIndex == 0) { repIndex = lemmaCount / 20; ratio = (double)i / (double)lemmaCount; loger.log("Loaded [" + i + "] lemmas (" + ratio.ToString("P2") + ")"); } } doAllowLexiconQuery = false; isLexiconPreloaded = true; loger.log("Semantic Lexicon preload done"); }
/// <summary> /// Prepares this instance. /// </summary> public override void prepare() { if (isReady) { return; } imbLanguageFrameworkManager.log.log("Semantic lexicon manager prepare start"); constructionSettings.Poke(); settings.Poke(); if (constructor == null) { constructor = new lexiconConstructor(constructionSettings); } var missing = settings.sourceFiles.checkMissingFiles(); foreach (string mfile in missing) { imbLanguageFrameworkManager.log.log("Source file missing: " + mfile); } if (missing.Any()) { aceGeneralException axe = new aceGeneralException("Lexicon source files missing", null, missing, "Lexicon source files missing"); throw axe; } consoleSettings.Poke(); console = new lexiconConsole(consoleSettings.defaultSession, consoleSettings); semanticLexiconExtensions.SetBrightStarDB(); //String conString = "type=rest;storesdirectory=G:\\BrightStarDB\\;storename=lex"; string conString = @"type=rest;endpoint=http://localhost:8090/brightstar;storename=lex"; lexiconContext = new semanticLexiconContext(conString); imbLanguageFrameworkManager.log.log("Semantic lexicon manager prepare finished"); }
protected void LoadCacheFiles(ILogBuilder loger, semanticLexiconContext context) { failedQueries = new fileunit(folder.pathFor("lexiconCache_negatives.txt"), true); loger.log("Negative queries loaded"); AddTemp(failedQueries.contentLines, loger, true, true); loger.log("Loading encoding twins"); twins.Load(twinsSavePath, loger); loger.log("Encoding twins loaded"); if (twins.Count == 0) { rebuildEncodedTwins(loger, context); } failedQueries.Save(); }
public void prepare(ILogBuilder loger, semanticLexiconContext context, bool preload = false) { aceLog.consoleControl.setAsOutput(loger as IConsoleControl, "LexCache"); lexiconContext = context; loger.log("Counting lexic resources..."); lemmaCount = context.TermLemmas.Count(); instanceCount = context.TermInstances.Count(); conceptCount = context.Concepts.Count(); twinsSavePath = folder.pathFor(FILENAME_TWINS); loger.log("Loading cache files"); LoadCacheFiles(loger, context); AppendStatus(loger); if (preload) { preloadLexicon(loger, context); } }
/// <summary> /// Rebuilds the encoded twins. /// </summary> /// <param name="loger">The loger.</param> /// <param name="context">The context.</param> public void rebuildEncodedTwins(ILogBuilder loger, semanticLexiconContext context) { twins.Clear(true); loger.log("Rebuilding Semantic Lexicon lower encoding twins"); double ratio = 0; int i = 0; int repIndex = lemmaCount / 20; foreach (ITermLemma lemma in context.TermLemmas) { string nl = ""; if (lemma.name.isNonDosChars()) { twins.SetWord(lemma.name); } foreach (ITermInstance instance in lemma.instances) { if (instance.name.isNonDosChars()) { twins.SetWord(instance.name); } } i++; repIndex--; if (repIndex == 0) { // twins.Save(); repIndex = lemmaCount / 20; ratio = (double)i / (double)lemmaCount; loger.log("Recoded [" + i + "] lemmas (" + ratio.ToString("P2") + ")"); } } twins.Save(); loger.log("Rebuilding Semantic Lexicon lower encoding twins"); }