private void init() { this.timer = TimerPool.getTimer(this, "Frontend"); this.last = null; Iterator iterator = this.frontEndList.iterator(); while (iterator.hasNext()) { DataProcessor dataProcessor = (DataProcessor)iterator.next(); if (!FrontEnd.assertionsDisabled && dataProcessor == null) { throw new AssertionError(); } if (this.last != null) { dataProcessor.setPredecessor(this.last); } if (this.first == null) { this.first = dataProcessor; } this.last = dataProcessor; } this.initialize(); }
/* * (non-Javadoc) * @see edu.cmu.sphinx.decoder.search.SearchManager#allocate() */ //@Override public override void allocate() { totalTokensScored = StatisticsVariable .getStatisticsVariable("totalTokensScored"); tokensPerSecond = StatisticsVariable .getStatisticsVariable("tokensScoredPerSecond"); curTokensScored = StatisticsVariable .getStatisticsVariable("curTokensScored"); tokensCreated = StatisticsVariable .getStatisticsVariable("tokensCreated"); viterbiPruned = StatisticsVariable .getStatisticsVariable("viterbiPruned"); beamPruned = StatisticsVariable.getStatisticsVariable("beamPruned"); try { linguist.allocate(); pruner.allocate(); scorer.allocate(); } catch (IOException e) { throw new SystemException( "Allocation of search manager resources failed", e); } scoreTimer = TimerPool.getTimer(this, "Score"); pruneTimer = TimerPool.getTimer(this, "Prune"); growTimer = TimerPool.getTimer(this, "Grow"); }
public virtual void allocate() { this.dictionary.allocate(); this.newGrammar(); sphinx.util.Timer timer = TimerPool.getTimer(this, "grammarLoad"); timer.start(); this.initialNode = this.createGrammar(); timer.stop(); }
private int detectGapInsertionErrors() { util.Timer timer = TimerPool.getTimer(this, "GapInsertionDetector"); timer.start(); GapInsertionDetector gapInsertionDetector = new GapInsertionDetector(this.dataSource.getTranscriptFile(), this.hypothesisFile, this.showGapInsertions); int result = gapInsertionDetector.detect(); timer.stop(); return(result); }
public virtual void timeLinguist(int numRuns, int numFrames, int maxBeam) { java.util.Random random = new java.util.Random((long)((ulong)1000)); sphinx.util.Timer timer = TimerPool.getTimer(this, "frameTimer"); sphinx.util.Timer timer2 = TimerPool.getTimer(this, "totalTimer"); [email protected](new StringBuilder().append("TestLinguist: runs ").append(numRuns).append(" frames ").append(numFrames).append(" beam ").append(maxBeam).toString()); timer2.start(); for (int i = 0; i < numRuns; i++) { int num = 0; object obj = new ArrayList(); ((ArrayList)obj).add(this.linguist.getSearchGraph().getInitialState()); this.linguist.startRecognition(); for (int j = 0; j < numFrames; j++) { object obj2 = obj; obj = new ArrayList(maxBeam * 10); timer.start(); object obj3 = obj2; List list; if (obj3 != null) { if ((list = (obj3 as List)) == null) { throw new IncompatibleClassChangeError(); } } else { list = null; } Iterator iterator = list.iterator(); while (iterator.hasNext()) { SearchState searchState = (SearchState)iterator.next(); this.expandState(num, (ArrayList)obj, searchState); } timer.stop(); Collections.shuffle((ArrayList)obj, random); if (((ArrayList)obj).size() > maxBeam) { obj = ((ArrayList)obj).subList(0, maxBeam); } } this.linguist.stopRecognition(); } timer2.stop(); [email protected](new StringBuilder().append(" MaxSuccessors : ").append(this.maxSuccessors).toString()); [email protected](new StringBuilder().append(" TotalStates : ").append(this.totalStates).toString()); [email protected](new StringBuilder().append(" TotalEmitting : ").append(this.totalEmittingStates).toString()); [email protected](new StringBuilder().append(" NonEmitting : ").append(this.totalNonEmittingStates).toString()); [email protected](new StringBuilder().append(" Final States : ").append(this.totalFinalStates).toString()); }
public override void allocate() { this.logger.info("Allocating DFLAT"); this.allocateAcousticModel(); this.grammar.allocate(); this.hmmPool = new HMMPool(this.acousticModel, this.logger, this.unitManager); this.nodeToNextUnitArrayMap = new HashMap(); this.nodeToUnitSetMap = new HashMap(); sphinx.util.Timer timer = TimerPool.getTimer(this, "compileGrammar"); timer.start(); this.compileGrammar(); timer.stop(); this.logger.info("Done allocating DFLAT"); }
public override void allocate() { TimerPool.getTimer(this, "Load LM").start(); this.logger.info(new StringBuilder().append("Loading n-gram language model from: ").append(this.location).toString()); if (this.ngramLogFile != null) { this.logFile = new PrintWriter(new FileOutputStream(this.ngramLogFile)); } BinaryLoader binaryLoader; if (this.location.getProtocol() != null) { if (!String.instancehelper_equals(this.location.getProtocol(), "file")) { binaryLoader = new BinaryLoader(this.location); goto IL_EC; } } try { binaryLoader = new BinaryLoader(new File(this.location.toURI())); } catch (System.Exception) { binaryLoader = new BinaryLoader(new File(this.location.getPath())); } IL_EC: binaryLoader.verifyHeader(); this.counts = binaryLoader.readCounts(); if (this.maxDepth <= 0 || this.maxDepth > this.counts.Length) { this.maxDepth = this.counts.Length; } if (this.maxDepth > 1) { this.quant = binaryLoader.readQuant(this.maxDepth); } this.unigrams = binaryLoader.readUnigrams(this.counts[0]); if (this.maxDepth > 1) { this.trie = new NgramTrie(this.counts, this.quant.getProbBoSize(), this.quant.getProbSize()); binaryLoader.readTrieByteArr(this.trie.getMem()); } this.words = binaryLoader.readWords(this.counts[0]); this.buildUnigramIDMap(); this.ngramProbCache = new LRUCache(this.ngramCacheSize); binaryLoader.close(); TimerPool.getTimer(this, "Load LM").stop(); }
public override void allocate() { this.scoreTimer = TimerPool.getTimer(this, "Score"); this.pruneTimer = TimerPool.getTimer(this, "Prune"); this.growTimer = TimerPool.getTimer(this, "Grow"); this.totalTokensScored = StatisticsVariable.getStatisticsVariable("totalTokensScored"); this.curTokensScored = StatisticsVariable.getStatisticsVariable("curTokensScored"); this.tokensCreated = StatisticsVariable.getStatisticsVariable("tokensCreated"); try { this.linguist.allocate(); this.pruner.allocate(); this.scorer.allocate(); } catch (IOException ex) { throw new RuntimeException("Allocation of search manager resources failed", ex); } }
public override void allocate() { if (!this.allocated) { this.dictionary = new HashMap(); this.wordDictionary = new HashMap(); sphinx.util.Timer timer = TimerPool.getTimer(this, "Load Dictionary"); this.fillerWords = new HashSet(); timer.start(); this.logger.info(new StringBuilder().append("Loading dictionary from: ").append(this.wordDictionaryFile).toString()); this.loadDictionary(this.wordDictionaryFile.openStream(), false); this.loadCustomDictionaries(this.addendaUrlList); this.logger.info(new StringBuilder().append("Loading filler dictionary from: ").append(this.fillerDictionaryFile).toString()); this.loadDictionary(this.fillerDictionaryFile.openStream(), true); if (this.g2pModelFile != null && !String.instancehelper_equals(this.g2pModelFile.getPath(), "")) { this.g2pDecoder = new G2PConverter(this.g2pModelFile); } timer.stop(); } }
/* * /// (non-Javadoc) * * /// @see edu.cmu.sphinx.decoder.search.SearchManager#allocate() */ override public void allocate() { // tokenTracker = new TokenTracker(); // tokenTypeTracker = new TokenTypeTracker(); scoreTimer = TimerPool.getTimer(this, "Score"); pruneTimer = TimerPool.getTimer(this, "Prune"); growTimer = TimerPool.getTimer(this, "Grow"); totalTokensScored = StatisticsVariable.getStatisticsVariable("totalTokensScored"); curTokensScored = StatisticsVariable.getStatisticsVariable("curTokensScored"); tokensCreated = StatisticsVariable.getStatisticsVariable("tokensCreated"); try { linguist.allocate(); pruner.allocate(); scorer.allocate(); } catch (IOException e) { throw new SystemException("Allocation of search manager resources failed", e); } }
internal virtual void benchmark() { int num = 0; [email protected]("benchmarking ..."); TimerPool.getTimer(this, "hmmPoolBenchmark").start(); for (int i = 0; i < 1000000; i++) { int[] array = HMMPool.ids; int num2 = i; int num3 = HMMPool.ids.Length; int unitID = array[(num3 != -1) ? (num2 % num3) : 0]; HMMPosition[] array2 = HMMPool.pos; int num4 = i; int num5 = HMMPool.pos.Length; HMMPosition position = array2[(num5 != -1) ? (num4 % num5) : 0]; if (this.getHMM(unitID, position) == null) { num++; } } TimerPool.getTimer(this, "hmmPoolBenchmark").stop(); [email protected](new StringBuilder().append("null count ").append(num).toString()); }
public virtual void load() { if (!this.loaded) { TimerPool.getTimer(this, "Load AM").start(); this.hmmManager = new HMMManager(); this.contextIndependentUnits = new LinkedHashMap(); this.meanTransformationMatrixPool = null; this.meanTransformationVectorPool = null; this.varianceTransformationMatrixPool = null; this.varianceTransformationVectorPool = null; this.transformMatrix = (float[][])null; try { this.loadModelFiles(); } catch (URISyntaxException ex) { throw new RuntimeException(ex); } this.loaded = true; TimerPool.getTimer(this, "Load AM").stop(); } }
public override void allocate() { TimerPool.getTimer(this, "Load LM").start(); this.logger.info(new StringBuilder().append("Loading n-gram language model from: ").append(this.location).toString()); if (this.ngramLogFile != null) { this.logFile = new PrintWriter(new FileOutputStream(this.ngramLogFile)); } if (this.location.getProtocol() != null) { if (!String.instancehelper_equals(this.location.getProtocol(), "file")) { this.loader = new BinaryStreamLoader(this.location, this.format, this.applyLanguageWeightAndWip, this.languageWeight, this.wip, this.unigramWeight); goto IL_15B; } } try { this.loader = new BinaryLoader(new File(this.location.toURI()), this.format, this.applyLanguageWeightAndWip, this.languageWeight, this.wip, this.unigramWeight); } catch (System.Exception) { this.loader = new BinaryLoader(new File(this.location.getPath()), this.format, this.applyLanguageWeightAndWip, this.languageWeight, this.wip, this.unigramWeight); } IL_15B: this.unigramIDMap = new HashMap(); this.unigrams = this.loader.getUnigrams(); this.loadedNGramBuffers = new Map[this.loader.getMaxDepth()]; this.ngramProbTable = new float[this.loader.getMaxDepth()][]; this.ngramBackoffTable = new float[this.loader.getMaxDepth()][]; this.ngramSegmentTable = new int[this.loader.getMaxDepth()][]; for (int i = 1; i <= this.loader.getMaxDepth(); i++) { this.loadedNGramBuffers[i - 1] = new HashMap(); if (i >= 2) { this.ngramProbTable[i - 1] = this.loader.getNGramProbabilities(i); } if (i > 2) { this.ngramBackoffTable[i - 1] = this.loader.getNGramBackoffWeights(i); this.ngramSegmentTable[i - 1] = this.loader.getNGramSegments(i); } } this.ngramProbCache = new LRUCache(this.ngramCacheSize); if (this.dictionary != null) { this.buildUnigramIDMap(this.dictionary); } else { this.buildUnigramIDMap(); } this.loadedBigramBuffers = new NGramBuffer[this.unigrams.Length]; if (this.maxDepth <= 0 || this.maxDepth > this.loader.getMaxDepth()) { this.maxDepth = this.loader.getMaxDepth(); } for (int i = 1; i <= this.loader.getMaxDepth(); i++) { this.logger.info(new StringBuilder().append(Integer.toString(i)).append("-grams: ").append(this.loader.getNumberNGrams(i)).toString()); } if (this.fullSmear) { [email protected]("Full Smear"); try { [email protected]("... Reading ..."); this.readSmearInfo("smear.dat"); [email protected]("... Done "); } catch (IOException ex2) { [email protected](new StringBuilder().append("... ").append(ex2).toString()); [email protected]("... Calculating"); this.buildSmearInfo(); [email protected]("... Writing"); [email protected]("... Done"); } } TimerPool.getTimer(this, "Load LM").stop(); }
protected internal override GrammarNode createGrammar() { this.languageModel.allocate(); TimerPool.getTimer(this, "LMGrammar.create").start(); GrammarNode grammarNode = null; if (this.languageModel.getMaxDepth() > 2) { [email protected]("Warning: LMGrammar limited to bigrams"); } ArrayList arrayList = new ArrayList(); Set vocabulary = this.languageModel.getVocabulary(); Iterator iterator = vocabulary.iterator(); while (iterator.hasNext()) { string word = (string)iterator.next(); GrammarNode grammarNode2 = this.createGrammarNode(word); if (grammarNode2 != null && !grammarNode2.isEmpty()) { if (grammarNode2.getWord().equals(this.getDictionary().getSentenceStartWord())) { grammarNode = grammarNode2; } else if (grammarNode2.getWord().equals(this.getDictionary().getSentenceEndWord())) { grammarNode2.setFinalNode(true); } arrayList.add(grammarNode2); } } if (grammarNode == null) { string text = "No sentence start found in language model"; throw new Error(text); } iterator = arrayList.iterator(); while (iterator.hasNext()) { GrammarNode grammarNode3 = (GrammarNode)iterator.next(); if (!grammarNode3.isFinalNode()) { Iterator iterator2 = arrayList.iterator(); while (iterator2.hasNext()) { GrammarNode grammarNode4 = (GrammarNode)iterator2.next(); string spelling = grammarNode3.getWord().getSpelling(); string spelling2 = grammarNode4.getWord().getSpelling(); Word[] words = new Word[] { this.getDictionary().getWord(spelling), this.getDictionary().getWord(spelling2) }; float probability = this.languageModel.getProbability(new WordSequence(words)); grammarNode3.add(grammarNode4, probability); } } } TimerPool.getTimer(this, "LMGrammar.create").stop(); this.languageModel.deallocate(); return(grammarNode); }
private util.Timer getAlignTimer() { return(TimerPool.getTimer(this, "Align")); }
protected internal virtual Collection compileGrammar() { this.initialGrammarState = this.grammar.getInitialNode(); this.nodeStateMap = new HashMap(); this.arcPool = new Cache(); ArrayList arrayList = new ArrayList(); TimerPool.getTimer(this, "Compile").start(); TimerPool.getTimer(this, "Create States").start(); Iterator iterator = this.grammar.getGrammarNodes().iterator(); while (iterator.hasNext()) { GrammarNode grammarNode = (GrammarNode)iterator.next(); FlatLinguist.GState gstate = this.createGState(grammarNode); arrayList.add(gstate); } TimerPool.getTimer(this, "Create States").stop(); this.addStartingPath(); TimerPool.getTimer(this, "Collect Contexts").start(); iterator = arrayList.iterator(); while (iterator.hasNext()) { FlatLinguist.GState gstate2 = (FlatLinguist.GState)iterator.next(); gstate2.collectContexts(); } TimerPool.getTimer(this, "Collect Contexts").stop(); TimerPool.getTimer(this, "Expand States").start(); iterator = arrayList.iterator(); while (iterator.hasNext()) { FlatLinguist.GState gstate2 = (FlatLinguist.GState)iterator.next(); gstate2.expand(); } TimerPool.getTimer(this, "Expand States").stop(); TimerPool.getTimer(this, "Connect Nodes").start(); iterator = arrayList.iterator(); while (iterator.hasNext()) { FlatLinguist.GState gstate2 = (FlatLinguist.GState)iterator.next(); gstate2.connect(); } TimerPool.getTimer(this, "Connect Nodes").stop(); SentenceHMMState sentenceHMMState = this.findStartingState(); if (this.addOutOfGrammarBranch) { CIPhoneLoop ciphoneLoop = new CIPhoneLoop(this.phoneLoopAcousticModel, this.logPhoneInsertionProbability); SentenceHMMState nextState = (SentenceHMMState)ciphoneLoop.getSearchGraph().getInitialState(); sentenceHMMState.connect(this.getArc(nextState, 0f, this.logOutOfGrammarBranchProbability)); } this.searchGraph = new FlatLinguist.FlatSearchGraph(this, sentenceHMMState); TimerPool.getTimer(this, "Compile").stop(); if (this.dumpGStates) { Iterator iterator2 = this.grammar.getGrammarNodes().iterator(); while (iterator2.hasNext()) { GrammarNode node = (GrammarNode)iterator2.next(); FlatLinguist.GState gstate3 = this.getGState(node); gstate3.dumpInfo(); } } this.nodeStateMap = null; this.arcPool = null; return(SentenceHMMState.collectStates(sentenceHMMState)); }