private void AdvanceTop(Parse inputParse) { mBuildModel.Evaluate(mBuildContextGenerator.GetContext(inputParse.GetChildren(), 0), mBuildProbabilities); inputParse.AddProbability(System.Math.Log(mBuildProbabilities[mTopStartIndex])); mCheckModel.Evaluate(mCheckContextGenerator.GetContext(inputParse.GetChildren(), TopNode, 0, 0), mCheckProbabilities); inputParse.AddProbability(System.Math.Log(mCheckProbabilities[mCompleteIndex])); inputParse.Type = TopNode; }
private void AddParseEvents(List <SharpEntropy.TrainingEvent> events, Parse[] chunks) { int currentChunk = 0; while (currentChunk < chunks.Length) { Parse chunkParse = chunks[currentChunk]; Parse parentParse = chunkParse.Parent; if (parentParse != null) { string type = parentParse.Type; string outcome; if (IsFirstChild(chunkParse, parentParse)) { outcome = MaximumEntropyParser.StartPrefix + type; } else { outcome = MaximumEntropyParser.ContinuePrefix + type; } chunkParse.Label = outcome; if (mEventType == EventType.Build) { events.Add(new SharpEntropy.TrainingEvent(outcome, mBuildContextGenerator.GetContext(chunks, currentChunk))); } int start = currentChunk - 1; while (start >= 0 && (chunks[start]).Parent == parentParse) { start--; } if (IsLastChild(chunkParse, parentParse)) { if (mEventType == EventType.Check) { events.Add(new SharpEntropy.TrainingEvent(MaximumEntropyParser.CompleteOutcome, mCheckContextGenerator.GetContext(chunks, type, start + 1, currentChunk))); } //perform reduce int reduceStart = currentChunk; int reduceEnd = currentChunk; while (reduceStart >= 0 && chunks[reduceStart].Parent == parentParse) { reduceStart--; } reduceStart++; if (!(type == MaximumEntropyParser.TopNode)) { Parse[] reducedChunks = new Parse[chunks.Length - (reduceEnd - reduceStart + 1) + 1]; //total - num_removed + 1 (for new node) //insert nodes before reduction for (int reductionIndex = 0, reductionCount = reduceStart; reductionIndex < reductionCount; reductionIndex++) { reducedChunks[reductionIndex] = chunks[reductionIndex]; } //insert reduced node reducedChunks[reduceStart] = parentParse; //insert nodes after reduction int currentReductionIndex = reduceStart + 1; for (int afterReductionIndex = reduceEnd + 1; afterReductionIndex < chunks.Length; afterReductionIndex++) { reducedChunks[currentReductionIndex] = chunks[afterReductionIndex]; currentReductionIndex++; } chunks = reducedChunks; currentChunk = reduceStart - 1; //currentChunk will be incremented at end of loop } else { chunks = new Parse[0]; } } else { if (mEventType == EventType.Check) { events.Add(new SharpEntropy.TrainingEvent(MaximumEntropyParser.IncompleteOutcome, mCheckContextGenerator.GetContext(chunks, type, start + 1, currentChunk))); } } } currentChunk++; } }