protected override double Calculate_Prob_f_c(int c_i, int f_i) { // The 2 below represents 2 distributions: // 1. Probability of getting word f_i. // 2. Probability of NOT getting word f_i. return(SmoothingHelper.GetAddDeltaProbability(count_f_c[f_i, c_i], count_c[c_i], ConditionalProbabilityDelta, 2)); }
// Methods protected override void Train() { base.Train(); logNegativeVocabulary = new double[NoOfClasses]; logProbOOV_c = new double[NoOfClasses]; logNegProb_f_c = new double[NoOfFeatures, NoOfClasses]; for (int c_i = 0; c_i < NoOfClasses; c_i++) { for (int f_i = 0; f_i < NoOfFeatures; f_i++) { logNegProb_f_c[f_i, c_i] = Math.Log10(1 - prob_f_c[f_i, c_i]); logNegativeVocabulary[c_i] += logNegProb_f_c[f_i, c_i]; } logProbOOV_c[c_i] = Math.Log10(SmoothingHelper.GetAddDeltaProbability(0, count_c[c_i], ConditionalProbabilityDelta, 2)); } }
// Methods protected override void Train() { countW_c = new double[NoOfClasses]; logProbOOV_c = new double[NoOfClasses]; for (int v_i = 0; v_i < TrainingVectors.Count; v_i++) { for (int f_i = 0; f_i < TrainingVectors[v_i].Features.Length; f_i++) { countW_c[TrainingVectors[v_i].Headers[Gold_i]] += TrainingVectors[v_i].Features[f_i]; countW += countW_c[TrainingVectors[v_i].Headers[Gold_i]]; } } base.Train(); for (int c_i = 0; c_i < NoOfClasses; c_i++) { logProbOOV_c[c_i] = Math.Log10(SmoothingHelper.GetAddDeltaProbability(0, countW_c[c_i], ConditionalProbabilityDelta, NoOfFeatures)); } }
protected override double Calculate_Prob_c(int c_i) { return(SmoothingHelper.GetAddDeltaProbability(countW_c[c_i], countW, ClassPriorDelta, NoOfClasses)); }
protected override double Calculate_Prob_f_c(int c_i, int f_i) { return(SmoothingHelper.GetAddDeltaProbability(count_f_c[f_i, c_i], countW_c[c_i], ConditionalProbabilityDelta, NoOfFeatures)); }