protected internal override void Calculate(double[] x) { if (dataValues != null) { throw new Exception(); } value = 0.0; Arrays.Fill(derivative, 0.0); for (int d = 0; d < data.Length; d++) { int[] features = data[d]; double sum = 0; foreach (int feature1 in features) { sum += x[feature1]; } double expSum; double derivativeIncrement; if (dataweights != null) { throw new Exception(); } if (labels[d] == 1) { expSum = Math.Exp(-sum); double g = (1 / (1 + expSum)); value -= Math.Log(g); derivativeIncrement = (g - 1); } else { // expSum = Math.exp(-sum); // double g = (1 / (1 + expSum)); // value -= Math.log(1-g); // derivativeIncrement = (g); // } expSum = Math.Exp(-sum); double g = (1 / (1 + expSum)); double e = (1 - probCorrect) * g + (probCorrect) * (1 - g); value -= Math.Log(e); derivativeIncrement = -(g * (1 - g) * (1 - 2 * probCorrect)) / (e); } foreach (int feature in features) { derivative[feature] += derivativeIncrement; } } value += prior.Compute(x, derivative); }
protected internal override void Calculate(double[] x) { if (dataValues != null) { CalculateRVF(x); return; } value = 0.0; Arrays.Fill(derivative, 0.0); for (int d = 0; d < data.Length; d++) { int[] features = data[d]; double sum = 0; foreach (int feature1 in features) { sum += x[feature1]; } double expSum; double derivativeIncrement; if (labels[d] == 0) { expSum = Math.Exp(sum); derivativeIncrement = 1.0 / (1.0 + (1.0 / expSum)); } else { expSum = Math.Exp(-sum); derivativeIncrement = -1.0 / (1.0 + (1.0 / expSum)); } if (dataweights == null) { value += Math.Log(1.0 + expSum); } else { value += Math.Log(1.0 + expSum) * dataweights[d]; derivativeIncrement *= dataweights[d]; } foreach (int feature in features) { derivative[feature] += derivativeIncrement; } } value += prior.Compute(x, derivative); }
protected internal override void Calculate(double[] x) { if (derivative == null) { derivative = new double[x.Length]; } else { Arrays.Fill(derivative, 0.0); } value = 0.0; double[] sums = new double[numClasses]; double[] probs = new double[numClasses]; double[] weightedProbs = new double[numClasses]; for (int d = 0; d < data.Length; d++) { int[] features = data[d]; int observedLabel = labels[d]; // activation Arrays.Fill(sums, 0.0); for (int c = 0; c < numClasses; c++) { foreach (int feature in features) { int i = IndexOf(feature, c); sums[c] += x[i]; } } double total = ArrayMath.LogSum(sums); double[] weightedSums = new double[numClasses]; for (int trueLabel = 0; trueLabel < numClasses; trueLabel++) { weightedSums[trueLabel] = System.Math.Log(confusionMatrix[observedLabel][trueLabel]) + sums[trueLabel]; } double weightedTotal = ArrayMath.LogSum(weightedSums); for (int c_1 = 0; c_1 < numClasses; c_1++) { probs[c_1] = System.Math.Exp(sums[c_1] - total); weightedProbs[c_1] = System.Math.Exp(weightedSums[c_1] - weightedTotal); foreach (int feature in features) { int i = IndexOf(feature, c_1); derivative[i] += probs[c_1] - weightedProbs[c_1]; } } double tmpValue = 0.0; for (int c_2 = 0; c_2 < numClasses; c_2++) { tmpValue += confusionMatrix[observedLabel][c_2] * System.Math.Exp(sums[c_2] - total); } value -= System.Math.Log(tmpValue); } value += prior.Compute(x, derivative); }
protected internal override void Calculate(double[] x) { if (derivative == null) { derivative = new double[DomainDimension()]; } value = convexComboFrac * objFunc.ValueAt(x) + (1.0 - convexComboFrac) * biasedObjFunc.ValueAt(x); //value = objFunc.valueAt(x) + biasedObjFunc.valueAt(x); double[] d1 = objFunc.DerivativeAt(x); double[] d2 = biasedObjFunc.DerivativeAt(x); for (int i = 0; i < DomainDimension(); i++) { derivative[i] = convexComboFrac * d1[i] + (1.0 - convexComboFrac) * d2[i]; } //derivative[i] = d1[i] + d2[i]; if (prior != null) { value += prior.Compute(x, derivative); } }
public static double GetValue(double[][] weights, LogPrior prior) { double[] flatWeights = Flatten(weights); return(prior.Compute(flatWeights, new double[flatWeights.Length])); }