/// <summary> /// Write instruction operands into bytecode stream. /// </summary> /// <param name="writer">Bytecode writer.</param> public override void WriteOperands(WordWriter writer) { Condition.Write(writer); TrueLabel.Write(writer); FalseLabel.Write(writer); Branchweights.Write(writer); }
/// <summary> /// Calculate number of words to fit complete instruction bytecode. /// </summary> /// <returns>Number of words in instruction bytecode.</returns> public override uint GetWordCount() { uint wordCount = 0; wordCount += Condition.GetWordCount(); wordCount += TrueLabel.GetWordCount(); wordCount += FalseLabel.GetWordCount(); wordCount += Branchweights.GetWordCount(); return(wordCount); }
/// <summary> /// Updates the accuracy using the current results. /// </summary> protected virtual void UpdateAccuracy() { double nlpdThreshold = -Math.Log(0.001); int labelCount = TrueLabel.First(kvp => kvp.Value != null).Value.Dimension; var confusionMatrix = Util.ArrayInit(labelCount, labelCount, (i, j) => 0.0); int correct = 0; double logProb = 0.0; int goldX = 0; // Only for binary labels if (Mapping.LabelCount == 2) { trueBinaryLabel = new List <double>(); probTrueBinaryLabel = new List <double>(); } foreach (var kvp in GoldLabels) { if (kvp.Value == null) { continue; } // We have a gold label goldX++; Discrete trueLabel = null; if (TrueLabel.ContainsKey(kvp.Key)) { trueLabel = TrueLabel[kvp.Key]; } if (trueLabel == null) { trueLabel = Discrete.Uniform(Mapping.LabelCount); //continue; // No inferred label } var probs = trueLabel.GetProbs(); double max = probs.Max(); var predictedLabels = probs.Select((p, i) => new { prob = p, idx = i }).Where(a => a.prob == max).Select(a => a.idx).ToArray(); int predictedLabel = predictedLabels.Length == 1 ? predictedLabels[0] : predictedLabels[Rand.Int(predictedLabels.Length)]; this.PredictedLabel[kvp.Key] = predictedLabel; int goldLabel = kvp.Value.Value; confusionMatrix[goldLabel, predictedLabel] = confusionMatrix[goldLabel, predictedLabel] + 1.0; var nlp = -trueLabel.GetLogProb(goldLabel); if (nlp > nlpdThreshold) { nlp = nlpdThreshold; } logProb += nlp; if (trueBinaryLabel != null) { trueBinaryLabel.Add(goldLabel); probTrueBinaryLabel.Add(probs[goldLabel]); } } Accuracy = correct / (double)goldX; NegativeLogProb = logProb / goldX; ModelConfusionMatrix = confusionMatrix; // Average recall double sumRec = 0; for (int i = 0; i < labelCount; i++) { double classSum = 0; for (int j = 0; j < labelCount; j++) { classSum += confusionMatrix[i, j]; } sumRec += confusionMatrix[i, i] / classSum; } AvgRecall = sumRec / labelCount; // WorkerLabelAccuracy: Perc. agreement between worker label and gold label int sumAcc = 0; var LabelSet = Mapping.DataWithGold; int numLabels = LabelSet.Count(); foreach (var datum in LabelSet) { sumAcc += datum.WorkerLabel == datum.GoldLabel ? 1 : 0; } WorkerLabelAccuracy = sumAcc / (double)numLabels; if (trueBinaryLabel != null && trueBinaryLabel.Count > 0) { RocCurve = new ReceiverOperatingCharacteristic(trueBinaryLabel.ToArray(), probTrueBinaryLabel.ToArray()); RocCurve.Compute(0.001); BinaryConfusionMatrix = new ReceiverOperatingCharacteristic.ConfusionMatrix((int)confusionMatrix[1, 1], (int)confusionMatrix[0, 0], (int)confusionMatrix[0, 1], (int)confusionMatrix[1, 0]); } }
/// <summary> /// Updates the accuracy using the current results. /// </summary> private void UpdateAccuracy() { double nlpdThreshold = -Math.Log(0.001); int labelCount = TrueLabel.Where(kvp => kvp.Value != null).First().Value.Dimension; var confusionMatrix = Util.ArrayInit(labelCount, labelCount, (i, j) => 0.0); int correct = 0; double logProb = 0.0; int goldX = 0; foreach (var kvp in GoldLabels) { if (kvp.Value == null) { continue; } // We have a gold label goldX++; var trueLabel = TrueLabel[kvp.Key]; if (trueLabel == null) { continue; // No inferred label } var probs = trueLabel.GetProbs(); var max = probs.Max(); var predictedLabels = probs.Select((p, i) => new { prob = p, idx = i }).Where(a => a.prob == max).Select(a => a.idx).ToArray(); int predictedLabel = predictedLabels.Length == 1 ? predictedLabels[0] : predictedLabels[Rand.Int(predictedLabels.Length)]; int goldLabel = kvp.Value.Value; confusionMatrix[goldLabel, predictedLabel] = confusionMatrix[goldLabel, predictedLabel] + 1.0; if (goldLabel == predictedLabel) { correct++; } var nlp = -trueLabel.GetLogProb(goldLabel); if (nlp > nlpdThreshold) { nlp = nlpdThreshold; } logProb += nlp; } Accuracy = correct / (double)goldX; NegativeLogProb = logProb / (double)goldX; ModelConfusionMatrix = confusionMatrix; // Compute average recall double sumRec = 0; for (int i = 0; i < labelCount; i++) { double classSum = 0; for (int j = 0; j < labelCount; j++) { classSum += confusionMatrix[i, j]; } sumRec += confusionMatrix[i, i] / classSum; } AvgRecall = sumRec / labelCount; }
/// <summary> /// Updates the accuracy using the current results. /// </summary> private void UpdateAccuracy() { double nlpdThreshold = -Math.Log(0.001); int labelCount = TrueLabel.Where(kvp => kvp.Value != null).First().Value.Dimension; var confusionMatrix = Util.ArrayInit(labelCount, labelCount, (i, j) => 0.0); int correct = 0; double logProb = 0.0; int goldX = 0; foreach (var kvp in GoldLabels) { if (kvp.Value == null) { continue; } // We have a gold label goldX++; var trueLabel = TrueLabel[kvp.Key]; if (trueLabel == null) { continue; // No inferred label } var probs = trueLabel.GetProbs(); var max = probs.Max(); var predictedLabels = probs.Select((p, i) => new { prob = p, idx = i }).Where(a => a.prob == max).Select(a => a.idx).ToArray(); int predictedLabel = predictedLabels.Length == 1 ? predictedLabels[0] : predictedLabels[Rand.Int(predictedLabels.Length)]; int goldLabel = kvp.Value.Value; confusionMatrix[goldLabel, predictedLabel] = confusionMatrix[goldLabel, predictedLabel] + 1.0; if (goldLabel == predictedLabel) { correct++; } var nlp = -trueLabel.GetLogProb(goldLabel); if (nlp > nlpdThreshold) { nlp = nlpdThreshold; } logProb += nlp; } if (goldX == 0) { Console.WriteLine($"Accuracy and recall are NaN because no gold labels were provided."); } Accuracy = correct / (double)goldX; NegativeLogProb = logProb / goldX; ModelConfusionMatrix = confusionMatrix; // Compute average recall double sumRec = 0; int actualLabelCount = 0; for (int goldLabel = 0; goldLabel < labelCount; goldLabel++) { double goldLabelCount = 0; for (int predictedLabel = 0; predictedLabel < labelCount; predictedLabel++) { goldLabelCount += confusionMatrix[goldLabel, predictedLabel]; } if (goldLabelCount > 0) { actualLabelCount++; sumRec += confusionMatrix[goldLabel, goldLabel] / goldLabelCount; } } AvgRecall = sumRec / actualLabelCount; }