/// <summary> /// Forwards the inputs and calculate the loss. /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Forward(SuperArray preds, SuperArray labels) { preds /= Ops.Sum(preds, 1); preds = Ops.Clip(preds, Ops.EPSILON, 1 - Ops.EPSILON); return(Ops.Sum(-1 * labels * Ops.Log(preds), 1)); }
/// <summary> /// Calculates the metric with predicted and true values. /// </summary> /// <param name="preds">The predicted value.</param> /// <param name="labels">The true value.</param> /// <returns></returns> public override SuperArray Calc(SuperArray preds, SuperArray labels) { preds = Ops.Clip(preds, 0, 1); var r = Ops.EqualTo(Ops.Round(preds.Ravel()), labels.Ravel()); return(r); }
/// <summary> /// Forwards the inputs and calculate the loss. /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Forward(SuperArray preds, SuperArray labels) { var first_log = Ops.Log(Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1); var second_log = Ops.Log(Ops.Clip(labels, Ops.EPSILON, float.MaxValue) + 1); return(Ops.Mean(Ops.Square(first_log - second_log), 1)); }
/// <summary> /// Backpropagation method to calculate gradient of the loss function /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Backward(SuperArray preds, SuperArray labels) { var y_true = Ops.Clip(labels, Ops.EPSILON, 1); var y_pred = Ops.Clip(preds, Ops.EPSILON, 1); return(Ops.Maximum((-1 * (y_true / y_pred)), 0)); }
/// <summary> /// Forwards the inputs and calculate the loss. /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Forward(SuperArray preds, SuperArray labels) { var y_true = Ops.Clip(labels, Ops.EPSILON, 1); var y_pred = Ops.Clip(preds, Ops.EPSILON, 1); return(Ops.Sum(y_true * Ops.Log(y_true / y_pred), 1)); }
/// <summary>Invokes the constraints</summary> /// <param name="w">The weight SuperArray</param> /// <returns></returns> internal override SuperArray Call(SuperArray w) { SuperArray norms = Ops.Sqrt(Ops.Sum(Ops.Square(w), Axis)); var desired = Ops.Clip(norms, 0, MaxValue); return(w * (desired / (Ops.EPSILON + norms))); }
/// <summary> /// Backpropagation method to calculate gradient of the loss function /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Backward(SuperArray preds, SuperArray labels) { float norm = 2f / preds.Shape[0]; var first_log = Ops.Log(Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1); var second_log = Ops.Log(Ops.Clip(labels, Ops.EPSILON, float.MaxValue) + 1); return(norm * (first_log - second_log) / (Ops.Clip(preds, Ops.EPSILON, float.MaxValue) + 1)); }
/// <summary> /// Backpropagation method to calculate gradient of the loss function /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Backward(SuperArray preds, SuperArray labels) { SuperArray output = preds; if (!FromLogit) { output = Ops.Clip(preds, Ops.EPSILON, 1f - Ops.EPSILON); } return((output - labels) / (output * (1 - output))); }
/// <summary> /// Invoke the constraint /// </summary> /// <param name="w">The weight SuperArray.</param> /// <returns></returns> internal override SuperArray Call(SuperArray w) { SuperArray norms = null; norms = Ops.Sqrt(Ops.Sum(Ops.Square(w), Axis)); var desired = Rate * Ops.Clip(norms, MinValue, MaxValue) + (1 - Rate) * norms; w = w * (desired / (Ops.EPSILON + norms)); return(w); }
/// <summary> /// Forwards the inputs and calculate the loss. /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Forward(SuperArray preds, SuperArray labels) { SuperArray output = preds; if (!FromLogit) { output = Ops.Clip(output, Ops.EPSILON, 1f - Ops.EPSILON); output = Ops.Log(output / (1 - output)); } output = Ops.Sigmoid(output); return(labels * Ops.Neg(Ops.Log(output)) + (1 - labels) * Ops.Neg(Ops.Log(1 - output))); }
/// <summary> /// Backpropagation method to calculate gradient of the loss function /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Backward(SuperArray preds, SuperArray labels) { preds = Ops.Clip(preds, Ops.EPSILON, 1 - Ops.EPSILON); return((preds - labels) / preds); }
/// <summary> /// Backpropagation method to calculate gradient of the loss function /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Backward(SuperArray preds, SuperArray labels) { var diff = (preds - labels) / Ops.Clip(Ops.Abs(labels) * Ops.Abs(labels - preds), Ops.EPSILON, float.MaxValue); return(100 * diff / preds.Shape[0]); }
/// <summary> /// Forwards the inputs and calculate the loss. /// </summary> /// <param name="preds">The predicted result.</param> /// <param name="labels">The true result.</param> /// <returns></returns> public override SuperArray Forward(SuperArray preds, SuperArray labels) { var diff = Ops.Abs(preds - labels) / Ops.Clip(Ops.Abs(labels), Ops.EPSILON, float.MaxValue); return(100 * Ops.Mean(diff, 1).Reshape(1, -1)); }