/// <summary> /// nitialize with l2 weight /// </summary> /// <param name="l2">The l2 weight value.</param> /// <returns>Regulizers.</returns> public static Regulizers RegL2(double l2 = 0.01) { Regulizers result = new Regulizers() { IsL2 = true, L2 = l2, }; return(result); }
/// <summary> /// nitialize with l1 weight /// </summary> /// <param name="l1">The l1 weight value.</param> /// <returns>Regulizers.</returns> public static Regulizers RegL1(double l1 = 0.01) { Regulizers result = new Regulizers() { IsL1 = true, L1 = l1, }; return(result); }
/// <summary> /// Initialize with l1 and l2 weight /// </summary> /// <param name="l1">The l1 weight value.</param> /// <param name="l2">The l2 weight value.</param> /// <returns>Regulizers.</returns> public static Regulizers L1L2(double l1 = 0.01, double l2 = 0.01) { Regulizers result = new Regulizers() { IsL1 = true, IsL2 = true, L1 = l1, L2 = l2 }; return(result); }
/// <summary> /// Initialize with l2 weight /// </summary> /// <param name="l2">The l2 weight value.</param> /// <param name="gradientClippingWithTruncation">if set to <c>true</c> [gradient clipping with truncation].</param> /// <param name="gradientClippingThresholdPerSample">The gradient clipping threshold per sample.</param> /// <returns> /// Regulizers. /// </returns> public static Regulizers RegL2(double l2 = 0.01, bool gradientClippingWithTruncation = false, double?gradientClippingThresholdPerSample = null) { Regulizers result = new Regulizers() { IsL2 = true, L2 = l2, GradientClippingWithTruncation = gradientClippingWithTruncation, GradientClippingThresholdPerSample = gradientClippingThresholdPerSample }; return(result); }
/// <summary> /// Configures the model for training. /// </summary> /// <param name="optimizer">The optimizer function name used for training the model.</param> /// <param name="loss">The function name with which the training loss will be minimized.</param> /// <param name="metric"> The metric name to be evaluated by the model during training and testing.</param> /// <param name="regulizer">The regulizer instance to apply penalty on layers parameters.</param> public void Compile(string optimizer, string loss, string metric, Regulizers regulizer = null) { CompileModel(); BaseOptimizer optimizerInstance = new BaseOptimizer(optimizer); learners.Add(optimizerInstance.GetDefault(modelOut, regulizer)); lossName = loss; lossFunc = Losses.Get(loss, labelVariable, modelOut); if (!string.IsNullOrWhiteSpace(metric)) { metricName = metric; metricFunc = Metrics.Get(metric, labelVariable, modelOut); } else { metricName = loss; metricFunc = lossFunc; } }