/// <summary> /// Configures the model for training. /// </summary> /// <param name="optimizer">The optimizer function name used for training the model.</param> /// <param name="loss">The function name with which the training loss will be minimized.</param> /// <param name="metric"> The metric name to be evaluated by the model during training and testing.</param> /// <param name="regulizer">The regulizer instance to apply penalty on layers parameters.</param> public void Compile(BaseOptimizer optimizer, string loss, string metric = "", Regulizers regulizer = null) { CompileModel(); learners.Add(optimizer.Get(modelOut, regulizer)); lossName = loss; lossFunc = Losses.Get(loss, labelVariable, modelOut); if (!string.IsNullOrWhiteSpace(metric)) { metricName = metric; metricFunc = Metrics.Get(metric, labelVariable, modelOut); } else { metricName = loss; metricFunc = lossFunc; } }
public void Compile(OptimizerType optimizer, LossType loss, MetricType metric) { OptimizerFn = BaseOptimizer.Get(optimizer); LossFn = BaseLoss.Get(loss); MetricFn = BaseMetric.Get(metric); }
static void Main(string[] args) { Operations K = new Operations(); //Load array to the tensor NDArray x = new NDArray(3, 3); x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5); x.Print("Load X Values"); NDArray y = new NDArray(3, 1); y.Load(1, 0, 1); y.Print("Load Y Values"); //Create two layers, one with 6 neurons and another with 1 FullyConnected fc1 = new FullyConnected(3, 6, "relu"); FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid"); //Connect input by passing data from one layer to another fc1.Forward(x); fc2.Forward(fc1.Output); var preds = fc2.Output; preds.Print("Predictions"); //Calculate the mean square error cost between the predicted and expected values BaseCost cost = new BinaryCrossEntropy(); var costValues = cost.Forward(preds, y); costValues.Print("BCE Cost"); //Calculate the mean absolute metric value for the predicted vs expected values BaseMetric metric = new BinaryAccuacy(); var metricValues = metric.Calculate(preds, y); metricValues.Print("Acc Metric"); var grad = cost.Backward(preds, y); fc2.Backward(grad); fc1.Backward(fc2.InputGrad); Console.WriteLine("Param value for FC1 before ADAM optimization"); fc1.PrintParams(printGrads: false); //Initialise ADAM optimizer with default learning rate of 0.01 BaseOptimizer optimizer = BaseOptimizer.Get("adam"); //Change the value of learning rate to see the jump is weight changes. //optimizer.LearningRate = 0.1; //Apply optimizer for the first layer for first iteration optimizer.Update(1, fc1); Console.WriteLine("Param value for FC1 after ADAM optimization"); fc1.PrintParams(printGrads: false); Console.WriteLine("Param value for FC2 before ADAM optimization"); fc2.PrintParams(printGrads: false); //Apply optimizer for the first layer for first iteration optimizer.Update(1, fc2); Console.WriteLine("Param value for FC2 after ADAM optimization"); fc2.PrintParams(printGrads: false); Console.ReadLine(); }