public void DBiases(double[] dvalues, Layer l, double divisor = 1) { for (int j = 0; j < DeltaB.Count(); j++) { DeltaB[j] += (dvalues[j] * 1 / divisor); } if (l.L1Regularization > 0) { Parallel.For(0, l.Biases.GetLength(0), i => { DeltaB[i] += l.Biases[i] >= 0 ? l.L1Regularization : (-1 * l.L1Regularization); DeltaB[i] += 2 * l.L2Regularizattion * l.Biases[i]; }); } }
public void Update(Layer layer, double adjustment) { Parallel.For(0, DeltaB.Count(), i => { layer.BMomentum[i] = double.IsNaN(DeltaB[i]) ? layer.BMomentum[i] : layer.BMomentum[i] - (adjustment * DeltaB[i]); //DeltaB[i] >= 1 ? //layer.BMomentum[i] - adjustment : DeltaB[i] <= -1 ? //layer.BMomentum[i] + adjustment : layer.BMomentum [i] - (adjustment * DeltaB[i]); }); Parallel.For(0, layer.Weights.GetLength(0), i => { Parallel.For(0, layer.Weights.GetLength(1), j => { layer.WMomentum[i, j] = double.IsNaN(DeltaW[i, j]) ? layer.WMomentum[i, j] : layer.WMomentum[i, j] - (adjustment * DeltaW[i, j]); //DeltaW[i, j] >= 1 ? //layer.WMomentum[i, j] - adjustment : DeltaW[i, j] <= -1? //layer.WMomentum[i, j] + adjustment : layer.WMomentum[i, j] - (adjustment * DeltaW[i, j]); }); }); layer.Update(); Reset(); }
public void Reset() { DeltaB = new double[DeltaB.Count()]; DeltaW = new double[DeltaW.GetLength(0), DeltaW.GetLength(1)]; }