public override void Backward(NDarray <double> dY) { left.Grad = ND.AddBCleft(left.Grad, ND.Mul(dY, Yr)); right.Grad = ND.AddBCleft(right.Grad, ND.Mul(Yl, dY)); left.Backward(left.Grad); right.Backward(right.Grad); }
public override void Backward(NDarray <double> dY) { left.Grad = ND.AddBCleft(left.Grad, ND.Dot(dY, Yr.T)); right.Grad = ND.AddBCleft(right.Grad, ND.Dot(Yl.T, dY)); left.Backward(left.Grad); right.Backward(right.Grad); }
public DenseLayer(Layer layer, int outNodes) { this.layer = layer; InputNodes = layer.OutputNodes; OutputNodes = outNodes; double std = 2.0 / Math.Sqrt(InputNodes); weights = new Variable("weights", ND.Uniform(-std, std, InputNodes, OutputNodes)); biases = new Variable("biases", ND.Zeros <double>(1, OutputNodes)); Function = new AddFunc(new DotFunc(layer.Function, weights), biases); }
public override void Forward() { left.Forward(); right.Forward(); Y = ND.Add(left.Y, right.Y); if (left.Grad == null) { left.Grad = new NDarray <double>(left.Y.Shape); } if (right.Grad == null) { right.Grad = new NDarray <double>(right.Y.Shape); } }
public override void Forward() { left.Forward(); right.Forward(); Yl = left.Y; Yr = right.Y; Y = ND.Dot(Yl, Yr); if (left.Grad == null) { left.Grad = new NDarray <double>(left.Y.Shape); } if (right.Grad == null) { right.Grad = new NDarray <double>(right.Y.Shape); } }
public static void Main(string[] args) { Console.WriteLine("Hello World! AutoGradient MultiLayers Neurals Network"); Utils.random = new Random(123); double[] dataX = { 0, 0, 0, 1, 1, 0, 1, 1 }; double[] dataY = { 0, 1, 1, 0 }; var X = ND.CreateNDarray(dataX, 4, 2); var Y = ND.CreateNDarray(dataY, 4, 1); var MLP = new Chain(inNodes: 2) .AddDenseLayer(outNodes: 4) .AddTanhActivation() .AddDenseLayer(outNodes: 4) .AddTanhActivation() .AddDenseLayer(outNodes: 1) .AddSigmoidActivation(); int epochs = 1000, displayEpoch = 100; var sw = Stopwatch.StartNew(); for (int k = 0; k <= epochs; ++k) { MLP.Forward(X); var loss = MLP.Loss(Y); if (k % displayEpoch == 0) { Console.WriteLine($"Epochs:{k,5}/{epochs} loss:{loss:0.000000}"); } MLP.Backward(Y); MLP.UpdateWeightsSGD(0.1); MLP.ResetGradient(); } Console.WriteLine($"Time:{sw.ElapsedMilliseconds,6} ms"); Console.WriteLine(); Console.WriteLine("Prediction"); Console.WriteLine(MLP.Prediction(X)); }
public override void Backward(NDarray <double> dY) { Grad = ND.Mul(Y.ApplyFunc(grad), dY); function.Backward(Grad); }
public InputLayer(int inNodes) { InputNodes = OutputNodes = inNodes; Function = new Variable("inputs", ND.Zeros <double>(1)); }
public NDarray <double> Grad(NDarray <double> y, NDarray <double> p) => ND.Sub(p, y);
public double Loss(NDarray <double> y, NDarray <double> p) => ND.Sq(ND.Sub(p, y)).ApplyFunc(x => x * 0.5).Data.Average();
public void UpdateSGD(double lr) { Y = ND.Sub(Y, Grad.ApplyFunc(x => x * lr)); }
public override void Backward(NDarray <double> dY) { Grad = ND.Add(Grad, dY.T); function.Backward(Grad); }
public NDarray <U> Transpose(params int[] table) => ND.Transpose(this, table);
public NDarray <U> Reshape(params int[] shape) => ND.Reshape(this, shape);