/// <summary> /// Computes and displays t = t + 1 /// </summary> public static void Example3() { var cns = new ConvNetSharp <float>(); BuilderInstance <float> .Volume = new VolumeBuilder(); // Graph creation var t = cns.Variable(0.0f, "t", true); var fun = cns.Assign(t, t + 1.0f); using (var session = new Session <float>()) { session.InitializePlaceHolders(fun, new Dictionary <string, Volume <float> > { { "t", 1.0f } }); do { session.Run(fun, null); var x = t.Result.Get(0); Console.WriteLine(x); } while (!Console.KeyAvailable); } }
public SoftmaxCrossEntropy(ConvNetSharp <T> graph, Op <T> softmax, Op <T> y) : base(graph) { AddParent(softmax); AddParent(y); this.Result = BuilderInstance <T> .Volume.SameAs(new Shape(1, 1, 1, 1)); }
public AdamOptimizer(ConvNetSharp <T> graph, T learningRate, T beta1, T beta2, T epsilon) : base(graph) { this._lr = learningRate; this._beta1 = beta1; this._beta2 = beta2; this._epsilon = epsilon; this._learningRate = BuilderInstance <T> .Volume.SameAs(new Shape(1)); }
public Assign(ConvNetSharp <T> graph, Op <T> valueOp, Op <T> op) : base(graph) { if (!(valueOp is Variable <T>)) { throw new ArgumentException("Assigned Op should be a Variable", nameof(valueOp)); } this.AddParent(valueOp); this.AddParent(op); }
public void SqrtGradientCheck() { var cns = new ConvNetSharp <T>(); var x = cns.PlaceHolder("x"); var fun = cns.Sqrt(x); var shape = new Shape(2, 2, 3, 4); var location = this.NewVolume(RandomUtilities.RandomDoubleArray(shape.TotalLength, posisitveOnly: true), shape); this.GradientCheck(cns, fun, location); }
public void SigmoidGradientCheck() { var cns = new ConvNetSharp <T>(); var x = cns.PlaceHolder("x"); var fun = cns.Sigmoid(x); var shape = new Shape(2, 2, 3, 4); var location = this.NewVolume(RandomUtilities.RandomDoubleArray(shape.TotalLength), shape); this.GradientCheck(cns, fun, location, 1e-3); }