public Mult(ConvNetSharp <T> graph, Op <T> left, Op <T> right) : base(graph) { AddParent(left); AddParent(right); }
public Dense(ConvNetSharp <T> graph, Op <T> x, int neuronCount) : base(graph, x, 1, 1, neuronCount) { }
public Activation(ConvNetSharp <T> graph, Op <T> x, ActivationType type) : base(graph) { AddParent(x); this.Type = type; }
public LeakyReluGradient(ConvNetSharp <T> graph, Op <T> y, Op <T> derivate, T alpha) : base(graph) { this.Alpha = alpha; this.AddParent(y); this.AddParent(derivate); }
public Sum(ConvNetSharp <T> graph, Op <T> x, Op <T> shape) : base(graph) { AddParent(x); AddParent(shape); }
public Softmax(Op <T> x) { AddParent(x); }
public Negate(ConvNetSharp <T> graph, Op <T> x) : base(graph) { AddParent(x); }
public Sum(ConvNetSharp <T> graph, Op <T> x, Shape shape) : base(graph) { AddParent(x); this.OutputShape = shape; }
public Shape(Op <T> x) { this._builder = BuilderInstance <T> .Create(); // we want to remain on host AddParent(x); }
public void RemoveParent(Op <T> parent) { this.Parents.Remove(parent); parent.Children.Remove(this); }
public ActivationGradient(ConvNetSharp <T> graph, Op <T> input, Op <T> output, Op <T> outputGradient, ActivationType type) : base(graph) { this.AddParent(input); this.AddParent(output); this.AddParent(outputGradient); this.Type = type; }
public ConvolutionFilterGradient(ConvNetSharp <T> graph, Convolution <T> convolution, Op <T> derivate) : base(graph) { this._convolution = convolution; this.AddParent(convolution); this.AddParent(derivate); }
public Power(ConvNetSharp <T> graph, Op <T> u, Op <T> v) : base(graph) { AddParent(u); AddParent(v); }