public void CalculateRelu(ReluLayer layer) { var dims = ComputeBlocksTreads(layer.Outputs.Length); Gpu.Launch(dims.Item1, dims.Item2).CalculateReluGPU( layer.Inputs.GPUArray, layer.Outputs.GPUArray, layer.Outputs.Length ); }
public void BackPropagateRelu(ReluLayer layer) { var dims = ComputeBlocksTreads(layer.Inputs.Length); Gpu.Launch(dims.Item1, dims.Item2).BackPropagateReluGPU( layer.Inputs.Length, layer.InputGradients.GPUArray, layer.Gradients.GPUArray, layer.Outputs.GPUArray ); }
public ReluLayer AddReluLayer(string id = "") { var lastLayer = Layers.Last(); if (lastLayer == null) { throw new Exception("There must be one or more layers in the network"); } var reluLayer = new ReluLayer(_gpuModule, lastLayer, id: id); Layers.Add(reluLayer); return(reluLayer); }
public ReluLayer AddReluLayer(string id = "") { var lastLayer = Layers.Last(); if (lastLayer == null) throw new Exception("There must be one or more layers in the network"); var reluLayer = new ReluLayer(_gpuModule, lastLayer, id: id ); Layers.Add(reluLayer); return reluLayer; }