public void SoftmaxLayer_BackwardGradient() { var layer = new SoftmaxLayer(); var checker = new GradientChecker(1e-2f, 1e-3f); checker.CheckExhaustive(layer, bottom, top); }
public void BnllLayer_BackwardGradient() { var layer = new BnllLayer(); var checker = new GradientChecker(1e-2f, 1e-3f); checker.CheckEltwise(layer, bottom, top); }
public void ReluLayer_BackwardGradient() { var layer = new ReluLayer(); var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f); checker.CheckEltwise(layer, bottom, top); }
public void PowerLayer_Backward(double power, double scale, double shift) { var config = new PowerLayerConfiguration(power, scale, shift); var layer = new PowerLayer(config); if (power != 0 && power != 1 && power != 2) { var minValue = -shift / scale; using (var bottomCpu = bottom.OnCpu()) { var bottomData = bottomCpu.Data; for (int i = 0; i < bottom.Count; i++) { if (bottomData[i] < minValue) { bottomData[i] = minValue + (minValue - bottomData[i]); } } } } var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f); checker.CheckEltwise(layer, bottom, top); }
public void EuclideanLossLayer_BackwardGradient() { var layer = new EuclideanLossLayer(); layer.Setup(bottom, top); var checker = new GradientChecker(1e-6f, 4e-1f); checker.CheckSingle(layer, bottom, top, -1, -1, -1); }
public void TanhLayer_BackwardGradient() { var layer = new TanhLayer(); var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f); checker.CheckEltwise(layer, bottom, top); }
public void SoftmaxLayer_BackwardGradient() { var layer = new SoftmaxLayer(); layer.Setup(bottom, labels); var checker = new GradientChecker(1e-2f, 1e-2f); checker.CheckSingle(layer, bottom, labels, 0, -1, -1); }
public void ReluLayer_BackwardGradientWithLeakyUnits() { var config = new ReluLayerConfiguration(0.01f); var layer = new ReluLayer(config); var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f); checker.CheckEltwise(layer, bottom, top); }
public void DropoutLayer_BackwardGradientTrainPhase() { Context.Instance.Phase = PhaseType.Train; var layer = new DropoutLayer(); var checker = new GradientChecker(1e-2f, 1e-3f); checker.CheckEltwise(layer, bottom, top); }
public void AveragePoolingLayer_BackwardGradient( Size kernel, Size stride, Size padding ) { var filler = new ConstantFiller(2.0d); filler.Fill(bottom); var checker = new GradientChecker(1e-2f, 1e-2f); var layer = new AveragePoolingLayer(kernel, stride, padding); checker.CheckExhaustive(layer, bottom, top); }
public void InnerProductLayer_BackwardGradient(int output, FillerConfiguration weightsFiller, FillerConfiguration biasFiller) { bool useBias = biasFiller != null; var config = new InnerProductLayerConfiguration(output, useBias, weightsFiller, biasFiller); var layer = new InnerProductLayer(config); var checker = new GradientChecker(1e-4f, 1e-3f); checker.CheckExhaustive(layer, bottom, top); }
public void DropoutLayer_BackwardGradientTestPhase() { Context.Instance.Phase = PhaseType.Test; var layer = new DropoutLayer(); var checker = new GradientChecker(1e-2f, 1e-3f); checker.CheckEltwise(layer, bottom, top); }
public void AveragePoolingLayer_BackwardGradient(Size kernel, Size stride, Size padding) { var filler = new ConstantFiller(2.0d); filler.Fill(bottom); var checker = new GradientChecker(1e-2f, 1e-2f); var layer = new AveragePoolingLayer(kernel, stride, padding); checker.CheckExhaustive(layer, bottom, top); }
public void PowerLayer_Backward(double power, double scale, double shift) { var config = new PowerLayerConfiguration(power, scale, shift); var layer = new PowerLayer(config); if ( power != 0 && power != 1 && power != 2 ) { var minValue = -shift / scale; using (var bottomCpu = bottom.OnCpu()) { var bottomData = bottomCpu.Data; for (int i = 0; i < bottom.Count; i++) { if (bottomData[i] < minValue) bottomData[i] = minValue + (minValue - bottomData[i]); } } } var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f); checker.CheckEltwise(layer, bottom, top); }