public void ForwardBackwardTest() { Shape shape = new Shape(new int[] { 2 }); SigmoidLayer layer = new SigmoidLayer(shape); Session session = new Session(); Tensor source = new Tensor(null, shape); source.Set(new float[] { 2, -3 }); Tensor x = source.Clone() as Tensor; Tensor y = layer.Forward(session, new[] { x })[0]; float[] expected = source.Weights.Take(source.Length).Select(w => SigmoidLayerTest.activation(w)).ToArray(); Helpers.AreArraysEqual(x.Length, expected, y.Weights); // unroll the graph float[] dy = Enumerable.Range(1, x.Length).Select(w => (float)w).ToArray(); y.SetGradient(dy); session.Unroll(); Helpers.AreArraysEqual( expected.Length, expected.Zip(dy, (w, dw) => SigmoidLayerTest.derivative(w) * dw).ToArray(), x.Gradient); }
public void SigmoidLayer_Forward() { var layer = new SigmoidLayer(); layer.Setup(bottom, top); layer.Forward(bottom, top); Assert.Equal(bottom.Count, top.Count); using (var topCpu = top.OnCpu()) using (var bottomCpu = bottom.OnCpu()) { int count = bottom.Count; for (int i = 0; i < count; i++) { Assert.True(MathHelpers.Equality(topCpu.DataAt(i), 1.0d / (1.0d + Math.Exp(-bottomCpu.DataAt(i))))); // check that we squashed the value between 0 and 1 Assert.True(topCpu.DataAt(i) >= 0.0d); Assert.True(topCpu.DataAt(i) <= 1.0d); }; } }
public void SigmoidLayer_Forward() { var layer = new SigmoidLayer(); layer.Setup(bottom, top); layer.Forward(bottom, top); Assert.Equal(bottom.Count, top.Count); using (var topCpu = top.OnCpu()) using (var bottomCpu = bottom.OnCpu()) { int count = bottom.Count; for (int i = 0; i < count; i++) { Assert.True(MathHelpers.Equality(topCpu.DataAt(i), 1.0d / (1.0d + Math.Exp(-bottomCpu.DataAt(i))))); // check that we squashed the value between 0 and 1 Assert.True(topCpu.DataAt(i) >= 0.0d); Assert.True(topCpu.DataAt(i) <= 1.0d); } ; } }