public void TestSIN() { var activation = new ActivationSIN(); Assert.IsTrue(activation.HasDerivative); var clone = activation.Clone(); Assert.IsInstanceOfType(clone, typeof(ActivationSIN)); double[] input = { 0.0, Math.PI / 4, Math.PI / 2 }; activation.ActivationFunction(input, 0, 3); //it's actually Sin(2x) Assert.AreEqual(0.0, input[0], 0.01); Assert.AreEqual(1.0, input[1], 0.01); Assert.AreEqual(0.0, input[2], 0.01); // test derivative input[0] = activation.DerivativeFunction(0, input[0]); input[1] = activation.DerivativeFunction(Math.PI / 4, input[1]); input[2] = activation.DerivativeFunction(Math.PI / 2, input[2]); Assert.AreEqual(1.0, input[0], 0.01); Assert.AreEqual(0.0, input[1], 0.01); Assert.AreEqual(-1.0, input[2], 0.01); }
public void TestSIN() { var activation = new ActivationSIN(); Assert.IsTrue(activation.HasDerivative); var clone = (ActivationSIN) activation.Clone(); Assert.IsNotNull(clone); double[] input = {0.0}; activation.ActivationFunction(input, 0, 1); Assert.AreEqual(0.0, input[0], 0.1); // test derivative, should throw an error input[0] = activation.DerivativeFunction(input[0],input[0]); Assert.AreEqual(1.0, input[0], 0.1); }
public void TestSIN() { var activation = new ActivationSIN(); Assert.IsTrue(activation.HasDerivative()); var clone = (ActivationSIN)activation.Clone(); Assert.IsNotNull(clone); double[] input = { 0.0 }; activation.ActivationFunction(input, 0, 1); Assert.AreEqual(0.0, input[0], 0.1); // test derivative, should throw an error input[0] = activation.DerivativeFunction(input[0], input[0]); Assert.AreEqual(1.0, input[0], 0.1); }
void AddLayers(List<LayerConfig> gen) { foreach (var g in gen) { IActivationFunction act; if (g.ActivationType == 0) { act = new ActivationBiPolar(); } switch (g.ActivationType ) { case 0: act = new ActivationBiPolar(); break; case 1: act = new ActivationBipolarSteepenedSigmoid (); break; case 2: act = new ActivationClippedLinear(); break; case 3: act = new ActivationCompetitive(); break; case 4: act = new ActivationElliott(); break; case 5: act = new ActivationElliottSymmetric(); break; case 6: act = new ActivationGaussian(); break; case 7: act = new ActivationLinear(); break; case 8: act = new ActivationLOG(); break; case 9: act = new ActivationRamp(); break; case 10: act = new ActivationRamp(); break; case 11: act = new ActivationSigmoid(); break; case 12: act = new ActivationSIN(); break; case 13: act = new ActivationSoftMax(); break; case 14: act = new ActivationSteepenedSigmoid(); break; case 15: act = new ActivationStep(); break; case 16: act = new ActivationTANH(); break; default: act = new ActivationSoftMax(); break; } network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons)); } }