public void TestRamp() { var activation = new ActivationRamp(2, -2, 3, 1); Assert.IsTrue(activation.HasDerivative); var clone = activation.Clone(); Assert.IsInstanceOfType(clone, typeof(ActivationRamp)); double[] input = { -3, -2, 0, 2, 3 }; //Clone should have same parameters CollectionAssert.AreEqual(activation.Params, ((ActivationRamp)clone).Params); activation.ActivationFunction(input, 0, 5); Assert.AreEqual(1.0, input[0], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(1.0, input[1], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(2.0, input[2], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(3.0, input[3], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(3.0, input[4], EncogFramework.DefaultDoubleEqual); input[0] = activation.DerivativeFunction(-3, input[0]); input[2] = activation.DerivativeFunction(0, input[2]); input[4] = activation.DerivativeFunction(3, input[4]); Assert.AreEqual(0.0, input[0], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(0.5, input[2], EncogFramework.DefaultDoubleEqual); Assert.AreEqual(0.0, input[4], EncogFramework.DefaultDoubleEqual); }
void AddLayers(List<LayerConfig> gen) { foreach (var g in gen) { IActivationFunction act; if (g.ActivationType == 0) { act = new ActivationBiPolar(); } switch (g.ActivationType ) { case 0: act = new ActivationBiPolar(); break; case 1: act = new ActivationBipolarSteepenedSigmoid (); break; case 2: act = new ActivationClippedLinear(); break; case 3: act = new ActivationCompetitive(); break; case 4: act = new ActivationElliott(); break; case 5: act = new ActivationElliottSymmetric(); break; case 6: act = new ActivationGaussian(); break; case 7: act = new ActivationLinear(); break; case 8: act = new ActivationLOG(); break; case 9: act = new ActivationRamp(); break; case 10: act = new ActivationRamp(); break; case 11: act = new ActivationSigmoid(); break; case 12: act = new ActivationSIN(); break; case 13: act = new ActivationSoftMax(); break; case 14: act = new ActivationSteepenedSigmoid(); break; case 15: act = new ActivationStep(); break; case 16: act = new ActivationTANH(); break; default: act = new ActivationSoftMax(); break; } network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons)); } }