Example #1
0
        public void ForwardBackwardTest()
        {
            Shape        shape = new Shape(new int[] { 2 });
            SigmoidLayer layer = new SigmoidLayer(shape);

            Session session = new Session();

            Tensor source = new Tensor(null, shape);

            source.Set(new float[] { 2, -3 });
            Tensor x = source.Clone() as Tensor;
            Tensor y = layer.Forward(session, new[] { x })[0];

            float[] expected = source.Weights.Take(source.Length).Select(w => SigmoidLayerTest.activation(w)).ToArray();
            Helpers.AreArraysEqual(x.Length, expected, y.Weights);

            // unroll the graph
            float[] dy = Enumerable.Range(1, x.Length).Select(w => (float)w).ToArray();
            y.SetGradient(dy);
            session.Unroll();

            Helpers.AreArraysEqual(
                expected.Length,
                expected.Zip(dy, (w, dw) => SigmoidLayerTest.derivative(w) * dw).ToArray(),
                x.Gradient);
        }
Example #2
0
        public void CloneTest()
        {
            SigmoidLayer layer1 = new SigmoidLayer(new Shape(new int[] { 2 }));
            SigmoidLayer layer2 = layer1.Clone() as SigmoidLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
        public void SerializationTest()
        {
            // Create a SigmoidLayer
            var layer = new SigmoidLayer();

            layer.Init(10, 10, 3);

            SigmoidLayer deserialized;

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position  = 0;
                deserialized = formatter.Deserialize(ms) as SigmoidLayer;
            }

            Assert.AreEqual(layer.InputDepth, deserialized.InputDepth);
            Assert.AreEqual(layer.InputHeight, deserialized.InputHeight);
            Assert.AreEqual(layer.InputWidth, deserialized.InputWidth);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
        }
        public void SigmoidLayer_BackwardGradient()
        {
            var layer = new SigmoidLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);
            checker.CheckEltwise(layer, bottom, top);
        }
Example #5
0
        public void CanUseBigSigmoidLayer()
        {
            double[,] weights = new double[2000, 1000];
            double[] input = new double[1000];

            for (int i = 0; i < 1000; i++)
            {
                weights[i, i] = 1.0;
                input[i]      = (double)i;
            }

            NetworkVector inputvector = new NetworkVector(input);
            Layer         layer       = new SigmoidLayer(weights);

            layer.Run(inputvector);
            double[] result = layer.Output.ToArray();

            double sig0 = sigmoid(0.0);

            for (int i = 0, j = 1000; i < 1000; i++, j++)
            {
                Assert.AreEqual(sigmoid((double)i), result[i], "Failed for i = " + i);
                Assert.AreEqual(sig0, result[j], "Failed for j = " + j);
            }
        }
Example #6
0
        public void CopyConstructorTest1()
        {
            SigmoidLayer layer1 = new SigmoidLayer(new Shape(new int[] { 2 }));
            SigmoidLayer layer2 = new SigmoidLayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Example #7
0
        private Network CreateNetwork(string PBaseNetName)
        {
            if (PBaseNetName == null)
            {
                throw new Exception("AAARGGHH!");
            }
            Network tempacnet    = Network.Load(PBaseNetName);
            int     numlayersnew = tempacnet.NumLayers / 2 + 1;

            ILayer[]     layers     = new ILayer[numlayersnew];
            double[][][] weights    = new double[numlayersnew][][];
            double[]     learnrates = new double[numlayersnew];
            for (int i = 0; i < numlayersnew; i++)
            {
                layers[i]     = tempacnet.Layers[i];
                weights[i]    = tempacnet.Weights[i];
                learnrates[i] = tempacnet.Learnrates[i];
            }
            layers[numlayersnew - 1]  = new SigmoidLayer(10, 0.001);
            weights[numlayersnew - 1] = new double[10][];
            for (int i = 0; i < 10; i++)
            {
                int numneurons = layers[numlayersnew - 2].NumNeurons;
                weights[numlayersnew - 1][i] = new double[numneurons];
                for (int j = 0; j < numneurons; j++)
                {
                    weights[numlayersnew - 1][i][j] = 0;
                }
            }
            learnrates[numlayersnew - 1] = 0.001;
            return(new Network(tempacnet.InputLayer, layers, weights, null, learnrates));
        }
Example #8
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            AnnModelParameter para = mParameter as AnnModelParameter;

            LinearLayer inputLayer = new LinearLayer(datasets.InputData[0].Length);

            SigmoidLayer hiddenLayer = new SigmoidLayer(para.HiddenNeuronsCount[0]);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(para.LearningRate);
            network.JitterEpoch      = para.JitterEpoch;
            network.JitterNoiseLimit = para.JitterNoiseLimit;
            network.EndEpochEvent   += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                // TODO: trainning error needs to be calculated
                OnRunningEpoch(new AnnModelRunEpochEventArgs(args.TrainingIteration + 1, 0));
            });

            network.Learn(ForecastingDataSets.ConvertToTrainingSet(datasets), para.Iterations);

            datasets.ForecastedData = new double[datasets.InputData.Length][];
            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
Example #9
0
        public void ComputeTwiceGradientShouldYieldTheSameResult()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            var layer = new SigmoidLayer <double>();

            layer.Init(inputWidth, inputHeight, inputDepth);

            // Forward pass
            var input = BuilderInstance <double> .Volume.Random(new Shape(inputWidth, inputHeight, inputDepth));

            var output = layer.DoForward(input, true);

            // Set output gradients to 1
            var outputGradient = BuilderInstance <double> .Volume.SameAs(new double[output.Shape.TotalLength].Populate(1.0), output.Shape);

            // Backward pass to retrieve gradients
            layer.Backward(outputGradient);
            var step1 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            layer.Backward(outputGradient);
            var step2 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            Assert.IsTrue(step1.SequenceEqual(step2));
        }
Example #10
0
        public void Test_Sigmoid_Execute()
        {
            sigmoid = new SigmoidLayer();

            Data2D data = new Data2D(2, 3, 1, 1);

            data[0, 0, 0, 0] = 4;
            data[0, 1, 0, 0] = 2;
            data[0, 2, 0, 0] = -2;

            data[1, 0, 0, 0] = 3;
            data[1, 1, 0, 0] = -1;
            data[1, 2, 0, 0] = -3;

            sigmoid.SetInput(data);

            sigmoid.Execute();

            Data2D output = sigmoid.GetOutput() as Data2D;

            Assert.AreEqual(output[0, 0, 0, 0], SigmoidFunc(4.0), 0.00000001);
            Assert.AreEqual(output[0, 1, 0, 0], SigmoidFunc(2.0), 0.00000001);
            Assert.AreEqual(output[0, 2, 0, 0], SigmoidFunc(-2.0), 0.00000001);

            Assert.AreEqual(output[1, 0, 0, 0], SigmoidFunc(3.0), 0.00000001);
            Assert.AreEqual(output[1, 1, 0, 0], SigmoidFunc(-1.0), 0.00000001);
            Assert.AreEqual(output[1, 2, 0, 0], SigmoidFunc(-3.0), 0.00000001);
        }
Example #11
0
        public static SigmoidLayer<T> Sigmoid<T>(this LayerBase<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var sigmoid = new SigmoidLayer<T>();
            sigmoid.AcceptParent(sigmoid);

            return sigmoid;
        }
Example #12
0
        //Get neural network
        public virtual NeuronDotNet.Core.Network getNeural()
        {
            LinearLayer  inputLayer  = new LinearLayer(23);
            SigmoidLayer outputLayer = new SigmoidLayer(100);

            return(new BackpropagationNetwork(inputLayer, outputLayer));
        }
Example #13
0
    void CreateNewNetwork()
    {
        LinearLayer  inputLayer   = new LinearLayer(neurons);
        SigmoidLayer hiddenLayer  = new SigmoidLayer(hidden1Neurons);
        SigmoidLayer hiddenLayer2 = new SigmoidLayer(hidden2Neurons);


        LinearLayer outputLayer = new LinearLayer(outputNum);


        BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);

        conn1.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn3 = new BackpropagationConnector(hiddenLayer, hiddenLayer2);

        conn3.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer2, outputLayer);

        conn2.Initializer = new RandomFunction(0d, 0.001d);

        conn1.Initialize();
        conn2.Initialize();
        conn3.Initialize();


        neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
        neuralNetwork.SetLearningRate(learningRate);

        neuralNetwork.Initialize();
    }
Example #14
0
        public BackpropagationNetwork network(int trainInVectorDimension, int trainOutVectorDimension)
        {
            this.hiddenLayerList = HiddenLayerList();

            ActivationLayer inputLayer  = new LinearLayer(trainInVectorDimension);
            ActivationLayer outputLayer = new SigmoidLayer(trainOutVectorDimension);

            BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[0]);

            for (int i = 1; i < this.hiddenLayerList.Count; i++)
            {
                bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
            }
            bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 1], outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            /*ActivationLayer inputLayer = hiddenLayerList[0];
             * ActivationLayer outputLayer = hiddenLayerList[hiddenLayerList.Count - 1];
             *
             * if(hiddenLayerList.Count != 2)
             * {
             *  BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[1]);
             *  for (int i = 2; i < this.hiddenLayerList.Count - 1; i++)
             *  {
             *      bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
             *  }
             *  bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 2], outputLayer);
             * }
             *
             * BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);*/
            network.SetLearningRate(this.learningRate);

            return(network);
        }
Example #15
0
        public static SigmoidLayer <T> Sigmoid <T>(this LayerBase <T> layer) where T : struct, IEquatable <T>, IFormattable
        {
            var sigmoid = new SigmoidLayer <T>();

            layer.ConnectTo(sigmoid);

            return(sigmoid);
        }
Example #16
0
        public static SigmoidLayer Sigmoid(this LayerBase layer)
        {
            var sigmoid = new SigmoidLayer();

            layer.ConnectTo(sigmoid);

            return(sigmoid);
        }
Example #17
0
        public void ArchitectureConstructorTest1()
        {
            Shape        shape = new Shape(new int[] { 2 });
            SigmoidLayer layer = new SigmoidLayer(shape, "SIG", null);

            CollectionAssert.AreEqual(shape.Axes, layer.OutputShape.Axes);
            Assert.AreEqual("SIG", layer.Architecture);
        }
Example #18
0
        public void SigmoidLayer_BackwardGradient()
        {
            var layer = new SigmoidLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);

            checker.CheckEltwise(layer, bottom, top);
        }
Example #19
0
        public void SerializeTest()
        {
            SigmoidLayer layer1 = new SigmoidLayer(new Shape(new int[] { 2 }));
            string       s1     = JsonConvert.SerializeObject(layer1);
            SigmoidLayer layer2 = JsonConvert.DeserializeObject <SigmoidLayer>(s1);
            string       s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
Example #20
0
        public void CanMakeSigmoidLayer()
        {
            double[,] weights = new double[, ] {
                { 1, 2 }, { 3, 4 }
            };
            Layer layer = new SigmoidLayer(weights);

            Assert.IsNotNull(layer);
        }
Example #21
0
 private void Form1_OnLoad(object sender, EventArgs e)
 {
     inputTier     = new LinearLayer(35);
     hiddenTier    = new SigmoidLayer(3);
     outputTier    = new SigmoidLayer(5);
     _             = new BackpropagationConnector(inputTier, hiddenTier);
     _             = new BackpropagationConnector(hiddenTier, outputTier);
     neuralNetwork = new BackpropagationNetwork(inputTier, outputTier);
     neuralNetwork.Initialize();
 }
 private Network CreateNetwork()
 {
     ILayer[] layers = new ILayer[2];
     layers[0] = new SigmoidLayer(300, 0.001);
     layers[1] = new SigmoidLayer(10, 0.001);
     double[] learnrates = new double[2];
     learnrates[0] = 0.001;
     learnrates[1] = 0.001;
     return(new Network(new SigmoidLayer(784, 0.001), layers, learnrates));
 }
 private Network CreateNetwork()
 {
     ILayer[] layers = new ILayer[2];
     layers[0] = new SigmoidLayer(300, 0.005);
     layers[1] = new SigmoidLayer(10, 0.005);
     double[] learnrates = new double[2];
     learnrates[0] = 0.005;
     learnrates[1] = 0.005;
     return(new Network(new SigmoidLayer(autoencoderoutputs, 0.005), layers, learnrates));
 }
        public void SigmoidLayer_Setup()
        {
            var layer = new SigmoidLayer();
            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
        public void GradientWrtInputCheck()
        {
            const int inputWidth = 20;
            const int inputHeight = 20;
            const int inputDepth = 2;

            // Create layer
            var layer = new SigmoidLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth);
        }
        public void GradientWrtInputCheck()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            // Create layer
            var layer = new SigmoidLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth);
        }
Example #27
0
        public void SigmoidLayer_Setup()
        {
            var layer = new SigmoidLayer();

            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
Example #28
0
        private void WFAnnRecognition_Load(object sender, EventArgs e)
        {
            lstLog.Items.Insert(0, "Initialize ANN model");
            inputLayer  = new LinearLayer(35);
            hiddenLayer = new SigmoidLayer(3);
            outputLayer = new SigmoidLayer(5);
            BackpropagationConnector connector  = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector connector2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();
        }
Example #29
0
 public Imagine(int w, int h)
 {
     W      = w;
     H      = h;
     Inputs = W * H * 3;
     iLay   = new LinearLayer(Inputs);
     hLay   = new SigmoidLayer(neuronCount);
     oLay   = new SigmoidLayer(w * h * 3);
     c1     = new BackpropagationConnector(iLay, hLay, ConnectionMode.Complete);
     c2     = new BackpropagationConnector(hLay, oLay, ConnectionMode.Complete);
     net    = new BackpropagationNetwork(iLay, oLay);
     net.SetLearningRate(learningRate);
 }
Example #30
0
        public void GradientWrtInputCheck()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            const int batchSize = 3;

            // Create layer
            var layer = new SigmoidLayer <double>();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, batchSize, 1e-6);
        }
        private void Form1_Load(object sender, EventArgs e)
        {
            int gizlikatmansayisi = Convert.ToInt32(Microsoft.VisualBasic.Interaction.InputBox("Gizli Katman Sayısını Giriniz", "Bilgi Girişi", "Örn: 3", 0, 0));

            giriskatmanı = new LinearLayer(35);
            gizlikatman  = new SigmoidLayer(gizlikatmansayisi);
            cikiskatmani = new SigmoidLayer(5);
            BackpropagationConnector giris_gizli_baglanti = new BackpropagationConnector(giriskatmanı, gizlikatman);
            BackpropagationConnector gizli_cikis_baglanti = new BackpropagationConnector(gizlikatman, cikiskatmani);

            ag = new BackpropagationNetwork(giriskatmanı, cikiskatmani);
            ag.Initialize();
        }
        private Network InitPreTrainedNet()
        {
            int numlayers = layersizes.GetLength(0);

            ILayer[] layers     = new ILayer[numlayers];
            double[] learnrates = new double[4];
            for (int i = 0; i < numlayers; i++)
            {
                layers[i]     = new SigmoidLayer(layersizes[i], 0.01);
                learnrates[i] = 0.01;
            }
            return(new Network(new SigmoidLayer(784, 0.01), layers, learnrates));
        }
Example #33
0
 private Network InitPreTrainedNet()
 {
     ILayer[] layers = new ILayer[1];
     layers[0] = new SigmoidLayer(100, 0.001);
     //layers[1] = new SigmoidLayer(numoutputs, 0.05);
     //layers[2] = new SigmoidLayer(100, 0.01);
     //layers[3] = new SigmoidLayer(numoutputs, 0.01);
     double[] learnrates = new double[1];
     learnrates[0] = 0.001;
     //learnrates[1] = 0.05;
     //learnrates[2] = 0.01;
     //learnrates[3] = 0.01;
     return(new Network(new SigmoidLayer(625, 0.001), layers, learnrates));
 }
        public void SigmoidLayer_Forward()
        {
            var layer = new SigmoidLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    Assert.True(MathHelpers.Equality(topCpu.DataAt(i), 1.0d / (1.0d + Math.Exp(-bottomCpu.DataAt(i)))));

                    // check that we squashed the value between 0 and 1
                    Assert.True(topCpu.DataAt(i) >= 0.0d);
                    Assert.True(topCpu.DataAt(i) <= 1.0d);
                };
            }
        }