Ejemplo n.º 1
0
        private void AddLayer_Click(object sender, RoutedEventArgs e)
        {
            if (isLayersBaseCreated == false) // first create Layers Vector, if incorrect layers number: return
            {
                if (Int32.TryParse((NumberOfLayersBox.Text), out _))
                {
                    int numberOfLayers = Convert.ToInt32(NumberOfLayersBox.Text);
                    if (numberOfLayers <= 0)
                    {
                        MessageBox.Show("Incorrect layers number.");
                        return;
                    }
                    else
                    {
                        NumberOfLayersBox.IsEnabled = false;
                        layers = new LayerCharacteristic[numberOfLayers];
                        isLayersBaseCreated       = true;
                        MainWindow.NumberOfLayers = numberOfLayers;
                    }
                }
            }
            if (currentLayer < layers.Length)
            {
                if (ActivationFunctionComboBox.SelectionBoxItem.ToString() == "SigmoidUnipolar")
                {
                    layers[currentLayer] = new LayerCharacteristic(Convert.ToInt32(CurrentLayerNeuronsBox.Text), new SigmoidUnipolarFunction());
                }
                else if (ActivationFunctionComboBox.SelectionBoxItem.ToString() == "SigmoidBipolar")
                {
                    layers[currentLayer] = new LayerCharacteristic(Convert.ToInt32(CurrentLayerNeuronsBox.Text), new SigmoidBipolarFunction());
                }
                else if (ActivationFunctionComboBox.SelectionBoxItem.ToString() == "IdentityFunction")
                {
                    layers[currentLayer] = new LayerCharacteristic(Convert.ToInt32(CurrentLayerNeuronsBox.Text), new IdentityFunction());
                }

                CurrentLayerNumber.Text = (currentLayer + 1).ToString();
                currentLayer++;
            }
            else
            {
                MessageBox.Show("All layers already have description");
            }
        }
Ejemplo n.º 2
0
        public void OutputWithoutBiasTest()
        {
            var layers = new LayerCharacteristic[2]
            {
                new LayerCharacteristic(2, new SigmoidUnipolarFunction()),
                new LayerCharacteristic(1, new IdentityFunction())
            };
            var net   = new NeuralNetwork(1, layers, false);
            var input = Vector <double> .Build.Dense(1);

            input[0] = 2;
            net.Layers[0].Weights[0, 0] = 1.5;
            net.Layers[0].Weights[0, 1] = 1 / 4.0;
            net.Layers[1].Weights[0, 0] = 1 / 4.0;
            net.Layers[1].Weights[1, 0] = 2;
            var output = net.CalculateOutput(input);

            Assert.AreEqual(1.483, Math.Round(output[0], 3));
        }
Ejemplo n.º 3
0
        static void Main(string[] args)
        {
            string learningFileName = @"C:\Users\Jakub\Desktop\approximation_train_1.txt";
            string testFileName     = @"C:\Users\Jakub\Desktop\approximation_test.txt";

            LayerCharacteristic[] layers = new LayerCharacteristic[2];
            layers[0] = new LayerCharacteristic(2, new SigmoidUnipolarFunction());
            layers[1] = new LayerCharacteristic(1, new IdentityFunction());
            var network = new NeuralNetwork(1, layers);

            ILearningProvider dataProvider = new LearningApproximationDataProvider(learningFileName, testFileName, 1, 1, true);

            //dataProvider.LearnSet[0] = new Datum(Vector<double>.Build.Dense(2, 1), Vector<double>.Build.Dense(2, 1));
            //dataProvider.LearnSet[1] = new Datum(Vector<double>.Build.Dense(2, 1), Vector<double>.Build.Dense(2, 0));
            var trainer = new OnlineTrainer(new MeanSquareErrorCalculator(), dataProvider, new BackPropagationAlgorithm(new LearningRateHandler(0.01, 0.7, 1.05, 1.04), 0.2, 1.05));

            network.ConsoleDisplay();
            Console.ReadLine();
            #region old demo
            //LayerCharacteristic[] layers = new LayerCharacteristic[2];
            //layers[0] = new LayerCharacteristic(2, new SigmoidUnipolarFunction());
            //layers[1] = new LayerCharacteristic(2, new IdentityFunction());
            //var network = new NeuralNetwork(1, layers);
            //var input = Vector<double>.Build.Dense(2);
            //input[0] = 1;
            //input[1] = 2;
            //foreach (var layer in network.Layers)
            //{
            //    Console.Write("\nNext layer:");
            //    foreach (var row in layer.Weights.ToRowArrays())
            //    {
            //        Console.WriteLine("\nrow: ");
            //        foreach (var cell in row)
            //        {
            //            Console.Write($"{cell} | ");
            //        }

            //    }
            //}
            //Console.WriteLine($"\nCalculated first: {network.CalculateOutput(input)[0]}");
            //Console.WriteLine($"\nCalculated seond: {network.CalculateOutput(input)[1]}");
            //var r = network.CalculateOutput(input);
            //Console.ReadLine();
            #endregion
            #region multiplty demo
            //var m = Matrix<double>.Build.Random(2,1);
            //var v = Vector<double>.Build.Dense(2,2);
            //Console.WriteLine($"vector: {v[0]} {v[1]}");
            //Console.Write("\nMatrix:");
            //foreach (var row in m.ToRowArrays())
            //{
            //    Console.WriteLine("\nrow: ");
            //    foreach (var cell in row)
            //    {
            //        Console.Write($"{cell} | ");
            //    }

            //}
            //var res = v * m;
            //Console.WriteLine($"res: {res.ToString()}");

            //Console.ReadLine();
            #endregion
        }