static void Real()
        {
            var inputLayer  = new InputLayer3D(1, 1, 1);
            var outputLayer = new OutputLayer(1)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var dataProvider = new FunctionProvider();

            InitializeTrainingData(dataProvider.TrainData);
            var perceptron1 = new PerceptronLayer(10, 2)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron2 = new PerceptronLayer(10, 10)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron3 = new PerceptronLayer(8, 10)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron4 = new PerceptronLayer(6, 8)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron5 = new PerceptronLayer(2, 6)
            {
                ActivationFunction = new SigmoidFunction()
            };

            MultiLayerPerceptron network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);
            network.HiddenLayers.Add(perceptron3);
            network.HiddenLayers.Add(perceptron4);
            network.HiddenLayers.Add(perceptron5);

            var trainer = new FCTrainer(network, 10, 1, dataProvider);

            trainer.Train(1);
            var error = network.Test(1);
        }
        static void OneTrainingData()
        {
            var inputLayer  = new InputLayer3D(1, 1, 3);
            var outputLayer = new OutputLayer(2)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var weight1 = new List <Array3D> {
                new Array3D(0.1, 0.3), new Array3D(0.3, 0.1)
            };
            var weight2 = new List <Array3D> {
                new Array3D(0.4, 0.5), new Array3D(0.3, 0.5)
            };
            var perceptron1 = new PerceptronLayer(weight1)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron2 = new PerceptronLayer(weight2)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var dataProvider = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D> {
                        Input = new Array3D(0.3,                         0.4, 0.5), Expected = new Array3D(0.2, 0.6)
                    },
                    new TrainingData <Array3D, Array3D> {
                        Input = new Array3D(0.2, 0.4, 0.7), Expected = new Array3D(0.1, 0.8)
                    }
                }
            };

            var network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);

            var trainer = new FCTrainer(network, 2, 1, dataProvider);

            trainer.Train(100);
        }
Esempio n. 3
0
        public static Layer ToPercLayer(this PerceptronLayer layer)
        {
            var dbWeights = layer.Weights.Weights.Split(';');
            var weights   = new double[layer.Weights.Height][];

            for (int i = 0; i < layer.Weights.Height; ++i)
            {
                weights[i] = new double[layer.Weights.Width];
                for (int j = 0; j < layer.Weights.Width; ++j)
                {
                    var w = dbWeights[i * layer.Weights.Width + j];
                    weights[i][j] = double.Parse(dbWeights[i * layer.Weights.Width + j]);
                }
            }

            return(new Layer(layer.NeuronsCount, weights));
        }
        public void Initialize_Test(int neurals, int weights, double between)
        {
            var layer = new PerceptronLayer(neurals, weights);
            var fit   = 0;

            foreach (var perceptron in layer.Neurals)
            {
                foreach (double weight in perceptron.Weights)
                {
                    if (-between <= weight && weight < between)
                    {
                        fit++;
                    }
                }
            }

            Assert.Greater(fit, neurals * weights * 0.9);
        }
        public void TestMethod1()
        {
            PerceptronLayer layer = new PerceptronLayer(4, 2);
            var             error = new Array(4, true)
            {
                [0] = 0.2,
                [1] = 0.3,
                [2] = 0.4,
                [3] = 0.5
            };
            var input = new Array(2, true)
            {
                [0] = 0.5,
                [1] = 2
            };

            var actual = layer.FormOutput(input);
            var result = layer.Train(error, input, actual);
        }
Esempio n. 6
0
        private InputLayer InicializarModeloVisualizer()
        {
            var inputLayer = new InputLayer("Input");

            var sepalLengthInput = new Input("Sepal Length");
            var SepalWidthInput  = new Input("Sepal Width");
            var PetalLengthInput = new Input("PetalLength");
            var PetalWidthInput  = new Input("Petal Width");

            inputLayer.AddNode(sepalLengthInput);
            inputLayer.AddNode(SepalWidthInput);
            inputLayer.AddNode(PetalLengthInput);
            inputLayer.AddNode(PetalWidthInput);
            inputLayer.Bias = new Bias("Bias Input")
            {
                OutputValue = 1.0
            };

            var capaOculta = new PerceptronLayer("Oculta");

            var cantNeuronasOcultas = (int)spinNeuronasOculta.Value;
            var funcionActivacion   = (cboFuncionActivacionOculta.SelectedItem as EnumInfo <ActivationType>).Valor.Map();

            for (int i = 0; i < cantNeuronasOcultas; i++)
            {
                var neurona = new Perceptron("oculta" + i)
                {
                    ActivationFunction = funcionActivacion
                };

                capaOculta.AddNode(neurona);
            }

            capaOculta.Bias = new Bias("Bias Oculta")
            {
                OutputValue = 1.0
            };

            inputLayer.Connect(capaOculta);

            funcionActivacion = (cboFuncionActivacionSalida.SelectedItem as EnumInfo <ActivationType>).Valor.Map();

            var setosaOutput = new Perceptron("Setosa")
            {
                ActivationFunction = funcionActivacion
            };
            var versicolorOutput = new Perceptron("Versicolor")
            {
                ActivationFunction = funcionActivacion
            };
            var virginicaOutput = new Perceptron("Virginica")
            {
                ActivationFunction = funcionActivacion
            };

            var capaSalida = new PerceptronLayer("Salida");

            capaSalida.AddNode(setosaOutput);
            capaSalida.AddNode(versicolorOutput);
            capaSalida.AddNode(virginicaOutput);

            capaOculta.Connect(capaSalida);

            return(inputLayer);
        }
        private Guid ProcessSave(CNN network, string name, NetworkContext context)
        {
            var cnnModel = new CnnModel
            {
                CnnModelId = Guid.NewGuid()
            };
            var layers     = new List <CnnLayer>();
            var cnnWeights = new List <CnnWeights>();

            for (int i = 0; i < network.LayersCount; i++)
            {
                if (network.Layers[i] is ConvolutionalLayer)
                {
                    var cnnLayer = network.Layers[i] as ConvolutionalLayer;
                    var layer    = new CnnLayer
                    {
                        CnnLayerId         = Guid.NewGuid(),
                        PositionIn         = i,
                        KernelHeight       = cnnLayer.KernelSize,
                        KernelWidth        = cnnLayer.KernelSize,
                        KernelsCount       = cnnLayer.KernelsCount,
                        FeatureMapsCountIn = cnnLayer.KernelDepth,
                        LayerType          = (byte)LayerType.CovolutionalLayer,
                        Model = cnnModel
                    };

                    var weights = new CnnWeights
                    {
                        CnnWeightsId = Guid.NewGuid(),
                        Layer        = layer,
                        LayerId      = layer.CnnLayerId
                    };

                    var w       = string.Empty;
                    var builder = new StringBuilder(String.Empty);
                    for (int j = 0; j < cnnLayer.KernelsCount; ++j)
                    {
                        for (int k = 0; k < cnnLayer.KernelDepth; ++k)
                        {
                            for (int a = 0; a < cnnLayer.Kernels[j][k].Length; ++a)
                            {
                                for (int b = 0; b < cnnLayer.Kernels[j][k][a].Length; ++b)
                                {
                                    builder.Append(cnnLayer.Kernels[j][k][a][b] + ";");
                                }
                            }
                        }
                    }

                    w = builder.ToString();
                    weights.Weights = w;
                    cnnWeights.Add(weights);

                    layer.Weights = weights;

                    layers.Add(layer);
                }
                else if (network.Layers[i] is PollingLayer)
                {
                    var cnnLayer = network.Layers[i] as PollingLayer;
                    var layer    = new CnnLayer
                    {
                        CnnLayerId   = Guid.NewGuid(),
                        PositionIn   = i,
                        KernelHeight = cnnLayer.KernelSize,
                        KernelWidth  = cnnLayer.KernelSize,
                        LayerType    = (byte)LayerType.PoolingLayer,
                        Model        = cnnModel
                    };

                    layers.Add(layer);
                }
                else
                {
                    var cnnLayer = network.Layers[i] as ReLuLayer;
                    var layer    = new CnnLayer
                    {
                        CnnLayerId   = Guid.NewGuid(),
                        PositionIn   = i,
                        KernelHeight = cnnLayer.KernelSize,
                        KernelWidth  = cnnLayer.KernelSize,
                        LayerType    = (byte)LayerType.ReluLayer,
                        Model        = cnnModel
                    };

                    layers.Add(layer);
                }
            }

            var l = new CnnLayer
            {
                CnnLayerId   = Guid.NewGuid(),
                KernelHeight = network.FlattenLayer.KernelSize,
                KernelWidth  = network.FlattenLayer.KernelSize,
                LayerType    = (byte)LayerType.FlattenLayer,
                Model        = cnnModel
            };

            layers.Add(l);

            cnnModel.Layers = layers;

            var perceptronModel = new PerceptronModel
            {
                PerceptronModelId = Guid.NewGuid()
            };
            var percLayers  = new List <PerceptronLayer>();
            var percWeights = new List <PerceptronWeights>();

            for (var i = 0; i < network.Perceptron.LayersCount; ++i)
            {
                var layer = network.Perceptron.Layers[i];

                var perLayer = new PerceptronLayer
                {
                    PerceptronLayerId = Guid.NewGuid(),
                    NeuronsCount      = layer.NeuronsCount,
                    PositionIn        = i,
                    Perceptron        = perceptronModel
                };

                var weights = new PerceptronWeights
                {
                    PerceptronWeightsId = perLayer.PerceptronLayerId,
                    Height = layer.WeightRowsCount,
                    Width  = layer.WeightColumnsCount
                };

                string w       = String.Empty;
                var    builder = new StringBuilder(String.Empty);
                for (int a = 0; a < layer.Weights.Length; ++a)
                {
                    for (int b = 0; b < layer.Weights[a].Length; ++b)
                    {
                        builder.Append(layer.Weights[a][b] + ";");
                    }
                }

                w = builder.ToString();

                weights.Weights = w;
                percWeights.Add(weights);

                percLayers.Add(perLayer);
            }

            perceptronModel.Layers = percLayers;

            //save
            var networkModel = new NetworkModel
            {
                NetworkModelId = Guid.NewGuid(),
                Perceptron     = perceptronModel,
                PerceptronId   = perceptronModel.PerceptronModelId,
                Cnn            = cnnModel,
                CnnId          = cnnModel.CnnModelId,
                Name           = name
            };

            cnnModel.NetworkModel   = networkModel;
            cnnModel.NetworkModelId = networkModel.NetworkModelId;

            perceptronModel.NetworkModel   = networkModel;
            perceptronModel.NetworkModelId = networkModel.NetworkModelId;

            context.NetworkModels.Add(networkModel);

            context.CnnLayers.AddRange(layers);
            context.CnnWeightsSet.AddRange(cnnWeights);
            context.CnnModels.Add(cnnModel);

            context.PerceptronLayers.AddRange(percLayers);
            context.PerceptronWeights.AddRange(percWeights);
            context.PerceptronModels.Add(perceptronModel);


            context.SaveChanges();

            return(networkModel.NetworkModelId);
        }
        static void Xor()
        {
            const int batchSize = 4;
            const int epochSize = 16;

            var inputLayer  = new InputLayer3D(1, 1, 1);
            var outputLayer = new OutputLayer(1)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var dataProvider = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 1), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 0), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                },
                TestData = { new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 0), Expected = new Array3D(0)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 1), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 0), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 1), Expected = new Array3D(0)
                             } },
                IsQueue = false
            };
            var oneData = new FunctionProvider {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                }, IsQueue = false
            };
            var function = new FunctionProvider(x => Math.Pow(x, 2));

            var weight1 = new List <Array3D> {
                new Array3D(0.1, 0.3), new Array3D(0.3, 0.1)
            };
            var weight2 = new List <Array3D> {
                new Array3D(0.4, 0.5)
            };
            var perceptron1 = new PerceptronLayer(5, 2)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron1.Trainer = new MiniBatchPerceptronTrainer(perceptron1.Neurals, false)
            {
                BatchSize = batchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };
            var perceptron2 = new PerceptronLayer(1, 5)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron2.Trainer = new MiniBatchPerceptronTrainer(perceptron2.Neurals, true)
            {
                BatchSize = batchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };

            var network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);

            var trainer = new FCTrainer(network, epochSize, batchSize, dataProvider);

            trainer.Train(200);
        }
Esempio n. 9
0
 private void AddPerceptronLayerText(PerceptronLayer layer, StringBuilder builder)
 {
     AddLayerText(layer, builder);
     builder.AppendLine("Previous layer: " + layer.Previous.Id);
 }
        public void TestNetwork()
        {
            const int layer0 = 3, layer1 = 2, layer2 = 3;

            #region 1 Iteration

            double[] input   = { 0.8, 0.76, 0.54 };
            double[] weight1 = { 0.35, 0.46, 0.51 };
            double[] weight2 = { 0.4, 0.87, 0.36 };

            double[] sum1 = { 0, 0 };

            for (var i = 0; i < input.Length; i++)
            {
                sum1[0] += input[i] * weight1[i];
                sum1[1] += input[i] * weight2[i];
            }

            double[] output1 = { Expanent(sum1[0]), Expanent(sum1[1]) };

            #endregion

            #region 2 Iteration

            double[] weight3 = { 0.5, 0.14 };
            double[] weight4 = { 0.76, 0.86 };
            double[] weight5 = { 0.95, 0.12 };

            double[] sum2 = { 0, 0, 0 };

            for (var i = 0; i < output1.Length; i++)
            {
                sum2[0] += output1[i] * weight3[i];
                sum2[1] += output1[i] * weight4[i];
                sum2[2] += output1[i] * weight5[i];
            }

            double[] output2 = { Expanent(sum2[0]), Expanent(sum2[1]), Expanent(sum2[2]) };

            #endregion

            #region Train

            double[] expected = { 0.87, 1, 0.32 };
            double[] error    = new double[layer2];
            for (var i = 0; i < error.Length; i++)
            {
                error[i] = output2[i] - expected[i];
            }

            double[] weightDelta = new double[layer2];
            for (var i = 0; i < layer2; i++)
            {
                weightDelta[i] = error[i] * output2[i] * (1 - output2[i]);
            }

            var newWeight3 = new double[layer1];
            var newWeight4 = new double[layer1];
            var newWeight5 = new double[layer1];

            const double learningRate = 0.1;
            for (var i = 0; i < layer1; i++)
            {
                newWeight3[i] = weight3[i] - output1[i] * weightDelta[0] * learningRate;
                newWeight4[i] = weight4[i] - output1[i] * weightDelta[1] * learningRate;
                newWeight5[i] = weight5[i] - output1[i] * weightDelta[2] * learningRate;
            }

            #region 2 Iteration

            double[] error2 = new double[layer1];
            for (var i = 0; i < layer1; i++)
            {
                error2[i] += weight3[i] * weightDelta[0];
                error2[i] += weight4[i] * weightDelta[1];
                error2[i] += weight5[i] * weightDelta[2];
            }

            double[] weightDelta2 = new double[layer1];
            for (var i = 0; i < layer1; i++)
            {
                weightDelta2[i] = error2[i] * output1[i] * (1 - output1[i]);
            }

            var newWeight1 = new double[layer0];
            var newWeight2 = new double[layer0];

            for (var i = 0; i < layer0; i++)
            {
                newWeight1[i] = weight1[i] - input[i] * weightDelta2[0] * learningRate;
                newWeight2[i] = weight2[i] - input[i] * weightDelta2[1] * learningRate;
            }


            #endregion

            #endregion

            #region Compare

            var inputLayer  = new InputLayer3D(1, 1, 3);
            var outputLayer = new OutputLayer(3)
            {
                ActivationFunction = new ConstArrayFunction()
            };

            var weightList1 = new List <List <double> > {
                new List <double>(weight1), new List <double>(weight2)
            };
            var hiddenLayer1 = new PerceptronLayer(weightList1);

            var weightList2 = new List <List <double> >
            {
                new List <double>(weight3),
                new List <double>(weight4),
                new List <double>(weight5)
            };
            var hiddenLayer2 = new PerceptronLayer(weightList2);

            var trainingData = new TrainingData <Array3D, Array3D>
            {
                Input    = new Array3D(input[0], input[1], input[2]),
                Expected = new Array3D(expected[0], expected[1], expected[2])
            };
            var trainingData2 = new TrainingData <Array3D, Array3D>
            {
                Input    = new Array3D(0.15, 0.44, 0.83),
                Expected = new Array3D(0.1, 0.24, 0.18)
            };

            var network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                HiddenLayers = { hiddenLayer1, hiddenLayer2 },
                DataProvider = new FunctionProvider {
                    TrainData = { trainingData, trainingData2 }, IsQueue = true
                }
            };

            for (var i = 0; i < 3; i++)
            {
                Assert.AreEqual(network.HiddenLayers[0].Neurals[0].Weights[i], newWeight1[i], 0.001);
                Assert.AreEqual(network.HiddenLayers[0].Neurals[1].Weights[i], newWeight2[i], 0.001);
            }

            #endregion
        }
Esempio n. 11
0
        private void InitializeNetwork()
        {
            var inputLayer  = new InputLayer3D(1, 1, 1);
            var outputLayer = new OutputLayer(1)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var dataProvider = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 1), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 0), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                },
                TestData = { new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 0), Expected = new Array3D(0)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 1), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 0), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 1), Expected = new Array3D(0)
                             } },
                IsQueue = false
            };
            var oneData = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                },
                IsQueue = false
            };
            var function = new FunctionProvider(x => Math.Pow(x, 2));

            var perceptron1 = new PerceptronLayer(10, 1)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron1.Trainer = new MiniBatchPerceptronTrainer(perceptron1.Neurals, false)
            {
                BatchSize = BatchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };
            var perceptron2 = new PerceptronLayer(1, 10)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron2.Trainer = new MiniBatchPerceptronTrainer(perceptron2.Neurals, true)
            {
                BatchSize = BatchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };

            Network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = function
            };
            Network.HiddenLayers.Add(perceptron1);
            Network.HiddenLayers.Add(perceptron2);

            Trainer = new FCTrainer(Network, EpochSize, BatchSize, function);
        }
        public void Calculate()
        {
            const int layer0 = 3, layer1 = 2, layer2 = 3;

            #region 1 Iteration

            double[] input   = { 0.8, 0.76, 0.54 };
            double[] weight1 = { 0.35, 0.46, 0.51 };
            double[] weight2 = { 0.4, 0.87, 0.36 };

            double[] sum1 = { 0, 0 };

            for (var i = 0; i < input.Length; i++)
            {
                sum1[0] += input[i] * weight1[i];
                sum1[1] += input[i] * weight2[i];
            }

            double[] output1 = { Expanent(sum1[0]), Expanent(sum1[1]) };

            #endregion

            #region 2 Iteration

            double[] weight3 = { 0.5, 0.14 };
            double[] weight4 = { 0.76, 0.86 };
            double[] weight5 = { 0.95, 0.12 };

            double[] sum2 = { 0, 0, 0 };

            for (var i = 0; i < output1.Length; i++)
            {
                sum2[0] += output1[i] * weight3[i];
                sum2[1] += output1[i] * weight4[i];
                sum2[2] += output1[i] * weight5[i];
            }

            double[] output2 = { Expanent(sum2[0]), Expanent(sum2[1]), Expanent(sum2[2]) };

            #endregion

            #region Train

            double[] expected = { 0.87, 1, 0.32 };
            double[] error    = new double[layer2];
            for (var i = 0; i < error.Length; i++)
            {
                error[i] = output2[i] - expected[i];
            }

            double[] weightDelta = new double[layer2];
            for (var i = 0; i < layer2; i++)
            {
                weightDelta[i] = error[i] * output2[i] * (1 - output2[i]);
            }

            var newWeight3 = new double[layer1];
            var newWeight4 = new double[layer1];
            var newWeight5 = new double[layer1];

            const double learningRate = 0.1;
            for (var i = 0; i < layer1; i++)
            {
                newWeight3[i] = weight3[i] - output1[i] * weightDelta[0] * learningRate;
                newWeight4[i] = weight4[i] - output1[i] * weightDelta[1] * learningRate;
                newWeight5[i] = weight5[i] - output1[i] * weightDelta[2] * learningRate;
            }

            #region 2 Iteration

            double[] error2 = new double[layer1];
            for (var i = 0; i < layer1; i++)
            {
                error2[i] += weight3[i] * weightDelta[0];
                error2[i] += weight4[i] * weightDelta[1];
                error2[i] += weight5[i] * weightDelta[2];
            }

            double[] weightDelta2 = new double[layer1];
            for (var i = 0; i < layer1; i++)
            {
                weightDelta2[i] = error2[i] * output1[i] * (1 - output1[i]);
            }

            var newWeight1 = new double[layer0];
            var newWeight2 = new double[layer0];

            for (var i = 0; i < layer0; i++)
            {
                newWeight1[i] = weight1[i] - input[i] * weightDelta2[0] * learningRate;
                newWeight2[i] = weight2[i] - input[i] * weightDelta2[1] * learningRate;
            }


            #endregion

            #endregion

            #region Compare

            var weightList1 = new List <List <double> > {
                new List <double>(weight1), new List <double>(weight2)
            };
            var perceptronLayer1 = new PerceptronLayer(weightList1);

            var weightList2 = new List <List <double> >
            {
                new List <double>(weight3),
                new List <double>(weight4),
                new List <double>(weight5)
            };
            var perceptronLayer2 = new PerceptronLayer(weightList2);

            var inputArray = new Array(input);

            var result1 = (Array)perceptronLayer1.FormOutput(inputArray);
            var result2 = (Array)perceptronLayer2.FormOutput(result1);

            var errorResult1 = new Array(error);
            var errorResult2 = (Array)perceptronLayer2.Train(errorResult1, result1, result2);
            var errorResult3 = (Array)perceptronLayer1.Train(errorResult2, inputArray, result1);

            for (var i = 0; i < 3; i++)
            {
                Assert.AreEqual(perceptronLayer1.Neurals[0].Weights[i], newWeight1[i], 0.001);
                Assert.AreEqual(perceptronLayer1.Neurals[1].Weights[i], newWeight2[i], 0.001);
            }

            #endregion
        }
Esempio n. 13
0
        private void btnStart_Click(object sender, EventArgs e)
        {
            _input = new InputLayer("Input")
            {
                Bias = new Bias("bias")
                {
                    OutputValue = 1.234
                }
            };

            _input.AddNode(new Input("e1")
            {
                OutputValue = 0.255
            });
            _input.AddNode(new Input("e2")
            {
                OutputValue = 0.455
            });
            _input.AddNode(new Input("e3")
            {
                OutputValue = -0.78967656
            });
            _input.AddNode(new Input("e4")
            {
                OutputValue = 0.0
            });
            //_input.AddNode(new Input("e5") { OutputValue = 0.255 });
            //_input.AddNode(new Input("e6") { OutputValue = 0.455 });
            //_input.AddNode(new Input("e7") { OutputValue = -0.78967656 });
            //_input.AddNode(new Input("e8") { OutputValue = 0.011 });
            //_input.AddNode(new Input("e9") { OutputValue = 0.2255 });
            //_input.AddNode(new Input("e10") { OutputValue = 43.455 });
            //_input.AddNode(new Input("e11") { OutputValue = -11.67656 });
            //_input.AddNode(new Input("e12") { OutputValue = -1.001 });

            var hidden = new PerceptronLayer("Hidden");

            hidden.AddNode(new Perceptron("o1")
            {
                ActivationFunction = ActivationFunction.BinaryStep, OutputValue = 2.364, SumValue = 2.364
            });
            hidden.AddNode(new Perceptron("o2")
            {
                ActivationFunction = ActivationFunction.LeakyRelu, OutputValue = -0.552, SumValue = 55.44
            });
            hidden.AddNode(new Perceptron("o4")
            {
                ActivationFunction = ActivationFunction.Relu, OutputValue = 1.324, SumValue = 4.34
            });
            //hidden.AddNode(new Perceptron("o3") { ActivationFunction = ActivationFunction.Linear, OutputValue = 0.0, SumValue = 19.22 });
            //hidden.AddNode(new Perceptron("o5") { ActivationFunction = ActivationFunction.Sigmoid, OutputValue = -0.12, SumValue = 25.224 });
            //hidden.AddNode(new Perceptron("o6") { ActivationFunction = ActivationFunction.Tanh, OutputValue = 10.3, SumValue = 1.222 });

            _input.Connect(hidden);

            var output = new PerceptronLayer("Output");

            output.AddNode(new Perceptron("s1")
            {
                ActivationFunction = ActivationFunction.Softmax, OutputValue = 0.567656, SumValue = 0.454
            });
            output.AddNode(new Perceptron("s2")
            {
                ActivationFunction = ActivationFunction.Sigmoid, OutputValue = 0.176545, SumValue = 0.54
            });
            //output.AddNode(new Perceptron("s3") { ActivationFunction = ActivationFunction.Softmax, OutputValue = 0.9545, SumValue = 0.133 });
            //output.AddNode(new Perceptron("s4") { ActivationFunction = ActivationFunction.Softmax, OutputValue = 0.145, SumValue = 0.88 });

            hidden.Connect(output);

            var aleatorio = new Random(2);

            foreach (var p in hidden.Nodes)
            {
                foreach (var edge in p.Edges)
                {
                    int sign = aleatorio.Next(-1, 2);
                    edge.Weight = aleatorio.NextDouble() * sign;
                }
            }

            foreach (var p in output.Nodes)
            {
                foreach (var edge in p.Edges)
                {
                    int sign = aleatorio.Next(-1, 1);
                    edge.Weight = aleatorio.NextDouble() * sign;
                }
            }

            NeuralNetworkVisualizerControl1.InputLayer = _input;

            btnChangeValue.Enabled = btnAddBias.Enabled = btnClear.Enabled = trackZoom.Enabled = cboQuality.Enabled = true;
        }