コード例 #1
0
ファイル: UnitTest1.cs プロジェクト: tengge1/neuron-dotnet-cn
        public void TestMethod1()
        {
            // 创建输入层、隐层和输出层
            var inputLayer  = new LinearLayer(1);
            var hiddenLayer = new LinearLayer(5);
            var outputLayer = new LinearLayer(1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            //network.SetLearningRate(new LinearFunction(0.1, 0.6));
            network.Initialize();

            // 训练
            var ran = new Random();

            for (var i = 0; i < 100; i++)
            {
                var inputVector    = new double[] { i };
                var outputVector   = new double[] { Math.PI *i };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                network.Learn(trainingSample, i, 100);
            }

            // 预测
            var testInput  = new double[] { 1 };
            var testOutput = network.Run(testInput);

            Console.WriteLine(testOutput[0]);
        }
コード例 #2
0
ファイル: CrowNetUP.cs プロジェクト: substage/Crow
        public BackpropagationNetwork network(int trainInVectorDimension, int trainOutVectorDimension)
        {
            this.hiddenLayerList = HiddenLayerList();

            ActivationLayer inputLayer  = new LinearLayer(trainInVectorDimension);
            ActivationLayer outputLayer = new SigmoidLayer(trainOutVectorDimension);

            BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[0]);

            for (int i = 1; i < this.hiddenLayerList.Count; i++)
            {
                bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
            }
            bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 1], outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            /*ActivationLayer inputLayer = hiddenLayerList[0];
             * ActivationLayer outputLayer = hiddenLayerList[hiddenLayerList.Count - 1];
             *
             * if(hiddenLayerList.Count != 2)
             * {
             *  BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[1]);
             *  for (int i = 2; i < this.hiddenLayerList.Count - 1; i++)
             *  {
             *      bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
             *  }
             *  bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 2], outputLayer);
             * }
             *
             * BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);*/
            network.SetLearningRate(this.learningRate);

            return(network);
        }
コード例 #3
0
ファイル: GAN.cs プロジェクト: pedroam14/Neural-Link
    private void CreateDNet()
    {
        ConvolutionLayer  conv0       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation0 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool0       = new MaxPooling2DLayer();
        ConvolutionLayer  conv1       = new ConvolutionLayer(inputDimension, filterSize: 3, filterCount: 32, zeroPadding: true);
        ActivationLayer   activation1 = new ActivationLayer(new Relu(leaky: true));
        MaxPooling2DLayer pool1       = new MaxPooling2DLayer();
        FlattenLayer      flatten     = new FlattenLayer();
        LinearLayer       linear0     = new LinearLayer(numNeurons: 128);
        ActivationLayer   activation2 = new ActivationLayer(new Relu(leaky: true));
        LinearLayer       linear1     = new LinearLayer(numNeurons: 1);
        ActivationLayer   activation3 = new ActivationLayer(new Sigmoid());

        dNet.Add(conv0);
        dNet.Add(activation0);
        dNet.Add(pool0);
        dNet.Add(conv1);
        dNet.Add(activation1);
        dNet.Add(pool1);
        dNet.Add(flatten);
        dNet.Add(linear0);
        dNet.Add(activation2);
        dNet.Add(linear1);
        dNet.Add(activation3);
        dNet.Compile(new BinaryCrossEntropy(), new Adam(0.001d));
    }
コード例 #4
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            AnnModelParameter para = mParameter as AnnModelParameter;

            LinearLayer inputLayer = new LinearLayer(datasets.InputData[0].Length);

            SigmoidLayer hiddenLayer = new SigmoidLayer(para.HiddenNeuronsCount[0]);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(para.LearningRate);
            network.JitterEpoch      = para.JitterEpoch;
            network.JitterNoiseLimit = para.JitterNoiseLimit;
            network.EndEpochEvent   += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                // TODO: trainning error needs to be calculated
                OnRunningEpoch(new AnnModelRunEpochEventArgs(args.TrainingIteration + 1, 0));
            });

            network.Learn(ForecastingDataSets.ConvertToTrainingSet(datasets), para.Iterations);

            datasets.ForecastedData = new double[datasets.InputData.Length][];
            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
コード例 #5
0
        public void LabTest1()
        {
            var inputLayer  = new LinearLayer(5);
            var hiddenLayer = new TanhLayer(neuronCount);
            var outputLayer = new TanhLayer(2);

            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _xorNetwork.SetLearningRate(learningRate);

            var trainingSet = new TrainingSet(5, 2);

            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 }));

            _errorList = new double[cycles];

            //_xorNetwork.EndEpochEvent += EndEpochEvent;
            _xorNetwork.Learn(trainingSet, cycles);

            var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 });
        }
コード例 #6
0
    void CreateNewNetwork()
    {
        LinearLayer  inputLayer   = new LinearLayer(neurons);
        SigmoidLayer hiddenLayer  = new SigmoidLayer(hidden1Neurons);
        SigmoidLayer hiddenLayer2 = new SigmoidLayer(hidden2Neurons);


        LinearLayer outputLayer = new LinearLayer(outputNum);


        BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);

        conn1.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn3 = new BackpropagationConnector(hiddenLayer, hiddenLayer2);

        conn3.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer2, outputLayer);

        conn2.Initializer = new RandomFunction(0d, 0.001d);

        conn1.Initialize();
        conn2.Initialize();
        conn3.Initialize();


        neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
        neuralNetwork.SetLearningRate(learningRate);

        neuralNetwork.Initialize();
    }
コード例 #7
0
        //Get neural network
        public virtual NeuronDotNet.Core.Network getNeural()
        {
            LinearLayer  inputLayer  = new LinearLayer(23);
            SigmoidLayer outputLayer = new SigmoidLayer(100);

            return(new BackpropagationNetwork(inputLayer, outputLayer));
        }
コード例 #8
0
ファイル: GpuLayersTests.cs プロジェクト: olegtarasov/Retia
        public void CanComputeForwardLinear()
        {
            var dataSet = new TestDataSet <float>(3, 4, 5, 10);

            var linLayer = new LinearLayer <float>(dataSet.InputSize, dataSet.TargetSize);

            TestGpuLayer(linLayer, dataSet);
        }
コード例 #9
0
        public CrowNetBPP()
        {
            LinearLayer inputNull  = new LinearLayer(1);
            LinearLayer outputNull = new LinearLayer(1);
            BackpropagationConnector nullConnector = new BackpropagationConnector(inputNull, outputNull);

            this.network = new BackpropagationNetwork(inputNull, outputNull);
        }
コード例 #10
0
        public void CanGradientCheckLinearLayer()
        {
            var layer = new LinearLayer <double>(5, 3)
            {
                ErrorFunction = new MeanSquareError <double>()
            };

            TestLayer(layer);
        }
コード例 #11
0
ファイル: Form1.cs プロジェクト: aydosenes/letter-recognition
 private void Form1_OnLoad(object sender, EventArgs e)
 {
     inputTier     = new LinearLayer(35);
     hiddenTier    = new SigmoidLayer(3);
     outputTier    = new SigmoidLayer(5);
     _             = new BackpropagationConnector(inputTier, hiddenTier);
     _             = new BackpropagationConnector(hiddenTier, outputTier);
     neuralNetwork = new BackpropagationNetwork(inputTier, outputTier);
     neuralNetwork.Initialize();
 }
コード例 #12
0
        private void WFAnnRecognition_Load(object sender, EventArgs e)
        {
            lstLog.Items.Insert(0, "Initialize ANN model");
            inputLayer  = new LinearLayer(35);
            hiddenLayer = new SigmoidLayer(3);
            outputLayer = new SigmoidLayer(5);
            BackpropagationConnector connector  = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector connector2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();
        }
コード例 #13
0
        public void CanChainWeightCombinerWithSoftMax()
        {
            NetworkComponent layer = new LinearLayer(new double[, ] {
                { 1 }
            });
            NetworkComponent smu = new SoftMaxUnit(1);

            NetworkComponentChain smlayer = new NetworkComponentChain();

            smlayer.Add(layer);
            smlayer.Add(smu);
        }
コード例 #14
0
 public Imagine(int w, int h)
 {
     W      = w;
     H      = h;
     Inputs = W * H * 3;
     iLay   = new LinearLayer(Inputs);
     hLay   = new SigmoidLayer(neuronCount);
     oLay   = new SigmoidLayer(w * h * 3);
     c1     = new BackpropagationConnector(iLay, hLay, ConnectionMode.Complete);
     c2     = new BackpropagationConnector(hLay, oLay, ConnectionMode.Complete);
     net    = new BackpropagationNetwork(iLay, oLay);
     net.SetLearningRate(learningRate);
 }
コード例 #15
0
        private void Form1_Load(object sender, EventArgs e)
        {
            int gizlikatmansayisi = Convert.ToInt32(Microsoft.VisualBasic.Interaction.InputBox("Gizli Katman Sayısını Giriniz", "Bilgi Girişi", "Örn: 3", 0, 0));

            giriskatmanı = new LinearLayer(35);
            gizlikatman  = new SigmoidLayer(gizlikatmansayisi);
            cikiskatmani = new SigmoidLayer(5);
            BackpropagationConnector giris_gizli_baglanti = new BackpropagationConnector(giriskatmanı, gizlikatman);
            BackpropagationConnector gizli_cikis_baglanti = new BackpropagationConnector(gizlikatman, cikiskatmani);

            ag = new BackpropagationNetwork(giriskatmanı, cikiskatmani);
            ag.Initialize();
        }
コード例 #16
0
ファイル: Brain.cs プロジェクト: tiagosomda/NeuralGames
    private int CreateNeuralNetwork(int input, int output, int[] hidden)
    {
        LinearLayer  inputLayer  = new LinearLayer(input);
        SigmoidLayer outputLayer = new SigmoidLayer(output);

        // minimum size
        if (hidden == null)
        {
            hidden = new int[] { input + 1, input + 1 };
        }

        var hiddenLayers = new SigmoidLayer[hidden.Length];

        // plus two because of the input and the output layers
        var connectors = new BackpropagationConnector[hidden.Length + 2];

        // create the hidden layers
        for (int k = 0; k < hidden.Length; k++)
        {
            hiddenLayers[k] = new SigmoidLayer(hidden[k]);
        }

        // back propagation from first hidden layer to input
        connectors[0] = new BackpropagationConnector(inputLayer, hiddenLayers[0]);

        // back propagation between the hidden layers
        for (int k = 1; k < hidden.Length; k++)
        {
            connectors[k] = new BackpropagationConnector(hiddenLayers[k - 1], hiddenLayers[k]);
        }
        // back propagation from output to last hidden layer
        connectors[hidden.Length - 1] = new BackpropagationConnector(hiddenLayers[hidden.Length - 1], outputLayer);

        // The network
        neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);

        #region retrieve network weight count
        int netWeightCount = 0;

        foreach (BackpropagationConnector connector in neuralNetwork.Connectors)
        {
            foreach (BackpropagationSynapse synapse in connector.Synapses)
            {
                netWeightCount += 2;
            }
        }
        #endregion

        return(netWeightCount);
    }
コード例 #17
0
        public void LinearLayerHasRightRun()
        {
            double[,] weights = new double[, ] {
                { 1, 0, 1 }, { 1, 1, 0 }
            };
            NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 });
            Layer         layer       = new LinearLayer(weights);

            layer.Run(inputvector);
            double[] result         = layer.Output.ToArray();
            double[] expectedResult = new double[] { 4, 3 };
            Assert.AreEqual(expectedResult[0], result[0]);
            Assert.AreEqual(expectedResult[1], result[1]);
        }
コード例 #18
0
        private void button8_Click(object sender, EventArgs e)
        {
            LinearLayer  inputLayer  = new LinearLayer(Convert.ToInt32(textBox3.Text));
            SigmoidLayer hiddenLayer = new SigmoidLayer(Convert.ToInt32(textBox4.Text));
            SigmoidLayer outputLayer = new SigmoidLayer(Convert.ToInt32(textBox5.Text));


            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();

            MessageBox.Show("Rete generata con successo.");
        }
コード例 #19
0
        public void BackpropagateRunsWithZeroLayerInput()
        {
            double[,] weights = new double[, ] {
                { 1 }
            };
            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });
            Layer         layer          = new LinearLayer(weights);

            layer.BackPropagate(outputgradient);

            double[] inputGradientCheck  = new double[] { 1 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i]);
            }
        }
コード例 #20
0
        //Initialize agent's parameters
        public override void agent_init(char type, bool policy, string agentName, int inputCount)
        {
            //Initialize neural net
            LinearLayer  inputLayer  = new LinearLayer(inputCount + 1);
            SigmoidLayer hiddenLayer = new SigmoidLayer(150);
            LinearLayer  outputLayer = new LinearLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(-0.5, 0.5);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(-0.5, 0.5);

            this.network = new BackpropagationNetwork(inputLayer, outputLayer);

            this.network.SetLearningRate(0.2);
            this.network.Initialize();


            #region Initialize_parameters

            this.name = agentName;
            this.id   = Int32.Parse(agentName.Last().ToString());

            this.agentType    = type;
            this.policyFrozen = policy;

            if (policy)
            {
                this.epsilon = 0;
                this.alpha   = 0;
            }
            else
            {
                this.epsilon = 0.5;
                this.alpha   = 0.2;
            }

            this.gamma = 0.95;
            this.lamda = 0.8;

            currentEpoch = 1;

            initParams();

            #endregion Initialize_parameters
        }
コード例 #21
0
ファイル: App.cs プロジェクト: olegtarasov/Retia
        public void TestGpuLayers()
        {
            var dataSet = new TestDataSet <float>(3, 4, 5, 10);

            Console.WriteLine("Testing softmax forward");
            var softmaxLayer = new SoftMaxLayer <float>(dataSet.InputSize);

            TestLayerForward(softmaxLayer, dataSet, dataSet.InputSize);

            Console.WriteLine("Testing linear forward");
            var linLayer = new LinearLayer <float>(dataSet.InputSize, dataSet.TargetSize, new RandomMatrixInitializer <float>());

            TestLayerForward(linLayer, dataSet);

            Console.WriteLine("Testing GRU forward");
            var gruLayer = new GruLayer <float>(dataSet.InputSize, dataSet.TargetSize, new ProportionalRandomMatrixInitializer <float>(), new ProportionalRandomMatrixInitializer <float>(), new RandomMatrixInitializer <float>());

            TestLayerForward(gruLayer, dataSet);
        }
コード例 #22
0
    public Model()
    {
        float[,] aX = LoadCsv("dataX.csv");
        float[,] aY = LoadCsv("dataY.csv");
        _dataX      = new TFTensor(aX);
        _dataY      = new TFTensor(aY);

        _session = new TFSession();
        _graph   = _session.Graph;
        _input   = _graph.Placeholder(TFDataType.Float);
        _output  = _graph.Placeholder(TFDataType.Float);
        _y_out   = new LinearLayer(_graph, _input, (int)_dataX.Shape[0], 1);
        cost     = _graph.ReduceMean(_graph.SigmoidCrossEntropyWithLogits(_y_out.Result, _output));
        _gradientDescentOptimizer = new GradientDescentOptimizer(_graph, _cost, _y_out.W, _y_out.b);
        _gradientDescentOptimizer.ApplyGradientDescent(_graph);
        var runner = _session.GetRunner();

        runner.AddTarget(_y_out.InitB.Operation);
        runner.Run();
    }
コード例 #23
0
        public void BackPropagateIsCorrect()
        {
            double[,] weights = new double[, ] {
                { 1, 2 }, { 3, 5 }
            };
            Layer layer = new LinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 });

            layer.BackPropagate(outputgradient);

            double[,] weightsCheck = new double[, ] {
                { -6, 9 }, { -8, 16 }
            };
            LayerState state = layer.State;

            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                for (int j = 0; j < layer.NumberOfInputs; j++)
                {
                    Assert.AreEqual(weightsCheck[i, j], state.Weights[i, j], string.Format("Failed for (i, j) = ({0}, {1}", i, j));
                }
            }

            double[] biasesCheck = new double[] { -7, -11 };
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(biasesCheck[i], layer.State.Biases[i]);
            }

            double[] inputGradientCheck  = new double[] { 40, 69 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i));
            }
        }
コード例 #24
0
        /// <summary>
        /// This constructor creates a default network to work with.
        /// </summary>
        /// <param name="aoe2Directory">Directory of your age of empires game.</param>
        /// <param name="aiScript">Name of your ai script that you want to generate.</param>
        public AiTrainingModule(string aoe2Directory, string aiScript)
        {
            _aoe2Directory = aoe2Directory;
            _aiScript      = aiScript;

            _numberOfInitialCycles    = 100000;
            _numberOfContinuousCycles = 10000;
            _numberOfNeuronRefreshes  = 0;

            // Keep track of random number of neurons here.
            int numberOfInputNeurons  = 10;
            int numberOfHiddenNeurons = 10;
            int numberOfOutputNeurons = 8;

            double learningRate = 0.25;

            _errorList = new LinkedList <double>();

            LinearLayer  inputLayer  = new LinearLayer(numberOfInputNeurons);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons);

            // Wow, how hidden is really hidden. So that I think these connectors do is
            // insert themselves as part of the various layers. This really hides the hidden
            // layer from the network, as only the connectors then modify the hidden layer.
            // In other words "trained".
            var conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            var conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            _nueralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _nueralNetwork.SetLearningRate(learningRate);
            _nueralNetwork.EndEpochEvent += BackgroundTasks; // hehe call back methods.

            // Needs to make initial configuration of AI.

            // If this module is being instantiated for the first time, create a comprehensive
            // knowledge base/ network so it can continue where it last left off. Tweak the
            // query to filter outliers.
            _rawMgxStats = StreamUtilities.GetAiDataSet();
            _nueralNetwork.Learn(CompileTrainingSet(), _numberOfInitialCycles);
        }
コード例 #25
0
        public void Test1()
        {
            LinearLayer layer = new LinearLayer(2, 2);

            // Bias
            layer.Matrix[0, 0] = 100.0f;
            // Weights
            layer.Matrix[1, 0] = 1.0f;
            layer.Matrix[2, 0] = 7.0f;

            // Bias
            layer.Matrix[0, 1] = 50.0f;
            // Weights
            layer.Matrix[1, 1] = 1.0f;
            layer.Matrix[2, 1] = 0.0f;

            Matrix input  = new Matrix(1.0f, 1.0f, 1.0f);
            Matrix output = layer.Evaluate(input);

            Assert.Equal(new Matrix(108.0f, 51.0f), output);
        }
コード例 #26
0
ファイル: CrowNetUP.cs プロジェクト: substage/Crow
        public List <ActivationLayer> HiddenLayerList()
        {
            List <ActivationLayer> ActivationLayerList = new List <ActivationLayer>();

            for (int i = 0; i < neuronCount.Count; i++)
            {
                if (neuronCount[i] < 1)
                {
                    neuronCount[i] = 1;
                }
                ;

                if (layerStructure[i] == 0)
                {
                    SigmoidLayer currenthiddenLayer = new SigmoidLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 1)
                {
                    LinearLayer currenthiddenLayer = new LinearLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 2)
                {
                    LogarithmLayer currenthiddenLayer = new LogarithmLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 3)
                {
                    SineLayer currenthiddenLayer = new SineLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 4)
                {
                    TanhLayer currenthiddenLayer = new TanhLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else
                {
                    return(new List <ActivationLayer>());
                }
            }

            return(ActivationLayerList);
        }
コード例 #27
0
        public void BackpropagateRunsTwoByThree()
        {
            double[,] weights = new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            };
            Layer layer = new LinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            layer.BackPropagate(outputgradient);

            double[] inputGradientCheck  = new double[] { 3, 5, 7 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i));
            }
        }
コード例 #28
0
ファイル: Trainer.cs プロジェクト: JkLiebana/SceneCreator
    void CreateNewNetwork()
    {
        Debug.Log("Creating new network...");

        LinearLayer  inputLayer   = new LinearLayer(neurons);
        SigmoidLayer hiddenLayer  = new SigmoidLayer(hidden1Neurons);
        SigmoidLayer hiddenLayer2 = new SigmoidLayer(hidden2Neurons);

        LinearLayer outputLayer = new LinearLayer(outputNum);


        BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);

        conn1.Initializer = new RandomFunction(0d, 0.00001d);
        BackpropagationConnector conn3 = new BackpropagationConnector(hiddenLayer, hiddenLayer2);

        conn3.Initializer = new RandomFunction(0d, 0.00001d);
        BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer2, outputLayer);

        conn2.Initializer = new RandomFunction(0d, 0.00001d);

        conn1.Initialize();
        conn2.Initialize();
        conn3.Initialize();


        if (NetworkManager.Instance._neuralNetwork != null)
        {
            Debug.Log("A network already exists... new network will overwrite it");
        }
        Debug.Log("Created.");

        NetworkManager.Instance._neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
        NetworkManager.Instance._neuralNetwork.SetLearningRate(learningRate);
        NetworkManager.Instance.setNeuralNetwork(NetworkManager.Instance._neuralNetwork);

        NetworkManager.Instance.InitializeNetwork(neurons);
    }
コード例 #29
0
        public void CanUseBigLinearLayer()
        {
            double[,] weights = new double[2000, 1000];
            double[] input = new double[1000];

            for (int i = 0; i < 1000; i++)
            {
                weights[i, i] = 1.0;
                input[i]      = (double)i;
            }

            NetworkVector inputvector = new NetworkVector(input);
            Layer         layer       = new LinearLayer(weights);

            layer.Run(inputvector);
            double[] result = layer.Output.ToArray();

            for (int i = 0, j = 1000; i < 1000; i++, j++)
            {
                Assert.AreEqual((double)i, result[i], "Failed for i = " + i);
                Assert.AreEqual(0.0, result[j], "Failed for j = " + j);
            }
        }
コード例 #30
0
ファイル: MainForm.cs プロジェクト: lanicon/waveletstudio
        private void Start(object sender, EventArgs e)
        {
            CleanseGraph();
            EnableControls(false);
            curve.Color = enabledColor;

            if (!int.TryParse(txtCycles.Text, out cycles))
            {
                cycles = 10000;
            }
            if (!double.TryParse(txtLearningRate.Text, out learningRate))
            {
                learningRate = 0.25d;
            }
            if (!int.TryParse(txtNeuronCount.Text, out neuronCount))
            {
                neuronCount = 10;
            }

            if (cycles <= 0)
            {
                cycles = 10000;
            }
            if (learningRate < 0 || learningRate > 1)
            {
                learningRate = 0.25d;
            }
            if (neuronCount <= 0)
            {
                neuronCount = 10;
            }

            txtCycles.Text       = cycles.ToString();
            txtLearningRate.Text = learningRate.ToString();
            txtNeuronCount.Text  = neuronCount.ToString();

            LinearLayer  inputLayer  = new LinearLayer(1);
            SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(1, 1);

            for (int i = 0; i < curve.Points.Count; i++)
            {
                double xVal = curve.Points[i].X;
                for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01)
                {
                    trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y }));
                }
            }

            network.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles);
                Application.DoEvents();
            });
            network.Learn(trainingSet, cycles);
            StopLearning(this, EventArgs.Empty);
        }