예제 #1
0
파일: CrowNetUP.cs 프로젝트: substage/Crow
        public BackpropagationNetwork network(int trainInVectorDimension, int trainOutVectorDimension)
        {
            this.hiddenLayerList = HiddenLayerList();

            ActivationLayer inputLayer  = new LinearLayer(trainInVectorDimension);
            ActivationLayer outputLayer = new SigmoidLayer(trainOutVectorDimension);

            BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[0]);

            for (int i = 1; i < this.hiddenLayerList.Count; i++)
            {
                bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
            }
            bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 1], outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            /*ActivationLayer inputLayer = hiddenLayerList[0];
             * ActivationLayer outputLayer = hiddenLayerList[hiddenLayerList.Count - 1];
             *
             * if(hiddenLayerList.Count != 2)
             * {
             *  BackpropagationConnector bpc0 = new BackpropagationConnector(inputLayer, this.hiddenLayerList[1]);
             *  for (int i = 2; i < this.hiddenLayerList.Count - 1; i++)
             *  {
             *      bpc0 = new BackpropagationConnector(this.hiddenLayerList[i - 1], this.hiddenLayerList[i]);
             *  }
             *  bpc0 = new BackpropagationConnector(this.hiddenLayerList[this.hiddenLayerList.Count - 2], outputLayer);
             * }
             *
             * BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);*/
            network.SetLearningRate(this.learningRate);

            return(network);
        }
예제 #2
0
        public void TestMethod1()
        {
            // 创建输入层、隐层和输出层
            var inputLayer  = new LinearLayer(1);
            var hiddenLayer = new LinearLayer(5);
            var outputLayer = new LinearLayer(1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            //network.SetLearningRate(new LinearFunction(0.1, 0.6));
            network.Initialize();

            // 训练
            var ran = new Random();

            for (var i = 0; i < 100; i++)
            {
                var inputVector    = new double[] { i };
                var outputVector   = new double[] { Math.PI *i };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                network.Learn(trainingSample, i, 100);
            }

            // 预测
            var testInput  = new double[] { 1 };
            var testOutput = network.Run(testInput);

            Console.WriteLine(testOutput[0]);
        }
예제 #3
0
        public void LabTest1()
        {
            var inputLayer  = new LinearLayer(5);
            var hiddenLayer = new TanhLayer(neuronCount);
            var outputLayer = new TanhLayer(2);

            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _xorNetwork.SetLearningRate(learningRate);

            var trainingSet = new TrainingSet(5, 2);

            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 }));

            _errorList = new double[cycles];

            //_xorNetwork.EndEpochEvent += EndEpochEvent;
            _xorNetwork.Learn(trainingSet, cycles);

            var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 });
        }
            /// <summary>
            /// Create a new adam network trainer
            /// </summary>
            /// <param name="TargetNetwork">Network to train</param>
            /// <param name="TrainingDataset">Data sets used for training</param>
            /// <param name="LearningRate">Initial learning rate, default = 0.1</param>
            public AdamTrainer(BackpropagationNetwork TargetNetwork, IOMetaDataSet <double[]> TrainingDataset, double LearningRate = 0.1)
            {
                this.TargetNetwork   = TargetNetwork;
                this.TrainingDataset = TrainingDataset;

                adamDecent = new AdamDecent(LearningRate);
            }
예제 #5
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            AnnModelParameter para = mParameter as AnnModelParameter;

            LinearLayer inputLayer = new LinearLayer(datasets.InputData[0].Length);

            SigmoidLayer hiddenLayer = new SigmoidLayer(para.HiddenNeuronsCount[0]);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(para.LearningRate);
            network.JitterEpoch      = para.JitterEpoch;
            network.JitterNoiseLimit = para.JitterNoiseLimit;
            network.EndEpochEvent   += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                // TODO: trainning error needs to be calculated
                OnRunningEpoch(new AnnModelRunEpochEventArgs(args.TrainingIteration + 1, 0));
            });

            network.Learn(ForecastingDataSets.ConvertToTrainingSet(datasets), para.Iterations);

            datasets.ForecastedData = new double[datasets.InputData.Length][];
            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
예제 #6
0
    void CreateNewNetwork()
    {
        LinearLayer  inputLayer   = new LinearLayer(neurons);
        SigmoidLayer hiddenLayer  = new SigmoidLayer(hidden1Neurons);
        SigmoidLayer hiddenLayer2 = new SigmoidLayer(hidden2Neurons);


        LinearLayer outputLayer = new LinearLayer(outputNum);


        BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);

        conn1.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn3 = new BackpropagationConnector(hiddenLayer, hiddenLayer2);

        conn3.Initializer = new RandomFunction(0d, 0.001d);
        BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer2, outputLayer);

        conn2.Initializer = new RandomFunction(0d, 0.001d);

        conn1.Initialize();
        conn2.Initialize();
        conn3.Initialize();


        neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
        neuralNetwork.SetLearningRate(learningRate);

        neuralNetwork.Initialize();
    }
        public void AddOutboundConnection_ValidatesArgs()
        {
            // Setup
            var network = new BackpropagationNetwork(new Mock<IErrorCalculator>().Object);

            // Execute/Verify
            network.AddOutboundConnection(null);
        }
예제 #8
0
        protected override void SolveInstance(IGH_DataAccess DA)
        {
            CrowNetBP net = new CrowNetBP();

            if (!networkLoaded)
            {
                int cycles = 1000;

                GH_Structure <GH_Number> tiv = new GH_Structure <GH_Number>();
                GH_Structure <GH_Number> tov = new GH_Structure <GH_Number>();

                DA.GetData(0, ref cycles);
                DA.GetData(1, ref net);
                DA.GetDataTree(2, out tiv);
                DA.GetDataTree(3, out tov);

                double[][] trainInVectors  = Utils.GHTreeToMultidimensionalArray(tiv);
                double[][] trainOutVectors = Utils.GHTreeToMultidimensionalArray(tov);


                int trainVectorCount = trainInVectors.Length;
                if (trainVectorCount != trainOutVectors.Length)
                {
                    AddRuntimeMessage(GH_RuntimeMessageLevel.Error, "Please supply an equal amount of input and output training vectors!");
                }

                int trainInVectorDimension  = trainInVectors[0].Length;
                int trainOutVectorDimension = trainOutVectors[0].Length;

                BackpropagationNetwork network = net.network(trainInVectorDimension, trainOutVectorDimension);


                // set Trainingset
                TrainingSet trainingSet = new TrainingSet(trainInVectorDimension, trainOutVectorDimension);

                for (int i = 0; i < trainVectorCount; i++)
                {
                    trainingSet.Add(new TrainingSample(trainInVectors[i], trainOutVectors[i]));
                }

                // train
                network.Learn(trainingSet, cycles);
                this.Network = network;
            }
            if (this.Network != null)
            {
                DA.SetData(0, this.Network.MeanSquaredError.ToString("0.0000000000"));

                CrowNetBPP nn = new CrowNetBPP(this.Network);
                nn.hiddenLayerList = net.hiddenLayerList;
                nn.layerStructure  = net.layerStructure;
                nn.neuronCount     = net.neuronCount;
                DA.SetData(1, nn);
            }

            networkLoaded = false;
        }
예제 #9
0
 private void Form1_OnLoad(object sender, EventArgs e)
 {
     inputTier     = new LinearLayer(35);
     hiddenTier    = new SigmoidLayer(3);
     outputTier    = new SigmoidLayer(5);
     _             = new BackpropagationConnector(inputTier, hiddenTier);
     _             = new BackpropagationConnector(hiddenTier, outputTier);
     neuralNetwork = new BackpropagationNetwork(inputTier, outputTier);
     neuralNetwork.Initialize();
 }
예제 #10
0
        private void WFAnnRecognition_Load(object sender, EventArgs e)
        {
            lstLog.Items.Insert(0, "Initialize ANN model");
            inputLayer  = new LinearLayer(35);
            hiddenLayer = new SigmoidLayer(3);
            outputLayer = new SigmoidLayer(5);
            BackpropagationConnector connector  = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector connector2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();
        }
예제 #11
0
        /// <summary>
        /// 点击计算按钮
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void tsmiCalculate_Click(object sender, EventArgs e)
        {
            // 创建输入层、隐层和输出层
            ActivationLayer inputLayer  = GetLayer(cboInputLayerType.SelectedItem.ToString(), 2);
            ActivationLayer hiddenLayer = GetLayer(cboHiddenLayerType.SelectedItem.ToString(), int.Parse(txtHiddenLayerCount.Text));
            ActivationLayer outputLayer = GetLayer(cboOutputLayerType.SelectedItem.ToString(), 1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete).Initializer  = new RandomFunction(0, 0.3);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(double.Parse(txtInitialLearningRate.Text), double.Parse(txtFinalLearningRate.Text));

            // 进行训练
            var trainingSet = new TrainingSet(2, 1);

            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var inputVector    = new double[] { x1, x2 };
                var outputVector   = new double[] { y };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                trainingSet.Add(trainingSample);
            }
            network.SetLearningRate(0.3, 0.1);
            network.Learn(trainingSet, int.Parse(txtTrainingEpochs.Text));
            network.StopLearning();

            // 进行预测
            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var testInput  = new double[] { x1, x2 };
                var testOutput = network.Run(testInput)[0];

                var absolute = testOutput - y;
                var relative = Math.Abs((testOutput - y) / testOutput);

                dgvData.Rows[i].Cells[3].Value = testOutput.ToString("f3");
                dgvData.Rows[i].Cells[4].Value = absolute.ToString("f3");
                dgvData.Rows[i].Cells[5].Value = (relative * 100).ToString("f1") + "%";
            }
        }
        private void Form1_Load(object sender, EventArgs e)
        {
            int gizlikatmansayisi = Convert.ToInt32(Microsoft.VisualBasic.Interaction.InputBox("Gizli Katman Sayısını Giriniz", "Bilgi Girişi", "Örn: 3", 0, 0));

            giriskatmanı = new LinearLayer(35);
            gizlikatman  = new SigmoidLayer(gizlikatmansayisi);
            cikiskatmani = new SigmoidLayer(5);
            BackpropagationConnector giris_gizli_baglanti = new BackpropagationConnector(giriskatmanı, gizlikatman);
            BackpropagationConnector gizli_cikis_baglanti = new BackpropagationConnector(gizlikatman, cikiskatmani);

            ag = new BackpropagationNetwork(giriskatmanı, cikiskatmani);
            ag.Initialize();
        }
예제 #13
0
 public Imagine(int w, int h)
 {
     W      = w;
     H      = h;
     Inputs = W * H * 3;
     iLay   = new LinearLayer(Inputs);
     hLay   = new SigmoidLayer(neuronCount);
     oLay   = new SigmoidLayer(w * h * 3);
     c1     = new BackpropagationConnector(iLay, hLay, ConnectionMode.Complete);
     c2     = new BackpropagationConnector(hLay, oLay, ConnectionMode.Complete);
     net    = new BackpropagationNetwork(iLay, oLay);
     net.SetLearningRate(learningRate);
 }
예제 #14
0
    private int CreateNeuralNetwork(int input, int output, int[] hidden)
    {
        LinearLayer  inputLayer  = new LinearLayer(input);
        SigmoidLayer outputLayer = new SigmoidLayer(output);

        // minimum size
        if (hidden == null)
        {
            hidden = new int[] { input + 1, input + 1 };
        }

        var hiddenLayers = new SigmoidLayer[hidden.Length];

        // plus two because of the input and the output layers
        var connectors = new BackpropagationConnector[hidden.Length + 2];

        // create the hidden layers
        for (int k = 0; k < hidden.Length; k++)
        {
            hiddenLayers[k] = new SigmoidLayer(hidden[k]);
        }

        // back propagation from first hidden layer to input
        connectors[0] = new BackpropagationConnector(inputLayer, hiddenLayers[0]);

        // back propagation between the hidden layers
        for (int k = 1; k < hidden.Length; k++)
        {
            connectors[k] = new BackpropagationConnector(hiddenLayers[k - 1], hiddenLayers[k]);
        }
        // back propagation from output to last hidden layer
        connectors[hidden.Length - 1] = new BackpropagationConnector(hiddenLayers[hidden.Length - 1], outputLayer);

        // The network
        neuralNetwork = new BackpropagationNetwork(inputLayer, outputLayer);

        #region retrieve network weight count
        int netWeightCount = 0;

        foreach (BackpropagationConnector connector in neuralNetwork.Connectors)
        {
            foreach (BackpropagationSynapse synapse in connector.Synapses)
            {
                netWeightCount += 2;
            }
        }
        #endregion

        return(netWeightCount);
    }
예제 #15
0
        public static void setNetworkWeights(BackpropagationNetwork aNetwork, double[] weights)
        {
            // Setup the network's weights.
            int index = 0;

            foreach (BackpropagationConnector connector in aNetwork.Connectors)
            {
                foreach (BackpropagationSynapse synapse in connector.Synapses)
                {
                    synapse.Weight = weights[index++];
                    // synapse.SourceNeuron.SetBias(weights[index++]);
                }
            }
        }
예제 #16
0
        public void initNet(int fLayer, int sLayer, int tLayer)
        {
            ActivationLayer inputLayer  = createLayer(fLayer, inputNeurons);
            ActivationLayer hiddenLayer = createLayer(sLayer, hiddenNeurons);
            ActivationLayer outputLayer = createLayer(tLayer, outputNeurons);

            //var hiddenLayer = new LinearLayer(hiddenNeurons);
            //var outputLayer = new LinearLayer(outputNeurons);
            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(learningRate);
            SampleInput  = new double[inputNeurons + outputNeurons];
            SampleOutput = new double[outputNeurons];
        }
                /// <summary>
                /// Sequential update the network from select layer with multitherading
                /// </summary>
                /// <param name="StartLayer">Start layer</param>
                public static void SequentialUpdateMultitherading(this BackpropagationNetwork backpropagation, int StartLayer)
                {
                    for (int layer = StartLayer; layer < backpropagation.Neurons.Count; layer++)
                    {
                        int neuronCount = backpropagation.Neurons[layer].Length;
                        IEnumerable <Neuron> prevLayer = (layer == 0) ? (backpropagation.InputNeurons) : (backpropagation.Neurons[layer - 1]);

                        Parallel.For(0, neuronCount, delegate(int i)
                        {
                            Neuron neuron = backpropagation.Neurons[layer][i];

                            neuron.Update(prevLayer.Select((Neuron n) => (n.OutputValue)).ToArray(), null, double.NaN);
                        });
                    }
                }
예제 #18
0
        private void button8_Click(object sender, EventArgs e)
        {
            LinearLayer  inputLayer  = new LinearLayer(Convert.ToInt32(textBox3.Text));
            SigmoidLayer hiddenLayer = new SigmoidLayer(Convert.ToInt32(textBox4.Text));
            SigmoidLayer outputLayer = new SigmoidLayer(Convert.ToInt32(textBox5.Text));


            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();

            MessageBox.Show("Rete generata con successo.");
        }
예제 #19
0
        async Task LearnNetworkAsync()
        {
            _network = new BackpropagationNetwork(_inputLayer, _outputLayer);
            _network.Initialize();
            var trainingSet = new TrainingSet(1, 3);

            foreach (var b in from bomb in Enum.GetValues(typeof(BombTypes)).Cast <BombTypes>() where bomb != BombTypes.Mine select BombFabric.CreateBomb(bomb) into b where b != null select b)
            {
                trainingSet.Add(new TrainingSample(new double[] { b.BeepsLevel },
                                                   new double[] {
                    (int)b.FirstStageDisarming,
                    (int)b.SecondStageDisarming,
                    (int)b.ThirdStageDisarming
                }));
            }
            _network.Learn(trainingSet, 100000);
        }
                public static void UpdatePositiveMultitherading(this BackpropagationNetwork backpropagation, double[] Input)
                {
                    // Check parameters
                    if (Input.Length != backpropagation.InputNeurons.Length)
                    {
                        throw new ArgumentException("Invalid input array, its size is different from the number of input neurons.", nameof(Input));
                    }


                    // Update input neurons
                    Parallel.For(0, backpropagation.InputNeurons.Length, delegate(int i)
                    {
                        backpropagation.InputNeurons[i].Update(Input[i]);
                    });

                    // Sequential update neurons
                    backpropagation.SequentialUpdateMultitherading(0);
                }
예제 #21
0
 private void StopLearning(object sender, EventArgs e)
 {
     if (network != null)
     {
         network.StopLearning();
         LineItem lineItem = new LineItem("Approximated Function");
         for (double xVal = 0; xVal < 10; xVal += 0.05d)
         {
             lineItem.AddPoint(xVal, network.Run(new double[] { xVal })[0]);
         }
         lineItem.Symbol.Type = SymbolType.None;
         lineItem.Color       = Color.DarkOrchid;
         functionGraph.GraphPane.CurveList.Add(lineItem);
         functionGraph.Refresh();
         functionGraph.GraphPane.CurveList.Remove(lineItem);
     }
     network = null;
     EnableControls(true);
 }
예제 #22
0
        private void load_btn_Clicked(object sender, EventArgs e)
        {
            OpenFileDialog openFileDialog = new OpenFileDialog();

            openFileDialog.Filter           = "CNW files (*.cnw)|*.cnw";
            openFileDialog.InitialDirectory = ".";
            if (openFileDialog.ShowDialog() == DialogResult.OK)
            {
                try
                {
                    System.IO.Stream stream = System.IO.File.Open(openFileDialog.FileName, System.IO.FileMode.Open);

                    BinaryFormatter bFormatter = new BinaryFormatter();
                    this.Network = bFormatter.Deserialize(stream) as BackpropagationNetwork;
                    stream.Close();
                    networkLoaded = true;
                    ExpireSolution(true);
                }
                catch (Exception ex) { AddRuntimeMessage(GH_RuntimeMessageLevel.Error, "Couldn't load the crow net:\n" + ex.Message); }
            }
        }
예제 #23
0
 public void setNeuralNetwork(BackpropagationNetwork network)
 {
     this._neuralNetwork = network;
 }
예제 #24
0
        private void Start(object sender, EventArgs e)
        {
            CleanseGraph();
            EnableControls(false);
            curve.Color = enabledColor;

            if (!int.TryParse(txtCycles.Text, out cycles))
            {
                cycles = 10000;
            }
            if (!double.TryParse(txtLearningRate.Text, out learningRate))
            {
                learningRate = 0.25d;
            }
            if (!int.TryParse(txtNeuronCount.Text, out neuronCount))
            {
                neuronCount = 10;
            }

            if (cycles <= 0)
            {
                cycles = 10000;
            }
            if (learningRate < 0 || learningRate > 1)
            {
                learningRate = 0.25d;
            }
            if (neuronCount <= 0)
            {
                neuronCount = 10;
            }

            txtCycles.Text       = cycles.ToString();
            txtLearningRate.Text = learningRate.ToString();
            txtNeuronCount.Text  = neuronCount.ToString();

            LinearLayer  inputLayer  = new LinearLayer(1);
            SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(1, 1);

            for (int i = 0; i < curve.Points.Count; i++)
            {
                double xVal = curve.Points[i].X;
                for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01)
                {
                    trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y }));
                }
            }

            network.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles);
                Application.DoEvents();
            });
            network.Learn(trainingSet, cycles);
            StopLearning(this, EventArgs.Empty);
        }
예제 #25
0
        /// <summary>
        /// This constructs a training procedure for standard backpropagation techniques.
        /// More advanced ones will be used as seen in the example.
        /// </summary>
        /// <param name="writer"></param>
        public TestingNdn(StreamWriter writer)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.
            int    numberOfNeurons = 3;
            double learningRate    = 0.5;
            int    numberOfCycles  = 10000;

            double[] errorList = new double[numberOfCycles];

            LinearLayer  inputLayer  = new LinearLayer(2);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(2, 1);

            trainingSet.Add(new TrainingSample(new double[2] {
                0, 0
            }, new double[1] {
                0
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                0, 1
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 0
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 1
            }, new double[1] {
                0
            }));

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max             = Math.Max(max, network.MeanSquaredError);
                PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            for (int i = 0; i < numberOfCycles; i++)
            {
                //Console.WriteLine(errorList[i]);
            }

            double[] outputResult = network.OutputLayer.GetOutput();
            Console.WriteLine("final output");

            double[] r1 = new double[] { 0, 0 };
            double[] r2 = new double[] { 0, 1 };
            double[] r3 = new double[] { 1, 0 };
            double[] r4 = new double[] { 1, 1 };

            Console.WriteLine(" 0 0 => " + network.Run(r1)[0]);
            Console.WriteLine(" 0 1 => " + network.Run(r2)[0]);
            Console.WriteLine(" 1 0 => " + network.Run(r3)[0]);
            Console.WriteLine(" 1 1 => " + network.Run(r4)[0]);
        }
예제 #26
0
 public BackpropagationNetworkDeserializer(BackpropagationNetwork TargetNetwork)
 {
     targetNetwork = TargetNetwork;
 }
예제 #27
0
        public static void SaveTrainingSet(TrainingSet ts, string path, BackpropagationNetwork network)
        {
            StringBuilder sb = new StringBuilder();

            using (StreamWriter sw = new StreamWriter(path))
            {
                double[] forecastVector;

                // 写标题
                for (int index = 1; index <= ts.InputVectorLength; index++)
                {
                    sb.Append("Input" + index.ToString() + ",");
                }
                for (int index = 1; index <= ts.OutputVectorLength; index++)
                {
                    sb.Append("Output" + index.ToString() + ",");
                }
                for (int index = 1; index <= ts.OutputVectorLength - 1; index++)
                {
                    sb.Append("Forecast" + index.ToString() + ",");
                }
                sb.Append("Forecast" + ts.OutputVectorLength.ToString());
                sw.WriteLine(sb.ToString());

                //  重复样品
                TrainingSample tsamp;
                for (int row = 0; row < ts.TrainingSampleCount; row++)
                {
                    tsamp = ts[row];
                    sb.Clear();

                    // 附加输入
                    for (int index = 0; index < ts.InputVectorLength; index++)
                    {
                        sb.Append(tsamp.InputVector[index].ToString("0.0000") + ",");
                    }
                    // 附加输出
                    for (int index = 0; index < ts.OutputVectorLength; index++)
                    {
                        sb.Append(tsamp.OutputVector[index].ToString("0.0000") + ",");
                    }

                    //
                    // 获取和追加预测
                    //

                    // 未标准化
                    forecastVector = network.Run(tsamp.InputVector);
                    // 标准化 - NOOOO
                    //forecastVector = network.Run(tsamp.NormalizedInputVector);

                    for (int index = 0; index < ts.OutputVectorLength - 1; index++)
                    {
                        sb.Append(forecastVector[index].ToString("0.0000") + ",");
                    }
                    sb.Append(forecastVector[ts.OutputVectorLength - 1].ToString("0.0000"));

                    // 输出这一行
                    sw.WriteLine(sb.ToString());
                }
            }
        }
        public void CalculateError_DistributesSignals()
        {
            //// SETUP
            const double ErrorSignal1 = -0.5d;
            const double ErrorSignal2 = 1.5d;
            const double DigitizedErrorSignal1 = -1.0d;
            const double DigitizedErrorSignal2 = 1.0d;
            const double NetworkErrorSignal1 = -1.0d;
            const double NetworkErrorSignal2 = 1.0d;
            var expectedNetworkErrors = new[] { NetworkErrorSignal1, NetworkErrorSignal2 };

            // Create 2 inbound and 2 outbound mock connections.
            var mockInbound1 = new Mock<ISupervisedLearnerConnection>();
            var mockInbound2 = new Mock<ISupervisedLearnerConnection>();
            var mockOutbound1 = new Mock<ISupervisedLearnerConnection>();
            var mockOutbound2 = new Mock<ISupervisedLearnerConnection>();

            // program the mock outbounds such that all are reporting their respective errors
            mockOutbound1.SetupGet(mock => mock.ErrorSignal).Returns(ErrorSignal1);
            mockOutbound2.SetupGet(mock => mock.ErrorSignal).Returns(ErrorSignal2);
            mockOutbound1.SetupGet(mock => mock.IsReportingError).Returns(true);
            mockOutbound2.SetupGet(mock => mock.IsReportingError).Returns(true);

            // Create 2 input nodes and 5 output nodes.
            var mockInputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockInputNode2 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode2 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode3 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode4 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode5 = new Mock<ISupervisedLearnerNode>();

            // Program nodes to report input/output sizes.
            mockInputNode1.SetupGet(mock => mock.InputSize).Returns(1);
            mockInputNode2.SetupGet(mock => mock.InputSize).Returns(1);
            mockOutputNode1.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode2.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode3.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode4.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode5.SetupGet(mock => mock.OutputSize).Returns(1);

            // Program nodes to provide values.
            mockInputNode1.SetupGet(mock => mock.CachedErrors).Returns(new[] { NetworkErrorSignal1 });
            mockInputNode2.SetupGet(mock => mock.CachedErrors).Returns(new[] { NetworkErrorSignal2 });
            mockOutputNode1.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });
            mockOutputNode2.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });
            mockOutputNode3.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });
            mockOutputNode4.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });
            mockOutputNode5.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });

            // Create the test object.
            var network = new BackpropagationNetwork(new Mock<IErrorCalculator>().Object);

            network.AddInboundConnection(mockInbound1.Object);
            network.AddInboundConnection(mockInbound2.Object);
            network.AddOutboundConnection(mockOutbound1.Object);
            network.AddOutboundConnection(mockOutbound2.Object);

            network.AddInputNode(mockInputNode1.Object);
            network.AddInputNode(mockInputNode2.Object);
            network.AddOutputNode(mockOutputNode1.Object);
            network.AddOutputNode(mockOutputNode2.Object);
            network.AddOutputNode(mockOutputNode3.Object);
            network.AddOutputNode(mockOutputNode4.Object);
            network.AddOutputNode(mockOutputNode5.Object);

            // EXECUTION
            network.Fire(new[] { 0.0d });
            network.CalculateError(ErrorSignal1);

            // VERIFICATION:  The IsReporting signals were checked...
            mockOutbound1.Verify(mock => mock.IsReportingError, Times.Exactly(1));
            mockOutbound2.Verify(mock => mock.IsReportingError, Times.Exactly(1));

            // ...and calculation activities occurred.
            mockInbound1.Verify(mock => mock.ReportError(NetworkErrorSignal1), Times.Once());
            mockInbound2.Verify(mock => mock.ReportError(NetworkErrorSignal2), Times.Once());
            mockOutbound1.VerifyGet(mock => mock.ErrorSignal, Times.Once());
            mockOutbound1.Verify(mock => mock.ClearReportingFlag(), Times.Once());
            mockOutbound2.VerifyGet(mock => mock.ErrorSignal, Times.Once());
            mockOutbound2.Verify(mock => mock.ClearReportingFlag(), Times.Once());
            mockInputNode1.VerifyGet(mock => mock.CachedErrors, Times.AtLeastOnce());
            mockInputNode2.VerifyGet(mock => mock.CachedErrors, Times.AtLeastOnce());
            mockOutputNode1.Verify(mock => mock.CalculateError(DigitizedErrorSignal1), Times.Once());
            mockOutputNode2.Verify(mock => mock.CalculateError(DigitizedErrorSignal1), Times.Once());
            mockOutputNode3.Verify(mock => mock.CalculateError(DigitizedErrorSignal1), Times.Once());
            mockOutputNode4.Verify(mock => mock.CalculateError(DigitizedErrorSignal2), Times.Once());
            mockOutputNode5.Verify(mock => mock.CalculateError(DigitizedErrorSignal2), Times.Once());
            Assert.AreEqual(2, network.CachedErrors.Length);
            Assert.AreEqual(expectedNetworkErrors, network.CachedErrors);
        }
예제 #29
0
        private void buttonOK_Click(object sender, EventArgs e)
        {
            this.Cursor = Cursors.WaitCursor;
            try
            {
                //
                // 解析数据文件
                //
                int inputCount  = 0;
                int outputCount = 1;
                if (textInputCount.Text.Length > 0)
                {
                    inputCount = int.Parse(textInputCount.Text);
                }
                if (textOutputCount.Text.Length > 0)
                {
                    outputCount = int.Parse(textOutputCount.Text);
                }
                TrainingSet trainingSet   = DataFile.CsvFileToTrainingSet(textTrainingSet.Text, ref inputCount, ref outputCount);
                TrainingSet crossvalidSet = null;
                if (textCvSet.Text.Length > 0)
                {
                    crossvalidSet = DataFile.CsvFileToTrainingSet(textCvSet.Text, ref inputCount, ref outputCount);
                }


                //
                // 创建新网络
                //

                // 输入层始终与输入计数是线性关系
                LinearLayer inputLayer = new LinearLayer(inputCount);

                // 创建隐层
                ActivationLayer hiddenLayer1 = null;
                ActivationLayer hiddenLayer2 = null;
                ActivationLayer outputLayer  = null;
                if (comboActFunction1.SelectedIndex < 0)
                {
                    MessageBox.Show("请选择激活函数!"); return;
                }
                switch ((HiddenLayerType)comboActFunction1.SelectedItem)
                {
                case HiddenLayerType.Linear: hiddenLayer1 = new LinearLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Logarithmic: hiddenLayer1 = new LogarithmLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Sigmoid: hiddenLayer1 = new SigmoidLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Sine: hiddenLayer1 = new SineLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Tanh: hiddenLayer1 = new TanhLayer(int.Parse(textNeuronCount1.Text)); break;
                }
                if (textNeuronCount2.Text.Length > 0 && int.Parse(textNeuronCount2.Text) > 0)
                {
                    switch ((HiddenLayerType)comboActFunction2.SelectedItem)
                    {
                    case HiddenLayerType.Linear: hiddenLayer2 = new LinearLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Logarithmic: hiddenLayer2 = new LogarithmLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Sigmoid: hiddenLayer2 = new SigmoidLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Sine: hiddenLayer2 = new SineLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Tanh: hiddenLayer2 = new TanhLayer(int.Parse(textNeuronCount2.Text)); break;
                    }
                }

                if (comboOutputFunction.SelectedIndex < 0)
                {
                    MessageBox.Show("请选择输出函数!"); return;
                }
                switch ((HiddenLayerType)comboOutputFunction.SelectedItem)
                {
                case HiddenLayerType.Linear: outputLayer = new LinearLayer(outputCount); break;

                case HiddenLayerType.Logarithmic: outputLayer = new LogarithmLayer(outputCount); break;

                case HiddenLayerType.Sigmoid: outputLayer = new SigmoidLayer(outputCount); break;

                case HiddenLayerType.Sine: outputLayer = new SineLayer(outputCount); break;

                case HiddenLayerType.Tanh: outputLayer = new TanhLayer(outputCount); break;
                }

                // 连接层, hidden2是可选的
                new BackpropagationConnector(inputLayer, hiddenLayer1);
                if (hiddenLayer2 != null)
                {
                    new BackpropagationConnector(hiddenLayer1, hiddenLayer2);
                    new BackpropagationConnector(hiddenLayer2, outputLayer);
                }
                else
                {
                    new BackpropagationConnector(hiddenLayer1, outputLayer);
                }
                BackpropagationNetwork backpropNetwork = new BackpropagationNetwork(inputLayer, outputLayer);


                //
                // 设置学习和退出参数
                //

                double startLearningRate = double.Parse(textStartLearningRate.Text);
                double?finalLearningRate = null;
                if (textFinalLearningRate.Text.Length > 0)
                {
                    finalLearningRate = double.Parse(textFinalLearningRate.Text);
                }

                // 如果选择了学习率函数则使用
                LearningRateFunction?lrf = null;
                if (comboLRFunction.SelectedIndex > 0)
                {
                    lrf = (LearningRateFunction)comboLRFunction.SelectedItem;
                    backpropNetwork.SetLearningRate(
                        LearningRateFactory.GetLearningRateFunction(lrf.Value, startLearningRate, finalLearningRate.Value));
                }
                else
                {
                    // 否则使用普通学习率,也许有起点和终点
                    if (finalLearningRate.HasValue)
                    {
                        backpropNetwork.SetLearningRate(startLearningRate, finalLearningRate.Value);
                    }
                    else
                    {
                        backpropNetwork.SetLearningRate(startLearningRate);
                    }
                }

                // 如果给定,在连接器中设置动量
                double?momentum = null;
                if (textMomentum.Text.Length > 0)
                {
                    momentum = double.Parse(textMomentum.Text);
                    foreach (ILayer layer in backpropNetwork.Layers)
                    {
                        foreach (BackpropagationConnector conn in layer.SourceConnectors)
                        {
                            conn.Momentum = momentum.Value;
                        }
                        foreach (BackpropagationConnector conn in layer.TargetConnectors)
                        {
                            conn.Momentum = momentum.Value;
                        }
                    }
                }


                //
                // 新建工程和保存工程
                //
                int tmpInt;
                NewProject         = new NnProject();
                NewProject.Network = backpropNetwork;
                // 确保为新网络初始化权重,默认情况下新的训练周期将不会初始化
                NewProject.Network.Initialize();
                NewProject.ProjectName        = textProjectName.Text.Trim();
                NewProject.SaveFolder         = textSaveFolder.Text;
                NewProject.TrainingSet        = trainingSet;
                NewProject.CrossValidationSet = crossvalidSet;
                NewProject.LearningParameters = new NnProject.NnLearningParameters();
                NewProject.LearningParameters.InitialLearningRate  = startLearningRate;
                NewProject.LearningParameters.FinalLearningRate    = finalLearningRate;
                NewProject.LearningParameters.LearningRateFunction = lrf;
                NewProject.LearningParameters.Momentum             = momentum;
                if (int.TryParse(textTrainingCycles.Text, out tmpInt))
                {
                    NewProject.LearningParameters.MaxTrainingCycles = tmpInt;
                }

                NnProject.Save(NewProject, textSaveFolder.Text);

                this.Close();
            }
            catch (Exception ex)
            {
                this.Cursor = Cursors.Default;
                MessageBox.Show("Error creating network - " + ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            { this.Cursor = Cursors.Default; }
        }
예제 #30
0
        private void Train(object sender, EventArgs e)
        {
            // btnTrain.Enabled = false;

            int cycles = 200;
            // if (!int.TryParse(txtCycles.Text, out cycles)) { cycles = 200; }
            // txtCycles.Text = cycles.ToString();

            int currentCombination = 0;

            //int totalCombinations = Alphabet.LetterCount * (Alphabet.LetterCount - 1) / 2;

            for (int i = 0; i < Alphabet.LetterCount; i++)
            {
                for (int j = i + 1; j < Alphabet.LetterCount; j++)
                {
                    ActivationLayer inputLayer  = new LinearLayer(400);
                    ActivationLayer hiddenLayer = new SigmoidLayer(4);
                    ActivationLayer outputLayer = new SigmoidLayer(2);
                    new BackpropagationConnector(inputLayer, hiddenLayer);
                    new BackpropagationConnector(hiddenLayer, outputLayer);
                    BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

                    TrainingSet trainingSet = new TrainingSet(400, 2);
                    Alphabet    ithLetter   = Alphabet.GetLetter(i);
                    Alphabet    jthLetter   = Alphabet.GetLetter(j);
                    foreach (Letter instance in ithLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d }));
                    }
                    foreach (Letter instance in jthLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d }));
                    }

                    //progressTraining.Value = 100 * currentCombination / totalCombinations;

                    Application.DoEvents();

                    bool correct = false;

                    int currentCycles = 35;
                    int count         = trainingSet.TrainingSampleCount;

                    while (correct == false & currentCycles <= cycles)
                    {
                        network.Initialize();
                        network.Learn(trainingSet, currentCycles);
                        correct = true;
                        for (int sampleIndex = 0; sampleIndex < count; sampleIndex++)
                        {
                            double[] op = network.Run(trainingSet[sampleIndex].InputVector);
                            if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1]) && op[0] - op[1] < 0.4) || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1]) && op[1] - op[0] < 0.4))
                            {
                                correct = false;
                                trainingSet.Add(trainingSet[sampleIndex]);
                            }
                        }
                        currentCycles *= 2;
                    }

                    //lstLog.Items.Add(cboAplhabet.Items[i] + " & " + cboAplhabet.Items[j] + " = " + network.MeanSquaredError.ToString("0.0000"));
                    // lstLog.TopIndex = lstLog.Items.Count - (int)(lstLog.Height / lstLog.ItemHeight);
                    try
                    {
                        using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + i.ToString("00") + j.ToString("00") + ".ndn", FileMode.Create))
                        {
                            IFormatter formatter = new BinaryFormatter();
                            formatter.Serialize(stream, network);
                        }
                    }
                    catch (Exception)
                    {
                        MessageBox.Show("Failed to save trained neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                        return;
                    }
                    currentCombination++;
                }
            }
            //  progressTraining.Value = 0;
            //  btnTrain.Enabled = false;
        }
예제 #31
0
        static void Main(string[] args)
        {
            Boolean finished = false;

            //Layer
            LinearLayer  inputLayer   = new LinearLayer(25);
            SigmoidLayer hiddenLayer1 = new SigmoidLayer(100);
            SigmoidLayer outputLayer  = new SigmoidLayer(4);

            //Connectors
            BackpropagationConnector connector  = new BackpropagationConnector(inputLayer, hiddenLayer1);
            BackpropagationConnector connector3 = new BackpropagationConnector(hiddenLayer1, outputLayer);

            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.Initialize();

            labyrinth laby = new labyrinth();
            player    plyr = new player();

            laby.Update(laby);

            Console.WriteLine(laby.GetPlayerPosition(laby.Laby));

            do
            {
                string pressedKey = Console.ReadKey(true).Key.ToString();

                switch (pressedKey)
                {
                case "RightArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1, plyr.getPlayerPosition().Item2 + 1] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("R"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "LeftArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1, plyr.getPlayerPosition().Item2 - 1] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("L"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "UpArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1 - 1, plyr.getPlayerPosition().Item2] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("U"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;

                case "DownArrow":
                    if (laby.Laby[plyr.getPlayerPosition().Item1 + 1, plyr.getPlayerPosition().Item2] != laby.Wall)
                    {
                        laby.Laby = laby.SetPosition(plyr.getPlayerPosition(), plyr.move("D"), laby.Laby);
                        laby.Update(laby);
                    }
                    break;
                }
            } while (finished != true);

            Console.Read();
        }
        public void ApplyWeightAdjustments_PropogatesThroughNetwork()
        {
            //// SETUP
            const double ErrorSignal = -2.3d;
            const float Momentum = 0.9f;
            const float LearningRate = 0.1f;

            // Create 2 inbound and 2 outbound mock connections.
            var mockInbound1 = new Mock<ISupervisedLearnerConnection>();
            var mockInbound2 = new Mock<ISupervisedLearnerConnection>();
            var mockOutbound1 = new Mock<ISupervisedLearnerConnection>();
            var mockOutbound2 = new Mock<ISupervisedLearnerConnection>();

            // program the mock outbounds such that both are reporting error signals
            mockOutbound1.SetupGet(mock => mock.IsReportingError).Returns(true);
            mockOutbound2.SetupGet(mock => mock.IsReportingError).Returns(true);

            // Create 2 input nodes and 2 output nodes.
            var mockInputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockInputNode2 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode2 = new Mock<ISupervisedLearnerNode>();

            // Program nodes to report input/output sizes.
            mockInputNode1.SetupGet(mock => mock.InputSize).Returns(1);
            mockInputNode2.SetupGet(mock => mock.InputSize).Returns(1);
            mockOutputNode1.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode2.SetupGet(mock => mock.OutputSize).Returns(1);

            // Program nodes to provide values.
            mockInputNode1.SetupGet(mock => mock.CachedErrors).Returns(new[] { ErrorSignal });
            mockInputNode2.SetupGet(mock => mock.CachedErrors).Returns(new[] { ErrorSignal });
            mockOutputNode1.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });
            mockOutputNode2.SetupGet(mock => mock.CachedOutputs).Returns(new[] { 0.0d });

            // Create the test object.
            var network = new BackpropagationNetwork(new Mock<IErrorCalculator>().Object);
            network.AddInboundConnection(mockInbound1.Object);
            network.AddInboundConnection(mockInbound2.Object);
            network.AddOutboundConnection(mockOutbound1.Object);
            network.AddOutboundConnection(mockOutbound2.Object);
            network.AddInputNode(mockInputNode1.Object);
            network.AddInputNode(mockInputNode2.Object);
            network.AddOutputNode(mockOutputNode1.Object);
            network.AddOutputNode(mockOutputNode2.Object);

            network.Fire(new[] { 0.0d });
            network.CalculateError(ErrorSignal);

            // EXECUTION
            network.ApplyWeightAdjustments(LearningRate, Momentum);

            // VERIFICATION
            mockInputNode1.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Once());
            mockInputNode2.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Once());
            mockOutbound1.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Once());
            mockOutbound2.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Once());
        }
예제 #33
0
        private void RefreshScreenForCurrentNetWork()
        {
            txtCycles.Text = currentProject.LearningParameters.MaxTrainingCycles.ToString();
            txtInitialLearningRate.Text = currentProject.LearningParameters.InitialLearningRate.ToString();
            if (currentProject.LearningParameters.FinalLearningRate.HasValue)
            {
                txtFinalLearningRate.Text = currentProject.LearningParameters.FinalLearningRate.ToString();
            }
            else
            {
                txtFinalLearningRate.Text = "";
            }

            if (currentProject.LearningParameters.LearningRateFunction.HasValue)
            {
                comboLRFunction.SelectedIndex = (int)currentProject.LearningParameters.LearningRateFunction + 1;
            }

            if (currentProject.LearningParameters.Momentum.HasValue)
            {
                textMomentum.Text = currentProject.LearningParameters.Momentum.ToString();
            }
            else
            {
                textMomentum.Text = "";
            }

            string networkDesc        = "";
            BackpropagationNetwork bn = currentProject.Network;
            int layerNo = 1;

            foreach (ILayer layer in bn.Layers)
            {
                networkDesc += "Layer " + layerNo.ToString() + ": {" + layer.ToString().Replace("NeuronDotNet.Core.Backpropagation.", "") + "," + layer.NeuronCount.ToString() + "}" + System.Environment.NewLine;
                layerNo++;
            }
            networkDesc            += System.Environment.NewLine + "TrainingSampleCount: " + currentProject.TrainingSet.TrainingSampleCount.ToString();
            networkDescription.Text = networkDesc;

            lblTrainErrorVal.Text = bn.MeanSquaredError.ToString("0.000000");
            if (currentProject.CrossValidationSet != null)
            {
                double cvError = GetCrossValidationError();
                lblSumSqErrorCV.Text = cvError.ToString("0.000000");
                double percErr = Math.Abs(100 * (currentProject.Network.MeanSquaredError - cvError) / currentProject.Network.MeanSquaredError);
                if (!double.IsInfinity(percErr))
                {
                    lblCvPercError.Text = percErr.ToString("0.00") + " %";
                }
                else
                {
                    lblCvPercError.Text = "";
                }
                lblSumSqErrorCV.Enabled   = true;
                lblSumSqErrorCV.BackColor = lblTrainErrorVal.BackColor;
            }
            else
            {
                lblSumSqErrorCV.Text      = ""; lblCvPercError.Text = "";
                lblSumSqErrorCV.Enabled   = false;
                lblSumSqErrorCV.BackColor = lblCVErrLabel.BackColor;
            }
        }
        public void Train_TrainsTheNetwork()
        {
            //// SETUP

            const int NumEpochs = 3;
            const float LearningRate = 0.1f;
            const float Momentum = 0.9f;
            const double FinalError = 99.99d;

            var inputs = new[] { new[] { 1.0d, 2.0d }, new[] { 3.0d, 4.0d } };
            var ideals = new[] { new[] { 5.0d, 6.0d }, new[] { 7.0d, 8.0d } };
            var outputs = new[] { new[] { 3.5d, 5.5d }, new[] { 7.5d, 9.5d } };
            var overallErrors = new[]
                {
                    new[] { ideals[0][0] - outputs[0][0], ideals[0][1] - outputs[0][1] },
                    new[] { ideals[1][0] - outputs[1][0], ideals[1][1] - outputs[1][1] }
                };
            var finalErrors = new[] { new[] { 1.5d, 2.5d }, new[] { 3.5d, 4.5d } };

            var mockErrorCalc = new Mock<IErrorCalculator>();
            mockErrorCalc.Setup(mock => mock.Calculate()).Returns(FinalError);

            // Create 2 input nodes and 2 output nodes.
            var mockInputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockInputNode2 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode2 = new Mock<ISupervisedLearnerNode>();

            // Program nodes to report input/output sizes.
            mockInputNode1.SetupGet(mock => mock.InputSize).Returns(1);
            mockInputNode2.SetupGet(mock => mock.InputSize).Returns(1);
            mockOutputNode1.SetupGet(mock => mock.OutputSize).Returns(1);
            mockOutputNode2.SetupGet(mock => mock.OutputSize).Returns(1);

            // Program output nodes to return output values based on inputs.
            mockInputNode1.Setup(mock => mock.Fire(new[] { inputs[0][0] })).Callback(() => mockOutputNode1.SetupGet(mock => mock.CachedOutputs).Returns(new[] { outputs[0][0] }));
            mockInputNode2.Setup(mock => mock.Fire(new[] { inputs[0][1] })).Callback(() => mockOutputNode2.SetupGet(mock => mock.CachedOutputs).Returns(new[] { outputs[0][1] }));
            mockInputNode1.Setup(mock => mock.Fire(new[] { inputs[1][0] })).Callback(() => mockOutputNode1.SetupGet(mock => mock.CachedOutputs).Returns(new[] { outputs[1][0] }));
            mockInputNode2.Setup(mock => mock.Fire(new[] { inputs[1][1] })).Callback(() => mockOutputNode2.SetupGet(mock => mock.CachedOutputs).Returns(new[] { outputs[1][1] }));

            // Program input nodes to return final error based on overal errors.
            mockOutputNode1.Setup(mock => mock.CalculateError(overallErrors[0][0])).Callback(
                () => mockInputNode1.SetupGet(mock => mock.CachedErrors).Returns(new[] { finalErrors[0][0] }));
            mockOutputNode1.Setup(mock => mock.CalculateError(overallErrors[1][0])).Callback(
                () => mockInputNode1.SetupGet(mock => mock.CachedErrors).Returns(new[] { finalErrors[1][0] }));
            mockOutputNode2.Setup(mock => mock.CalculateError(overallErrors[0][1])).Callback(
                () => mockInputNode2.SetupGet(mock => mock.CachedErrors).Returns(new[] { finalErrors[0][1] }));
            mockOutputNode2.Setup(mock => mock.CalculateError(overallErrors[1][1])).Callback(
                () => mockInputNode2.SetupGet(mock => mock.CachedErrors).Returns(new[] { finalErrors[1][1] }));

            // Create the network
            var network = new BackpropagationNetwork(mockErrorCalc.Object);
            network.AddInputNode(mockInputNode1.Object);
            network.AddInputNode(mockInputNode2.Object);
            network.AddOutputNode(mockOutputNode1.Object);
            network.AddOutputNode(mockOutputNode2.Object);

            // Execute
            var actualError = network.Train(NumEpochs, LearningRate, Momentum, inputs, ideals);

            //// Verify

            // Use of error calculator
            mockErrorCalc.Verify(mock => mock.Reset(), Times.Exactly(NumEpochs));
            mockErrorCalc.Verify(mock => mock.AddToErrorCalc(overallErrors[0]), Times.Exactly(NumEpochs));
            mockErrorCalc.Verify(mock => mock.AddToErrorCalc(overallErrors[1]), Times.Exactly(NumEpochs));
            mockErrorCalc.Verify(mock => mock.Calculate(), Times.Once());

            // Use of input nodes
            mockInputNode1.Verify(mock => mock.Fire(new[] { inputs[0][0] }), Times.Exactly(NumEpochs));
            mockInputNode1.Verify(mock => mock.Fire(new[] { inputs[1][0] }), Times.Exactly(NumEpochs));
            mockInputNode2.Verify(mock => mock.Fire(new[] { inputs[0][1] }), Times.Exactly(NumEpochs));
            mockInputNode2.Verify(mock => mock.Fire(new[] { inputs[1][1] }), Times.Exactly(NumEpochs));

            mockInputNode1.Verify(mock => mock.ClearCachedErrors(), Times.Exactly(NumEpochs * inputs.Length));
            mockInputNode2.Verify(mock => mock.ClearCachedErrors(), Times.Exactly(NumEpochs * inputs.Length));

            mockInputNode1.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Exactly(NumEpochs));
            mockInputNode2.Verify(mock => mock.ApplyWeightAdjustments(LearningRate, Momentum), Times.Exactly(NumEpochs));

            // Use of output nodes
            mockOutputNode1.VerifyGet(mock => mock.CachedOutputs, Times.AtLeast(NumEpochs));
            mockOutputNode2.VerifyGet(mock => mock.CachedOutputs, Times.AtLeast(NumEpochs));

            mockOutputNode1.Verify(mock => mock.CalculateError(overallErrors[0][0]), Times.Exactly(NumEpochs));
            mockOutputNode1.Verify(mock => mock.CalculateError(overallErrors[1][0]), Times.Exactly(NumEpochs));
            mockOutputNode2.Verify(mock => mock.CalculateError(overallErrors[0][1]), Times.Exactly(NumEpochs));
            mockOutputNode2.Verify(mock => mock.CalculateError(overallErrors[1][1]), Times.Exactly(NumEpochs));

            // Verify Error
            Assert.AreEqual(FinalError, actualError);
        }
        public void CalculateError_DoesNotCalculateUntilAllConnectionsReport()
        {
            //// SETUP

            // Create 2 inbound and 2 outbound mock connections.
            var mockInbound1 = new Mock<ISupervisedLearnerConnection>();
            var mockInbound2 = new Mock<ISupervisedLearnerConnection>();
            var mockOutboundReporting = new Mock<ISupervisedLearnerConnection>();
            var mockOutboundNotReporting = new Mock<ISupervisedLearnerConnection>();

            // program the mock outbounds such that one is reporting and one isn't
            mockOutboundReporting.SetupGet(mock => mock.IsReportingError).Returns(true);
            mockOutboundNotReporting.SetupGet(mock => mock.IsReportingError).Returns(false);

            // Create 2 input nodes and 2 output nodes.
            var mockInputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockInputNode2 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode1 = new Mock<ISupervisedLearnerNode>();
            var mockOutputNode2 = new Mock<ISupervisedLearnerNode>();

            // Create the test object.
            var network = new BackpropagationNetwork(new Mock<IErrorCalculator>().Object);
            network.AddInboundConnection(mockInbound1.Object);
            network.AddInboundConnection(mockInbound2.Object);
            network.AddOutboundConnection(mockOutboundReporting.Object);
            network.AddOutboundConnection(mockOutboundNotReporting.Object);
            network.AddInputNode(mockInputNode1.Object);
            network.AddInputNode(mockInputNode2.Object);
            network.AddOutputNode(mockOutputNode1.Object);
            network.AddOutputNode(mockOutputNode2.Object);

            // EXECUTION
            const double ErrorSignal = -2.3d;
            network.CalculateError(ErrorSignal);

            // VERIFICATION:  The IsReporting signals were checked...
            mockOutboundReporting.Verify(mock => mock.IsReportingError, Times.Exactly(1));
            mockOutboundNotReporting.Verify(mock => mock.IsReportingError, Times.Exactly(1));

            // ...but no calculation activities occurred.
            mockInbound1.Verify(mock => mock.ReportError(It.IsAny<double>()), Times.Never());
            mockInbound2.Verify(mock => mock.ReportError(It.IsAny<double>()), Times.Never());
            mockOutboundReporting.Verify(mock => mock.ClearReportingFlag(), Times.Never());
            mockOutboundNotReporting.Verify(mock => mock.ClearReportingFlag(), Times.Never());
            mockInputNode1.VerifyGet(mock => mock.CachedErrors, Times.Never());
            mockInputNode2.VerifyGet(mock => mock.CachedErrors, Times.Never());
            mockOutputNode1.Verify(mock => mock.CalculateError(It.IsAny<double>()), Times.Never());
            mockOutputNode2.Verify(mock => mock.CalculateError(It.IsAny<double>()), Times.Never());
        }