Example #1
0
        public void LabTest1()
        {
            var inputLayer  = new LinearLayer(5);
            var hiddenLayer = new TanhLayer(neuronCount);
            var outputLayer = new TanhLayer(2);

            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _xorNetwork.SetLearningRate(learningRate);

            var trainingSet = new TrainingSet(5, 2);

            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 }));

            _errorList = new double[cycles];

            //_xorNetwork.EndEpochEvent += EndEpochEvent;
            _xorNetwork.Learn(trainingSet, cycles);

            var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 });
        }
Example #2
0
        public Image ImagineImage(Image from)
        {
            int il = 0;

            //Random r = new Random(seed);
            double[] iv = new double[from.W * from.H * 3];
            for (int y = 0; y < from.H; y++)
            {
                for (int x = 0; x < from.W; x++)
                {
                    iv[il] = GV(from.Dat[il++]);
                    iv[il] = GV(from.Dat[il++]);
                    iv[il] = GV(from.Dat[il++]);
                }
            }
            Image ni = new Image(W, H);

            double[] ov = net.Run(iv);
            for (int y = 0; y < H; y++)
            {
                for (int x = 0; x < W; x++)
                {
                    int l = (y * W * 3) + (x * 3);
                    // ni.Dat[l] = tb(ov[l]);
                    //ni.Dat[l+1] = tb(ov[l+1]));
                    //   ni.Dat[l] = TB(ov[l]);
                    //    ni.Dat[l + 1] = TB(ov[l + 1]);
                    //     ni.Dat[l + 2] = TB(ov[l + 2]);
                    System.Drawing.Color col = System.Drawing.Color.FromArgb(255, TB(ov[l]), TB(ov[l + 1]), TB(ov[l + 2]));
                    ni.BP.SetPixel(x, y, col);
                }
            }
            return(ni);
        }
Example #3
0
    public double[] ProcessSensorData(double[] input)
    {
        if (input == null)
        {
            return(null);
        }

        return(neuralNetwork.Run(input));
    }
Example #4
0
        /// <summary>
        /// 点击计算按钮
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void tsmiCalculate_Click(object sender, EventArgs e)
        {
            // 创建输入层、隐层和输出层
            ActivationLayer inputLayer  = GetLayer(cboInputLayerType.SelectedItem.ToString(), 2);
            ActivationLayer hiddenLayer = GetLayer(cboHiddenLayerType.SelectedItem.ToString(), int.Parse(txtHiddenLayerCount.Text));
            ActivationLayer outputLayer = GetLayer(cboOutputLayerType.SelectedItem.ToString(), 1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete).Initializer  = new RandomFunction(0, 0.3);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(double.Parse(txtInitialLearningRate.Text), double.Parse(txtFinalLearningRate.Text));

            // 进行训练
            var trainingSet = new TrainingSet(2, 1);

            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var inputVector    = new double[] { x1, x2 };
                var outputVector   = new double[] { y };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                trainingSet.Add(trainingSample);
            }
            network.SetLearningRate(0.3, 0.1);
            network.Learn(trainingSet, int.Parse(txtTrainingEpochs.Text));
            network.StopLearning();

            // 进行预测
            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var testInput  = new double[] { x1, x2 };
                var testOutput = network.Run(testInput)[0];

                var absolute = testOutput - y;
                var relative = Math.Abs((testOutput - y) / testOutput);

                dgvData.Rows[i].Cells[3].Value = testOutput.ToString("f3");
                dgvData.Rows[i].Cells[4].Value = absolute.ToString("f3");
                dgvData.Rows[i].Cells[5].Value = (relative * 100).ToString("f1") + "%";
            }
        }
        private void button38_Click(object sender, EventArgs e)
        {
            double[] girismatris = gelengiris();
            double[] cikis       = ag.Run(girismatris);
            int      i           = 1;

            foreach (double item in cikis)
            {
                panel1.Controls["label_cikis_" + (i)].Text = item.ToString();


                i++;
            }
        }
Example #6
0
    void CheckImage(Texture2D image, int index)
    {
        double[] pxs    = new double[neurons];
        Color[]  pixels = image.GetPixels();
        for (int i = 0; i < image.width * image.height; i++)
        {
            pxs[i] = pixels[i].grayscale;
        }

        double[] output = neuralNetwork.Run(pxs);
        int      maxVal = MaxValue(output);

        if (maxVal == index)
        {
            perforCounter++;
        }
    }
Example #7
0
        public void testing()
        {
            double[] dati = new double[10];

            while (_shouldStop2 == false)
            {
                if (dati[0] != chart1.Series["Delta"].Points[19].YValues[0])
                {
                    dati[0] = chart1.Series["Delta"].Points[19].YValues[0];
                    dati[1] = chart2.Series["Theta"].Points[19].YValues[0];
                    dati[2] = chart3.Series["Low Alpha"].Points[19].YValues[0];
                    dati[3] = chart3.Series["High Alpha"].Points[19].YValues[0];
                    dati[4] = chart4.Series["Low Beta"].Points[19].YValues[0];
                    dati[5] = chart4.Series["High Beta"].Points[19].YValues[0];
                    dati[6] = chart5.Series["Low Gamma"].Points[19].YValues[0];
                    dati[7] = chart5.Series["High Gamma"].Points[19].YValues[0];
                    dati[8] = progressBar1.Value;
                    dati[9] = progressBar2.Value;

                    double[] output = network.Run(dati);
                    // label17.Invoke((MethodInvoker)(() => label17.Text = "" + output[0]));
                }
            }
        }
Example #8
0
        private void Train(object sender, EventArgs e)
        {
            // btnTrain.Enabled = false;

            int cycles = 200;
            // if (!int.TryParse(txtCycles.Text, out cycles)) { cycles = 200; }
            // txtCycles.Text = cycles.ToString();

            int currentCombination = 0;

            //int totalCombinations = Alphabet.LetterCount * (Alphabet.LetterCount - 1) / 2;

            for (int i = 0; i < Alphabet.LetterCount; i++)
            {
                for (int j = i + 1; j < Alphabet.LetterCount; j++)
                {
                    ActivationLayer inputLayer  = new LinearLayer(400);
                    ActivationLayer hiddenLayer = new SigmoidLayer(4);
                    ActivationLayer outputLayer = new SigmoidLayer(2);
                    new BackpropagationConnector(inputLayer, hiddenLayer);
                    new BackpropagationConnector(hiddenLayer, outputLayer);
                    BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

                    TrainingSet trainingSet = new TrainingSet(400, 2);
                    Alphabet    ithLetter   = Alphabet.GetLetter(i);
                    Alphabet    jthLetter   = Alphabet.GetLetter(j);
                    foreach (Letter instance in ithLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d }));
                    }
                    foreach (Letter instance in jthLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d }));
                    }

                    //progressTraining.Value = 100 * currentCombination / totalCombinations;

                    Application.DoEvents();

                    bool correct = false;

                    int currentCycles = 35;
                    int count         = trainingSet.TrainingSampleCount;

                    while (correct == false & currentCycles <= cycles)
                    {
                        network.Initialize();
                        network.Learn(trainingSet, currentCycles);
                        correct = true;
                        for (int sampleIndex = 0; sampleIndex < count; sampleIndex++)
                        {
                            double[] op = network.Run(trainingSet[sampleIndex].InputVector);
                            if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1]) && op[0] - op[1] < 0.4) || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1]) && op[1] - op[0] < 0.4))
                            {
                                correct = false;
                                trainingSet.Add(trainingSet[sampleIndex]);
                            }
                        }
                        currentCycles *= 2;
                    }

                    //lstLog.Items.Add(cboAplhabet.Items[i] + " & " + cboAplhabet.Items[j] + " = " + network.MeanSquaredError.ToString("0.0000"));
                    // lstLog.TopIndex = lstLog.Items.Count - (int)(lstLog.Height / lstLog.ItemHeight);
                    try
                    {
                        using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + i.ToString("00") + j.ToString("00") + ".ndn", FileMode.Create))
                        {
                            IFormatter formatter = new BinaryFormatter();
                            formatter.Serialize(stream, network);
                        }
                    }
                    catch (Exception)
                    {
                        MessageBox.Show("Failed to save trained neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                        return;
                    }
                    currentCombination++;
                }
            }
            //  progressTraining.Value = 0;
            //  btnTrain.Enabled = false;
        }
Example #9
0
 public double[] estimate(double[] input)
 {
     return(network.Run(input));
 }
Example #10
0
        // get results butonu
        private void btnGetResult_Click(object sender, EventArgs e)
        {
            if (listBox1.Items.Count > 0)
            {
                listBox1.Items.Clear();
                txtMatrix.Clear();
            }
            // 7x5 matris
            double[][] dizi = new double[7][];
            {
                dizi[0] = new double[] { 0, 0, 0, 0, 0 };
                dizi[1] = new double[] { 0, 0, 0, 0, 0 };
                dizi[2] = new double[] { 0, 0, 0, 0, 0 };
                dizi[3] = new double[] { 0, 0, 0, 0, 0 };
                dizi[4] = new double[] { 0, 0, 0, 0, 0 };
                dizi[5] = new double[] { 0, 0, 0, 0, 0 };
                dizi[6] = new double[] { 0, 0, 0, 0, 0 };
            };
            // kutucuklara göre 0 ve 1'lerden matris oluşturma
            foreach (Control buttons in pixelContainer.Controls)
            {
                Button button = buttons as Button;

                int i = Convert.ToInt32(button.Tag.ToString());
                if (buttons.Text == "X" && buttons.BackColor == Color.Green)
                {
                    if (i <= 4)
                    {
                        dizi[0][i] = 1;
                    }
                    else if (i <= 9)
                    {
                        dizi[1][i % 5] = 1;
                    }
                    else if (i <= 14)
                    {
                        dizi[2][i % 5] = 1;
                    }
                    else if (i <= 19)
                    {
                        dizi[3][i % 5] = 1;
                    }
                    else if (i <= 24)
                    {
                        dizi[4][i % 5] = 1;
                    }
                    else if (i <= 29)
                    {
                        dizi[5][i % 5] = 1;
                    }
                    else if (i <= 34)
                    {
                        dizi[6][i % 5] = 1;
                    }
                }
                i++;
            }
            double[] outputResult = new double[35];
            // 0 ve 1'lerden oluşan matris sonucunu diziye yazdırma
            int b = 0;

            for (int i = 0; i < 7; i++)
            {
                for (int j = 0; j < 5; j++)
                {
                    outputResult[b] = dizi[i][j];
                    b++;
                }
            }

            double[] input = outputResult;
            //yapay sinir ağlarına göre çıktı alma
            double[] output = neuralNetwork.Run(input);

            // sonuçları listbox'a yazdırma
            for (int j = 1; j < output.Length + 1; j++)
            {
                switch (j)
                {
                case 1:
                    listBox1.Items.Add($"A : {output[j - 1]:%.####} \n");
                    break;

                case 2:
                    listBox1.Items.Add($"B : {output[j - 1]:%.####} \n");
                    break;

                case 3:
                    listBox1.Items.Add($"C : {output[j - 1]:%.####} \n");
                    break;

                case 4:
                    listBox1.Items.Add($"D : {output[j - 1]:%.####} \n");
                    break;

                case 5:
                    listBox1.Items.Add($"E : {output[j - 1]:%.####} \n");
                    listBox1.Items.Add("MSE: " + neuralNetwork.MeanSquaredError.ToString());
                    break;

                default:
                    Console.WriteLine("Gecersiz.");
                    break;
                }
            }

            // 0 ve 1'lerden oluşturulan matrisi ekrana yazdırma
            for (int k = 0; k < input.Length; k++)
            {
                txtMatrix.Text += (input[k].ToString() + "    ");
                if ((k + 1) % 5 == 0)
                {
                    txtMatrix.Text += "\n";
                }
            }
            txtMatrix.SelectAll();
            txtMatrix.SelectionAlignment = HorizontalAlignment.Center;
            txtMatrix.DeselectAll();
        }
        /// <summary>
        /// Created this to test the custom neuron network with binary inputs.
        /// </summary>
        /// <param name="writer"></param>
        public static void Test(
            string file,
            int numberOfInputNeurons,
            int numberOfHiddenNeurons,
            int numberOfOutputNeurons,
            int numberOfCycles  = 50000,
            double learningRate = 0.25)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.

            double[] errorList = new double[numberOfCycles];

            int totalNumberOfNeurons = numberOfInputNeurons + numberOfOutputNeurons;

            LinearLayer  inputLayer  = new LinearLayer(numberOfInputNeurons);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(10, 8);

            // A file stream reader.
            var inDefaule = Console.In;

            using (StreamReader reader = new StreamReader(file))
            {
                Console.SetIn(reader);
                String line = "";
                //trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 1 }, new double[1] { 1 }));
                while ((line = reader.ReadLine()) != null)
                {
                    String[] array       = line.Split(',');
                    double[] inputArray  = new double[10];
                    double[] outputArray = new double[8];

                    for (int i = 0; i < 10; i++)
                    {
                        inputArray[i] = Convert.ToDouble(array[i]);
                    }

                    for (int i = 0; i < 8; i++)
                    {
                        outputArray[i] = Convert.ToDouble(array[i + 11]);
                    }

                    trainingSet.Add(new TrainingSample(inputArray, outputArray));
                }
            }

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max = Math.Max(max, network.MeanSquaredError);
                // PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            // for (int i = 0; i < numberOfCycles; i++)
            // {
            //Console.WriteLine(errorList[i]);
            //  }

            // print out the error list for scientific evaluation.
            StreamUtilities.DumpData("dumpErrorValues.txt", errorList);

            double[] outputResult = network.OutputLayer.GetOutput();


            outputResult = network.Run(new double[] { 0.47, 0.41, 0.12, 0.05, 0.1, 0.5, 0.1, 0.1, 0.05, 0.1 });

            foreach (var d in outputResult)
            {
                Console.WriteLine("output: " + d);
            }

            // Console.WriteLine("final output");
        }