Esempio n. 1
0
        // Begin the training of ANN_MLP
        public void Train()
        {
            int           trainSampleCount = TrainSamples - 1;
            Matrix <Byte> varType          = new Matrix <byte>(trainData.Cols + 1, 1);

            varType.SetValue((byte)Emgu.CV.ML.MlEnum.VarType.Numerical);              //the data is numerical
            varType[trainData.Cols, 0] = (byte)Emgu.CV.ML.MlEnum.VarType.Categorical; //the response is catagorical

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { InputLayers, 40, 20, 20, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses, null, null, null, varType))
                    {
                        nnet.SetLayerSizes(layerSizeMat);
                        nnet.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0.6, 1);
                        nnet.TermCriteria = new MCvTermCriteria(1000, 1.0e-8);
                        nnet.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0);
                        try
                        {
                            nnet.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);
                            Console.WriteLine("Training Completed Successfully....");
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine("Training Error:" + e.Message);
                        }
                    }
        }
Esempio n. 2
0
        public Matrix <float> testingMLP(Matrix <float> testData, string modelName, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym)
        {
            Matrix <float> finalResult = null;

            layerSize = new Matrix <int>(new int[] { testData.Cols, hiddenLayers, 1 });
            try
            {
                using (ANN_MLP network1 = new ANN_MLP()) // Testing trainned Data
                {
                    network1.SetActivationFunction(activationType);
                    network1.SetLayerSizes(layerSize);

                    network1.Read(new FileStorage(modelName + ".xml", FileStorage.Mode.Read).GetFirstTopLevelNode()); // Load trainned ANN weights

                    IInputArray  Sample_test = testData;
                    IOutputArray Result      = new Matrix <float>(1, 1);

                    network1.Predict(Sample_test, Result); //Start Network prediction

                    finalResult = (Matrix <float>)Result;
                    return(finalResult);
                }
            }
            catch (Exception ee)
            {
                return(finalResult);
            }
        }
Esempio n. 3
0
        private void button1_Click(object sender, EventArgs e)
        {

            Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 2, 1 });
            ANN_MLP nnPtr = new ANN_MLP();
            nnPtr.SetLayerSizes(layerSize);
            nnPtr.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym);
            nnPtr.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.01, 0.01);

            if (!nnPtr->Train(samples, ROW_SAMPLE, responses))
                return 1;
        }
Esempio n. 4
0
        private void annTraining()
        {
            string       finalOutput = "";
            int          features    = 16;
            int          classes     = 26;
            Matrix <int> layers      = new Matrix <int>(6, 1);

            layers[0, 0] = features;
            layers[1, 0] = classes * 16;
            layers[2, 0] = classes * 8;
            layers[3, 0] = classes * 4;
            layers[4, 0] = classes * 2;
            layers[5, 0] = classes;

            FileStorage fileStorageRead = new FileStorage(@"ANN_Model.xml", FileStorage.Mode.Read);

            ann.Read(fileStorageRead.GetRoot(0));
            ann.SetLayerSizes(layers);
            ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
            ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
            ann.Train(allFeatureOfSample, DataLayoutType.RowSample, annAllResponse);

            FileStorage fileStorageWrite = new FileStorage(@"ANN_Model.xml", FileStorage.Mode.Write);

            ann.Write(fileStorageWrite);

            Matrix <float> testSample = new Matrix <float>(1, 16);

            for (int q = 0; q < 16; q++)
            {
                testSample[0, q] = allFeatureOfSample[12, q];
            }
            float real = ann.Predict(testSample);

            finalOutput += labelArray[(int)real];
            label5.Text  = finalOutput.ToString();
            SpeechSynthesizer reader1 = new SpeechSynthesizer();


            if (label5.Text != " ")
            {
                reader1.Dispose();
                reader1 = new SpeechSynthesizer();
                reader1.SpeakAsync(finalOutput.ToString());
            }
            else
            {
                MessageBox.Show("No Text Present!");
            }

            System.IO.File.WriteAllText(@"ANNResult.txt", real.ToString());
        }
Esempio n. 5
0
        public Form2()
        {
            InitializeComponent();
            //初始化
            bp = new ANN_MLP();
            Matrix<int> layerSizes = new Matrix<int>(new int[] { 2, 2, 2, 2, 1 });
            bp.SetLayerSizes(layerSizes);
            bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 0, 0);
            bp.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
            //bp.BackpropWeightScale = 0.1;
            //bp.BackpropMomentumScale = 0.1;
            bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
            //训练
            float[,] labels = new float[,] {
            { 0 }, { 1 }, { 0 }, { 1 }
            };
            Matrix<float> labelsMats = new Matrix<float>(labels);
            //Matrix<float> labelsMats = new Matrix<float>(count, 1);
            //Matrix<float> labelsMats1 = labelsMats.GetRows(0, count >> 1, 1);
            //labelsMats1.SetValue(1);
            //Matrix<float> labelsMats2 = labelsMats.GetRows(count >> 1, count, 1);
            //labelsMats2.SetValue(0);
            float[,] trainingData = new float[,] {
            { 1, 2 }, { 51, 52 }, { 111, 112 }, { 211, 212 }
            };
            for (int i = 0; i < trainingData.GetLength(0); i++)//归一化
            {
                for (int j = 0; j < trainingData.GetLength(1); j++)
                {
                    trainingData[i, j] /= 512;
                }
            }
            Matrix<float> trainingDataMat = new Matrix<float>(trainingData);
            //Matrix<float> trainingDataMat = new Matrix<float>(count, 2);
            //Matrix<float> trainingDataMat1 = trainingDataMat.GetRows(0, count >> 1, 1);
            //trainingDataMat1.SetRandNormal(new MCvScalar(200 / 512f), new MCvScalar(50 / 512f));
            //Matrix<float> trainingDataMat2 = trainingDataMat.GetRows(count >> 1, count, 1);
            //trainingDataMat2.SetRandNormal(new MCvScalar(300 / 512f), new MCvScalar(50 / 512f));

            TrainData tmpTrainData = new TrainData(trainingDataMat, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labelsMats);
            bp.Train(tmpTrainData, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);
//#if !NETFX_CORE
//                String fileName = Path.Combine(Application.StartupPath, "ann_mlp_model.xml");
//                bp.Save(fileName);
//                if (File.Exists(fileName))
//                    File.Delete(fileName);
//#endif
        }
Esempio n. 6
0
        private void CreateBP()
        {
            bp = new ANN_MLP();
            Matrix <int> layerSizes = new Matrix <int>(new int[] {
                bpWidth *bpHeight,
                bpWidth *bpHeight + 100, bpRectangleCount * 4 + 50,
                //20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
                //20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
                bpRectangleCount * 4
            });

            bp.SetLayerSizes(layerSizes);
            bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 1, 1);
            //bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 0, 0);
            bp.TermCriteria = new MCvTermCriteria(1000, 1.0e-8);
            //bp.BackpropWeightScale = 0.1;
            //bp.BackpropMomentumScale = 0.1;
            bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
            //bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
        }
Esempio n. 7
0
        public bool trainingMLP(Matrix <float> inputData, Matrix <float> outputData, string modelName, int iteration = 1000, double learningRate = 0.01, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym, double backpropWeightScale = 0.1, double backpropMomentumScale = 0.2)
        {
            try
            {
                layerSize = new Matrix <int>(new int[] { inputData.Cols, hiddenLayers, 1 });// Integer vector specifying the number of neurons in each layer including the input and output layers. The very first element specifies the number of elements in the input layer. The last element - number of elements in the output layer.

                IInputArray sample_in = inputData;
                IInputArray response  = outputData;



                //===========================================================
                using (ANN_MLP network = new ANN_MLP())
                {
                    network.SetActivationFunction(activationType);
                    network.SetLayerSizes(layerSize);
                    network.TermCriteria = new MCvTermCriteria(iteration, learningRate); // Number of Iteration for training
                    network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop);
                    network.BackpropWeightScale   = backpropWeightScale;
                    network.BackpropMomentumScale = backpropMomentumScale;

                    //network.Save("tmp.xml"); // Save temp weights to file for correction before training

                    ActivationFunctionHardFix(network);                                                                  // Fix min max values
                    network.Read(new FileStorage("tmp.xml", FileStorage.Mode.Read).GetFirstTopLevelNode());              // Read Fixed values for training
                    TrainData training = new TrainData(sample_in, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response); // Creating training data

                    network.Train(training);                                                                             // Start Training
                    network.Save(modelName + ".xml");
                }
                return(true);
            }
            catch (Exception ee)
            {
                return(false);
            }
        }
Esempio n. 8
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);

            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);

#if !NETFX_CORE
                            String fileName = Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(fileName);
                            if (File.Exists(fileName))
                            {
                                File.Delete(fileName);
                            }
#endif

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
        private async void moduleFeatureExtraction(int first, int last)
        {
            string fghfh = "";

            double[,] RawData = new double[16, 3780];
            int mid  = (first + last) / 2;
            int low  = mid - 8;;
            int high = mid + 8;

            for (int i = 0; i < 16; i++)
            {
                for (int j = 0; j < 26; j++)
                {
                    if (j == adasas)
                    {
                        response[i, j] = 1;
                    }
                    if (j != adasas)
                    {
                        response[i, j] = 0;
                    }
                }
            }
            adasas++;
            if (low < first)
            {
                low++;
            }
            if (high > last)
            {
                low++;
            }
            int length = high - low;

            for (int k = (low); k < (high); k++)
            {
                string            frameName             = "gesture//" + k + ".jpeg";
                Image <Bgr, byte> featurExtractionInput = new Image <Bgr, byte>(frameName);
                //pictureBox3.Image = featurExtractionInput.Bitmap;
                //label4.Text = k.ToString();
                await Task.Delay(1000 / Convert.ToInt32(2));

                float[] desc = new float[3780];
                desc = GetVector(featurExtractionInput);

                int i = k - (low);
                for (int j = 0; j < 3780; j++)
                {
                    double val = Convert.ToDouble(desc[j]);
                    RawData.SetValue(val, i, j);
                }

                if (k == (high - 1))
                {
                    Matrix <Double> DataMatrix   = new Matrix <Double>(RawData);
                    Matrix <Double> Mean         = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenValues  = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenVectors = new Matrix <Double>(3780, 3780);
                    CvInvoke.PCACompute(DataMatrix, Mean, EigenVectors, 16);
                    Matrix <Double> result = new Matrix <Double>(16, 16);
                    CvInvoke.PCAProject(DataMatrix, Mean, EigenVectors, result);


                    String        filePath = @"test.xml";
                    StringBuilder sb       = new StringBuilder();
                    (new XmlSerializer(typeof(Matrix <double>))).Serialize(new StringWriter(sb), result);
                    XmlDocument xDoc = new XmlDocument();
                    xDoc.LoadXml(sb.ToString());

                    System.IO.File.WriteAllText(filePath, sb.ToString());
                    Matrix <double> matrix = (Matrix <double>)(new XmlSerializer(typeof(Matrix <double>))).Deserialize(new XmlNodeReader(xDoc));

                    string djf = null;
                    djf  = System.IO.File.ReadAllText(@"g.txt");
                    djf += Environment.NewLine;
                    djf += Environment.NewLine;
                    for (int p = 0; p < 16; p++)
                    {
                        for (int q = 0; q < 16; q++)
                        {
                            djf += p + " , " + q + "  " + matrix[p, q].ToString() + "    ";
                        }
                        djf += Environment.NewLine;
                    }
                    Matrix <float> masjhdb   = result.Convert <float>();
                    TrainData      trainData = new TrainData(masjhdb, DataLayoutType.RowSample, response);
                    int            features  = 16;
                    int            classes   = 26;
                    Matrix <int>   layers    = new Matrix <int>(6, 1);
                    layers[0, 0] = features;
                    layers[1, 0] = classes * 16;
                    layers[2, 0] = classes * 8;
                    layers[3, 0] = classes * 4;
                    layers[4, 0] = classes * 2;
                    layers[5, 0] = classes;
                    ANN_MLP     ann             = new ANN_MLP();
                    FileStorage fileStorageRead = new FileStorage(@"abc.xml", FileStorage.Mode.Read);
                    ann.Read(fileStorageRead.GetRoot(0));
                    ann.SetLayerSizes(layers);
                    ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                    ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
                    ann.Train(masjhdb, DataLayoutType.RowSample, response);
                    FileStorage fileStorageWrite = new FileStorage(@"abc.xml", FileStorage.Mode.Write);
                    ann.Write(fileStorageWrite);
                    Matrix <float> hehe = new Matrix <float>(1, 16);
                    for (int q = 0; q < 16; q++)
                    {
                        hehe[0, q] = masjhdb[11, q];
                    }
                    float real = ann.Predict(hehe);

                    fghfh += array[(int)real];
                    SpeechSynthesizer reader = new SpeechSynthesizer();

                    if (richTextBox1.Text != " ")
                    {
                        reader.Dispose();
                        reader = new SpeechSynthesizer();
                        reader.SpeakAsync(fghfh.ToString());
                    }
                    else
                    {
                        MessageBox.Show("No Text Present!");
                    }
                    richTextBox1.Text = fghfh.ToString();
                    System.IO.File.WriteAllText(@"g.txt", real.ToString());
                }
            }
        }
Esempio n. 10
0
        private void Btn_ann2_Click(object sender, EventArgs e)
        {
            this.prepareData();

            if (trainNegData?.Count == 0 || trainActData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int            trainSampleCount = trainActData.Count + trainNegData.Count;
            int            colCount         = width * height;
            Matrix <float> trainData        = new Matrix <float>(trainSampleCount, colCount);
            Matrix <float> trainClasses     = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, colCount);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            //准备正面数据
            var actCount = trainActData.Count;

            //Matrix<float> trainActDataMatr = new Matrix<float>(actCount, width * height);
            //Matrix<float> trainActClassesMatr = new Matrix<float>(actCount, 1);

            for (int i = 0; i < actCount; i++)
            {
                var colData   = trainActData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i, j] = trainActData[i][j];
                }

                trainClasses.Data[i, 0] = 1;
                //trainClasses.Data[i, 1] = 0;
            }

            //准备未涂答数据
            var negCount = trainNegData.Count;

            //Matrix<float> trainNegDataMatr = new Matrix<float>(negCount, width * height);
            //Matrix<float> trainNegClassesMatr = new Matrix<float>(negCount, 1);
            for (int i = 0; i < negCount; i++)
            {
                var colData   = trainNegData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i + actCount, j] = trainNegData[i][j];
                }

                trainClasses.Data[i + actCount, 0] = 0;
                //trainClasses.Data[i + actCount, 1] = 1;
            }

            //训练
            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 286, 10, 10, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.01, 0.01);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //测试
                            //1测试正面数据
                            var testActCount  = testActData.Count;
                            var rightActCount = 0;//正确act识别数量
                            for (int i = 0; i < testActCount; i++)
                            {
                                var testData = testActData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response > 0.5)
                                {
                                    rightActCount++;
                                    Console.WriteLine($"该数据是涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是涂答的,错误识别{response}");
                                }
                            }

                            //2测试负面数据
                            var testNegCount  = testNegData.Count;
                            var rightNegCount = 0;//正确neg识别数量
                            for (int i = 0; i < testNegCount; i++)
                            {
                                var testData = testNegData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response <= 0.5)
                                {
                                    rightNegCount++;
                                    Console.WriteLine($"该数据是未涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是未涂答的,错误识别{response}");
                                }
                            }
                            MessageBox.Show("训练完毕,并测试");
                        }
        }
Esempio n. 11
0
        private void Btn_CNN2_Click(object sender, EventArgs e)
        {
            var positiveData = GetPositiveData();
            var negativeData = GetNegativeData();

            if (positiveData?.Count == 0 || negativeData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int trainSampleCount = positiveData.Count + negativeData.Count;

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);


            for (int i = 0; i < positiveData.Count; i++)
            {
                var item = positiveData[i];
                trainData.Data[i, 0] = item.Percent;
                trainData.Data[i, 1] = item.Avg;

                trainClasses.Data[i, 0] = 1;
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var item = negativeData[i];
                int row  = positiveData.Count + i;
                trainData.Data[row, 0] = item.Percent;
                trainData.Data[row, 1] = item.Avg;

                trainClasses.Data[row, 0] = 0;
            }

            Image <Bgr, Byte> img = new Image <Bgr, byte>(765, 300);

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //画图

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = i * 1.0f / (100 * 3);
                                    sample.Data[0, 1] = 255 - j * 1.0f / 3.0f;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 0.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < positiveData.Count; i++)
            {
                var    d  = positiveData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var    d  = negativeData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(100, 255, 100), -1);
            }
            this.ib_result.Image = img;
            MessageBox.Show("训练完毕");
        }
Esempio n. 12
0
        private void button1_Click(object sender, EventArgs e)
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes
            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 10, 2 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym);
                            network.TermCriteria = new MCvTermCriteria(10000, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td);
                            network.Save("temp.txt");
                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    if (response < 1.5)
                                    {
                                        img[i, j] = new Bgr(90, 0, 0);
                                    }
                                    else
                                    {
                                        img[i, j] = new Bgr(0, 90, 0);
                                    }
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }
            pictureBox1.Image = img.ToBitmap();
            // Emgu.CV.UI.ImageViewer.Show(img);
        }