예제 #1
0
 // Saving the Trained Model
 public void SaveModel(string ModelFileName)
 {
     #if !NETFX_CORE
     if (File.Exists(ModelFileName))     // if model already exists delete model
     {
         File.Delete(ModelFileName);
     }
     nnet.Save(ModelFileName);     // save the model
     #endif
 }
예제 #2
0
        private void ActivationFunctionHardFix(ANN_MLP network)
        {
            string tmpFile = "tmp.xml";

            network.Save(tmpFile); // Save current ANN network weights values
            StreamReader reader        = new StreamReader(tmpFile);
            string       configContent = reader.ReadToEnd();

            reader.Close();

            configContent = configContent.Replace("<min_val>0.", "<min_val>0"); // declaration of min max values 0..1
            configContent = configContent.Replace("<max_val>0.", "<max_val>1");
            configContent = configContent.Replace("<min_val1>0.", "<min_val1>0");
            configContent = configContent.Replace("<max_val1>0.", "<max_val1>1");

            StreamWriter writer = new StreamWriter(tmpFile, false);

            writer.Write(configContent);
            writer.Close();
        }
예제 #3
0
        public bool trainingMLP(Matrix <float> inputData, Matrix <float> outputData, string modelName, int iteration = 1000, double learningRate = 0.01, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym, double backpropWeightScale = 0.1, double backpropMomentumScale = 0.2)
        {
            try
            {
                layerSize = new Matrix <int>(new int[] { inputData.Cols, hiddenLayers, 1 });// Integer vector specifying the number of neurons in each layer including the input and output layers. The very first element specifies the number of elements in the input layer. The last element - number of elements in the output layer.

                IInputArray sample_in = inputData;
                IInputArray response  = outputData;



                //===========================================================
                using (ANN_MLP network = new ANN_MLP())
                {
                    network.SetActivationFunction(activationType);
                    network.SetLayerSizes(layerSize);
                    network.TermCriteria = new MCvTermCriteria(iteration, learningRate); // Number of Iteration for training
                    network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop);
                    network.BackpropWeightScale   = backpropWeightScale;
                    network.BackpropMomentumScale = backpropMomentumScale;

                    //network.Save("tmp.xml"); // Save temp weights to file for correction before training

                    ActivationFunctionHardFix(network);                                                                  // Fix min max values
                    network.Read(new FileStorage("tmp.xml", FileStorage.Mode.Read).GetFirstTopLevelNode());              // Read Fixed values for training
                    TrainData training = new TrainData(sample_in, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response); // Creating training data

                    network.Train(training);                                                                             // Start Training
                    network.Save(modelName + ".xml");
                }
                return(true);
            }
            catch (Exception ee)
            {
                return(false);
            }
        }
예제 #4
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);

            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);

#if !NETFX_CORE
                            String fileName = Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(fileName);
                            if (File.Exists(fileName))
                            {
                                File.Delete(fileName);
                            }
#endif

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
예제 #5
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

             #region Generate the traning data and classes
             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);
             Matrix<float> prediction = new Matrix<float>(1, 1);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
             trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainClasses2.SetValue(2);
             #endregion

             Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 5, 1 });

             MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();
             parameters.term_crit = new MCvTermCriteria(10, 1.0e-8);
             parameters.train_method = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
             parameters.bp_dw_scale = 0.1;
             parameters.bp_moment_scale = 0.1;

             using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
             {
            network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);
            network.Save("ann_mlp_model.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;
                  network.Predict(sample, prediction);

                  // estimates the response and get the neighbors' labels
                  float response = prediction.Data[0,0];

                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
               }
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount >> 1); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
             }
        }
예제 #6
0
        private void Btn_ann2_Click(object sender, EventArgs e)
        {
            this.prepareData();

            if (trainNegData?.Count == 0 || trainActData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int            trainSampleCount = trainActData.Count + trainNegData.Count;
            int            colCount         = width * height;
            Matrix <float> trainData        = new Matrix <float>(trainSampleCount, colCount);
            Matrix <float> trainClasses     = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, colCount);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            //准备正面数据
            var actCount = trainActData.Count;

            //Matrix<float> trainActDataMatr = new Matrix<float>(actCount, width * height);
            //Matrix<float> trainActClassesMatr = new Matrix<float>(actCount, 1);

            for (int i = 0; i < actCount; i++)
            {
                var colData   = trainActData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i, j] = trainActData[i][j];
                }

                trainClasses.Data[i, 0] = 1;
                //trainClasses.Data[i, 1] = 0;
            }

            //准备未涂答数据
            var negCount = trainNegData.Count;

            //Matrix<float> trainNegDataMatr = new Matrix<float>(negCount, width * height);
            //Matrix<float> trainNegClassesMatr = new Matrix<float>(negCount, 1);
            for (int i = 0; i < negCount; i++)
            {
                var colData   = trainNegData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i + actCount, j] = trainNegData[i][j];
                }

                trainClasses.Data[i + actCount, 0] = 0;
                //trainClasses.Data[i + actCount, 1] = 1;
            }

            //训练
            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 286, 10, 10, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.01, 0.01);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //测试
                            //1测试正面数据
                            var testActCount  = testActData.Count;
                            var rightActCount = 0;//正确act识别数量
                            for (int i = 0; i < testActCount; i++)
                            {
                                var testData = testActData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response > 0.5)
                                {
                                    rightActCount++;
                                    Console.WriteLine($"该数据是涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是涂答的,错误识别{response}");
                                }
                            }

                            //2测试负面数据
                            var testNegCount  = testNegData.Count;
                            var rightNegCount = 0;//正确neg识别数量
                            for (int i = 0; i < testNegCount; i++)
                            {
                                var testData = testNegData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response <= 0.5)
                                {
                                    rightNegCount++;
                                    Console.WriteLine($"该数据是未涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是未涂答的,错误识别{response}");
                                }
                            }
                            MessageBox.Show("训练完毕,并测试");
                        }
        }
예제 #7
0
        private void Btn_CNN2_Click(object sender, EventArgs e)
        {
            var positiveData = GetPositiveData();
            var negativeData = GetNegativeData();

            if (positiveData?.Count == 0 || negativeData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int trainSampleCount = positiveData.Count + negativeData.Count;

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);


            for (int i = 0; i < positiveData.Count; i++)
            {
                var item = positiveData[i];
                trainData.Data[i, 0] = item.Percent;
                trainData.Data[i, 1] = item.Avg;

                trainClasses.Data[i, 0] = 1;
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var item = negativeData[i];
                int row  = positiveData.Count + i;
                trainData.Data[row, 0] = item.Percent;
                trainData.Data[row, 1] = item.Avg;

                trainClasses.Data[row, 0] = 0;
            }

            Image <Bgr, Byte> img = new Image <Bgr, byte>(765, 300);

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //画图

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = i * 1.0f / (100 * 3);
                                    sample.Data[0, 1] = 255 - j * 1.0f / 3.0f;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 0.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < positiveData.Count; i++)
            {
                var    d  = positiveData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var    d  = negativeData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(100, 255, 100), -1);
            }
            this.ib_result.Image = img;
            MessageBox.Show("训练完毕");
        }
예제 #8
0
파일: Form1.cs 프로젝트: markxma1/XNeuron
        private void button1_Click(object sender, EventArgs e)
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes
            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 10, 2 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym);
                            network.TermCriteria = new MCvTermCriteria(10000, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td);
                            network.Save("temp.txt");
                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    if (response < 1.5)
                                    {
                                        img[i, j] = new Bgr(90, 0, 0);
                                    }
                                    else
                                    {
                                        img[i, j] = new Bgr(0, 90, 0);
                                    }
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }
            pictureBox1.Image = img.ToBitmap();
            // Emgu.CV.UI.ImageViewer.Show(img);
        }
예제 #9
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes
            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 });

            MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();
            parameters.term_crit       = new MCvTermCriteria(10, 1.0e-8);
            parameters.train_method    = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
            parameters.bp_dw_scale     = 0.1;
            parameters.bp_moment_scale = 0.1;

            using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
            {
                network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);
                network.Save("ann_mlp_model.xml");

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;
                        network.Predict(sample, prediction);

                        // estimates the response and get the neighbors' labels
                        float response = prediction.Data[0, 0];

                        // highlight the pixel depending on the accuracy (or confidence)
                        img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                    }
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }
        }