Esempio n. 1
0
        public void TrainANNModel()
        {
            LoadImageGray();
            // the output layer must know the number of classes.
            //var numberOfClasses = imageBindingModel.ConvertAll(x => x.Label).Distinct().Count();
            var numberOfClasses = 1;
            var numberInput     = resolutionImage.Width * resolutionImage.Height;

            int            trainSampleCount = imageBindingModel.Count;
            Matrix <float> trainData        = new Matrix <float>(trainSampleCount, numberInput);
            Matrix <float> trainClasses     = new Matrix <float>(trainSampleCount, numberOfClasses);

            for (int i = 0; i < imageBindingModel.Count; i++)
            {
                for (int j = 0; j < numberInput; j++)
                {
                    trainData[i, j]    = imageBindingModel[i].Image.Bytes[j];
                    trainClasses[i, 0] = (float)imageBindingModel[i].Id;
                }
            }

            Matrix <int> layerSize = new Matrix <int>(new int[] { numberInput, numberInput + numberInput, numberOfClasses });

            MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();

            parameters.term_crit       = new MCvTermCriteria(100, 1.0e-8);
            parameters.train_method    = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
            parameters.bp_dw_scale     = 0.1;
            parameters.bp_moment_scale = 0.1;

            using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
            {
                network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);


                Matrix <float> sample     = new Matrix <float>(1, numberInput);
                Matrix <float> prediction = new Matrix <float>(1, numberOfClasses);

                int recog_true = 0;

                for (int i = 0; i < imageBindingModel.Count; i++)
                {
                    for (int j = 0; j < numberInput; j++)
                    {
                        sample[0, j] = trainData[i, j];
                    }

                    network.Predict(sample, prediction);
                    var response = prediction.Data[0, 0];

                    if (Math.Abs(response - imageBindingModel[i].Id) < 0.5)
                    {
                        recog_true++;
                    }


                    Console.WriteLine($"recoge : {response} -- target: {imageBindingModel[i].Id} -- result: {Math.Abs(response - imageBindingModel[i].Id) < 0.5}");
                }
            }
        }
Esempio n. 2
0
        public Matrix <float> testingMLP(Matrix <float> testData, string modelName, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym)
        {
            Matrix <float> finalResult = null;

            layerSize = new Matrix <int>(new int[] { testData.Cols, hiddenLayers, 1 });
            try
            {
                using (ANN_MLP network1 = new ANN_MLP()) // Testing trainned Data
                {
                    network1.SetActivationFunction(activationType);
                    network1.SetLayerSizes(layerSize);

                    network1.Read(new FileStorage(modelName + ".xml", FileStorage.Mode.Read).GetFirstTopLevelNode()); // Load trainned ANN weights

                    IInputArray  Sample_test = testData;
                    IOutputArray Result      = new Matrix <float>(1, 1);

                    network1.Predict(Sample_test, Result); //Start Network prediction

                    finalResult = (Matrix <float>)Result;
                    return(finalResult);
                }
            }
            catch (Exception ee)
            {
                return(finalResult);
            }
        }
Esempio n. 3
0
        static MLearner()
        {
            if (!File.Exists(annFileName))
            {
                return;
            }

            network = new ANN_MLP();

            network.Load(annFileName);
        }
Esempio n. 4
0
        private void button1_Click(object sender, EventArgs e)
        {

            Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 2, 1 });
            ANN_MLP nnPtr = new ANN_MLP();
            nnPtr.SetLayerSizes(layerSize);
            nnPtr.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym);
            nnPtr.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.01, 0.01);

            if (!nnPtr->Train(samples, ROW_SAMPLE, responses))
                return 1;
        }
Esempio n. 5
0
        public Form2()
        {
            InitializeComponent();
            //初始化
            bp = new ANN_MLP();
            Matrix<int> layerSizes = new Matrix<int>(new int[] { 2, 2, 2, 2, 1 });
            bp.SetLayerSizes(layerSizes);
            bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 0, 0);
            bp.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
            //bp.BackpropWeightScale = 0.1;
            //bp.BackpropMomentumScale = 0.1;
            bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
            //训练
            float[,] labels = new float[,] {
            { 0 }, { 1 }, { 0 }, { 1 }
            };
            Matrix<float> labelsMats = new Matrix<float>(labels);
            //Matrix<float> labelsMats = new Matrix<float>(count, 1);
            //Matrix<float> labelsMats1 = labelsMats.GetRows(0, count >> 1, 1);
            //labelsMats1.SetValue(1);
            //Matrix<float> labelsMats2 = labelsMats.GetRows(count >> 1, count, 1);
            //labelsMats2.SetValue(0);
            float[,] trainingData = new float[,] {
            { 1, 2 }, { 51, 52 }, { 111, 112 }, { 211, 212 }
            };
            for (int i = 0; i < trainingData.GetLength(0); i++)//归一化
            {
                for (int j = 0; j < trainingData.GetLength(1); j++)
                {
                    trainingData[i, j] /= 512;
                }
            }
            Matrix<float> trainingDataMat = new Matrix<float>(trainingData);
            //Matrix<float> trainingDataMat = new Matrix<float>(count, 2);
            //Matrix<float> trainingDataMat1 = trainingDataMat.GetRows(0, count >> 1, 1);
            //trainingDataMat1.SetRandNormal(new MCvScalar(200 / 512f), new MCvScalar(50 / 512f));
            //Matrix<float> trainingDataMat2 = trainingDataMat.GetRows(count >> 1, count, 1);
            //trainingDataMat2.SetRandNormal(new MCvScalar(300 / 512f), new MCvScalar(50 / 512f));

            TrainData tmpTrainData = new TrainData(trainingDataMat, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labelsMats);
            bp.Train(tmpTrainData, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);
//#if !NETFX_CORE
//                String fileName = Path.Combine(Application.StartupPath, "ann_mlp_model.xml");
//                bp.Save(fileName);
//                if (File.Exists(fileName))
//                    File.Delete(fileName);
//#endif
        }
        bool Save(string filename)
        {
            FileStorage fs = new FileStorage(filename, FileStorage.Mode.Write);

            if (!fs.IsOpened)
            {
                return(false);
            }

            ANN_MLP network = new ANN_MLP();

            network.Write(fs);
            fs.Write("values" + values);
            fs.ReleaseAndGetString();
            return(true);
        }
Esempio n. 7
0
        private void ActivationFunctionHardFix(ANN_MLP network)
        {
            string tmpFile = "tmp.xml";

            network.Save(tmpFile); // Save current ANN network weights values
            StreamReader reader        = new StreamReader(tmpFile);
            string       configContent = reader.ReadToEnd();

            reader.Close();

            configContent = configContent.Replace("<min_val>0.", "<min_val>0"); // declaration of min max values 0..1
            configContent = configContent.Replace("<max_val>0.", "<max_val>1");
            configContent = configContent.Replace("<min_val1>0.", "<min_val1>0");
            configContent = configContent.Replace("<max_val1>0.", "<max_val1>1");

            StreamWriter writer = new StreamWriter(tmpFile, false);

            writer.Write(configContent);
            writer.Close();
        }
Esempio n. 8
0
        private void CreateBP()
        {
            bp = new ANN_MLP();
            Matrix <int> layerSizes = new Matrix <int>(new int[] {
                bpWidth *bpHeight,
                bpWidth *bpHeight + 100, bpRectangleCount * 4 + 50,
                //20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
                //20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
                bpRectangleCount * 4
            });

            bp.SetLayerSizes(layerSizes);
            bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 1, 1);
            //bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 0, 0);
            bp.TermCriteria = new MCvTermCriteria(1000, 1.0e-8);
            //bp.BackpropWeightScale = 0.1;
            //bp.BackpropMomentumScale = 0.1;
            bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
            //bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
        }
        bool Read(string filename)
        {
            FileStorage fs = new FileStorage(filename, FileStorage.Mode.Read);

            if (!fs.IsOpened)
            {
                return(false);
            }

            ANN_MLP network = new ANN_MLP();

            network.Read(fs.GetRoot());
            values.Clear();
            //for (var iter = fs["values"]..begin(); iter != fs["values"].end(); iter++)
            //{
            //    values.push_back(*iter);
            //}

            fs.ReleaseAndGetString();
            return(true);
        }
Esempio n. 10
0
        public void RunTest()
        {
            float[,] trainFeaturesData =
            {
                {   0,   0 },
                {   0, 100 },
                { 100,   0 },
                { 100, 100 },
            };
            using var trainFeatures = new Mat(4, 2, MatType.CV_32F, trainFeaturesData);

            float[] trainLabelsData = { 1, 0, 1, 0 };
            using var trainLabels = new Mat(4, 1, MatType.CV_32F, trainLabelsData);

            using var model = ANN_MLP.Create();
            model.SetActivationFunction(ANN_MLP.ActivationFunctions.SigmoidSym, 0.1, 0.1);
            model.SetTrainMethod(ANN_MLP.TrainingMethods.BackProp, 0.1, 0.1);
            //model.TermCriteria = new TermCriteria(CriteriaType.MaxIter | CriteriaType.Eps, 10000, 0.0001);

            using var layerSize = new Mat(3, 1, MatType.CV_32SC1);
            layerSize.Set <int>(0, 2);
            layerSize.Set <int>(1, 10);
            layerSize.Set <int>(2, 1);
            model.SetLayerSizes(layerSize);

            bool trainSuccess = model.Train(trainFeatures, SampleTypes.RowSample, trainLabels);

            Assert.True(trainSuccess);
            Assert.True(model.IsTrained());

            float[] testFeatureData = { 0, 0 };
            using var testFeature = new Mat(1, 2, MatType.CV_32F, testFeatureData);

            using var result = new Mat();
            var detectedClass = model.Predict(testFeature, result);

            // TODO
            //Assert.Equal(-1, detectedClass);
        }
Esempio n. 11
0
        private void Btn_reg_Click(object sender, EventArgs e)
        {
            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (ANN_MLP network = new ANN_MLP())
                    {
                        network.Load(annFileName);
                        //network.SetLayerSizes(layerSizeMat);
                        //network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                        //network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                        //network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                        float[,] testData = new float[1, 2] {
                            { float.Parse(this.txb_percent.Text), float.Parse(this.txb_avg.Text) }
                        };
                        Matrix <float> sample     = new Matrix <float>(testData);
                        Matrix <float> prediction = new Matrix <float>(1, 1);

                        network.Predict(sample, prediction);
                        float response = prediction.Data[0, 0];

                        MessageBox.Show($"判断结果:{response}");
                    }
        }
Esempio n. 12
0
        public bool trainingMLP(Matrix <float> inputData, Matrix <float> outputData, string modelName, int iteration = 1000, double learningRate = 0.01, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym, double backpropWeightScale = 0.1, double backpropMomentumScale = 0.2)
        {
            try
            {
                layerSize = new Matrix <int>(new int[] { inputData.Cols, hiddenLayers, 1 });// Integer vector specifying the number of neurons in each layer including the input and output layers. The very first element specifies the number of elements in the input layer. The last element - number of elements in the output layer.

                IInputArray sample_in = inputData;
                IInputArray response  = outputData;



                //===========================================================
                using (ANN_MLP network = new ANN_MLP())
                {
                    network.SetActivationFunction(activationType);
                    network.SetLayerSizes(layerSize);
                    network.TermCriteria = new MCvTermCriteria(iteration, learningRate); // Number of Iteration for training
                    network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop);
                    network.BackpropWeightScale   = backpropWeightScale;
                    network.BackpropMomentumScale = backpropMomentumScale;

                    //network.Save("tmp.xml"); // Save temp weights to file for correction before training

                    ActivationFunctionHardFix(network);                                                                  // Fix min max values
                    network.Read(new FileStorage("tmp.xml", FileStorage.Mode.Read).GetFirstTopLevelNode());              // Read Fixed values for training
                    TrainData training = new TrainData(sample_in, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response); // Creating training data

                    network.Train(training);                                                                             // Start Training
                    network.Save(modelName + ".xml");
                }
                return(true);
            }
            catch (Exception ee)
            {
                return(false);
            }
        }
Esempio n. 13
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);

            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);

#if !NETFX_CORE
                            String fileName = Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(fileName);
                            if (File.Exists(fileName))
                            {
                                File.Delete(fileName);
                            }
#endif

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
Esempio n. 14
0
    public void ANN()
    {
        int trainSampleCount = 100;

        #region Generate the traning data and classes
        Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
        Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

        Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

        Matrix <float> sample     = new Matrix <float>(1, 2);
        Matrix <float> prediction = new Matrix <float>(1, 1);

        Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
        trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
        Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
        trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

        Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
        trainClasses1.SetValue(1);
        Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
        trainClasses2.SetValue(2);
        #endregion

        Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 });

        MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();
        parameters.term_crit       = new MCvTermCriteria(10, 1.0e-8);
        parameters.train_method    = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
        parameters.bp_dw_scale     = 0.1;
        parameters.bp_moment_scale = 0.1;

        using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
        {
            network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);

            for (int i = 0; i < img.Height; i++)
            {
                for (int j = 0; j < img.Width; j++)
                {
                    sample.Data[0, 0] = j;
                    sample.Data[0, 1] = i;
                    network.Predict(sample, prediction);

                    // estimates the response and get the neighbors' labels
                    float response = prediction.Data[0, 0];

                    // highlight the pixel depending on the accuracy (or confidence)
                    img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                }
            }
        }

        // display the original training samples
        for (int i = 0; i < (trainSampleCount >> 1); i++)
        {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
        }
        Emgu.CV.UI.ImageViewer.Show(img);
    }
Esempio n. 15
0
        private void button1_Click(object sender, EventArgs e)
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes
            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 10, 2 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym);
                            network.TermCriteria = new MCvTermCriteria(10000, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td);
                            network.Save("temp.txt");
                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    if (response < 1.5)
                                    {
                                        img[i, j] = new Bgr(90, 0, 0);
                                    }
                                    else
                                    {
                                        img[i, j] = new Bgr(0, 90, 0);
                                    }
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }
            pictureBox1.Image = img.ToBitmap();
            // Emgu.CV.UI.ImageViewer.Show(img);
        }
        private async void moduleFeatureExtraction(int first, int last)
        {
            string fghfh = "";

            double[,] RawData = new double[16, 3780];
            int mid  = (first + last) / 2;
            int low  = mid - 8;;
            int high = mid + 8;

            for (int i = 0; i < 16; i++)
            {
                for (int j = 0; j < 26; j++)
                {
                    if (j == adasas)
                    {
                        response[i, j] = 1;
                    }
                    if (j != adasas)
                    {
                        response[i, j] = 0;
                    }
                }
            }
            adasas++;
            if (low < first)
            {
                low++;
            }
            if (high > last)
            {
                low++;
            }
            int length = high - low;

            for (int k = (low); k < (high); k++)
            {
                string            frameName             = "gesture//" + k + ".jpeg";
                Image <Bgr, byte> featurExtractionInput = new Image <Bgr, byte>(frameName);
                //pictureBox3.Image = featurExtractionInput.Bitmap;
                //label4.Text = k.ToString();
                await Task.Delay(1000 / Convert.ToInt32(2));

                float[] desc = new float[3780];
                desc = GetVector(featurExtractionInput);

                int i = k - (low);
                for (int j = 0; j < 3780; j++)
                {
                    double val = Convert.ToDouble(desc[j]);
                    RawData.SetValue(val, i, j);
                }

                if (k == (high - 1))
                {
                    Matrix <Double> DataMatrix   = new Matrix <Double>(RawData);
                    Matrix <Double> Mean         = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenValues  = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenVectors = new Matrix <Double>(3780, 3780);
                    CvInvoke.PCACompute(DataMatrix, Mean, EigenVectors, 16);
                    Matrix <Double> result = new Matrix <Double>(16, 16);
                    CvInvoke.PCAProject(DataMatrix, Mean, EigenVectors, result);


                    String        filePath = @"test.xml";
                    StringBuilder sb       = new StringBuilder();
                    (new XmlSerializer(typeof(Matrix <double>))).Serialize(new StringWriter(sb), result);
                    XmlDocument xDoc = new XmlDocument();
                    xDoc.LoadXml(sb.ToString());

                    System.IO.File.WriteAllText(filePath, sb.ToString());
                    Matrix <double> matrix = (Matrix <double>)(new XmlSerializer(typeof(Matrix <double>))).Deserialize(new XmlNodeReader(xDoc));

                    string djf = null;
                    djf  = System.IO.File.ReadAllText(@"g.txt");
                    djf += Environment.NewLine;
                    djf += Environment.NewLine;
                    for (int p = 0; p < 16; p++)
                    {
                        for (int q = 0; q < 16; q++)
                        {
                            djf += p + " , " + q + "  " + matrix[p, q].ToString() + "    ";
                        }
                        djf += Environment.NewLine;
                    }
                    Matrix <float> masjhdb   = result.Convert <float>();
                    TrainData      trainData = new TrainData(masjhdb, DataLayoutType.RowSample, response);
                    int            features  = 16;
                    int            classes   = 26;
                    Matrix <int>   layers    = new Matrix <int>(6, 1);
                    layers[0, 0] = features;
                    layers[1, 0] = classes * 16;
                    layers[2, 0] = classes * 8;
                    layers[3, 0] = classes * 4;
                    layers[4, 0] = classes * 2;
                    layers[5, 0] = classes;
                    ANN_MLP     ann             = new ANN_MLP();
                    FileStorage fileStorageRead = new FileStorage(@"abc.xml", FileStorage.Mode.Read);
                    ann.Read(fileStorageRead.GetRoot(0));
                    ann.SetLayerSizes(layers);
                    ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                    ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
                    ann.Train(masjhdb, DataLayoutType.RowSample, response);
                    FileStorage fileStorageWrite = new FileStorage(@"abc.xml", FileStorage.Mode.Write);
                    ann.Write(fileStorageWrite);
                    Matrix <float> hehe = new Matrix <float>(1, 16);
                    for (int q = 0; q < 16; q++)
                    {
                        hehe[0, q] = masjhdb[11, q];
                    }
                    float real = ann.Predict(hehe);

                    fghfh += array[(int)real];
                    SpeechSynthesizer reader = new SpeechSynthesizer();

                    if (richTextBox1.Text != " ")
                    {
                        reader.Dispose();
                        reader = new SpeechSynthesizer();
                        reader.SpeakAsync(fghfh.ToString());
                    }
                    else
                    {
                        MessageBox.Show("No Text Present!");
                    }
                    richTextBox1.Text = fghfh.ToString();
                    System.IO.File.WriteAllText(@"g.txt", real.ToString());
                }
            }
        }
Esempio n. 17
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

             #region Generate the traning data and classes
             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);
             Matrix<float> prediction = new Matrix<float>(1, 1);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
             trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainClasses2.SetValue(2);
             #endregion

             Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 5, 1 });

             MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();
             parameters.term_crit = new MCvTermCriteria(10, 1.0e-8);
             parameters.train_method = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
             parameters.bp_dw_scale = 0.1;
             parameters.bp_moment_scale = 0.1;

             using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
             {
            network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);
            network.Save("ann_mlp_model.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;
                  network.Predict(sample, prediction);

                  // estimates the response and get the neighbors' labels
                  float response = prediction.Data[0,0];

                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
               }
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount >> 1); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
             }
        }
Esempio n. 18
0
 public MLP()
 {
     nnet = new ANN_MLP();
 }
Esempio n. 19
0
        private void Btn_CNN2_Click(object sender, EventArgs e)
        {
            var positiveData = GetPositiveData();
            var negativeData = GetNegativeData();

            if (positiveData?.Count == 0 || negativeData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int trainSampleCount = positiveData.Count + negativeData.Count;

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);


            for (int i = 0; i < positiveData.Count; i++)
            {
                var item = positiveData[i];
                trainData.Data[i, 0] = item.Percent;
                trainData.Data[i, 1] = item.Avg;

                trainClasses.Data[i, 0] = 1;
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var item = negativeData[i];
                int row  = positiveData.Count + i;
                trainData.Data[row, 0] = item.Percent;
                trainData.Data[row, 1] = item.Avg;

                trainClasses.Data[row, 0] = 0;
            }

            Image <Bgr, Byte> img = new Image <Bgr, byte>(765, 300);

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //画图

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = i * 1.0f / (100 * 3);
                                    sample.Data[0, 1] = 255 - j * 1.0f / 3.0f;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 0.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < positiveData.Count; i++)
            {
                var    d  = positiveData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            }
            for (int i = 0; i < negativeData.Count; i++)
            {
                var    d  = negativeData[i];
                PointF p1 = new PointF((255 - d.Avg) * 3, d.Percent * 300);
                img.Draw(new CircleF(p1, 2), new Bgr(100, 255, 100), -1);
            }
            this.ib_result.Image = img;
            MessageBox.Show("训练完毕");
        }
Esempio n. 20
0
        private void Btn_ann2_Click(object sender, EventArgs e)
        {
            this.prepareData();

            if (trainNegData?.Count == 0 || trainActData?.Count == 0)
            {
                MessageBox.Show("训练数据不能为空");
                return;
            }
            int            trainSampleCount = trainActData.Count + trainNegData.Count;
            int            colCount         = width * height;
            Matrix <float> trainData        = new Matrix <float>(trainSampleCount, colCount);
            Matrix <float> trainClasses     = new Matrix <float>(trainSampleCount, 1);

            Matrix <float> sample     = new Matrix <float>(1, colCount);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            //准备正面数据
            var actCount = trainActData.Count;

            //Matrix<float> trainActDataMatr = new Matrix<float>(actCount, width * height);
            //Matrix<float> trainActClassesMatr = new Matrix<float>(actCount, 1);

            for (int i = 0; i < actCount; i++)
            {
                var colData   = trainActData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i, j] = trainActData[i][j];
                }

                trainClasses.Data[i, 0] = 1;
                //trainClasses.Data[i, 1] = 0;
            }

            //准备未涂答数据
            var negCount = trainNegData.Count;

            //Matrix<float> trainNegDataMatr = new Matrix<float>(negCount, width * height);
            //Matrix<float> trainNegClassesMatr = new Matrix<float>(negCount, 1);
            for (int i = 0; i < negCount; i++)
            {
                var colData   = trainNegData[i];
                var colCount1 = colData.Count;
                for (int j = 0; j < colCount1; j++)
                {
                    trainData.Data[i + actCount, j] = trainNegData[i][j];
                }

                trainClasses.Data[i + actCount, 0] = 0;
                //trainClasses.Data[i + actCount, 1] = 1;
            }

            //训练
            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 286, 10, 10, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)
                    using (TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.01, 0.01);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);


                            //String fileName = "ann_mlp_model.xml"; //Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(annFileName);
                            //if (File.Exists(fileName))
                            //    File.Delete(fileName);

                            //测试
                            //1测试正面数据
                            var testActCount  = testActData.Count;
                            var rightActCount = 0;//正确act识别数量
                            for (int i = 0; i < testActCount; i++)
                            {
                                var testData = testActData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response > 0.5)
                                {
                                    rightActCount++;
                                    Console.WriteLine($"该数据是涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是涂答的,错误识别{response}");
                                }
                            }

                            //2测试负面数据
                            var testNegCount  = testNegData.Count;
                            var rightNegCount = 0;//正确neg识别数量
                            for (int i = 0; i < testNegCount; i++)
                            {
                                var testData = testNegData[i];
                                for (int j = 0; j < testData.Count; j++)
                                {
                                    sample[0, j] = testData[j];
                                }
                                network.Predict(sample, prediction);
                                float response = prediction.Data[0, 0];
                                if (response <= 0.5)
                                {
                                    rightNegCount++;
                                    Console.WriteLine($"该数据是未涂答的,正确识别{response}");
                                }
                                else
                                {
                                    Console.WriteLine($"该数据是未涂答的,错误识别{response}");
                                }
                            }
                            MessageBox.Show("训练完毕,并测试");
                        }
        }
Esempio n. 21
0
        private void btn_ANNReg_Click(object sender, EventArgs e)
        {
            var regPath = txbregPath.Text;

            if (string.IsNullOrEmpty(regPath))
            {
                MessageBox.Show("待识别文件夹不能空");
                return;
            }
            var isAct = ckbIsAct.Checked;

            var files    = Directory.GetFiles(regPath);
            var testData = new List <List <float> >();

            for (int i = 0; i < files.Length; i++)
            {
                var path = files[i];
                Image <Gray, byte> img = new Image <Gray, byte>(path);
                testData.Add(getImgData(img));
            }


            using (ANN_MLP network = new ANN_MLP())
            {
                network.Load(annFileName);

                int            colCount   = width * height;
                Matrix <float> sample     = new Matrix <float>(1, colCount);
                Matrix <float> prediction = new Matrix <float>(1, 1);

                //1测试数据
                var testCount  = testData.Count;
                var rightCount = 0;//正确act识别数量
                for (int i = 0; i < testCount; i++)
                {
                    var testColData = testData[i];
                    for (int j = 0; j < testColData.Count; j++)
                    {
                        sample[0, j] = testColData[j];
                    }
                    network.Predict(sample, prediction);
                    float response = prediction.Data[0, 0];

                    if (isAct && response > 0.5)
                    {
                        rightCount++;
                        Console.WriteLine($"该数据是涂答的,正确识别{response}");
                    }
                    else if (isAct && response <= 0.5)
                    {
                        Console.WriteLine($"该数据是涂答的,错误识别{response}");
                        File.Copy(files[i], Path.Combine(actRegErrorDir, Path.GetFileName(files[i])), true);
                    }
                    else if (!isAct && response <= 0.5)
                    {
                        rightCount++;
                        Console.WriteLine($"该数据是未涂答的,正确识别{response}");
                    }
                    else if (!isAct && response > 0.5)
                    {
                        Console.WriteLine($"该数据是未涂答的,错误识别{response}");
                        File.Copy(files[i], Path.Combine(negRegErrorDir, Path.GetFileName(files[i])), true);
                    }
                    else
                    {
                        Console.WriteLine("未知识别结果");
                    }
                }

                var result = $"测试数量:{testCount},正确数量:{rightCount},正确率:{rightCount * 1.0 / testCount}";
                Console.WriteLine(result);
                MessageBox.Show(result);
            }
        }