Beispiel #1
0
        public Support_Vector_Machine(ReadImages RM)
        {
            for (int g = 0; g < 5; ++g)
            {
                mm[g] = new List <Point>();
            }
            this.Clases  = RM.classes;
            this.DAtaset = RM;
            trainData    = new Matrix <float>(RM.TrainingSamples.Count, RM.TrainingSamples[0].Feature.Descriptor.Length);
            trainClasses = new Matrix <float>(RM.TrainingSamples.Count, 1);
            sample       = new Matrix <float>(1, RM.TrainingSamples[0].Feature.Descriptor.Length);
            string[] all_classes = RM.classes.ToArray();
            /// make function to load training data set
            for (int u = 0; u < RM.TrainingSamples.Count; u++)
            {
                for (int p2 = 0; p2 < RM.TrainingSamples[0].Feature.Descriptor.Length; ++p2)
                {
                    trainData[u, p2] = RM.TrainingSamples[u].Feature.Descriptor[p2];
                }

                trainClasses[u, 0] = Map(RM.TrainingSamples[u].Lable[0]) + 1;
            }

            model        = new SVM();
            p            = new SVMParams();
            p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
            p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
            p.C          = 1;
            p.TermCrit   = new MCvTermCriteria(100, 0.00001);
            //bool trained = model.Train(trainData, trainClasses, null, null, p);
            bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
        }
        void TrainSVM(int times, ref float accuracy)
        {
            using (SVM model = new SVM())
            {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.RBF;
                p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                p.C          = 1;
                p.Gamma      = 0.1;
                p.TermCrit   = new MCvTermCriteria(50, 0.00001);
                bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);

                Matrix <float> test = new Matrix <float>(1, featureNum);

                float count = 0;
                for (int i = 0; i < trainSampleCount; i++)
                {
                    for (int j = 0; j < featureNum; j++)
                    {
                        test.Data[0, j] = trainData.Data[i, j];
                    }
                    float response = model.Predict(test);
                    float classSam = trainClasses[i, 0];

                    if (response == 10)
                    {
                        if (classSam == 10)
                        {
                            count++;
                        }
                    }
                    else
                    {
                        if (classSam != 10)
                        {
                            count++;
                        }
                    }
                }

                float acc = count / trainSampleCount;

                chart1.Series["Series1"].ChartType = SeriesChartType.Spline;
                chart1.Series["Series1"].Points.AddXY(times, acc);

                if (acc > accuracy)
                {
                    accuracy = acc;
                    model.Save("SVM_NOTE.txt");
                }

                Application.DoEvents();
            }
        }
        public void StartSVM(float[] data)
        {
            timeSeriGenerator = new TimeSeriGenerator <float>();
            MyTimeSeri <float> myTimeSeri = timeSeriGenerator.generateWithThisData(data, 6);

            Matrix <float> trainData    = new Matrix <float>(myTimeSeri.inputs);
            Matrix <float> trainClasses = new Matrix <float>(myTimeSeri.targets);

            svmModel = new SVM();
            SVMParams p = new SVMParams();

            p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.POLY;
            p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
            p.C          = 1;
            p.TermCrit   = new MCvTermCriteria(100, 0.00001);
            p.Gamma      = 1;
            p.Degree     = 1;
            p.P          = 1;
            p.Nu         = 0.1;

            bool trained = svmModel.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);
        }
Beispiel #4
0
        private Image <Bgr, Byte> svm()
        {
            Stopwatch timer = new Stopwatch();

            timer.Start();
            int trainSampleCount = 150;
            int sigma            = 60;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix <float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix <float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            timer.Stop();
            MessageBox.Show("生成" + timer.ElapsedMilliseconds + "ms");
            timer.Reset();
            timer.Start();

            using (SVM model = new SVM()) {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                p.C          = 1;
                p.TermCrit   = new MCvTermCriteria(100, 0.00001);

                //model.Load(@"D:\Play Data\训练数据");
                //bool trained = model.Train(trainData, trainClasses, null, null, p);
                bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
                timer.Stop();
                MessageBox.Show("训练" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        //float response = model.Predict(sample);

                        //img[i, j] =
                        //   response == 1 ? new Bgr(90, 0, 0) :
                        //   response == 2 ? new Bgr(0, 90, 0) :
                        //   new Bgr(0, 0, 90);
                    }
                }
                //model.Save(@"D:\Play Data\训练数据");

                timer.Stop();
                MessageBox.Show("染色" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();
                int c = model.GetSupportVectorCount();
                for (int i = 0; i < c; i++)
                {
                    float[] v  = model.GetSupportVector(i);
                    PointF  p1 = new PointF(v[0], v[1]);
                    img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
                }
                timer.Stop();
                MessageBox.Show("画圈" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 3); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
            timer.Stop();
            MessageBox.Show("标点" + timer.ElapsedMilliseconds + "ms");
            timer.Reset();
            timer.Start();

            return(img);
        }
        public void Start()
        {
            try
            {
                svmWriter    = new StreamWriter("Complex_Hybrid_SVMOutput.txt");
                arimaLogger  = new StreamWriter("Complex_Hybrid_ArimaGALog.txt");
                hybridWriter = new StreamWriter(OUTPUT_FILE_NAME);

                #region Loading the training data and classes and test data and test classes

                TimeSeriGenerator <float> timeSeriGenerator = new TimeSeriGenerator <float>();
                int numInp = 0;
                this.Dispatcher.Invoke(new Action(() => numInp = Int32.Parse(NumberOfInpTextBox.Text)));
                timeSeriGenerator.load(numInp);
                Dispatcher.Invoke(new Action(() => ActivityProgressBar.IsIndeterminate = true));
                Dispatcher.Invoke(new Action(() => numberOfTests         = Int32.Parse(OptimumTestTextBox.Text)));
                Dispatcher.Invoke(new Action(() => numberOfForecastTests = Int32.Parse(ForecastTestTextBox.Text)));
                myCategorizedTimeSeri = timeSeriGenerator.generate(numberOfTests, numberOfForecastTests);

                #endregion


                #region creating and training the svm model

                double          minError       = 9999999;
                SVM_KERNEL_TYPE bestKernelType = SVM_KERNEL_TYPE.LINEAR;
                double          bestEps        = 0.1;

                SVMParams      p;
                Matrix <float> trainData    = new Matrix <float>(myCategorizedTimeSeri.TrainInputs);
                Matrix <float> trainClasses = new Matrix <float>(myCategorizedTimeSeri.TrainTargets);
                Matrix <float> testData     = new Matrix <float>(myCategorizedTimeSeri.TestInputs);
                Matrix <float> testClasses  = new Matrix <float>(myCategorizedTimeSeri.TestTargets);

                foreach (SVM_KERNEL_TYPE tp in Enum.GetValues(typeof(SVM_KERNEL_TYPE)))
                {
                    for (double eps = 0.1; eps >= 0.00001; eps *= 0.1)
                    {
                        using (SVM model = new SVM())
                        {
                            p            = new SVMParams();
                            p.KernelType = tp;
                            p.SVMType    = SVM_TYPE.EPS_SVR; // for regression
                            p.C          = 1;
                            p.TermCrit   = new MCvTermCriteria(100, eps);
                            p.Gamma      = 1;
                            p.Degree     = 1;
                            p.P          = 1;
                            p.Nu         = 0.1;

                            bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

                            double error = getSumError(model, testData, testClasses);
                            if (trained && minError > error)
                            {
                                minError = error;

                                bestEps        = eps;
                                bestKernelType = tp;
                            }
                        }
                    }
                }

                Matrix <float> trainDataWithGATest    = new Matrix <float>(myCategorizedTimeSeri.getTrainWithTestInputs());
                Matrix <float> trainClassesWithGATest = new Matrix <float>(myCategorizedTimeSeri.getTrainWithTestTargets());

                svmModel     = new SVM();
                p            = new SVMParams();
                p.KernelType = bestKernelType;
                p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
                p.C          = 1;
                p.TermCrit   = new MCvTermCriteria(100, bestEps);
                p.Gamma      = 1;
                p.Degree     = 1;
                p.P          = 1;
                p.Nu         = 0.1;

                bool _trained = svmModel.TrainAuto(trainDataWithGATest, trainClassesWithGATest, null, null,
                                                   p.MCvSVMParams, 10);

                List <float> Et = getResidual(trainDataWithGATest, trainClassesWithGATest);
                svmWriter.Flush();
                svmWriter.Close();

                int bestD = StartArima(Et.ToArray());

                List <float> Zt = new List <float>();
                float        mu = Et.Average();
                if (bestD == 0)
                {
                    for (int i = 0; i < Et.Count; i++)
                    {
                        Zt.Add(Et[i] - mu);
                    }
                }
                else if (bestD == 1)
                {
                    Zt.Add(0);
                    for (int i = 1; i < Et.Count; i++)
                    {
                        Zt.Add(Et[i] - Et[i - 1] - mu);
                    }
                }
                else //else if (bestD == 2)    << CHECK HERE >>
                {
                    Zt.Add(0);
                    Zt.Add(0);
                    for (int i = 2; i < Et.Count; i++)
                    {
                        Zt.Add(Et[i] - 2 * Et[i - 1] + Et[i - 2] - mu);
                    }
                }

                Pair <int> bestAB = CreateComplexHybridModel(Et.ToArray(), Zt.ToArray());
                MessageBox.Show(bestAB.First + " , " + bestAB.Second, "INJAAAAAAAAAAAAAAAAA", MessageBoxButton.OK,
                                MessageBoxImage.Asterisk);

                // now our complex hybrid model is created

                double minErr = SVMComplexModelForBestModel(bestAB.First, bestAB.Second, Et.ToArray(), Zt.ToArray());
                MessageBox.Show("MinError In Training =>  " + minErr);

                double mse          = 0;
                double errorPercent = 0;
                double sumTargets   = 0;

                List <float>   results = new List <float>();
                Matrix <float> testIn  = new Matrix <float>(myCategorizedTimeSeri.ForecastTestInputs);
                Queue <float>  EtQueue = new Queue <float>();
                Queue <float>  ZtQueue = new Queue <float>();
                for (int i = 0; i < bestAB.First; i++)
                {
                    EtQueue.Enqueue(Et[Et.Count - bestAB.First + i]);
                }
                for (int i = 0; i < bestAB.Second; i++)
                {
                    ZtQueue.Enqueue(Zt[Zt.Count - bestAB.Second + i]);
                }
                for (int i = 0; i < numberOfForecastTests; i++)
                {
                    float   Lt      = svmModel.Predict(testIn.GetRow(i));
                    float[] inpTest = new float[bestAB.First + bestAB.Second + 1];
                    float[] EQArray = EtQueue.ToArray();
                    float[] ZQArray = ZtQueue.ToArray();
                    int     l       = 0;
                    for (int j = 0; j < bestAB.First; j++, l++)
                    {
                        inpTest[l] = EQArray[j];
                    }
                    inpTest[l++] = Lt;
                    for (int j = 0; j < bestAB.Second; j++, l++)
                    {
                        inpTest[l] = ZQArray[j];
                    }
                    float result = svmModelHybrid.Predict(new Matrix <float>(inpTest));
                    results.Add(result);
                    hybridWriter.WriteLine(result);
                    float target = myCategorizedTimeSeri.TestTargets[i];

                    //mse += Math.Pow(target - result, 2);
                    //errorPercent += Math.Abs(target - result);
                    //sumTargets += Math.Abs(target);

                    // preparing for next use in this for loop
                    float resi = target - Lt;    // float resi = target - result;   << CHECK HERE IMPORTANT >>
                    Et.Add(resi);
                    EtQueue.Dequeue();
                    EtQueue.Enqueue(resi);
                    ZtQueue.Dequeue();
                    mu = Et.Average();
                    if (bestD == 0)
                    {
                        ZtQueue.Enqueue(EQArray[EQArray.Length - 1] - mu);
                    }
                    else if (bestD == 1)
                    {
                        ZtQueue.Enqueue(EQArray[EQArray.Length - 1] - EQArray[EQArray.Length - 2] - mu);
                    }
                    else //else if (bestD == 2)    << CHECK HERE >>
                    {
                        ZtQueue.Enqueue(EQArray[EQArray.Length - 1] - 2 * EQArray[EQArray.Length - 2] +
                                        EQArray[EQArray.Length - 3] - mu);
                    }
                }
                //mse /= numberOfForecastTests;
                //hybridWriter.WriteLine("\n\nMSE =>  " + mse);
                //errorPercent /= sumTargets;
                //hybridWriter.WriteLine("\n\nERROR% =>  " + errorPercent*100);

                double _mse          = MyErrorParameters.MSE(results.ToArray(), myCategorizedTimeSeri.ForecastTestTargets);
                double _errorPercent = MyErrorParameters.ERROR_Percent(results.ToArray(), myCategorizedTimeSeri.ForecastTestTargets);
                hybridWriter.WriteLine("\n\n\nMSE & ERROR% are =>\n\n{0} {1}", _mse, _errorPercent);

                hybridWriter.Flush();
                hybridWriter.Close();

                MessageBox.Show(
                    String.Format(
                        "Complex Hybrid Model Created File {0} For Output Successfully Now , Please Check It Out .",
                        OUTPUT_FILE_NAME), "Hybrid SVM Arima Done", MessageBoxButton.OK,
                    MessageBoxImage.Information);

                #endregion
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message + "\n\n" + ex.StackTrace, "ERROR", MessageBoxButton.OK, MessageBoxImage.Error);
            }
        }
        private double SVMComplexModelForBestModel(int y, int z, float[] Et, float[] Zt)
        {
            double error = -9999999;

            if (Math.Max(y, z) < myCategorizedTimeSeri.TrainInputs.GetLength(1))
            {
                return(error);
            }

            int numOfInp = y + z + 1;
            int rows     = Et.Length - Math.Max(y, z);

            float[,] inps = new float[rows, numOfInp];
            float[] targs = new float[rows];
            int     l     = 0;

            for (int i = 0; i < rows; i++)
            {
                if (y > z)
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[i, j] = Et[l + j];
                    }
                    inps[i, y] = svmModel.Predict(new Matrix <float>(myCategorizedTimeSeri.getInputsForTarget(l + y)));
                    for (int j = 0; j < z; j++)
                    {
                        inps[i, y + j + 1] = Zt[l + y - z + j];
                    }
                    targs[i] = myCategorizedTimeSeri.TimeSeri[l + y];
                }
                else
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[i, j] = Et[l + z - y + j];
                    }
                    inps[i, y] = svmModel.Predict(new Matrix <float>(myCategorizedTimeSeri.getInputsForTarget(l + z)));
                    for (int j = 0; j < z; j++)
                    {
                        inps[i, j + y + 1] = Zt[l + j];
                    }
                    targs[i] = myCategorizedTimeSeri.TimeSeri[l + z];
                }
                l++;
            }

            float[,] trainInputs = new float[rows - numberOfTests, numOfInp];
            float[] trainTargets = new float[rows - numberOfTests];
            float[,] testInputs = new float[numberOfTests, numOfInp];
            float[] testTargets = new float[numberOfTests];
            int     t           = 0;

            for (; t < rows - numberOfTests; t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    trainInputs[t, j] = inps[t, j];
                }
                trainTargets[t] = targs[t];
            }
            for (int i = 0; t < rows; i++, t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    testInputs[i, j] = inps[t, j];
                }
                testTargets[i] = targs[t];
            }


            double          minError       = 9999999;
            SVM_KERNEL_TYPE bestKernelType = SVM_KERNEL_TYPE.LINEAR;
            double          bestEps        = 0.1;
            SVMParams       p;
            Matrix <float>  trainData    = new Matrix <float>(trainInputs);
            Matrix <float>  trainClasses = new Matrix <float>(trainTargets);
            Matrix <float>  testData     = new Matrix <float>(testInputs);
            Matrix <float>  testClasses  = new Matrix <float>(testTargets);

            foreach (SVM_KERNEL_TYPE tp in Enum.GetValues(typeof(SVM_KERNEL_TYPE)))
            {
                for (double eps = 0.1; eps >= 0.00001; eps *= 0.1)
                {
                    using (SVM model = new SVM())
                    {
                        p            = new SVMParams();
                        p.KernelType = tp;
                        p.SVMType    = SVM_TYPE.EPS_SVR; // for regression
                        p.C          = 1;
                        p.TermCrit   = new MCvTermCriteria(100, eps);
                        p.Gamma      = 1;
                        p.Degree     = 1;
                        p.P          = 1;
                        p.Nu         = 0.1;

                        bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

                        double err = getSumError(model, testData, testClasses);
                        if (trained && minError > err)
                        {
                            minError = err;

                            bestEps        = eps;
                            bestKernelType = tp;
                        }
                    }
                }
            }

            svmModelHybrid = new SVM();
            p            = new SVMParams();
            p.KernelType = bestKernelType;
            p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
            p.C          = 1;
            p.TermCrit   = new MCvTermCriteria(100, bestEps);
            p.Gamma      = 1;
            p.Degree     = 1;
            p.P          = 1;
            p.Nu         = 0.1;


            bool _trained = svmModelHybrid.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

            error = -1 * getSumError(svmModelHybrid, testData, testClasses);

            return(error);
        }
Beispiel #7
0
        public void TestSVM()
        {
            int trainSampleCount = 150;
             int sigma = 60;

             #region Generate the training data and classes

             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
             trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
             trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

             Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
             trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainClasses2.SetValue(2);
             Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainClasses3.SetValue(3);

             #endregion

             using (SVM model = new SVM())
             {
            SVMParams p = new SVMParams();
            p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
            p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
            p.C = 1;
            p.TermCrit = new MCvTermCriteria(100, 0.00001);

            //bool trained = model.Train(trainData, trainClasses, null, null, p);
            bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);

            model.Save("svmModel.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  float response = model.Predict(sample);

                  img[i, j] =
                     response == 1 ? new Bgr(90, 0, 0) :
                     response == 2 ? new Bgr(0, 90, 0) :
                     new Bgr(0, 0, 90);
               }
            }

            int c = model.GetSupportVectorCount();
            for (int i = 0; i < c; i++)
            {
               float[] v = model.GetSupportVector(i);
               PointF p1 = new PointF(v[0], v[1]);
               img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount / 3); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
            img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
             }
        }
Beispiel #8
0
        public void TestSVM()
        {
            int trainSampleCount = 150;
            int sigma            = 60;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix <float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix <float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            using (SVM model = new SVM())
            {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SvmKernelType.Linear;
                p.SVMType    = Emgu.CV.ML.MlEnum.SvmType.CSvc;
                p.C          = 1;
                p.TermCrit   = new MCvTermCriteria(100, 0.00001);

                //bool trained = model.Train(trainData, trainClasses, null, null, p);
                bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
#if !NETFX_CORE
                String fileName = Path.Combine(Path.GetTempPath(), "svmModel.xml");
                model.Save(fileName);
                if (File.Exists(fileName))
                {
                    File.Delete(fileName);
                }
#endif

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        float response = model.Predict(sample);

                        img[i, j] =
                            response == 1 ? new Bgr(90, 0, 0) :
                            response == 2 ? new Bgr(0, 90, 0) :
                            new Bgr(0, 0, 90);
                    }
                }

                int c = model.GetSupportVectorCount();
                for (int i = 0; i < c; i++)
                {
                    float[] v  = model.GetSupportVector(i);
                    PointF  p1 = new PointF(v[0], v[1]);
                    img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 3); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
        }
        void Start()
        {
            var Et = new List <double>();
            var Zt = new List <double>();
            var Lt = new List <double>();

            timeSeriGenerator = new TimeSeriGenerator <double>();
            arimaLogger       = new StreamWriter("Best_Hybrid_ArimaGALog.txt");
            int numInp = 0;

            this.Dispatcher.Invoke(new Action(() => numInp = Int32.Parse(NumberOfInpTextBox.Text)));
            timeSeriGenerator.load(numInp);
            Dispatcher.Invoke(new Action(() => ActivityProgressBar.IsIndeterminate = true));
            Dispatcher.Invoke(new Action(() => numberOfTests         = Int32.Parse(OptimumTestTextBox.Text)));
            Dispatcher.Invoke(new Action(() => numberOfForecastTests = Int32.Parse(ForecastTestTextBox.Text)));

            MyTimeSeriForBestHybrid <double> myTimeSeriForBestHybrid =
                timeSeriGenerator.generateForBestHybrid(numberOfForecastTests);


            //maxGAIteretionInArima = 1000;
            //var train = new double[timeSeriGenerator.TimeSeri.Length - 5];
            //var test = new double[5];
            //for (int i = 0; i < train.Length; i++)
            //{
            //    train[i] = timeSeriGenerator.TimeSeri[i];
            //}
            //for (int i = train.Length, j = 0; i < timeSeriGenerator.TimeSeri.Length; i++, j++)
            //{
            //    test[j] = timeSeriGenerator.TimeSeri[i];
            //}

            //ArimaGA aga=new ArimaGA();
            //aga.StartArima(train);

            //NumericalVariable timeSeriii = new NumericalVariable("timeSeriii", train);
            //arimaModel = new ArimaModel(timeSeriii, aga.bestP, aga.bestD, aga.bestQ);
            //arimaModel.Compute();

            //var fv2 = arimaModel.Forecast(numberOfForecastTests);
            //double ea2 = MyErrorParameters.ERROR_Percent(fv2.ToArray(), test);)



            for (int i = 0; i < myTimeSeriForBestHybrid.part2.Count; i++)
            {
                StartArima(myTimeSeriForBestHybrid.part1.ToArray());

                //// converting to double[]
                //double[] db = new double[myTimeSeriForBestHybrid.part1.Count];
                //for (int j = 0; j < db.Length; j++)
                //{
                //    db[j] = myTimeSeriForBestHybrid.part1.ToArray()[j];
                //}

                NumericalVariable timeSerii = new NumericalVariable("timeSerii", myTimeSeriForBestHybrid.part1.ToArray());
                arimaModel = new ArimaModel(timeSerii, myBestP, myBestD, myBestQ);
                arimaModel.Compute();

                var   res = arimaModel.Forecast(1);
                float lt  = (float)res[0];
                Lt.Add(lt);
                double target = myTimeSeriForBestHybrid.part2[i];
                double e      = lt - target;
                Et.Add(e);

                myTimeSeriForBestHybrid.part1.Add(target);
                double mu = myTimeSeriForBestHybrid.part1.Average();

                if (myBestD == 0)
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] - mu);
                }
                else if (myBestD == 1)
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] -
                           myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 2] - mu);
                }
                else
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] -
                           2 * myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 2] +
                           myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 3] - mu);
                }
            }

            ArimaModel EtArimaModel = new ArimaGA().GetBestModel(Et.ToArray());
            ArimaModel ZtArimaModel = new ArimaGA().GetBestModel(Zt.ToArray());
            int        a            = 0;

            SVM svm = new SVM();


            //TimeSeriGenerator<double> gen = new TimeSeriGenerator<double>();
            //gen.NumberOfInputVariables = Int32.Parse(NumberOfInpTextBox.Text);
            //gen.TimeSeri = Et.ToArray();
            //var EtTimeSeries = gen.generate();

            //gen = new TimeSeriGenerator<double>();
            //gen.NumberOfInputVariables = Int32.Parse(NumberOfInpTextBox.Text);
            //gen.TimeSeri = Zt.ToArray();
            //var ZtTimeSeries = gen.generate();
            //// biaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa


            Pair <int> bestAB = CreateComplexHybridModel(Et.ToArray(), Lt.ToArray(), Zt.ToArray());
            double     minErr = SVMComplexModelForBestModel(bestAB.First, bestAB.Second, Et.ToArray(), Lt.ToArray(),
                                                            Zt.ToArray());

            MessageBox.Show(bestAB.First + " , " + bestAB.Second + "\nMinError In Training Is =>  " + minErr, "Now Best M & N Found", MessageBoxButton.OK,
                            MessageBoxImage.Asterisk);


            // --------------------------------- now our complex hybrid model is created -----------------------------------------------------------------

            double mse          = 0;
            double errorPercent = 0;
            double sumTargets   = 0;

            if (myTimeSeriForBestHybrid.part1.Count != timeSeriGenerator.TimeSeri.Length - numberOfForecastTests)
            {
                MessageBox.Show("Input For Arima Model Is Not Completed", "ERROR", MessageBoxButton.OK, MessageBoxImage.Error);
            }


            // << CHECK HERE >>  (FOR CHECKING PURPOSE ONLY , COMMENT HERE LATER)

            var    forecastedVector = arimaModel.Forecast(numberOfForecastTests);
            double eoa = MyErrorParameters.ERROR_Percent(forecastedVector.ToArray(), myTimeSeriForBestHybrid.testCases.ToArray());

            MessageBox.Show("Error Of Arima Is =>  " + eoa, "Arima Error", MessageBoxButton.OK,
                            MessageBoxImage.Information);

            //maxGAIteretionInArima = 1000;
            //StartArima(myTimeSeriForBestHybrid.part1.ToArray());
            //double[] dbb = new double[myTimeSeriForBestHybrid.part1.Count];
            //for (int j = 0; j < dbb.Length; j++)
            //{
            //    dbb[j] = myTimeSeriForBestHybrid.part1.ToArray()[j];
            //}
            //NumericalVariable timeSeriTest = new NumericalVariable("timeSerii", dbb);
            //arimaModel = new ArimaModel(timeSeriTest, myBestP, myBestD, myBestQ);
            //arimaModel.Compute();

            StreamWriter  hybridWriter = new StreamWriter(OUTPUT_FILE_NAME);
            List <double> results      = new List <double>();

            //double errorOfArima = MyErrorParameters.ERROR_Percent(forcastedVector.ToArray(), myTimeSeriForBestHybrid.testCases.ToArray());
            //MessageBox.Show("Error Of Arima Is =>  " + errorOfArima, "Arima Error", MessageBoxButton.OK,
            //                MessageBoxImage.Information);


            // ---------------------------------------------------------------
            int numOfInp = bestAB.First + bestAB.Second + 1;
            int rows     = Et.Count - Math.Max(bestAB.First, bestAB.Second);

            float[,] inps = new float[rows, numOfInp];
            double[] targs = new double[rows];
            int      y     = bestAB.First;
            int      z     = bestAB.Second;
            int      ll    = 0;

            for (int o = 0; o < rows; o++)
            {
                if (y > z)
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[o, j] = (float)Et[ll + j];
                    }
                    inps[o, y] = (float)Lt[ll + y];
                    for (int j = 0; j < z; j++)
                    {
                        inps[o, y + j + 1] = (float)Zt[ll + y - z + j];
                    }
                    targs[o] = timeSeriGenerator.TimeSeri[ll + y];
                }
                else
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[o, j] = (float)Et[ll + z - y + j];
                    }
                    inps[o, y] = (float)Lt[ll + z];
                    for (int j = 0; j < z; j++)
                    {
                        inps[o, j + y + 1] = (float)Zt[ll + j];
                    }
                    targs[o] = timeSeriGenerator.TimeSeri[ll + z];
                }
                ll++;
            }

            float[,] trainInputs = new float[rows - numberOfTests, numOfInp];
            float[] trainTargets = new float[rows - numberOfTests];
            float[,] testInputs = new float[numberOfTests, numOfInp];
            float[] testTargets = new float[numberOfTests];
            int     t           = 0;

            for (; t < rows - numberOfTests; t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    trainInputs[t, j] = inps[t, j];
                }
                trainTargets[t] = (float)targs[t];
            }
            for (int o = 0; t < rows; o++, t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    testInputs[o, j] = inps[t, j];
                }
                testTargets[o] = (float)targs[t];
            }

            svmModelHybrid = new SVM();

            SVM_KERNEL_TYPE bestKernelType = SVM_KERNEL_TYPE.RBF;
            double          bestEps        = 0.001;
            SVMParams       p;
            Matrix <float>  trainData    = new Matrix <float>(trainInputs);
            Matrix <float>  trainClasses = new Matrix <float>(trainTargets);

            p            = new SVMParams();
            p.KernelType = bestKernelType;
            p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
            p.C          = 1;
            p.TermCrit   = new MCvTermCriteria(100, bestEps);
            p.Gamma      = 1;
            p.Degree     = 1;
            p.P          = 1;
            p.Nu         = 0.1;

            bool _trained = svmModelHybrid.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

            // ---------------------------------------------------------------


            for (int i = 0; i < numberOfForecastTests; i++)
            {
                float[,] inpTest = new float[bestAB.First + bestAB.Second + 1, 1];
                int l = 0;
                for (int j = 0; j < bestAB.First; j++, l++)
                {
                    inpTest[l, 0] = (float)Et[Et.Count - bestAB.First + j];
                }
                inpTest[l++, 0] = (float)forecastedVector[i];
                for (int j = 0; j < bestAB.Second; j++, l++)
                {
                    inpTest[l, 0] = (float)Zt[Zt.Count - bestAB.Second + j];
                }


                // injaaaaaaaaaaaaaaaaaaaa



                float result = svmModelHybrid.Predict(new Matrix <float>(inpTest));
                results.Add(result);
                hybridWriter.WriteLine(result);
                double target = myTimeSeriForBestHybrid.testCases[i];


                // preparing for next use in this for loop
                double resi = target - (float)forecastedVector[i];    // float resi = target - result;   << CHECK HERE IMPORTANT >>
                Et.Add(resi);

                myTimeSeriForBestHybrid.part1.Add(target);
                double mu = myTimeSeriForBestHybrid.part1.Average();
                if (myBestD == 0)
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] - mu);
                }
                else if (myBestD == 1)
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] -
                           myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 2] - mu);
                }
                else //else if (bestD == 2)    << CHECK HERE >>
                {
                    Zt.Add(myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 1] -
                           2 * myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 2] +
                           myTimeSeriForBestHybrid.part1[myTimeSeriForBestHybrid.part1.Count - 3] - mu);
                }
            }

            double _mse          = MyErrorParameters.MSE(results.ToArray(), myTimeSeriForBestHybrid.testCases.ToArray());
            double _errorPercent = MyErrorParameters.ERROR_Percent(results.ToArray(), myTimeSeriForBestHybrid.testCases.ToArray());

            hybridWriter.WriteLine("\n\n\nMSE & ERROR% are =>\n\n{0} {1}", _mse, _errorPercent);

            hybridWriter.Flush();
            hybridWriter.Close();

            MessageBox.Show(
                String.Format(
                    "Complex Hybrid Model Created File {0} For Output Successfully Now , Please Check It Out .",
                    OUTPUT_FILE_NAME), "Hybrid SVM Arima Done", MessageBoxButton.OK,
                MessageBoxImage.Information);
        }
        private double SVMComplexModel(int y, int z, double[] Et, double[] Lt, double[] Zt)
        {
            double error = -9999999;

            int numOfInp = y + z + 1;
            int rows     = Et.Length - Math.Max(y, z);

            float[,] inps = new float[rows, numOfInp];
            double[] targs = new double[rows];
            int      l     = 0;

            for (int i = 0; i < rows; i++)
            {
                if (y > z)
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[i, j] = (float)Et[l + j];
                    }
                    inps[i, y] = (float)Lt[l + y];
                    for (int j = 0; j < z; j++)
                    {
                        inps[i, y + j + 1] = (float)Zt[l + y - z + j];
                    }
                    targs[i] = timeSeriGenerator.TimeSeri[l + y];
                }
                else
                {
                    for (int j = 0; j < y; j++)
                    {
                        inps[i, j] = (float)Et[l + z - y + j];
                    }
                    inps[i, y] = (float)Lt[l + z];
                    for (int j = 0; j < z; j++)
                    {
                        inps[i, j + y + 1] = (float)Zt[l + j];
                    }
                    targs[i] = timeSeriGenerator.TimeSeri[l + z];
                }
                l++;
            }

            float[,] trainInputs = new float[rows - numberOfTests, numOfInp];
            float[] trainTargets = new float[rows - numberOfTests];
            float[,] testInputs = new float[numberOfTests, numOfInp];
            float[] testTargets = new float[numberOfTests];
            int     t           = 0;

            for (; t < rows - numberOfTests; t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    trainInputs[t, j] = inps[t, j];
                }
                trainTargets[t] = (float)targs[t];
            }
            for (int i = 0; t < rows; i++, t++)
            {
                for (int j = 0; j < numOfInp; j++)
                {
                    testInputs[i, j] = inps[t, j];
                }
                testTargets[i] = (float)targs[t];
            }


            double          minError       = 9999999;
            SVM_KERNEL_TYPE bestKernelType = SVM_KERNEL_TYPE.LINEAR;
            double          bestEps        = 0.1;
            SVMParams       p;
            Matrix <float>  trainData    = new Matrix <float>(trainInputs);
            Matrix <float>  trainClasses = new Matrix <float>(trainTargets);
            Matrix <float>  testData     = new Matrix <float>(testInputs);
            Matrix <float>  testClasses  = new Matrix <float>(testTargets);

            // UNCOMMENT THIS FOR BETER MODEL   << CHECK HERE >>
            //foreach (SVM_KERNEL_TYPE tp in Enum.GetValues(typeof(SVM_KERNEL_TYPE)))
            //{
            //    for (double eps = 0.1; eps >= 0.00001; eps *= 0.1)
            //    {
            //        using (SVM model = new SVM())
            //        {
            //            p = new SVMParams();
            //            p.KernelType = tp;
            //            p.SVMType = SVM_TYPE.EPS_SVR; // for regression
            //            p.C = 1;
            //            p.TermCrit = new MCvTermCriteria(100, eps);
            //            p.Gamma = 1;
            //            p.Degree = 1;
            //            p.P = 1;
            //            p.Nu = 0.1;

            //            bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

            //            float[] predictedVa = new float[testData.Rows];
            //            for (int i = 0; i < testData.Rows; i++)
            //            {
            //                predictedVa[i] = model.Predict(testData.GetRow(i));
            //            }
            //            double err = MyErrorParameters.ERROR_Percent(predictedVa, testTargets);

            //            if (trained && minError > err)
            //            {
            //                minError = err;

            //                bestEps = eps;
            //                bestKernelType = tp;
            //            }
            //        }
            //    }
            //}

            //var svmMod = new SVM();
            //p = new SVMParams();
            //p.KernelType = bestKernelType;
            //p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
            //p.C = 1;
            //p.TermCrit = new MCvTermCriteria(100, bestEps);
            //p.Gamma = 1;
            //p.Degree = 1;
            //p.P = 1;
            //p.Nu = 0.1;
            // UNCOMMENT HERE

            // COMMENT HERE
            var svmMod = new SVM();

            p            = new SVMParams();
            p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.POLY;
            p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
            p.C          = 1;
            p.TermCrit   = new MCvTermCriteria(100, 0.00001);
            p.Gamma      = 1;
            p.Degree     = 1;
            p.P          = 1;
            p.Nu         = 0.1;
            // COMMENT HERE

            bool _trained = svmMod.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

            float[] predictedV = new float[testData.Rows];
            for (int i = 0; i < testData.Rows; i++)
            {
                predictedV[i] = svmMod.Predict(testData.GetRow(i));
            }
            error = -1 * MyErrorParameters.ERROR_Percent(predictedV, testTargets);

            return(error);
        }
Beispiel #11
0
        public void Start()
        {
            writer = new StreamWriter("SVM_Output.txt");

            #region Loading the training data and classes and test data and test classes

            Matrix <float> trainData    = null;
            Matrix <float> trainClasses = null;
            OpenFileDialog ofd          = new OpenFileDialog();
            ofd.Title            = "Please Open Your Train File :";
            ofd.InitialDirectory = Environment.CurrentDirectory;
            ofd.Filter           = "Text Files (*.txt)|*.txt|All Files (*.*)|*.*";
            if (ofd.ShowDialog().Value)
            {
                LoadData(ofd.FileName, ref trainData, ref trainClasses);
            }

            Matrix <float> testData    = null;
            Matrix <float> testClasses = null;
            ofd.Title = "Now Please Open Your Test File :";
            if (ofd.ShowDialog().Value)
            {
                LoadData(ofd.FileName, ref testData, ref testClasses);
            }

            #endregion


            #region creating and training the svm model

            double          minError       = 9999999;
            SVM_KERNEL_TYPE bestKernelType = SVM_KERNEL_TYPE.LINEAR;
            double          bestEps        = 0.1;

            foreach (Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE tp in Enum.GetValues(typeof(Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE)))
            {
                for (double eps = 0.1; eps >= 0.00001; eps *= 0.1)
                {
                    using (SVM model = new SVM())
                    {
                        SVMParams p = new SVMParams();
                        p.KernelType = tp;
                        p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
                        p.C          = 1;
                        p.TermCrit   = new MCvTermCriteria(100, eps);
                        p.Gamma      = 1;
                        p.Degree     = 1;
                        p.P          = 1;
                        p.Nu         = 0.1;

                        //these just work with rounded trainClasses data
                        //bool trained = model.Train(trainData, trainClasses, null, null, p);
                        bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);


                        double error = getSumError(model, testData, testClasses, false);
                        if (trained && minError > error)
                        {
                            minError = error;

                            bestEps        = eps;
                            bestKernelType = tp;
                        }
                    }
                }
            }

            using (SVM model = new SVM())
            {
                SVMParams p = new SVMParams();
                p.KernelType = bestKernelType;
                p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.EPS_SVR; // for regression
                p.C          = 1;
                p.TermCrit   = new MCvTermCriteria(100, bestEps);
                p.Gamma      = 1;
                p.Degree     = 1;
                p.P          = 1;
                p.Nu         = 0.1;

                //these just work with rounded trainClasses data
                //bool trained = model.Train(trainData, trainClasses, null, null, p);
                bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 10);

                double error = getSumError(model, testData, testClasses, true);
            }

            writer.Flush();
            writer.Close();
            MessageBox.Show("Done", "SVM", MessageBoxButton.OK, MessageBoxImage.Information);

            #endregion
        }
Beispiel #12
0
        private void button2_Click_1(object sender, EventArgs e)
        {
            watch = Stopwatch.StartNew();
            makeDataSet();
            watch.Stop();
            txtFeatureTime.Text = "" + watch.ElapsedMilliseconds;
            progressBar1.Value  = 100;
            int trainingSampleCount = (int)(trainCount * 0.75);
            int testSampleCount     = (int)(trainCount * 0.25);

            watch = Stopwatch.StartNew();
            if (chkDtree.Checked)
            {
                Matrix <Byte> varType = new Matrix <byte>(data.Cols + 1, 1);
                varType.SetValue((byte)VAR_TYPE.NUMERICAL);
                Matrix <byte> sampleIdx = new Matrix <byte>(data.Rows, 1);
                using (Matrix <byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
                    sampleRows.SetValue(255);
                IntPtr         priors = new IntPtr();
                MCvDTreeParams param  = new MCvDTreeParams();
                param.maxDepth           = 8;
                param.minSampleCount     = 10;
                param.regressionAccuracy = 0;
                param.useSurrogates      = true;
                param.maxCategories      = 15;
                param.cvFolds            = 2;
                param.use1seRule         = true;
                param.truncatePrunedTree = true;
                param.priors             = priors;
                bool success = dtree.Train(
                    data,
                    Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
                    response,
                    null,
                    null,
                    varType,
                    null,
                    param);
                if (!success)
                {
                    return;
                }
            }
            else
            {
                SVMParams param = new SVMParams();
                param.SVMType  = SVM_TYPE.C_SVC;
                param.TermCrit = new MCvTermCriteria(1000);
                param.C        = 4;
                svm.Train(data, response, null, null, param);
            }
            watch.Stop();
            txtLearnTime.Text = "" + watch.ElapsedMilliseconds;

            double         trainDataCorrectRatio = 0;
            double         testDataCorrectRatio  = 0;
            Matrix <float> output = new Matrix <float>(1, featureCount);

            for (int i = 0; i < data.Rows; i++)
            {
                using (Matrix <float> sample = data.GetRow(i))
                {
                    double r;
                    if (chkDtree.Checked)
                    {
                        r = dtree.Predict(sample, null, true).value;
                    }
                    else
                    {
                        r = svm.Predict(sample);
                    }
                    r = Math.Abs(Math.Round(r) - response[i, 0]);
                    if (r < 1.0e-6)
                    {
                        if (i >= 0 && i < 3 * trainCount / 16)
                        {
                            trainDataCorrectRatio++;
                        }
                        else if (i >= 3 * trainCount / 16 && i < trainCount / 4)
                        {
                            testDataCorrectRatio++;
                        }
                        else if (i >= trainCount / 4 && i < 7 * trainCount / 16)
                        {
                            trainDataCorrectRatio++;
                        }
                        else if (i >= 7 * trainCount / 16 && i < trainCount / 2)
                        {
                            testDataCorrectRatio++;
                        }
                        else if (i >= trainCount / 2 && i < 11 * trainCount / 16)
                        {
                            trainDataCorrectRatio++;
                        }
                        else if (i >= 11 * trainCount / 16 && i < 3 * trainCount / 4)
                        {
                            testDataCorrectRatio++;
                        }
                        else if (i >= 3 * trainCount / 4 && i < 15 * trainCount / 16)
                        {
                            trainDataCorrectRatio++;
                        }
                        else if (i >= 15 * trainCount / 16 && i < trainCount)
                        {
                            testDataCorrectRatio++;
                        }
                    }
                }
            }
            trainDataCorrectRatio /= (trainCount * .75);
            testDataCorrectRatio  /= (trainCount * .25);
            txtTestRate.Text       = (Math.Round(testDataCorrectRatio, 4) * 100).ToString();
            txtTrainRate.Text      = (Math.Round(trainDataCorrectRatio, 4) * 100).ToString();
        }
Beispiel #13
0
        private void trainIndoorOutdoorToolStripMenuItem_Click(object sender, EventArgs e)
        {
            this.folderBrowserDialog.RootFolder = System.Environment.SpecialFolder.MyComputer;

            DialogResult dialogResult = this.folderBrowserDialog.ShowDialog();

            if (dialogResult == DialogResult.OK)
            {
                string root = folderBrowserDialog.SelectedPath;
                ColorHistogramInOutDoorFeature inoutdoorFeature = new ColorHistogramInOutDoorFeature();
                DirectoryInfo trainPosDir = new DirectoryInfo(root + @"\Train\pos");
                DirectoryInfo trainNegDir = new DirectoryInfo(root + @"\Train\neg");
                DirectoryInfo testPosDir  = new DirectoryInfo(root + @"\Test\pos");
                DirectoryInfo testNegDir  = new DirectoryInfo(root + @"\Test\neg");

                var posImagesOnDisk = FileHelper.GetImages(trainPosDir);
                var negImagesOnDisk = FileHelper.GetImages(trainNegDir);

                int trainingCount = posImagesOnDisk.Count + negImagesOnDisk.Count;

                int offset    = 0;
                int index     = 0;
                int dimension = 3 * 2 * 64;

                Matrix <float> trainClasses = new Matrix <float>(trainingCount, 1);

                float[,] flatDescriptors = new float[trainingCount, dimension];
                foreach (var image in posImagesOnDisk)
                {
                    using (Image <Bgr, Byte> tempImage = new Image <Bgr, Byte>(image.FullName))
                    {
                        inoutdoorFeature.Compute(tempImage);
                        var descriptor = inoutdoorFeature.Descriptors;
                        Buffer.BlockCopy(descriptor.ManagedArray, 0, flatDescriptors, offset, sizeof(float) * descriptor.ManagedArray.Length);
                        offset += sizeof(float) * descriptor.ManagedArray.Length;

                        trainClasses[index, 0] = 1.0f;
                        ++index;
                    }
                }

                foreach (var image in negImagesOnDisk)
                {
                    using (Image <Bgr, Byte> tempImage = new Image <Bgr, Byte>(image.FullName))
                    {
                        inoutdoorFeature.Compute(tempImage);
                        var descriptor = inoutdoorFeature.Descriptors;
                        Buffer.BlockCopy(descriptor.ManagedArray, 0, flatDescriptors, offset, sizeof(float) * descriptor.ManagedArray.Length);
                        offset += sizeof(float) * descriptor.ManagedArray.Length;

                        trainClasses[index, 0] = -1.0f;
                        ++index;
                    }
                }

                Matrix <float> trainData = new Matrix <float>(flatDescriptors);
                using (SVM model = new SVM())
                {
                    SVMParams p = new SVMParams();
                    p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                    p.SVMType    = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                    p.C          = 1;
                    p.TermCrit   = new MCvTermCriteria(1000, 0.01);

                    bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);

                    model.Save(root + @"\inoutdoor.model");
                }
            }
        }