Example #1
0
 private void loadToolStripMenuItem_Click(object sender, EventArgs e)
 {
     imgClassifier = new BowImageClassifier();
     imgClassifier.Init(
         dataPath + String.Format(@"\train-{0}.bow", bowSize),
         dataPath + String.Format(@"\train-{0}.net", bowSize),
         mask
         );
     logger.logStr("Loaded");
 }
Example #2
0
        private bool Train(dynamic bow, SceneFeatureData scenefeatureData)
        {
            Bitmap mask = Utils.CreateMaskBitmap(new Size(1280, 720), new Rectangle[] { scenefeatureData.feature.area });

            mask.Save(scenefeatureData.feature.name + "-mask.png");
            var trainData = scenefeatureData.trainData;

            int[]      labelIndexs = trainData.GetLabelIndexs();
            String[]   labels      = trainData.GetLabels();
            double[][] features    = trainData.GetFeature(bow, mask);
            int        numOutput   = trainData.GetNumOutput();
            var        function    = new SigmoidFunction();
            bool       flgFound    = false;
            int        count       = 0;

            while ((flgFound == false) && (count < 100))
            {
                count++;
                var network = new ActivationNetwork(function, bow.NumberOfOutputs, 20, numOutput);
                new NguyenWidrow(network).Randomize();
                var teacher = new ParallelResilientBackpropagationLearning(network);

                BowImageClassifier trainImgClassifier = new BowImageClassifier();
                trainImgClassifier.Init(bow, network, mask);
                //creat output
                double[][] outputs    = trainData.GetOutputs(numOutput);
                double     avgError   = 10000.0;
                double     prevError  = avgError;
                double     bestError  = avgError;
                int        errorCount = 0;
                while ((errorCount < 3) && (avgError > 0.00001))
                {
                    //Application.DoEvents();
                    double[] errors = new double[10];
                    for (int i = 0; i < 10; i++)
                    {
                        errors[i] = teacher.RunEpoch(features, outputs);
                    }
                    avgError = errors.Average();
                    if (prevError > avgError)
                    {
                        int trainError = trainImgClassifier.Evaluate(trainData);
                        //int testError = trainImgClassifier.Evaluate(testData);
                        //int testSetError = trainImgClassifier.Evaluate(testDataSet);
                        logger.logStr(String.Format("{0} {1} {2}", avgError, prevError, trainError));
                        prevError = avgError;
                        //save best error
                        if (bestError > avgError)
                        {
                            bestError = avgError;
                            //Accord.IO.Serializer.Save(network, dataPath + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                        }
                        if (trainError /*+ testError + testSetError*/ == 0)
                        {
                            Accord.IO.Serializer.Save(network, path + @"\" + scenefeatureData.feature.name + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                            logger.logStr("Done " + bestError + " " + trainError);
                            return(true);
                        }
                    }
                    else
                    {
                        logger.logStr(String.Format("{0}", avgError));
                        prevError = 10000.0;
                        errorCount++;
                    }
                    //Application.DoEvents();
                }
                logger.logStr("Done " + bestError + " " + count);
            }
            return(false);
        }
Example #3
0
        private void trainToolStripMenuItem_Click_1(object sender, EventArgs e)
        {
            ImageTrainDataSet testDataSet = FeatureDetector.GetAllTrainImageData(testDataPath, configure.trainFolders);

            testDataSet.flgCache = true;

            int[]    labelIndexs = trainData.GetLabelIndexs();
            String[] labels      = trainData.GetLabels();
            var      bow         = Accord.IO.Serializer.Load <BagOfVisualWords>(dataPath + String.Format(@"\train-{0}.bow", bowSize));

            double[][] features  = trainData.GetFeature(bow, mask);
            int        numOutput = trainData.GetNumOutput();
            var        function  = new SigmoidFunction();

            logger.logStr("Start Training");
            bool flgFound = false;
            int  count    = 0;

            while ((flgFound == false) && (count < 100))
            {
                count++;
                var network = new ActivationNetwork(function, bow.NumberOfOutputs, 20, numOutput);
                new NguyenWidrow(network).Randomize();
                var teacher = new ParallelResilientBackpropagationLearning(network);

                BowImageClassifier trainImgClassifier = new BowImageClassifier();
                trainImgClassifier.Init(bow, network, mask);
                //creat output
                double[][] outputs    = trainData.GetOutputs(numOutput);
                double     avgError   = 10000.0;
                double     prevError  = avgError;
                double     bestError  = avgError;
                int        errorCount = 0;
                while ((errorCount < 3) && (avgError > 0.00001))
                {
                    //Application.DoEvents();
                    double[] errors = new double[10];
                    for (int i = 0; i < 10; i++)
                    {
                        errors[i] = teacher.RunEpoch(features, outputs);
                    }
                    avgError = errors.Average();
                    if (prevError > avgError)
                    {
                        int trainError   = trainImgClassifier.Evaluate(trainData);
                        int testError    = trainImgClassifier.Evaluate(testData);
                        int testSetError = trainImgClassifier.Evaluate(testDataSet);
                        logger.logStr(String.Format("{0} {1} {2} {3} {4} #{5}", avgError, prevError, trainError, testError, testSetError, errorCount));
                        prevError = avgError;
                        //save best error
                        if (bestError > avgError)
                        {
                            bestError = avgError;
                            Accord.IO.Serializer.Save(network, dataPath + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                        }
                        if (trainError + testError + testSetError == 0)
                        {
                            flgFound = true;
                            Accord.IO.Serializer.Save(network, dataPath + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                            break;
                        }
                    }
                    else
                    {
                        logger.logStr(String.Format("{0}", avgError));
                        prevError = 10000.0;
                        errorCount++;
                    }
                    Application.DoEvents();
                }
                logger.logStr("Done " + bestError + " " + count);
            }
        }