示例#1
0
        public int Evaluate(ImageTrainDataSet trainData)
        {
            double[][] features    = trainData.GetFeature(bow, mask);
            int[]      labelIndexs = trainData.GetLabelIndexs();
            String[]   labels      = trainData.GetLabels();
            int        errorCount  = 0;

            for (int i = 0; i < trainData.Count(); i++)
            {
                //double[] feature = bow.Transform(images[i]);
                double[] answer = network.Compute(features[i]);

                int expected = labelIndexs[i];
                int actual; answer.Max(out actual);
                if (actual != expected)
                {
                    errorCount++;
                }
            }
            return(errorCount);
        }
示例#2
0
        private void trainToolStripMenuItem_Click_1(object sender, EventArgs e)
        {
            ImageTrainDataSet testDataSet = FeatureDetector.GetAllTrainImageData(testDataPath, configure.trainFolders);

            testDataSet.flgCache = true;

            int[]    labelIndexs = trainData.GetLabelIndexs();
            String[] labels      = trainData.GetLabels();
            var      bow         = Accord.IO.Serializer.Load <BagOfVisualWords>(dataPath + String.Format(@"\train-{0}.bow", bowSize));

            double[][] features  = trainData.GetFeature(bow, mask);
            int        numOutput = trainData.GetNumOutput();
            var        function  = new SigmoidFunction();

            logger.logStr("Start Training");
            bool flgFound = false;
            int  count    = 0;

            while ((flgFound == false) && (count < 100))
            {
                count++;
                var network = new ActivationNetwork(function, bow.NumberOfOutputs, 20, numOutput);
                new NguyenWidrow(network).Randomize();
                var teacher = new ParallelResilientBackpropagationLearning(network);

                BowImageClassifier trainImgClassifier = new BowImageClassifier();
                trainImgClassifier.Init(bow, network, mask);
                //creat output
                double[][] outputs    = trainData.GetOutputs(numOutput);
                double     avgError   = 10000.0;
                double     prevError  = avgError;
                double     bestError  = avgError;
                int        errorCount = 0;
                while ((errorCount < 3) && (avgError > 0.00001))
                {
                    //Application.DoEvents();
                    double[] errors = new double[10];
                    for (int i = 0; i < 10; i++)
                    {
                        errors[i] = teacher.RunEpoch(features, outputs);
                    }
                    avgError = errors.Average();
                    if (prevError > avgError)
                    {
                        int trainError   = trainImgClassifier.Evaluate(trainData);
                        int testError    = trainImgClassifier.Evaluate(testData);
                        int testSetError = trainImgClassifier.Evaluate(testDataSet);
                        logger.logStr(String.Format("{0} {1} {2} {3} {4} #{5}", avgError, prevError, trainError, testError, testSetError, errorCount));
                        prevError = avgError;
                        //save best error
                        if (bestError > avgError)
                        {
                            bestError = avgError;
                            Accord.IO.Serializer.Save(network, dataPath + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                        }
                        if (trainError + testError + testSetError == 0)
                        {
                            flgFound = true;
                            Accord.IO.Serializer.Save(network, dataPath + String.Format(@"\train-{0}.net", bow.NumberOfOutputs));
                            break;
                        }
                    }
                    else
                    {
                        logger.logStr(String.Format("{0}", avgError));
                        prevError = 10000.0;
                        errorCount++;
                    }
                    Application.DoEvents();
                }
                logger.logStr("Done " + bestError + " " + count);
            }
        }