private void btnTrain_Click(object sender, EventArgs e) { HOGDescriptor hog = new HOGDescriptor(new Size(36, 36), new Size(36, 36), new Size(6, 6), new Size(6, 6)); fsPeg = Directory.GetFiles(txtPosPath.Text); for (int i = 0; i < fsPeg.Length; i++) { String cFileName = txtPosPath.Text + fsPeg[i]; Image <Bgr, byte> vImage = new Image <Bgr, byte>(cFileName); Image <Gray, byte> vGray = vImage.Convert <Gray, byte>(); float[] fAttr = hog.Compute(vGray); for (int j = 0; j < fAttr.Length; j++) { DataMatrix[i, j] = fAttr[j]; } AttrMatrix[i, 0] = 1; } fsNeg = Directory.GetFiles(txtNegPath.Text); for (int i = 0; i < fsNeg.Length; i++) { String cFileName = txtNegPath.Text + fsNeg[i]; Image <Bgr, byte> vImage = new Image <Bgr, byte>(cFileName); Image <Gray, byte> vGray = vImage.Convert <Gray, byte>(); float[] fAttr = hog.Compute(vGray); for (int j = 0; j < fAttr.Length; j++) { DataMatrix[i, j] = fAttr[j]; } AttrMatrix[i, 0] = 0; } Emgu.CV.ML.SVM vSVM = new Emgu.CV.ML.SVM(); vSVM.Type = Emgu.CV.ML.SVM.SvmType.CSvc; vSVM.SetKernel(Emgu.CV.ML.SVM.SvmKernelType.Linear); vSVM.TermCriteria = new MCvTermCriteria(1000, 0.1); TrainData td = new TrainData(DataMatrix, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, AttrMatrix); String cExportFileName = txtFileName.Text; vSVM.Save(cExportFileName); }
public void TestSVM() { int trainSampleCount = 150; int sigma = 60; #region Generate the training data and classes Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); Matrix<float> sample = new Matrix<float>(1, 2); Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1); trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma)); Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1); trainClasses1.SetValue(1); Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainClasses2.SetValue(2); Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainClasses3.SetValue(3); #endregion //using (SVM.Params p = new SVM.Params(MlEnum.SvmType.CSvc, MlEnum.SvmKernelType.Linear, 0, 1, 0, 1, 0, 0, null, new MCvTermCriteria(100, 1.0e-6))) using (SVM model = new SVM()) using (Matrix<int> trainClassesInt = trainClasses.Convert<int>()) using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClassesInt)) { model.Type = SVM.SvmType.CSvc; model.SetKernel(SVM.SvmKernelType.Inter); model.Degree = 0; model.Gamma = 1; model.Coef0 = 0; model.C = 1; model.Nu = 0; model.P = 0; model.TermCriteria = new MCvTermCriteria(100, 1.0e-6); //bool trained = model.TrainAuto(td, 5); model.Train(td); #if !NETFX_CORE String fileName = "svmModel.xml"; //String fileName = Path.Combine(Path.GetTempPath(), "svmModel.xml"); model.Save(fileName); SVM model2 = new SVM(); FileStorage fs = new FileStorage(fileName, FileStorage.Mode.Read); model2.Read(fs.GetFirstTopLevelNode()); if (File.Exists(fileName)) File.Delete(fileName); #endif for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; float response = model.Predict(sample); img[i, j] = response == 1 ? new Bgr(90, 0, 0) : response == 2 ? new Bgr(0, 90, 0) : new Bgr(0, 0, 90); } } Mat supportVectors = model.GetSupportVectors(); //TODO: find out how to draw the support vectors Image<Gray, float> pts = supportVectors.ToImage<Gray, float>(); PointF[] vectors = new PointF[supportVectors.Rows]; GCHandle handler = GCHandle.Alloc(vectors, GCHandleType.Pinned); using ( Mat vMat = new Mat(supportVectors.Rows, supportVectors.Cols, DepthType.Cv32F, 1, handler.AddrOfPinnedObject(), supportVectors.Cols*4)) { supportVectors.CopyTo(vMat); } handler.Free(); /* int c = model.GetSupportVectorCount(); for (int i = 0; i < c; i++) { float[] v = model.GetSupportVector(i); PointF p1 = new PointF(v[0], v[1]); img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2); }*/ } // display the original training samples for (int i = 0; i < (trainSampleCount / 3); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]); img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1); } //Emgu.CV.UI.ImageViewer.Show(img); }
public void Evaluate(IEvolutionState state, Individual ind, int subpop, int threadnum) { if (!ind.Evaluated) { int counter = 0; var features = new int[5, NumOfImagesPerCategory *NumOfClasses, NumOfFeatures]; var labels = new int[5, NumOfImagesPerCategory *NumOfClasses]; for (int i = 0; i < 5; i++) { counter = 0; var categoryDir = CategoryDirs[i]; var subcategoryDirs = Directory.EnumerateDirectories(categoryDir).ToArray(); for (int j = 0; j < NumOfClasses; j++) { var subcategoryDir = subcategoryDirs[j]; var images = Directory.GetFiles(subcategoryDir); for (int k = 0; k < NumOfImagesPerCategory; k++) { var tempImage = new Image <Gray, Byte>(images[k]); tempImage.CopyTo(currentImage[threadnum]); tempImage.CopyTo(originalImage[threadnum]); tempImage.Dispose(); ((GPIndividual)ind).Trees[0].Child.Eval(state, threadnum, Input, Stack, ((GPIndividual)ind), this); int[] imageFeatures = ImageTransformer.GetSquareSuperpixelFeatures(currentImage[threadnum], SuperpixelSize); for (int x = 0; x < imageFeatures.Length; x++) { features[i, counter, x] = imageFeatures[x]; labels[i, counter] = j + 1; } counter++; } } } /* * var trainDataFile = new StreamWriter(@"F:\Gesty\problem2\features\traindata" + threadnum + ".txt"); * var testDataFile = new StreamWriter(@"F:\Gesty\problem2\features\testdata" + threadnum + ".txt"); * * for(int i=0; i<4; i++) * { * for(int j=0; j<1000; j++) * { * var line = new StringBuilder(); * line.Append(labels[i, j].ToString() + " "); * for (int k=0; k<NumOfFeatures; k++) * { * line.Append((k + 1).ToString() + ":" + features[i, j, k].ToString() + " "); * } * trainDataFile.WriteLine(line.ToString().Trim()); * } * } * for (int j = 0; j < 1000; j++) * { * var line = new StringBuilder(); * line.Append(labels[4, j].ToString() + " "); * for (int k = 0; k < NumOfFeatures; k++) * { * line.Append((k + 1).ToString() + ":" + features[4, j, k].ToString() + " "); * } * testDataFile.WriteLine(line.ToString().Trim()); * } * trainDataFile.Close(); * testDataFile.Close(); */ var confMatI = new double[10, 10]; double accuracy = 0; for (int x = 0; x < 5; x++) { var trainData = new Matrix <float>(NumOfImagesPerCategory * NumOfClasses * 4, NumOfFeatures); var trainClasses = new Matrix <int>(NumOfImagesPerCategory * NumOfClasses * 4, 1); var testData = new Matrix <float>(NumOfImagesPerCategory * NumOfClasses, NumOfFeatures); var testClasses = new Matrix <int>(NumOfImagesPerCategory * NumOfClasses, 1); //trainData int imageCount = 0; for (int i = 0; i < 5; i++) { if (i != x) { for (int j = 0; j < NumOfImagesPerCategory * NumOfClasses; j++) { for (int k = 0; k < NumOfFeatures; k++) { trainData[imageCount, k] = features[i, j, k]; trainClasses[imageCount, 0] = labels[i, j]; } imageCount++; } } else { for (int j = 0; j < NumOfImagesPerCategory * NumOfClasses; j++) { for (int k = 0; k < NumOfFeatures; k++) { testData[j, k] = features[i, j, k]; testClasses[j, 0] = labels[i, j]; } } } } Emgu.CV.ML.SVM model = new Emgu.CV.ML.SVM(); var predictions = new Matrix <float>(NumOfImagesPerCategory * NumOfClasses, 1); var trainData2 = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses); model.Type = Emgu.CV.ML.SVM.SvmType.CSvc; model.SetKernel(Emgu.CV.ML.SVM.SvmKernelType.Poly); model.TermCriteria = new MCvTermCriteria(10000, 0.001); model.Degree = 3; model.Gamma = 0.001; model.Coef0 = 0; model.C = 1000; model.Nu = 0.5; model.P = 0.1; model.Train(trainData2); model.Predict(testData, predictions); // var predictionsArray = (float[,])predictions.GetData(); int correctPredictions = 0; for (int i = 0; i < predictions.Rows; i++) { if ((int)predictions[i, 0] == testClasses[i, 0]) { correctPredictions++; } var predictedLabel = (int)predictions[i, 0]; var trueLabel = testClasses[i, 0]; confMatI[predictedLabel - 1, trueLabel - 1]++; } for (int i = 0; i < 10; i++) { for (int j = 0; j < 10; j++) { confMat[i, j] = (confMatI[i, j] / 500) * 100; } } if (correctPredictions > 0) { accuracy += 100 * ((double)correctPredictions / (double)predictions.Rows); } } /* * for(int i=0; i<NumOfImagesPerCategory*NumOfClasses*4; i++) * { * for(int j=0; j<NumOfFeatures; j++) * { * //trainData[i, j] = ((trainData[i, j] - 0) / (255 - 0)) * (1 + 1) - 1; * } * trainClasses[i, 0] = ((trainClasses[i, 0] - 1) / (NumOfClasses - 1)) * (1 + 1) - 1; * } */ //testData /* * for (int j = 0; j < NumOfImagesPerCategory * NumOfClasses; j++) * { * var line = new StringBuilder(); * line.Append(labels[4, j] + " "); * for (int k = 0; k < NumOfFeatures; k++) * { * line.Append(k + 1 + ":" + features[4, j, k] + " "); * } * testData.WriteLine(line.ToString().Trim()); * } * * trainData.Close(); * testData.Close(); */ //predictions.Dispose(); /* * var netData = new SharpLearning.Containers.Matrices.F64Matrix(NumOfImagesPerCategory * NumOfClasses * 4, NumOfFeatures); * var netTargets = new double[NumOfImagesPerCategory * NumOfClasses * 4]; * int imageCount = 0; * for (int i = 0; i < 4; i++) * { * for (int j = 0; j < NumOfImagesPerCategory * NumOfClasses; j++) * { * for (int k = 0; k < NumOfFeatures; k++) * { * netData[imageCount, k] = features[i, j, k]; * netTargets[imageCount] = labels[i, j]; * } * imageCount++; * } * } */ /* * var CVNeuralNet = new Emgu.CV.ML.ANN_MLP(); * * CVNeuralNet.TermCriteria = new MCvTermCriteria(10000, 0.001); * var layerSizes = new Matrix<int>(new int[4] { NumOfFeatures, NumOfFeatures * 10, NumOfFeatures*5, 1 }); * CVNeuralNet.SetLayerSizes(layerSizes); * CVNeuralNet.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Rprop); * CVNeuralNet.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym); * CVNeuralNet.BackpropMomentumScale = 0.01; * CVNeuralNet.BackpropWeightScale = 0.2; * var trainData2 = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses); * var predictions = new Matrix<float>(NumOfImagesPerCategory * NumOfClasses*4, 1); * CVNeuralNet.Train(trainData2); * CVNeuralNet.Predict(trainData, predictions); */ /* * var net = new NeuralNet(); * net.Add(new InputLayer(NumOfFeatures)); * net.Add(new DropoutLayer(0.5)); * net.Add(new DenseLayer(NumOfFeatures * 4, SharpLearning.Neural.Activations.Activation.Sigmoid)); * net.Add(new DenseLayer(NumOfFeatures * 4, SharpLearning.Neural.Activations.Activation.Sigmoid)); * net.Add(new DropoutLayer(0.5)); * net.Add(new SoftMaxLayer(NumOfClasses)); * var learner = new ClassificationNeuralNetLearner(net, new SquareLoss()); * var model = learner.Learn(netData, netTargets); * var predictions = model.Predict(netData); * int correctPredictions = 0; * for (int i = 0; i < predictions.Length; i++) * { * if ((int)predictions[i] == netTargets[i]) * correctPredictions++; * } * if (correctPredictions > 0) * accuracy = 100 * ((double)correctPredictions / (double)predictions.Length); */ /* * var problem = SVMProblemHelper.Load(@"F:\Gesty\problem2\features\traindata" + threadnum + ".txt"); * var testProblem = SVMProblemHelper.Load(@"F:\Gesty\problem2\features\testdata" + threadnum + ".txt"); * var model = problem.Train(Parameter); * double[] target = testProblem.Predict(model); * double accuracy = testProblem.EvaluateClassificationProblem(target); */ var f = ((KozaFitness)ind.Fitness); f.SetStandardizedFitness(state, (float)(100 - (accuracy / 5))); ind.Evaluated = true; var transFeatures = new StreamWriter(@"F:\Gesty\testy\transFeatures.csv"); for (int i1 = 0; i1 < 1000; i1++) { for (int i2 = 0; i2 < 5; i2++) { var line = new StringBuilder(); line.Append(labels[i2, i1].ToString() + ','); for (int i3 = 0; i3 < 64; i3++) { line.Append(features[i2, i1, i3].ToString() + ','); } transFeatures.WriteLine(line.ToString().Trim(',')); } } transFeatures.Close(); } }