/// <summary> /// /// </summary> /// <param name="dataFilename"></param> /// <param name="filenameToSave"></param> /// <param name="filenameToLoad"></param> private void BuildMlpClassifier(string dataFilename, string filenameToSave, string filenameToLoad) { const int ClassCount = 26; CvMat data = null; CvMat trainData = null; CvMat responses = null; CvMat mlpResponse = null; CvMat layerSizes = null; int nsamplesAll = 0, ntrainSamples = 0; double trainHr = 0, testHr = 0; CvANN_MLP mlp = new CvANN_MLP(); try { ReadNumClassData(dataFilename, 16, out data, out responses); } catch { Console.WriteLine("Could not read the database {0}", dataFilename); return; } Console.WriteLine("The database {0} is loaded.", dataFilename); nsamplesAll = data.Rows; ntrainSamples = (int)(nsamplesAll * 0.8); // Create or load MLP classifier if (filenameToLoad != null) { // load classifier from the specified file mlp.Load(filenameToLoad); ntrainSamples = 0; if (mlp.GetLayerCount() == 0) { Console.WriteLine("Could not read the classifier {0}", filenameToLoad); return; } Console.WriteLine("The classifier {0} is loaded.", filenameToLoad); } else { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // // MLP does not support categorical variables by explicitly. // So, instead of the output class label, we will use // a binary vector of <class_count> components for training and, // therefore, MLP will give us a vector of "probabilities" at the // prediction stage // // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! using (CvMat newResponses = new CvMat(ntrainSamples, ClassCount, MatrixType.F32C1)) { // 1. unroll the responses Console.WriteLine("Unrolling the responses..."); unsafe { for (int i = 0; i < ntrainSamples; i++) { int clsLabel = Cv.Round(responses.DataArraySingle[i]) - 'A'; float *bitVec = (float *)(newResponses.DataByte + i * newResponses.Step); for (int j = 0; j < ClassCount; j++) { bitVec[j] = 0.0f; } bitVec[clsLabel] = 1.0f; } } Cv.GetRows(data, out trainData, 0, ntrainSamples); // 2. train classifier int[] layerSizesData = { data.Cols, 100, 100, ClassCount }; layerSizes = new CvMat(1, layerSizesData.Length, MatrixType.S32C1, layerSizesData); mlp.Create(layerSizes); Console.Write("Training the classifier (may take a few minutes)..."); mlp.Train( trainData, newResponses, null, null, new CvANN_MLP_TrainParams(new CvTermCriteria(300, 0.01), MLPTrainingMethod.RPROP, 0.01) ); } Console.WriteLine(); } mlpResponse = new CvMat(1, ClassCount, MatrixType.F32C1); // compute prediction error on train and test data for (int i = 0; i < nsamplesAll; i++) { int bestClass; CvMat sample; CvPoint minLoc, maxLoc; Cv.GetRow(data, out sample, i); mlp.Predict(sample, mlpResponse); mlpResponse.MinMaxLoc(out minLoc, out maxLoc, null); bestClass = maxLoc.X + 'A'; int r = (Math.Abs((double)bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0; if (i < ntrainSamples) { trainHr += r; } else { testHr += r; } } testHr /= (double)(nsamplesAll - ntrainSamples); trainHr /= (double)ntrainSamples; Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0); // Save classifier to file if needed if (filenameToSave != null) { mlp.Save(filenameToSave); } Console.Read(); mlpResponse.Dispose(); data.Dispose(); responses.Dispose(); if (layerSizes != null) { layerSizes.Dispose(); } mlp.Dispose(); }
/// <summary> /// /// </summary> /// <param name="dataFilename"></param> /// <param name="filenameToSave"></param> /// <param name="filenameToLoad"></param> private void BuildMlpClassifier(string dataFilename, string filenameToSave, string filenameToLoad) { const int ClassCount = 26; CvMat data = null; CvMat trainData = null; CvMat responses = null; CvMat mlpResponse = null; CvMat layerSizes = null; int nsamplesAll = 0, ntrainSamples = 0; double trainHr = 0, testHr = 0; CvANN_MLP mlp = new CvANN_MLP(); try { ReadNumClassData(dataFilename, 16, out data, out responses); } catch { Console.WriteLine("Could not read the database {0}", dataFilename); return; } Console.WriteLine("The database {0} is loaded.", dataFilename); nsamplesAll = data.Rows; ntrainSamples = (int)(nsamplesAll * 0.8); // Create or load MLP classifier if (filenameToLoad != null) { // load classifier from the specified file mlp.Load(filenameToLoad); ntrainSamples = 0; if (mlp.GetLayerCount() == 0) { Console.WriteLine("Could not read the classifier {0}", filenameToLoad); return; } Console.WriteLine("The classifier {0} is loaded.", filenameToLoad); } else { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // // MLP does not support categorical variables by explicitly. // So, instead of the output class label, we will use // a binary vector of <class_count> components for training and, // therefore, MLP will give us a vector of "probabilities" at the // prediction stage // // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! using (CvMat newResponses = new CvMat(ntrainSamples, ClassCount, MatrixType.F32C1)) { // 1. unroll the responses Console.WriteLine("Unrolling the responses..."); unsafe { for (int i = 0; i < ntrainSamples; i++) { int clsLabel = Cv.Round(responses.DataArraySingle[i]) - 'A'; float* bitVec = (float*)(newResponses.DataByte + i * newResponses.Step); for (int j = 0; j < ClassCount; j++) { bitVec[j] = 0.0f; } bitVec[clsLabel] = 1.0f; } } Cv.GetRows(data, out trainData, 0, ntrainSamples); // 2. train classifier int[] layerSizesData = { data.Cols, 100, 100, ClassCount }; layerSizes = new CvMat(1, layerSizesData.Length, MatrixType.S32C1, layerSizesData); mlp.Create(layerSizes); Console.Write("Training the classifier (may take a few minutes)..."); mlp.Train( trainData, newResponses, null, null, new CvANN_MLP_TrainParams(new CvTermCriteria(300, 0.01), MLPTrainingMethod.RPROP, 0.01) ); } Console.WriteLine(); } mlpResponse = new CvMat(1, ClassCount, MatrixType.F32C1); // compute prediction error on train and test data for (int i = 0; i < nsamplesAll; i++) { int bestClass; CvMat sample; CvPoint minLoc, maxLoc; Cv.GetRow(data, out sample, i); mlp.Predict(sample, mlpResponse); mlpResponse.MinMaxLoc(out minLoc, out maxLoc, null); bestClass = maxLoc.X + 'A'; int r = (Math.Abs((double)bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0; if (i < ntrainSamples) trainHr += r; else testHr += r; } testHr /= (double)(nsamplesAll - ntrainSamples); trainHr /= (double)ntrainSamples; Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0); // Save classifier to file if needed if (filenameToSave != null) { mlp.Save(filenameToSave); } Console.Read(); mlpResponse.Dispose(); data.Dispose(); responses.Dispose(); if (layerSizes != null) layerSizes.Dispose(); mlp.Dispose(); }