public void TestMatToFileStorage() { //create a matrix m with random values Mat m = new Mat(120, 240, DepthType.Cv8U, 1); using (ScalarArray low = new ScalarArray(0)) using (ScalarArray high = new ScalarArray(255)) CvInvoke.Randu(m, low, high); //Convert the random matrix m to yml format, good for matrix that contains values such as calibration, homography etc. String mStr; using (FileStorage fs = new FileStorage(".yml", FileStorage.Mode.Write | FileStorage.Mode.Memory)) { fs.Write(m, "m"); mStr = fs.ReleaseAndGetString(); } //Treat the Mat as image data and convert it to png format. using (VectorOfByte bytes = new VectorOfByte()) { CvInvoke.Imencode(".png", m, bytes); byte[] rawData = bytes.ToArray(); } }
/// <summary> /// Stores algorithm parameters in a file storage /// </summary> /// <param name="algorithm">The algorithm.</param> /// <param name="storage">The storage.</param> public static void Write(this IAlgorithm algorithm, FileStorage storage) { CvInvoke.cveAlgorithmWrite(algorithm.AlgorithmPtr, storage); }
public void TestKNearest() { int K = 10; int trainSampleCount = 100; #region Generate the training data and classes Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); Matrix<float> sample = new Matrix<float>(1, 2); Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1); trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1); trainClasses1.SetValue(1); Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainClasses2.SetValue(2); #endregion Matrix<float> results, neighborResponses; results = new Matrix<float>(sample.Rows, 1); neighborResponses = new Matrix<float>(sample.Rows, K); //dist = new Matrix<float>(sample.Rows, K); using (KNearest knn = new KNearest()) { knn.DefaultK = K; knn.IsClassifier = true; knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses); //ParamDef[] defs = knn.GetParams(); //TODO: find out when knn.save will be implemented //knn.Save("knn.xml"); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; // estimates the response and get the neighbors' labels float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null); int accuracy = 0; // compute the number of neighbors representing the majority for (int k = 0; k < K; k++) { if (neighborResponses.Data[0, k] == response) accuracy++; } // highlight the pixel depending on the accuracy (or confidence) img[i, j] = response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) : (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0)); } } String knnModelStr; //save stat model to string using (FileStorage fs = new FileStorage(".yml", FileStorage.Mode.Write | FileStorage.Mode.Memory)) { knn.Write(fs); knnModelStr = fs.ReleaseAndGetString(); } //load stat model from string using (FileStorage fs = new FileStorage(knnModelStr, FileStorage.Mode.Read | FileStorage.Mode.Memory)) { KNearest knn2 = new KNearest(); knn2.Read(fs.GetRoot()); } } // display the original training samples for (int i = 0; i < (trainSampleCount >> 1); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); } //Emgu.CV.UI.ImageViewer.Show(img); }
public void TestSVM() { int trainSampleCount = 150; int sigma = 60; #region Generate the training data and classes Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); Matrix<float> sample = new Matrix<float>(1, 2); Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1); trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma)); Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1); trainClasses1.SetValue(1); Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainClasses2.SetValue(2); Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainClasses3.SetValue(3); #endregion //using (SVM.Params p = new SVM.Params(MlEnum.SvmType.CSvc, MlEnum.SvmKernelType.Linear, 0, 1, 0, 1, 0, 0, null, new MCvTermCriteria(100, 1.0e-6))) using (SVM model = new SVM()) using (Matrix<int> trainClassesInt = trainClasses.Convert<int>()) using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClassesInt)) { model.Type = SVM.SvmType.CSvc; model.SetKernel(SVM.SvmKernelType.Inter); model.Degree = 0; model.Gamma = 1; model.Coef0 = 0; model.C = 1; model.Nu = 0; model.P = 0; model.TermCriteria = new MCvTermCriteria(100, 1.0e-6); //bool trained = model.TrainAuto(td, 5); model.Train(td); #if !NETFX_CORE String fileName = "svmModel.xml"; //String fileName = Path.Combine(Path.GetTempPath(), "svmModel.xml"); model.Save(fileName); SVM model2 = new SVM(); FileStorage fs = new FileStorage(fileName, FileStorage.Mode.Read); model2.Read(fs.GetFirstTopLevelNode()); if (File.Exists(fileName)) File.Delete(fileName); #endif for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; float response = model.Predict(sample); img[i, j] = response == 1 ? new Bgr(90, 0, 0) : response == 2 ? new Bgr(0, 90, 0) : new Bgr(0, 0, 90); } } Mat supportVectors = model.GetSupportVectors(); //TODO: find out how to draw the support vectors Image<Gray, float> pts = supportVectors.ToImage<Gray, float>(); PointF[] vectors = new PointF[supportVectors.Rows]; GCHandle handler = GCHandle.Alloc(vectors, GCHandleType.Pinned); using ( Mat vMat = new Mat(supportVectors.Rows, supportVectors.Cols, DepthType.Cv32F, 1, handler.AddrOfPinnedObject(), supportVectors.Cols*4)) { supportVectors.CopyTo(vMat); } handler.Free(); /* int c = model.GetSupportVectorCount(); for (int i = 0; i < c; i++) { float[] v = model.GetSupportVector(i); PointF p1 = new PointF(v[0], v[1]); img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2); }*/ } // display the original training samples for (int i = 0; i < (trainSampleCount / 3); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]); img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1); } //Emgu.CV.UI.ImageViewer.Show(img); }
static internal extern IntPtr cveFileStorageCreate(IntPtr source, FileStorage.Mode flags, IntPtr encoding);
public void TestFileStorage2() { Mat m = new Mat(40, 30, DepthType.Cv8U, 3); using (ScalarArray lower = new ScalarArray(new MCvScalar(0, 0, 0))) using (ScalarArray higher = new ScalarArray(new MCvScalar(255, 255, 255))) CvInvoke.Randu(m, lower, higher ); int intValue = 10; float floatValue = 213.993f; double doubleValue = 32.314; using (FileStorage fs = new FileStorage(".xml", FileStorage.Mode.Write | FileStorage.Mode.Memory)) { fs.Write(m, "m"); fs.Write(intValue, "int"); fs.Write(floatValue, "float"); fs.Write(doubleValue, "double"); string s = fs.ReleaseAndGetString(); using (FileStorage fs2 = new FileStorage(s, FileStorage.Mode.Read | FileStorage.Mode.Memory)) { using (FileNode node = fs2.GetFirstTopLevelNode()) { Mat m2 = new Mat(); node.ReadMat(m2); EmguAssert.IsTrue(m.Equals(m2)); } using (FileNode node = fs2.GetNode("m")) { Mat m2 = new Mat(); node.ReadMat(m2); EmguAssert.IsTrue(m.Equals(m2)); } using (FileNode node = fs2.GetNode("int")) { EmguAssert.IsTrue(intValue.Equals(node.ReadInt())); } using (FileNode node = fs2.GetNode("float")) { EmguAssert.IsTrue(floatValue.Equals(node.ReadFloat())); } using (FileNode node = fs2.GetNode("double")) { EmguAssert.IsTrue(doubleValue.Equals(node.ReadDouble())); } } } }
public void TestFileStorage1() { FileStorage fs = new FileStorage("haarcascade_eye.xml", FileStorage.Mode.Read); }
/// <summary> /// Create a Cuda cascade classifier using the specific file storage /// </summary> /// <param name="fs">The file storage to create the classifier from</param> public CudaCascadeClassifier(FileStorage fs) { _ptr = CudaInvoke.cudaCascadeClassifierCreateFromFileStorage(fs); }
/// <summary> /// Stores algorithm parameters in a file storage /// </summary> /// <param name="algorithm">The algorithm.</param> /// <param name="storage">The storage.</param> /// <param name="name">The name of the node</param> public static void Write(this IAlgorithm algorithm, FileStorage storage, String name) { using (CvString csName = new CvString(name)) CvInvoke.cveAlgorithmWrite2(algorithm.AlgorithmPtr, storage, csName); }