public SaveImage() { using (IplImage img = new IplImage(Const.Image16bit, LoadMode.Color)) { // JPEG quality test img.SaveImage("q000.jpg", new JpegEncodingParam(0)); img.SaveImage("q025.jpg", new JpegEncodingParam(25)); img.SaveImage("q050.jpg", new JpegEncodingParam(50)); img.SaveImage("q075.jpg", new JpegEncodingParam(75)); img.SaveImage("q100.jpg", new JpegEncodingParam(100)); using (IplImage q000 = new IplImage("q000.jpg", LoadMode.Color)) using (IplImage q025 = new IplImage("q025.jpg", LoadMode.Color)) using (IplImage q050 = new IplImage("q050.jpg", LoadMode.Color)) using (IplImage q075 = new IplImage("q075.jpg", LoadMode.Color)) using (IplImage q100 = new IplImage("q100.jpg", LoadMode.Color)) using (CvWindow w000 = new CvWindow("quality 0", q000)) using (CvWindow w025 = new CvWindow("quality 25", q025)) using (CvWindow w050 = new CvWindow("quality 50", q050)) using (CvWindow w075 = new CvWindow("quality 75", q075)) using (CvWindow w100 = new CvWindow("quality 100", q100)) { Cv.WaitKey(); } } }
static void Main(string[] args) { // CreateCameraCaptureの引数はカメラのIndex(通常は0から始まる) using (var capture = Cv.CreateCameraCapture(0)) { IplImage frame = new IplImage(); // W320 x H240のウィンドウを作る double w = 320, h = 240; Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, w); Cv.SetCaptureProperty(capture, CaptureProperty.FrameHeight, h); // 何かキーを押すまでは、Webカメラの画像を表示し続ける while (Cv.WaitKey(1) == -1) { // カメラからフレームを取得 frame = Cv.QueryFrame(capture); // Window「Capture」を作って、Webカメラの画像を表示 Cv.ShowImage("Capture", frame); } // bmp以外に、jpegやpngでの保存が可能 frame.SaveImage("result.bmp"); // 使い終わったWindow「Capture」を破棄 Cv.DestroyWindow("Capture"); } }
public Edge() { using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.Color)) using (IplImage gray = new IplImage(src.Size, BitDepth.U8, 1)) using (IplImage temp = new IplImage(src.Size, BitDepth.S16, 1)) using (IplImage dstSobel = new IplImage(src.Size, BitDepth.U8, 1)) using (IplImage dstLaplace = new IplImage(src.Size, BitDepth.U8, 1)) using (IplImage dstCanny = new IplImage(src.Size, BitDepth.U8, 1)) { //src.CvtColor(gray, ColorConversion.RgbToGray); src.CvtColor(gray, ColorConversion.BgrToGray); // Sobel Cv.Sobel(gray, temp, 1, 0, ApertureSize.Size3); Cv.ConvertScaleAbs(temp, dstSobel); // Laplace Cv.Laplace(gray, temp); Cv.ConvertScaleAbs(temp, dstLaplace); // Canny Cv.Canny(gray, dstCanny, 50, 200, ApertureSize.Size3); using (new CvWindow("src", src)) using (new CvWindow("sobel", dstSobel)) using (new CvWindow("laplace", dstLaplace)) using (new CvWindow("canny", dstCanny)) { CvWindow.WaitKey(); } dstSobel.SaveImage("sobel.png"); dstLaplace.SaveImage("laplace.png"); dstCanny.SaveImage("canny.png"); } }
public Inpaint() { // cvInpaint // 画像の不要な文字列部分に対するマスク画像を指定して文字列を除去する Console.WriteLine( "Hot keys: \n" + "\tESC - quit the program\n" + "\tr - restore the original image\n" + "\ti or ENTER - run inpainting algorithm\n" + "\t\t(before running it, paint something on the image)\n" + "\ts - save the original image, mask image, original+mask image and inpainted image to desktop." ); // 原画像の読み込み using (IplImage img0 = new IplImage(Const.ImageFruits, LoadMode.AnyDepth | LoadMode.AnyColor)) { // お絵かき用の画像を確保(マスク) using (IplImage img = img0.Clone()) using (IplImage inpaintMask = new IplImage(img0.Size, BitDepth.U8, 1)) // Inpaintの出力先画像を確保 using (IplImage inpainted = img0.Clone()) { inpainted.Zero(); inpaintMask.Zero(); using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize, img)) { // マウスイベントの処理 CvPoint prevPt = new CvPoint(-1, -1); wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonUp || (flags & MouseEvent.FlagLButton) == 0) { prevPt = new CvPoint(-1, -1); } else if (ev == MouseEvent.LButtonDown) { prevPt = new CvPoint(x, y); } else if (ev == MouseEvent.MouseMove && (flags & MouseEvent.FlagLButton) != 0) { CvPoint pt = new CvPoint(x, y); if (prevPt.X < 0) { prevPt = pt; } inpaintMask.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); img.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); prevPt = pt; wImage.ShowImage(img); } }; for (; ; ) { switch ((char)CvWindow.WaitKey(0)) { case (char)27: // ESCキーで終了 CvWindow.DestroyAllWindows(); return; case 'r': // 原画像を復元 inpaintMask.Zero(); img0.Copy(img); wImage.ShowImage(img); break; case 'i': // Inpaintの実行 case '\r': CvWindow wInpaint = new CvWindow("inpainted image", WindowMode.AutoSize); img.Inpaint(inpaintMask, inpainted, 3, InpaintMethod.Telea); wInpaint.ShowImage(inpainted); break; case 's': // 画像の保存 string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop); img0.SaveImage(Path.Combine(desktop, "original.png")); inpaintMask.SaveImage(Path.Combine(desktop, "mask.png")); img.SaveImage(Path.Combine(desktop, "original+mask.png")); inpainted.SaveImage(Path.Combine(desktop, "inpainted.png")); break; } } } } } }
static void Main(string[] args) { double brightMinVal = 0, brightMaxVal = 0; CvPoint brightMinLoc, brightMaxLoc; IplImage image5, image4, image3, image2, image1, image0, totalImage, output; CvSize size, resize; image5 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_5_bmp\\Oct-28-12-14-26_773008-000005-109990.bmp", LoadMode.GrayScale); image4 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_4_bmp\\Oct-28-12-14-26_773008-000004-109990.bmp", LoadMode.GrayScale); image3 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_3_bmp\\Oct-28-12-14-26_773008-000003-109990.bmp", LoadMode.GrayScale); image2 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_2_bmp\\Oct-28-12-14-26_773008-000002-109990.bmp", LoadMode.GrayScale); image1 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_1_bmp\\Oct-28-12-14-26_773008-000001-109990.bmp", LoadMode.GrayScale); image0 = Cv.LoadImage("C:\\Summer14\\Top Layer\\Camera_0_bmp\\Oct-28-12-14-26_773008-000000-109990.bmp", LoadMode.GrayScale); size = new CvSize(image1.Width * 3, image1.Height * 2); totalImage = new IplImage(size, image1.Depth, image1.NChannels); //****************MAKING IMAGE 5 BRIGHTER Cv.MinMaxLoc(image5, out brightMinVal, out brightMaxVal, out brightMinLoc, out brightMaxLoc); double scale = 2.9;//255/85 or 2.9 double shift = -brightMinVal * scale; Cv.ConvertScale(image5, image5, scale, shift); //***************END MAKING IMAGE 5 BRIGHTER //***************STITCHING THE 6 CAMERA VIEWS TOGETHER for (int i = 0; i < image3.Height; i++)// i: vertical for (int j = 60; j < image3.Width; j++) totalImage[i, j - 60] = image3[i, j]; //****************************** for (int i = image2.Height - 74; i < totalImage.Height - 74; i++) { for (int j = 0; j < image2.Width - 50; j++) { totalImage[i - 74, j] = image2[x, j + 50]; } x++; } x = 0; for (int i = 0; i < image1.Height; i++) { for (int j = image3.Width - 60; j < (totalImage.Width - image3.Width) - 200; j++)//60,200 { totalImage[i, j - 60] = image1[i, x]; x++; } x = 0; } x = 0; int k = 0; //******************************* for (int i = image1.Height - 110; i < totalImage.Height - 110; i++)//120 { for (int j = image2.Width - 95; j < (totalImage.Width - image2.Width) - 300; j++)//95,300 { totalImage[i - 74, j] = image0[k, x]; x++; } x = 0; k++; } x = 0; k = 0; //******************************* for (int i = 0; i < image5.Height - 70; i++) { for (int j = (image3.Width + image1.Width)-200; j < totalImage.Width-200; j++)//200,200 { totalImage[i, j - 130] = image5[i + 70, x]; x++; } x = 0; } x = 0; //******************************** for (int i = image5.Height - 170; i < totalImage.Height - 170; i++) { for (int j = (image2.Width + image0.Width) - 300; j < totalImage.Width - 300; j++)//300 { totalImage[i, j] = image4[k, x]; x++; } x = 0; k++; } //******************END STITCHING THE 6 CAMERA VIEWS TOGETHER resize = new CvSize(totalImage.Width / 12, totalImage.Height / 12); output = new IplImage(resize, totalImage.Depth, totalImage.NChannels); totalImage.Resize(output); //**************IMPORTANT SAVE!!!!!!!!! output.SaveImage("C:\\OpenCvSharp\\FallProgram2LessOverlap\\NewMosaic.bmp");//Saves the scaled down version. totalImage.SaveImage("C:\\OpenCvSharp\\FallProgram2LessOverlap\\TrueSizedMosaic.bmp");//Saves the original size //*************END IMPORTANT SAVE!!!!!!!! Cv.NamedWindow("Output", WindowMode.AutoSize); Cv.ShowImage("Output", output); Cv.WaitKey(0); }
unsafe ImageDetail[] SaveImage(Target[] targets) { IList<ImageDetail> imgs = new List<ImageDetail>(); foreach (Target t in targets) { Frame frame = t.BaseFrame; DateTime dt = DateTime.FromBinary(frame.timeStamp); for (int j = 0; j < t.FaceCount; ++j) { IntPtr* f = ((IntPtr*)(t.FaceData)) + j; IplImage aFace = new IplImage(*f); aFace.IsEnabledDispose = false; string facePath = GetFacePath(frame, dt, j); aFace.SaveImage(facePath); imgs.Add(ImageDetail.FromPath(facePath)); } } ImageDetail[] details = new ImageDetail[imgs.Count]; imgs.CopyTo(details, 0); return details; }
private static void SaveFrame(Frame frame) { IplImage ipl = new IplImage(frame.image); ipl.IsEnabledDispose = false; string path = frame.GetFileName(); DateTime dt = DateTime.FromBinary(frame.timeStamp); string root = Path.Combine(Properties.Settings.Default.OutputPath, frame.cameraID.ToString("d2")); string folder = ImageClassifier.BuildDestDirectory(root, dt, Properties.Settings.Default.BigImageDirectoryName); if (!Directory.Exists(folder)) { Directory.CreateDirectory(folder); } path = Path.Combine(folder, path); ipl.SaveImage(path); }
static void Main(string[] args) { Stopwatch timer = new Stopwatch(); IplImage bestMatch = new IplImage(); CvMat mapMatrix; CvPoint2D32f center; double[] maxArray = new double[8]; int numberOfFiles = 50;//Can change the number of input files if I want double angle = 0.0, scale = 1.0, bestAngle = 0.0; mapMatrix = new CvMat(2, 3, MatrixType.F32C1); while (fileReader < numberOfFiles)//Number of files to read in folder { uavSourceImg = new IplImage("C:\\OpenCvSharp\\SummerPractice13RotateAndScale4DataInfo2\\Testing Different UAV Inputs\\RenameFolder\\Kamien " + fileReader + ".bmp", LoadMode.AnyColor); tempImg = new IplImage("C:\\OpenCvSharp\\SummerPractice1\\SummerDatabase\\BigGoogleTemplate2.jpg", LoadMode.AnyColor);//Big template test CvRect drawRectangle = new CvRect(200, 200, 300, 300); timer.Start(); while (angle < 15.0)//Angle change while loop. Can change if necessary { while (i < 10)//Scaling while loop. Can change this if necessary. { //***********************DECLARATION //tempImg = theBigTemplate.GetSubImage(drawRectangle); CvSize destSize; graySource = new IplImage(uavSourceImg.Size, BitDepth.U8, 1); grayTemp = new IplImage(tempImg.Size, BitDepth.U8, 1); tempDestImg = new IplImage(grayTemp.Size, BitDepth.U8, 1); double minValue, maxValue; CvPoint minLoc, maxLoc; //**********************END DECLARATIONS //**********************CONVERT TO GRAY uavSourceImg.CvtColor(graySource, ColorConversion.BgrToGray); tempImg.CvtColor(grayTemp, ColorConversion.BgrToGray); //**********************END CONVERT TO GRAY //**********************ROTATION center = new CvPoint2D32f(grayTemp.Width * 0.5, grayTemp.Height * 0.5); grayTemp.Copy(tempDestImg); Cv._2DRotationMatrix(center, angle, scale, out mapMatrix); Cv.WarpAffine(grayTemp, tempDestImg, mapMatrix, Interpolation.FillOutliers, Cv.ScalarAll(255)); //**********************END ROTATION theRotatedSubTemp = tempDestImg.GetSubImage(drawRectangle); //**********************RESIZE PART CvSize size = new CvSize(graySource.Width / i, graySource.Height / i);//Manipulate the source image size CvSize size2 = new CvSize(theRotatedSubTemp.Width / i, theRotatedSubTemp.Height / i);//theRotatedSubTemp test graySourceHolder = new IplImage(size, BitDepth.U8, 1);//1 for grayholder ORIGINAL grayTempHolder = new IplImage(size2, BitDepth.U8, 1); graySource.Resize(graySourceHolder);//ORIGINAL(resize the grayscale source image before template matching) theRotatedSubTemp.Resize(grayTempHolder);//TEST theRotatedSubTemp //*********************END RESIZE PART //*********************TEMPLATE MATCHING PART destSize = new CvSize(graySourceHolder.Width - grayTempHolder.Width + 1, graySourceHolder.Height - grayTempHolder.Height + 1); //TEST RESIZE BEFORE WITH RESIZED TEMPLATE resizeDestImg = new IplImage(destSize, BitDepth.F32, 1); graySourceHolder.MatchTemplate(grayTempHolder, resizeDestImg, MatchTemplateMethod.CCoeffNormed); resizeDestImg.MinMaxLoc(out minValue, out maxValue, out minLoc, out maxLoc); graySourceHolder.Rectangle(maxLoc.X, maxLoc.Y, maxLoc.X + grayTempHolder.Width, maxLoc.Y + grayTempHolder.Height, CvColor.Red, 3);//Testing resize before with resized template //********************END TEMPLATE MATCHING PART Console.WriteLine("Divided by {0}, there was a {1} percent match", i, maxValue); if (maxValue > theBestMax) { theBestMax = maxValue; bestMatch = graySourceHolder.Clone(); bestAngle = angle; iHolder = i; } Cv.NamedWindow("Rotating Template", WindowMode.AutoSize); Cv.NamedWindow("Sub Template", WindowMode.AutoSize); Cv.ShowImage("Rotating Template", tempDestImg); Cv.ShowImage("Sub Template", theRotatedSubTemp); //Cv.WaitKey(0); Cv.WaitKey(1); i++; Cv.ReleaseData(graySourceHolder); Cv.ReleaseData(grayTempHolder); Cv.ReleaseData(tempDestImg); Cv.ReleaseData(graySource); Cv.ReleaseData(grayTemp); Cv.ReleaseData(resizeDestImg); Cv.ReleaseData(theRotatedSubTemp);//Added for big template test }//End 3rd Inner while loop angle += 1.5; //This changes the angle tilt of the template. Can change if necessary. i = 1;//This changes the scale divider. Can change if necessary. Console.WriteLine("***************************************SHIFTING TEMPLATE\n"); //Cv.DestroyAllWindows(); }//End 2nd Inner while loop //***************SHOWING RESULT INFO timer.Stop(); //This section writes the results to a text file using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\\OpenCvSharp\\SummerPractice12RotateAndScale3DataInfo\\Data Info Folder\\Best Match Data.txt", true)) { file.WriteLine("{0}\t{1}\t\t{2}\t1/{3}", timer.ElapsedMilliseconds, theBestMax.ToString("#.###"), bestAngle, iHolder); } Console.WriteLine("---------RESULTS"); //*************END SHOWING RESULT INFO //*************IMPORTANT SAVES bestMatch.SaveImage("C:\\OpenCvSharp\\SummerPractice12RotateAndScale3DataInfo\\Data Info Folder\\Best Match" + fileReader.ToString() + ".jpg"); Console.WriteLine("ITEM SAVED IN DATA INFO FOLDER!"); //*************END IMPORTANT SAVES Cv.WaitKey(1); fileReader++; theBestMax = double.MinValue; iHolder = int.MinValue; bestAngle = double.MinValue; angle = 0.0; i = 1; Cv.ReleaseData(bestMatch);//TEST if(fileReader < numberOfFiles) Console.WriteLine("Switching Input..."); }//End while loop }
private static string SaveImage(IplImage image, DateTime captureTime) { var path = GetImagePath(captureTime); var dir = Path.GetDirectoryName(path); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } image.SaveImage(path); return path; }
private void btnOk_Click(object sender, EventArgs e) { if (this.picTargetPerson.Image == null) { MessageBox.Show("请选定一张人脸图片"); return; } if (drawRectangle == Rectangle.Empty) { MessageBox.Show("请定位人脸"); return; } String oldFileName = this.picTargetPerson.Image.Tag as string; String fileName = System.Guid.NewGuid().ToString().ToUpper() + System.IO.Path.GetExtension(oldFileName); //搜索人脸 OpenCvSharp.IplImage iplFace = BitmapConverter.ToIplImage((Bitmap)this.picTargetPerson.Image); string savePath = Path.Combine(FileSavePath, fileName); iplFace.SaveImage(savePath); //归一化 OpenCvSharp.CvRect rect = new OpenCvSharp.CvRect( this.drawRectangle.X, this.drawRectangle.Y, this.drawRectangle.Width, this.drawRectangle.Height); OpenCvSharp.IplImage[] normalizedImages = Program.searcher.NormalizeImageForTraining(iplFace, rect); for (int i = 0; i < normalizedImages.Length; ++i) { string normalizedFaceName = string.Format("{0}_{1:d4}.jpg", System.IO.Path.GetFileNameWithoutExtension(fileName), i); string fullPath = System.IO.Path.Combine(faceFeatureImagePath, normalizedFaceName); normalizedImages[i].SaveImage(fullPath); } string id = txtId.Text.ToString(); string name = txtName.Text.ToString(); string sex = rabMan.Checked ? "男" : "女"; int age = 0; int.TryParse(txtAge.Text, out age); string card = txtCard.Text.ToString(); PersonInfo info = new PersonInfo(); info.ID = id; info.Name = name; info.Sex = sex; info.Age = age; info.CardId = card; info.FileName = fileName; info.Similarity = 0; perinfo.WriteInfo(info); MessageBox.Show("添加成功"); Array.ForEach(normalizedImages, ipl => ipl.Dispose()); }
private void button1_Click_2(object sender, EventArgs e) { string[] files = Directory.GetFiles(@"D:\pictures in hall"); foreach (string file in files) { string ext = Path.GetExtension(file); if (ext != ".jpg") continue; Bitmap img1 = (Bitmap)Bitmap.FromFile(file); IplImage ipl = BitmapConverter.ToIplImage(img1); IplImage ipl1 = new IplImage(ipl.CvPtr); Bitmap bmp = ipl1.ToBitmap(); this.pictureFace.Image = bmp; ipl1.SaveImage(@"d:\iplimg.jpg"); return; // byte[] data = File.ReadAllBytes(file); // Frame f = new Frame(); // f.data = IntPtr.Zero;// Marshal.AllocCoTaskMem(data.Length); // //Marshal.Copy(data, 0, f.data, data.Length); // f.dataLength = 0;// data.Length; // f.image = IntPtr.Zero; // f.timeStamp = 0; // f.searchRect = IntPtr.Zero; // f.fileName = Marshal.StringToCoTaskMemAnsi(file); // // bool group = NativeMethods.PreProcessFrame(ref f); } }