private void OnClick_ROI実行(object sender, EventArgs e) { if (背景 != null) { IplImage buff = 背景.Clone(); buff.Zero(); string[] roi1 = textBox_roi1.Text.Split(','); string[] roi2 = textBox_roi2.Text.Split(','); CvPoint roiPoint = new CvPoint(int.Parse(roi1[0]), int.Parse(roi1[1])); CvSize roiSize = new CvSize(int.Parse(roi2[0]) - int.Parse(roi1[0]), int.Parse(roi2[1]) - int.Parse(roi1[1])); Cv.SetImageROI(背景, new CvRect(roiPoint, roiSize)); //黒画像に重ねる.ポインタを弄るしか描画方法がないらしい for (int x = 0; x < int.Parse(roi2[0]) - int.Parse(roi1[0]); x++) { for (int y = 0; y < int.Parse(roi2[1]) - int.Parse(roi1[1]); y++) { CvScalar cs = Cv.Get2D(背景, y, x); Cv.Set2D(buff, int.Parse(roi1[1]) + y, int.Parse(roi1[0]) + x, cs); } } label_roi_info.Text = 画像情報を取得(背景, roiSize); pictureBoxIpl1.ImageIpl = buff; Cv.ResetImageROI(背景); buff.Dispose(); } }
public bool DrawGuideline(IplImage src, out IplImage guide, int originalImageWidth) { guide = null; if (src == null || !_isFill) { return(false); } guide = new IplImage(originalImageWidth, (int)((double)originalImageWidth / _inputSize.Width * _inputSize.Height), BitDepth.U8, 3); IplImage srcTemp = src.Clone(); double ratio = Math.Min(src.Width, src.Height) / Math.Min(guide.Width, guide.Height); Cv.Circle(srcTemp, _center, ((int)ratio * 3), new CvScalar(0, 255, 0), -1); Cv.Circle(srcTemp, _center, (int)_r1, new CvScalar(255, 0, 0), ((int)ratio * 2)); Cv.Circle(srcTemp, _center, (int)_r2, new CvScalar(0, 0, 255), ((int)ratio * 2)); CvPoint2D32f p1 = ConvertPolar(0, 0); CvPoint2D32f p2 = ConvertPolar(0, _outputSize.Height); Cv.Line(srcTemp, p1, p2, new CvScalar(0, 255, 0), ((int)ratio * 2)); srcTemp.Resize(guide, Interpolation.Cubic); srcTemp.Dispose(); return(true); }
public static float[] ResizeIplTo(IplImage Face, int width, int height) { IplImage smallerFace = new IplImage(new OpenCvSharp.CvSize(width, height), Face.Depth, Face.NChannels); Face.Resize(smallerFace, Interpolation.Linear); unsafe { byte * smallFaceData = smallerFace.ImageDataPtr; float[] currentFace = new float[width * height * smallerFace.NChannels * BytesPerPixel(Face.Depth)]; for (int i = 0; i < smallerFace.Height; i++) { for (int j = 0; j < smallerFace.Width; j++) { currentFace[i * smallerFace.WidthStep + j] = (float)smallFaceData[i * smallerFace.WidthStep + j]; } } smallerFace.Dispose(); return(currentFace); } }
public void Dispose() { if (FindFace != null) { FindFace.Dispose(); } }
private void Form2_FormClosing(object sender, FormClosingEventArgs e) { Cv.ReleaseImage(scr_laptopcam); if (scr_laptopcam != null) { scr_laptopcam.Dispose(); } }
private void P1B03_RORDER_DOC_FormClosing(object sender, FormClosingEventArgs e) { Cv.ReleaseImage(src); if (src != null) { src.Dispose(); } }
public MainWindow() { InitializeComponent(); //ViewModel からのスクリプト実行用のdelegate DataContextChanged += (o, e) => { ViewModel vm = DataContext as ViewModel; if (vm != null) { vm._ExecuteScript += (sender, arg) => { Dispatcher.Invoke(new Action(() => { pythonConsole.Pad.Console.RunStatements(arg.cmd); })); }; vm._DrawCameraBitmap += (sender, arg) => { Dispatcher.BeginInvoke(new Action(() => { IplImage img = vm.VisionControl.GetCameraImage(); DrawCameraViewEventArgs a = arg as DrawCameraViewEventArgs; if (a._draw == 1) { CvRect rect = new CvRect(a._x1, a._y1, a._x2, a._y2); img.DrawRect(rect, new CvScalar(255, 0, 0), 2); } else if (a._draw == 2) { int x1 = a._x1 - a._x2 / 2; int x2 = a._x1 + a._x2 / 2; int y1 = a._y1 - a._y2 / 2; int y2 = a._y1 + a._y2 / 2; img.DrawLine(x1, a._y1, x2, a._y1, new CvScalar(255, 0, 0), 2); img.DrawLine(a._x1, y1, a._x1, y2, new CvScalar(255, 0, 0), 2); } if (VM.CenterLine == true) { img.DrawLine(0, 320, 640, 320, new CvScalar(255, 0, 0, 0), 2); img.DrawLine(320, 0, 320, 640, new CvScalar(255, 0, 0, 0), 2); } WriteableBitmapConverter.ToWriteableBitmap(img, _col_wb); cameraImage.Source = _col_wb; img.Dispose(); //cameraImage.Source = vm.VisionControl.GetCameraBitmap(); })); }; } }; pythonConsole.Pad.Host.ConsoleCreated += new PythonConsoleControl.ConsoleCreatedEventHandler(Host_ConsoleCreated); }
private void 출고정보_QR읽기_FormClosing(object sender, FormClosingEventArgs e) { //카메라 Cv.ReleaseImage(src); if (src != null) { src.Dispose(); } }
private void Form1_FormClosing(object sender, FormClosingEventArgs e) { Cv.ReleaseImage(src); // 동적할당 되어있기에 해제하여야 한다. if (src != null) { src.Dispose(); } }
private void RecvThread() { //using (var capture = new CvCapture("rtsp://*****:*****@10.2.14.51/video1")) { Stopwatch sw = new Stopwatch(); int count = 0; sw.Start(); while (true) { // フレーム画像を非同期に取得 var image = Cv.LoadImage("test.png"); //capture.GrabFrame(); //var image = capture.RetrieveFrame(); if (image == null) { break; } lock (lockobj) { if (_src != null) { _src.Dispose(); } //_src = new IplImage(500,500, BitDepth.U8, 3); //image.Resize(_src); _src = image.Clone(); } image.Dispose(); count++; if (sw.ElapsedMilliseconds >= 1000) { sw.Stop(); _inputFPS = count * (1000.0 / sw.ElapsedMilliseconds); count = 0; sw.Restart(); } } } }
private void Form1_FormClosing(object sender, FormClosingEventArgs e) { ListSave(); LogSave(); Cv.ReleaseImage(src); //ReleaseImage()는 이미지의 메모리 할당을 해제 if (src != null) { src.Dispose(); //Dispose()는 클래스등의 메모리 할당을 해제 } }
public bool Undistortion(IplImage src, out IplImage dst, int panoramaImageWidth) { dst = null; if (src == null) { return(false); } IplImage dstTemp = new IplImage(panoramaImageWidth, (int)Math.Max((panoramaImageWidth / 4 * (_r2 - _r1) / _r2), 1), BitDepth.U8, 3); bool isUpdate = false; if (isUpdate = (_inputSize != src.Size || _outputSize != dstTemp.Size)) { _inputSize = src.Size; _outputSize = dstTemp.Size; _pointDictionary.Clear(); } object lockobj = new object(); ParallelOptions opt = new ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }; Parallel.For(0, dstTemp.Height, opt, i => { IplImage patch = new IplImage(1, 1, BitDepth.U8, 3); for (int x = 0; x < dstTemp.Width; ++x) { if (isUpdate || _isUpdate) { lock (lockobj) { _pointDictionary[i * dstTemp.Width + x] = ConvertPolar(x, i); } } Cv.GetRectSubPix(src, patch, _pointDictionary[i * dstTemp.Width + x]); dstTemp.Set2D(dstTemp.Height - i - 1, dstTemp.Width - x - 1, patch.Get2D(0, 0)); } patch.Dispose(); }); _isUpdate = false; dst = dstTemp.Clone(); dstTemp.Dispose(); return(true); }
public void Image_Binary_Gray(IplImage sourceImage) { this.image_gray = Cv.CreateImage(Cv.GetSize(sourceImage), BitDepth.U8, 1); IplImage normal_gray = Cv.CreateImage(Cv.GetSize(sourceImage), BitDepth.U8, 1); Cv.CvtColor(sourceImage, normal_gray, ColorConversion.BgrToGray); //LineInfo(normal_gray,1); Cv.Copy(normal_gray, this.image_gray); normal_gray.Dispose(); }
private void OnClick_検査対象(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog() { Multiselect = false, // 複数選択の可否 Filter = // フィルタ "画像ファイル|*.bmp;*.gif;*.jpg;*.png|全てのファイル|*.*", }; //ダイアログを表示 DialogResult result = dialog.ShowDialog(); if (result == DialogResult.OK) { // ファイル名をタイトルバーに設定 this.Text = dialog.SafeFileName; if (検査対象 != null) { 検査対象.Dispose(); } if (背景 != null) { 背景.Dispose(); } 検査対象 = Cv.LoadImage(dialog.FileName, LoadMode.GrayScale); 背景 = 検査対象.Clone(); if (マスク画像 != null && 検査対象 != null) { 合成画像 = 検査対象.Clone(); 合成画像 = 画像合成(検査対象, マスク画像); 背景 = 合成画像; } //評価画面.Instance.Show(); pictureBoxIpl1.ImageIpl = 背景; System.Diagnostics.Debug.WriteLine("検査対象画像の平均=" + 検査対象.Avg().Val0.ToString("f")); } }
/// <summary> /// 取り込んだ画像をRGBビットマップで取得する /// </summary> /// <returns></returns> public WriteableBitmap GetCameraBitmap() { //RGB画像へ変換 Cv2.CvtColor(_mat, _col_mat, ColorConversion.GrayToRgb); IplImage img = _col_mat.ToIplImage(); WriteableBitmapConverter.ToWriteableBitmap(img, _col_wb); img.Dispose(); return(_col_wb); }
public static IplImage EnsureImageFormat(IplImage output, Size size, IplDepth depth, int channels) { if (output == null || output.Size != size || output.Depth != depth || output.Channels != channels) { if (output != null) { output.Dispose(); } return(new IplImage(size, depth, channels)); } return(output); }
//* MODE : DOT *// private IplImage mode_Dot(IplImage srcImg) { IplImage srcImage = srcImg; srcImage.Threshold(srcImage, 150, 255, ThresholdType.Otsu); //점이 찍어질 까만 이미지 IplImage dotImage = Cv.CreateImage(Cv.GetSize(srcImage), (BitDepth)CvConst.IPL_DEPTH_8U, 1); Cv.Set(dotImage, new CvScalar(0, 0, 0)); //흑백이진화 된 원래 이미지에서 지정 픽셀이 검은색이면 흰 원을 그린다. double r; // 비율조정 int ratio; if (srcImg.Height > srcImg.Width) { ratio = srcImg.Width; } else { ratio = srcImg.Height; } // DRAW DOT for (int Y = 0; Y < srcImg.Height; Y += (ratio / 40)) { for (int X = 0; X < srcImg.Width; X += (ratio / 40)) { r = Cv.GetReal2D(srcImg, Y, X); if (r == 0) { Cv.DrawCircle(dotImage, new CvPoint(X, Y), (int)(0.008 * ratio), Cv.RGB(255, 255, 255), Cv.FILLED); } } } // 이미지 축소 IplImage dst_dotImgae = new IplImage(dotImage.Width / 10, dotImage.Height / 10, dotImage.Depth, dotImage.NChannels); dotImage.Resize(dst_dotImgae); dotImage.Smooth(dotImage, SmoothType.Gaussian); dotImage.Dispose(); return(dst_dotImgae); }
//사진 저장 private void 저장ToolStripMenuItem_Click(object sender, EventArgs e) { string fullPathName = openFileDialog1.FileName; string fileName = openFileDialog1.SafeFileName; string pathName = fullPathName.Substring(0, (fullPathName.Length - fileName.Length)); src.Dispose(); if (System.IO.File.Exists(fullPathName)) // 디렉토리가 존재하지 않으면 생성한다. { System.IO.File.Delete(fullPathName); } pictureBoxIpl1.Image.Save(fullPathName, System.Drawing.Imaging.ImageFormat.Jpeg); }
static IplImage GetCountours(IplImage src) { CvMemStorage mem = Cv.CreateMemStorage(0); CvSeq <CvPoint> firstContour = null; IplImage dst = GetThresholdImage(src); dst = GetThresholdImage(dst); int count = dst.FindContours(mem, out firstContour); //src.DrawContours(firstContour, CvColor.Green, CvColor.Blue, 2); //src.DrawRect(firstContour.BoundingRect(), CvColor.Red); src = src.GetSubImage(firstContour.BoundingRect()); mem.Dispose(); firstContour.Dispose(); dst.Dispose(); return(src); }
/// <summary> /// OpenCv画像処理 /// </summary> /// <param name="src">画像データ</param> /// <param name="tpl">テンプレートデータ</param> public void Cv_Execute(Mat src, IplImage tpl) { CvPoint minPoint = new CvPoint(); CvPoint maxPoint = new CvPoint(); double min_val = 0.0, max_val = 0.0; IplImage img = src.ToIplImage(); //パターンマッチング CvMat result = new CvMat(img.Height - tpl.Height + 1, img.Width - tpl.Width + 1, MatrixType.F32C1); Cv.MatchTemplate(img, tpl, result, MatchTemplateMethod.CCoeffNormed); //結果取得 Cv.MinMaxLoc(result, out min_val, out max_val, out minPoint, out maxPoint); img.Dispose(); //結果代入 _visionPos[0] = maxPoint.X + (tpl.Width / 2); _visionPos[1] = maxPoint.Y + (tpl.Height / 2); _visionPos[2] = 0; _visionPos[3] = 0; _visionPos[4] = (int)(max_val * 100.0); }
private void CheckColor() { IplImage source = new IplImage(640, 360, BitDepth.U8, 3); const int TEST_FRAME = 36; var ListH = new List <int>(); int side = (int)Math.Sqrt(TEST_FRAME);//検査枠の1辺の長さ try { Task.Run(() => { while (FlagCheckCol) { try { //画像を取得する処理 if (!General.camLed.GetPic()) { continue; } source = General.camLed.imageForTest; using (var hsv = new IplImage(640, 360, BitDepth.U8, 3)) // グレースケール画像格納用の変数 { //RGBからHSVに変換 Cv.CvtColor(source, hsv, ColorConversion.BgrToHsv); OpenCvSharp.CPlusPlus.Mat mat = new OpenCvSharp.CPlusPlus.Mat(hsv, true); TestpointForGetLum = GetTestPoint(); TestpointForGetLum.ForEach(l => { ListH.Clear(); foreach (var i in Enumerable.Range(0, side)) { foreach (var j in Enumerable.Range(0, side)) { var re = mat.At <OpenCvSharp.CPlusPlus.Vec3b>(l.Item1.Y - (side / 2) + i, l.Item1.X - (side / 2) + j); if (re[0] != 0) { ListH.Add(re[0]); } } } string Hue = (ListH.Count != 0) ? ListH.Average().ToString("F0") : "0"; switch (l.Item2) { case SEG_NAME.LED1: State.VmLedPoint.LED1Hue = Hue; break; case SEG_NAME.LED2: State.VmLedPoint.LED2Hue = Hue; break; case SEG_NAME.LED3: State.VmLedPoint.LED3Hue = Hue; break; case SEG_NAME.LED4: State.VmLedPoint.LED4Hue = Hue; break; } }); } } catch { FlagCheckCol = false; } } }); } finally { source.Dispose(); } }
static CvPoint[] FindSquares4(IplImage img, CvMemStorage storage) { const int N = 11; CvSize sz = new CvSize(img.Width & -2, img.Height & -2); IplImage timg = img.Clone(); // make a copy of input image IplImage gray = new IplImage(sz, BitDepth.U8, 1); IplImage pyr = new IplImage(sz.Width / 2, sz.Height / 2, BitDepth.U8, 3); // create empty sequence that will contain points - // 4 points per square (the square's vertices) CvSeq <CvPoint> squares = new CvSeq <CvPoint>(SeqType.Zero, CvSeq.SizeOf, storage); // select the maximum ROI in the image // with the width and height divisible by 2 timg.ROI = new CvRect(0, 0, sz.Width, sz.Height); // down-scale and upscale the image to filter out the noise Cv.PyrDown(timg, pyr, CvFilter.Gaussian5x5); Cv.PyrUp(pyr, timg, CvFilter.Gaussian5x5); IplImage tgray = new IplImage(sz, BitDepth.U8, 1); // find squares in every color plane of the image for (int c = 0; c < 3; c++) { // extract the c-th color plane timg.COI = c + 1; Cv.Copy(timg, tgray, null); // try several threshold levels for (int l = 0; l < N; l++) { // hack: use Canny instead of zero threshold level. // Canny helps to catch squares with gradient shading if (l == 0) { // apply Canny. Take the upper threshold from slider // and set the lower to 0 (which forces edges merging) Cv.Canny(tgray, gray, 0, Thresh, ApertureSize.Size5); // dilate canny output to remove potential // holes between edge segments Cv.Dilate(gray, gray, null, 1); } else { // apply threshold if l!=0: // tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0 Cv.Threshold(tgray, gray, (l + 1) * 255.0 / N, 255, ThresholdType.Binary); } // find contours and store them all as a list CvSeq <CvPoint> contours; Cv.FindContours(gray, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxSimple, new CvPoint(0, 0)); // test each contour while (contours != null) { // approximate contour with accuracy proportional // to the contour perimeter CvSeq <CvPoint> result = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, contours.ContourPerimeter() * 0.02, false); // square contours should have 4 vertices after approximation // relatively large area (to filter out noisy contours) // and be convex. // Note: absolute value of an area is used because // area may be positive or negative - in accordance with the // contour orientation if (result.Total == 4 && Math.Abs(result.ContourArea(CvSlice.WholeSeq)) > 1000 && result.CheckContourConvexity()) { double s = 0; for (int i = 0; i < 5; i++) { // find minimum Angle between joint // edges (maximum of cosine) if (i >= 2) { double t = Math.Abs(Angle(result[i].Value, result[i - 2].Value, result[i - 1].Value)); s = s > t ? s : t; } } // if cosines of all angles are small // (all angles are ~90 degree) then write quandrange // vertices to resultant sequence if (s < 0.3) { for (int i = 0; i < 4; i++) { //Console.WriteLine(result[i]); squares.Push(result[i].Value); } } } // take the next contour contours = contours.HNext; } } } // release all the temporary images gray.Dispose(); pyr.Dispose(); tgray.Dispose(); timg.Dispose(); return(squares.ToArray()); }
/// <summary> /// Handles color and depth frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // if the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); // if any frame has expired by the time we process this event, return. // the "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToCameraSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToCameraPoints); } // we're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // we're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); this.bitmap.Unlock(); isBitmapLocked = false; // Segment Object // grab frame from Kinect and convert to HSV format IplImage imgOriginal = this.bitmap.ToIplImage(); IplImage imgHsv = Cv.CreateImage(imgOriginal.Size, BitDepth.U8, 3); Cv.CvtColor(imgOriginal, imgHsv, ColorConversion.RgbToHsv); imgOriginal.Dispose(); // set color thresholds and create IplImage to store thresholded frame CvScalar lower = new CvScalar(this.lowerH, this.lowerS, this.lowerV); CvScalar upper = new CvScalar(this.upperH, this.upperS, this.upperV); IplImage imgThreshed = Cv.CreateImage(imgHsv.Size, BitDepth.U8, 1); Cv.InRangeS(imgHsv, lower, upper, imgThreshed); // show image CvSize size = new CvSize(imgThreshed.Width / 2, imgThreshed.Height / 2); IplImage imgResized = new IplImage(size, BitDepth.U8, 1); imgThreshed.Resize(imgResized, Interpolation.Linear); Cv.ShowImage("Thresholded View", imgResized); // clean up imgHsv.Dispose(); imgThreshed.Dispose(); imgResized.Dispose(); } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } }
//* BUTTON_CONVERT *// private void button_Convert_Click(object sender, EventArgs e) { IplImage tempImgBox; Took3D.SET_Z = Int32.Parse(textBox_SetZ.Text); if (isImgOpen == true) { switch (currentMode) { // <<DEFAULT>> case Mode.DEFAULT: // 흰색 윤곽선 추가. tempImgBox = new IplImage(imgBox.Width + 10, imgBox.Height + 10, BitDepth.U8, 1); imgBox.CopyMakeBorder(tempImgBox, new CvPoint(5, 5), BorderType.Constant, CvScalar.ScalarAll(0xFF)); // 모델링 Took3D.START(tempImgBox); if (radioButton_Polygon.Checked == true) { bool checkBottomMode = Took3D.bottomPolygon(); if (checkBottomMode == false) { radioButton_Background.Checked = true; } Took3D.binarySTL_Bottom(); } else if (radioButton_Background.Checked == true) { Took3D.bottomBackground(); Took3D.binarySTL_BackBottom(); } else { Took3D.binarySTL(); } break; // <<STAMP>> case Mode.STAMP: tempImgBox = mode_Stamp(imgBox); Took3D.START(tempImgBox); Took3D.stampModeling(); Took3D.binarySTL_BackBottom(); break; // <<DOT>> case Mode.DOT: // 1. 크기조정 tempImgBox = Dot_Resize(imgBox); // 2. 이미지 확대 ( 도트 원형 유지 ) IplImage dot_tmp = new IplImage(tempImgBox.Width * 10, tempImgBox.Height * 10, tempImgBox.Depth, tempImgBox.NChannels); tempImgBox.Resize(dot_tmp, Interpolation.Cubic); tempImgBox.Dispose(); // 3. 도트 이미지 생성 tempImgBox = mode_Dot(dot_tmp); dot_tmp.Dispose(); // 4. 흰색윤곽추가 dot_tmp = new IplImage(tempImgBox.Width + 4, tempImgBox.Height + 4, tempImgBox.Depth, tempImgBox.NChannels); tempImgBox.CopyMakeBorder(dot_tmp, new CvPoint(2, 2), BorderType.Constant, CvScalar.ScalarAll(0xFF)); tempImgBox = dot_tmp; // 5. 이진화 시작 Took3D.START(tempImgBox); Took3D.binarySTL(); break; // <<RING>> case Mode.RING: tempImgBox = mode_Ring(imgBox); Took3D.START(tempImgBox); Took3D.binarySTL(); break; default: break; } pictureBox.ImageIpl = Took3D.resultImage; //Console.WriteLine("minX : {0} minY : {1} maxX : {2} maxY : {3}", Took3D.minX, Took3D.minY, Took3D.maxX, Took3D.maxY); // 파일 저장 처리 saveFileDialog1.Filter = "STL File(*.stl)|*.stl"; if (saveFileDialog1.ShowDialog() == DialogResult.OK) { FileInfo file = new FileInfo(@"C:\TookTemp\output"); if (file.Exists) { File.Copy(@"C:\TookTemp\output", saveFileDialog1.FileName, true); MessageBox.Show("완료"); } } //tempImgBox.Dispose(); } else { MessageBox.Show("이미지를 선택해주세요!"); } }
//* MODE : STAMP *// private IplImage mode_Stamp(IplImage srcImg) { // 1. 객체추출 Took3D.checkSize(srcImg); int minX = Took3D.minX, minY = Took3D.minY; int maxX = Took3D.maxX - minX, maxY = Took3D.maxY - minY; srcImg.SetROI(new CvRect(minX, minY, maxX + 1, maxY + 1)); IplImage src = new IplImage(maxX + 1, maxY + 1, srcImg.Depth, srcImg.NChannels); srcImg.Copy(src); // 2. 도장이미지 생성 IplImage stampImg = new IplImage(200, 200, src.Depth, src.NChannels); stampImg.Set(CvScalar.ScalarAll(255)); // 3. 도장이미지 크기조정 int roi_width = 175; int roi_height = 175; IplImage gr_hole; int setHeight = 0, setWidth = 0; if (src.Width > src.Height) { setWidth = roi_width; setHeight = (roi_width * src.Height) / src.Width; if (setHeight > roi_height) { setHeight = roi_height; setWidth = (roi_height * setWidth) / setHeight; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else if (src.Width < src.Height) { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; if (setWidth > roi_width) { setWidth = roi_width; setHeight = (roi_width * setHeight) / setWidth; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } src.Resize(gr_hole, Interpolation.Cubic); // 4. 위치 조정 int mid_X = (200 / 2) - (gr_hole.Width / 2); int mid_Y = (200 / 2) - (gr_hole.Height / 2); stampImg.SetROI(mid_X, mid_Y, gr_hole.Width, gr_hole.Height); // 5. 삽입 gr_hole.Copy(stampImg); // 6. 메모리 정리 srcImg.ResetROI(); stampImg.ResetROI(); gr_hole.Dispose(); src.Dispose(); return(stampImg); }
public void Dispose() { image.Dispose(); }
private IplImage Dot_Resize(IplImage srcImg) { // 1. 객체추출 Took3D.checkSize(srcImg); int minX = Took3D.minX, minY = Took3D.minY; int maxX = Took3D.maxX - minX, maxY = Took3D.maxY - minY; srcImg.SetROI(new CvRect(minX, minY, maxX, maxY)); IplImage src = new IplImage(maxX, maxY, srcImg.Depth, srcImg.NChannels); srcImg.Copy(src); // 2. 도트 밑판 생성 IplImage dotBackImg = new IplImage(500, 500, src.Depth, src.NChannels); dotBackImg.Set(CvScalar.ScalarAll(255)); // 3. 도트이미지 크기조정 int roi_width = 470; int roi_height = 470; IplImage temp; int setHeight = 0, setWidth = 0; if (src.Width > src.Height) { setWidth = roi_width; setHeight = (roi_width * src.Height) / src.Width; if (setHeight > roi_height) { setHeight = roi_height; setWidth = (roi_height * setWidth) / setHeight; } temp = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else if (src.Width < src.Height) { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; if (setWidth > roi_width) { setWidth = roi_width; setHeight = (roi_width * setHeight) / setWidth; } temp = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; temp = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } src.Resize(temp, Interpolation.Cubic); // 4. 위치 조정 int mid_X = (500 / 2) - (temp.Width / 2); int mid_Y = (500 / 2) - (temp.Height / 2); dotBackImg.SetROI(mid_X, mid_Y, temp.Width, temp.Height); // 5. 삽입 temp.Copy(dotBackImg); // 6. 메모리 정리 srcImg.ResetROI(); dotBackImg.ResetROI(); temp.Dispose(); src.Dispose(); return(dotBackImg); }
//Dispose of all initiated objects public void Dispose() { output.Dispose(); }
private void FormCam_FormClosing(object sender, FormClosingEventArgs e) { Cv.ReleaseImage(src); src.Dispose(); }
//* MODE : RING *// private IplImage mode_Ring(IplImage srcImg) { // 1. 링(고리) 생성 IplImage temp = Properties.Resources.gr.ToIplImage(); IplImage gr = new IplImage(temp.Size, srcImg.Depth, srcImg.NChannels); temp.CvtColor(gr, ColorConversion.BgrToGray); temp.Dispose(); //해제 // 2. 객체추출 Took3D.checkSize(srcImg); int minX = Took3D.minX, minY = Took3D.minY; int maxX = Took3D.maxX - minX, maxY = Took3D.maxY - minY; srcImg.SetROI(new CvRect(minX, minY, maxX, maxY)); IplImage src = new IplImage(maxX, maxY, srcImg.Depth, srcImg.NChannels); srcImg.Copy(src); // 3. 이미지 변환 Took3D.negativeImg(src); // 4. 이미지 크기 조정 IplImage gr_hole; int roi_X = 37; int roi_Y = 226; int roi_width = 204; int roi_height = 175; //int X_wid = 240; //int Y_hei = 379; int setHeight = 0, setWidth = 0; if (src.Width > src.Height) { setWidth = roi_width; setHeight = (roi_width * src.Height) / src.Width; if (setHeight > roi_height) { setHeight = roi_height; setWidth = (roi_height * setWidth) / setHeight; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else if (src.Width < src.Height) { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; if (setWidth > roi_width) { setWidth = roi_width; setHeight = (roi_width * setHeight) / setWidth; } gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } else { setHeight = roi_height; setWidth = (roi_height * src.Width) / src.Height; gr_hole = new IplImage(setWidth, setHeight, src.Depth, src.NChannels); } src.Resize(gr_hole, Interpolation.Cubic); //5. 위치 조정 int mid_X = (roi_width / 2) - (gr_hole.Width / 2); gr.SetROI(roi_X + mid_X, roi_Y, gr_hole.Width, gr_hole.Height); //6. 합성 gr_hole.Copy(gr); //7. 메모리 정리 gr.ResetROI(); srcImg.ResetROI(); gr_hole.Dispose(); src.Dispose(); return(gr); }