private void Button_Click_Get_Image(object sender, RoutedEventArgs e) { // 在WPF中, OpenFileDialog位于Microsoft.Win32名称空间 Microsoft.Win32.OpenFileDialog dialog = new Microsoft.Win32.OpenFileDialog(); dialog.Filter = "图像文件(*.jpg; *.jpeg;*.bmp;*.png)| *.jpg; *.jpeg;*.bmp;*.png | 所有文件 | *.* "; if (dialog.ShowDialog() != true) { return; } Console.WriteLine("第" + global.GetIns().CamSel.ToString() + "相机被选择"); try { switch (global.GetIns().CamSel) { case 0: Obj[0].Dispose(); HOperatorSet.ReadImage(out Obj[0], dialog.FileName); CameraADisp.RunHalcon(this.CamSetting.HalconID, Obj[0]); break; case 1: Obj[1].Dispose(); HOperatorSet.ReadImage(out Obj[1], dialog.FileName); CameraBDisp.RunHalcon(this.CamSetting.HalconID, Obj[1]); break; case 2: Obj[2].Dispose(); HOperatorSet.ReadImage(out Obj[2], dialog.FileName); CameraCDisp.RunHalcon(this.CamSetting.HalconID, Obj[2]); break; case 3: Obj[3].Dispose(); HOperatorSet.ReadImage(out Obj[3], dialog.FileName); CameraDDisp.RunHalcon(this.CamSetting.HalconID, Obj[3]); break; default: break; } } catch (HalconException HDevExpDefaultException) { HTuple hv_exception = null; HDevExpDefaultException.ToHTuple(out hv_exception); return; } }
public void Get_ICRegion_points(HTuple hv_width, HTuple hv_height, HTuple hv_Row, HTuple hv_Col, HTuple hv_uvHxy, HTuple hv_zoomFactor, out HTuple hv_icXScan, out HTuple hv_icYScan, out HTuple hv_iFlag) { hv_iFlag = ""; hv_icXScan = new HTuple(); hv_icYScan = new HTuple(); HTuple hv_uvHxyScaled = new HTuple(); try { HOperatorSet.HomMat2dScale(hv_uvHxy, 1 / hv_zoomFactor, 1 / hv_zoomFactor, 0, 0, out hv_uvHxyScaled); hv_icXScan = (hv_uvHxyScaled.TupleSelect(0) * ((hv_height - 1) / 2.0 - hv_Row)) + (hv_uvHxyScaled.TupleSelect(1) * ((hv_width - 1) / 2.0 - hv_Col)); hv_icYScan = (hv_uvHxyScaled.TupleSelect(3) * ((hv_height - 1) / 2.0 - hv_Row)) + (hv_uvHxyScaled.TupleSelect(4) * ((hv_width - 1) / 2.0 - hv_Col)); } catch (HalconException HDevExpDefaultException) { HTuple hv_Exception = new HTuple(); HDevExpDefaultException.ToHTuple(out hv_Exception); VisionMethonDll.VisionMethon.GetErrInfo(hv_Exception, out hv_iFlag); } }
/// <summary> /// 获取大芯片中心的扫描点位(IC的中心为相机视野中心) /// </summary> /// <param name="hv_width"></param> /// <param name="hv_height"></param> /// <param name="hv_uvHxy"></param> /// <param name="hv_widthFactor"></param> /// <param name="hv_heightFactor"></param> /// <param name="hv_zoomFactor"></param> /// <param name="hv_mapRowCnt"></param> /// <param name="hv_mapColCnt"></param> /// <param name="hv_clipmapX"></param> /// <param name="hv_clipmapY"></param> /// <param name="hv_clipmapRow"></param> /// <param name="hv_clipmapCol"></param> /// <param name="hv_snapX"></param> /// <param name="hv_snapY"></param> /// <param name="hv_snapRow"></param> /// <param name="hv_snapCol"></param> /// <param name="hv_iFlag"></param> public void get_scan_points(HTuple hv_width, HTuple hv_height, HTuple hv_uvHxy, HTuple hv_zoomFactor, HTuple hv_mapRowCnt, HTuple hv_mapColCnt, HTuple hv_clipmapX, HTuple hv_clipmapY, HTuple hv_clipmapRow, HTuple hv_clipmapCol, out HTuple hv_snapX, out HTuple hv_snapY, out HTuple hv_snapRow, out HTuple hv_snapCol, out HTuple hv_iFlag) { hv_iFlag = ""; hv_snapX = new HTuple(); hv_snapY = new HTuple(); hv_snapRow = new HTuple(); hv_snapCol = new HTuple(); HTuple hv_uvHxyScaled = new HTuple(); try { //HOperatorSet.HomMat2dScale(hv_uvHxy, 1 /*/ hv_zoomFactor*/, 1 /*/ hv_zoomFactor*/, 0, 0, out hv_uvHxyScaled); //hv_snapX = hv_clipmapX + ((hv_uvHxyScaled.TupleSelect(0) * (-1) * (hv_height - 1)) / 2.0) // + ((hv_uvHxyScaled.TupleSelect(1) * (-1) * (hv_width - 1)) / 2.0); //hv_snapY = hv_clipmapY + ((hv_uvHxyScaled.TupleSelect(3) * (-1) * (hv_height - 1)) / 2.0) // + ((hv_uvHxyScaled.TupleSelect(4) * (-1) * (hv_width - 1)) / 2.0); //*****将扫描点位按弓字形排序 HTuple hv_seqInd = new HTuple(); HTuple hv_rowIndConst = new HTuple(); HTuple hv_colIndSeq = new HTuple(); HTuple hv__x = new HTuple(); HTuple hv__y = new HTuple(); HTuple hv_i = 0; for (hv_i = 0; hv_i < hv_mapRowCnt; hv_i++) { HOperatorSet.TupleGenSequence(hv_i * hv_mapColCnt, ((hv_i + 1) * hv_mapColCnt) - 1, 1, out hv_seqInd); HOperatorSet.TupleGenConst(hv_mapColCnt, hv_i, out hv_rowIndConst); HOperatorSet.TupleGenSequence(0, hv_mapColCnt - 1, 1, out hv_colIndSeq); if ((int)(new HTuple(((hv_i % 2)).TupleEqual(0))) != 0) { hv__x = hv_clipmapX.TupleSelect(hv_seqInd); hv__y = hv_clipmapY.TupleSelect(hv_seqInd); } else { hv__x = ((hv_clipmapX.TupleSelect(hv_seqInd))).TupleInverse(); hv__y = ((hv_clipmapY.TupleSelect(hv_seqInd))).TupleInverse(); hv_colIndSeq = hv_colIndSeq.TupleInverse(); } hv_snapX = hv_snapX.TupleConcat(hv__x); hv_snapY = hv_snapY.TupleConcat(hv__y); hv_snapRow = hv_snapRow.TupleConcat(hv_rowIndConst); hv_snapCol = hv_snapCol.TupleConcat(hv_colIndSeq); } } catch (HalconException HDevExpDefaultException) { HTuple hv_Exception = new HTuple(); HDevExpDefaultException.ToHTuple(out hv_Exception); VisionMethonDll.VisionMethon.GetErrInfo(hv_Exception, out hv_iFlag); } }
private void Button_Click_1(object sender, RoutedEventArgs e) { try { for (int i = 0; i < 4; i++) { try { Obj[i].Dispose(); GrabImage.Grab(out Obj[i], global.GetIns().CamSel); } catch (Exception ex) { Console.WriteLine(ex.ToString()); continue; } } } catch (HalconException HDevExpDefaultException) { HOperatorSet.ReadImage(out Obj[0], @"D:\img\1.bmp"); CameraADisp.RunHalcon(this.Cam1_Disp.HalconID, Obj[0]); HOperatorSet.ReadImage(out Obj[1], @"D:\img\2.bmp"); CameraADisp.RunHalcon(this.Cam2_Disp.HalconID, Obj[1]); HOperatorSet.ReadImage(out Obj[2], @"D:\img\3.bmp"); CameraADisp.RunHalcon(this.Cam3_Disp.HalconID, Obj[2]); HOperatorSet.ReadImage(out Obj[3], @"D:\img\4.bmp"); CameraADisp.RunHalcon(this.Cam4_Disp.HalconID, Obj[3]); HTuple hv_exception = null; HDevExpDefaultException.ToHTuple(out hv_exception); } try { CameraADisp.check_axis(Obj[0], 0, ImageOperate.Track_Model, Cam1_Disp.HalconID); CameraBDisp.check_axis(Obj[1], 0, ImageOperate.Track_Model, Cam2_Disp.HalconID); CameraCDisp.check_axis(Obj[2], 0, ImageOperate.Track_Model, Cam3_Disp.HalconID); CameraDDisp.check_axis(Obj[3], 0, ImageOperate.Track_Model, Cam4_Disp.HalconID); } catch (HalconException HDevExpDefaultException) { Console.WriteLine(HDevExpDefaultException.ToString()); } }
public void action(HWindowControl hWindowControl1, String FilePath, String filename, String index) { initaVariables(); try { //This program demonstrates how to detect small texture //defects on the surface of plastic items by using the fast //fourier transform (FFT). //First, we construct a suitable filter using Gaussian //filters. Then, the images and the filter are convolved //by using fast fourier transforms. Finally, the defects //are detected in the filtered images by using //morphology operators. // //Initializations dev_update_off(); /*if (HDevWindowStack.IsOpen()) * { * HOperatorSet.CloseWindow(HDevWindowStack.Pop()); * } LEI */ //read image from file ho_Image.Dispose(); //1.read image HOperatorSet.ReadImage(out ho_Image, FilePath + index);// "metal/tube_01"); // ho_ROI_0.Dispose(); // HOperatorSet.GenRectangle1(out ho_ROI_0, 3.33632, 300.556, 990.549, 1648.14); ho_ROI_0.Dispose(); HOperatorSet.GetImageSize(ho_Image, out hov_Width, out hov_Height); HOperatorSet.GenRectangle1(out ho_ROI_0, 0, 0, 1200, 4096);//metal //HOperatorSet.GenRectangle1(out ho_ROI_0, 0, 620.059, 1998.27, 3603.45);//tube的 ho_ImageReduced.Dispose(); HOperatorSet.ReduceDomain(ho_Image, ho_ROI_0, out ho_ImageReduced); //get_domain (ImageReduced, Domain) ho_ImagePart.Dispose(); HOperatorSet.CropDomain(ho_ImageReduced, out ho_ImagePart); HOperatorSet.GetImageSize(ho_ImagePart, out hv_Width, out hv_Height); HOperatorSet.SetWindowAttr("background_color", "black"); //HOperatorSet.OpenWindow(0, 0, hv_Width, hv_Height, 0, "visible", "", out hv_WindowHandle); //HDevWindowStack.Push(hv_WindowHandle); HOperatorSet.OpenWindow(0, 0, hWindowControl1.Width, hWindowControl1.Height, hWindowControl1.HalconWindow, "", "", out hv_WindowHandle); HDevWindowStack.Push(hv_WindowHandle); if (HDevWindowStack.IsOpen()) { HOperatorSet.DispObj(ho_ImagePart, HDevWindowStack.GetActive()); } ho_ImagePart.Dispose(); hWindowControl1.Refresh(); set_display_font(hv_WindowHandle, 14, "mono", "true", "false"); if (HDevWindowStack.IsOpen()) { HOperatorSet.SetDraw(HDevWindowStack.GetActive(), "margin"); } if (HDevWindowStack.IsOpen()) { HOperatorSet.SetLineWidth(HDevWindowStack.GetActive(), 3); } if (HDevWindowStack.IsOpen()) { HOperatorSet.SetColor(HDevWindowStack.GetActive(), "red"); } // //Optimize the fft speed for the specific image size HOperatorSet.OptimizeRftSpeed(hv_Width, hv_Height, "standard"); // //Construct a suitable filter by combining two gaussian //filters hv_Sigma1 = 10.0; hv_Sigma2 = 3.0; ho_GaussFilter1.Dispose(); HOperatorSet.GenGaussFilter(out ho_GaussFilter1, hv_Sigma1, hv_Sigma1, 0.0, "none", "rft", hv_Width, hv_Height); ho_GaussFilter2.Dispose(); HOperatorSet.GenGaussFilter(out ho_GaussFilter2, hv_Sigma2, hv_Sigma2, 0.0, "none", "rft", hv_Width, hv_Height); ho_Filter.Dispose(); HOperatorSet.SubImage(ho_GaussFilter1, ho_GaussFilter2, out ho_Filter, 1, 0); // //Process the images iteratively hv_NumImages = 5; HTuple end_val34 = hv_NumImages; HTuple step_val34 = 1; //for (hv_Index = 1; hv_Index.Continue(end_val34, step_val34); hv_Index = hv_Index.TupleAdd(step_val34)) //{ HandleOneImage(FilePath, index); //} } catch (HalconException HDevExpDefaultException) { System.Diagnostics.Debug.WriteLine(HDevExpDefaultException.GetErrorMessage()); Console.WriteLine(HDevExpDefaultException.StackTrace); //System.Diagnostics.Debug.Assert(false, HDevExpDefaultException.GetErrorMessage()); DisposeAllObj(); // throw HDevExpDefaultException; } DisposeAllObj(); }