public static Points_3D Union_object_model_3d(HTuple model) { HTuple unionedModel; HOperatorSet.UnionObjectModel3d(model, "points_surface", out unionedModel); Points_3D points_3D = new Points_3D(unionedModel); HOperatorSet.ClearObjectModel3d(unionedModel); return(points_3D); }
/**写代码注意事项: * 0. 生成的临时的模型Htuple 需要及时释放 * **/ /**采样 * **/ private static Points_3D Sample_object_model_3d(HTuple model, SampleMethord methord = SampleMethord.accurate, double sampleDistance = 1, SampleParams para = SampleParams.min_num_points, int minNumPoints = 1) { HTuple m = BaseMethord.Enum2Htuple(methord); HTuple p = BaseMethord.Enum2Htuple(para); HTuple sampledModel; HOperatorSet.SampleObjectModel3d(model, m, sampleDistance, p, minNumPoints, out sampledModel); Points_3D points_3D = new Points_3D(sampledModel); HOperatorSet.ClearObjectModel3d(sampledModel); return(points_3D); }
/**全局注册 * 返回一个融合好的模型 * **/ public Points_3D Register_object_model_3d_global() { HTuple HomMat3DRefined, Score; HTuple modelHandle = GetModels(); HTuple poseHandle = Pose2Mat(); HOperatorSet.RegisterObjectModel3dGlobal(modelHandle, poseHandle, "previous", new HTuple(), Enum2Htuple(GenParamName.max_num_iterations), 1, out HomMat3DRefined, out Score); HTuple affinedModel = HalconWrapper.Affine_trans_object_model_3d(modelHandle, HomMat3DRefined); Points_3D unionedPoints = HalconWrapper.Union_object_model_3d(affinedModel); HOperatorSet.ClearObjectModel3d(affinedModel); return(unionedPoints); }
/**释放点云 * **/ public void Dispose() { //清除点 HOperatorSet.ClearObjectModel3d(this.Pointclouds); }
private unsafe void GvspThread() { int nRet = 0; uint uiGvspport = 0xcbbb; uint uiGvspsize = 0; KSJGigeVisionApi.KSJGIGEVISION_WriteRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0xd00, uiGvspport); KSJGigeVisionApi.KSJGIGEVISION_CameraSetGvspPort(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], (int)uiGvspport); KSJGigeVisionApi.KSJGIGEVISION_ReadRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0xd04, ref uiGvspsize); KSJGigeVisionApi.KSJGIGEVISION_CameraSetGvspSize(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], (int)uiGvspsize); uint nColSize = 0; uint nRowSize = 0; uint nType = 0; KSJGigeVisionApi.KSJGIGEVISION_ReadRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0x100, ref nColSize); KSJGigeVisionApi.KSJGIGEVISION_ReadRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0x104, ref nRowSize); KSJGigeVisionApi.KSJGIGEVISION_ReadRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0xb000, ref nType); byte[] pImageData = new byte[nColSize * nRowSize * 8]; KSJGigeVisionApi.KSJGIGEVISION_WriteRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0x124, 1); decimal fProfiley; fProfiley = YnumericUpDown.Value; float fZmapLow = (float)(Math.Round(LownumericUpDown.Value * 100)) / 100; float fZmapHigh = (float)(Math.Round(HighnumericUpDown.Value * 100)) / 100; float unit = (fZmapHigh - fZmapLow) / 255; int nWidth = (int)nColSize; int nHeight = (int)nRowSize; int nCount = 0; float[] profile; float[] profilex; profile = new float[nColSize * nRowSize]; profilex = new float[nColSize * nRowSize]; Graphics g; int i = 0, j; Point p1 = new Point(0, 0); Point p2 = new Point(0, 0); int Index = 0; byte[] idata = new byte[nColSize * nRowSize]; Bitmap bitmap; float fy = (float)(Math.Round(YnumericUpDown.Value * 100)) / 100; float[] hPx = new float[nColSize * nHeight]; float[] hPy = new float[nColSize * nHeight]; float[] hPz = new float[nColSize * nHeight]; HObject Hobjx = null, Hobjy = null, Hobjz = null; HTuple ObjectModel3DID = null; IntPtr ptrdata = Marshal.AllocHGlobal(nWidth * nHeight * 4); HTuple camParam = 0; //CamParam := [0.01,0,7e-6,7e-6,352,288,710,576] camParam[0] = 0.01; camParam[1] = 0; camParam[2] = 7e-6; camParam[3] = 7e-6; camParam[4] = 352; camParam[5] = 288; camParam[6] = 710; camParam[7] = 576; HTuple Pose = 0; HOperatorSet.CreatePose(-14.1079, -27.3273, 207.606, 175.064, 1.5805, 269.28, "Rp+T", "gba", "point", out Pose); HTuple GenParamName = 0; //GenParamName := ['color','disp_pose','alpha','intensity'] GenParamName[0] = "color"; GenParamName[1] = "disp_pose"; GenParamName[2] = "alpha"; GenParamName[3] = "intensity"; HTuple GenParamValue = 0; //GenParamValue := ['green','false',0.8,'none'] GenParamValue[0] = "green"; GenParamValue[1] = "false"; GenParamValue[2] = 0.8; GenParamValue[3] = "none"; while (true) { if (exitEvent.WaitOne(0)) { KSJGigeVisionApi.KSJGIGEVISION_WriteRegister(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], 0x124, 0); break; } nRet = KSJGigeVisionApi.KSJGIGEVISION_CameraGet3DGvspData(CamareIndex[m_nDeviceCurSel, 0], CamareIndex[m_nDeviceCurSel, 1], (int)nType, pImageData, ref nWidth, ref nHeight); //采集相机一帧 if (nRet == 1) //采集成功 { if (nType == 0) //gray { bitmap = ToGrayBitmap(pImageData, nWidth, nHeight); this.PictureBox_PREVIEWWND.Image = bitmap; } else if (nType == 1)//profile { for (i = 0; i < nWidth; i++) { profile[i] = BitConverter.ToSingle(pImageData, i * 4); } for (i = 0; i < nWidth; i++) { profilex[i] = BitConverter.ToSingle(pImageData, (i + nWidth) * 4); } bitmap = new Bitmap(nWidth, 256); g = Graphics.FromImage(bitmap); g.Clear(Color.Black); for (i = 1; i < nWidth; ++i)//绘制图片 { if (profile[i - 1] != -1000 && profile[i] != -1000 && profilex[i - 1] > 0 && profilex[i] > 0) { p1.X = (int)(profilex[i - 1] * 1280 / 50); p1.Y = (int)(256 - profile[i - 1] * 256 / 10); p2.X = (int)(profilex[i] * 1280 / 50); p2.Y = (int)(256 - profile[i] * 256 / 10); g.DrawLine(new Pen(Color.White, 1), p1, p2); } } this.PictureBox_PREVIEWWND.Image = bitmap; g.Dispose(); } else if (nType == 2)//3d { nCount = nWidth * nHeight; for (i = 0; i < nCount; i++) { profile[i] = BitConverter.ToSingle(pImageData, i * 4); } for (i = 0; i < nCount; i++) { profilex[i] = BitConverter.ToSingle(pImageData, (i + nCount) * 4); } Index = 0; for (i = 0; i < nHeight; ++i) { for (j = 0; j < nWidth; ++j) { if (profile[Index] <= fZmapHigh && profile[Index] >= fZmapLow) { idata[Index] = (byte)((profile[Index] - fZmapLow) / unit + 0.5); } else { idata[Index] = 0; } ++Index; } } bitmap = ToGrayBitmap(idata, nWidth, nHeight); this.PictureBox_PREVIEWWND.Image = bitmap; for (i = 0; i < nHeight; ++i) { for (j = 0; j < nWidth; ++j) { if (profilex[i * nWidth + j] == -1000) { profilex[i * nWidth + j] = 0.04f * j; profile[i * nWidth + j] = 0; } hPx[i * nWidth + j] = profilex[i * nWidth + j]; hPz[i * nWidth + j] = profile[i * nWidth + j]; hPy[i * nWidth + j] = fy * i; } } bool bCheck1 = checksavefile.Checked; if (bCheck1) { int len = nHeight * nWidth; WriteFile("X", hPx, len); WriteFile("Y", hPy, len); WriteFile("Z", hPz, len); checksavefile.Checked = false; } Marshal.Copy(hPx, 0, ptrdata, nWidth * nHeight); HOperatorSet.GenImage1(out Hobjx, "real", nWidth, nHeight, ptrdata); Marshal.Copy(hPy, 0, ptrdata, nWidth * nHeight); HOperatorSet.GenImage1(out Hobjy, "real", nWidth, nHeight, ptrdata); Marshal.Copy(hPz, 0, ptrdata, nWidth * nHeight); HOperatorSet.GenImage1(out Hobjz, "real", nWidth, nHeight, ptrdata); HOperatorSet.XyzToObjectModel3d(Hobjx, Hobjy, Hobjz, out ObjectModel3DID); HOperatorSet.PrepareObjectModel3d(ObjectModel3DID, "segmentation", "true", "distance_to", "auto"); //HOperatorSet.DispObjectModel3d(this.hWindowControl1.HalconWindow, ObjectModel3DID, camParam, Pose, GenParamName, GenParamValue); Hobjx.Dispose(); Hobjy.Dispose(); Hobjz.Dispose(); HOperatorSet.ClearObjectModel3d(ObjectModel3DID); bool bCheck = SavecheckBox.Checked; if (bCheck) { FileStream fs = new FileStream("test.pcd", FileMode.Create); string temp = "# .PCD v0.7 - Point Cloud Data file format\n"; byte[] data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "VERSION 0.7\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "FIELDS x y z\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "SIZE 4 4 4\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "TYPE F F F\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "COUNT 1 1 1\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = string.Format("WIDTH {0}\n", nColSize); data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = string.Format("HEIGHT {0}\n", nRowSize); data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "VIEWPOINT 0 0 0 1 0 0 0\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = string.Format("POINTS {0}\n", nCount); data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); temp = "DATA ascii\n"; data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); for (i = 0; i < nCount; i++) { if (i != nCount - 1) { if (profilex[i] == -1000) { temp = "nan nan nan\n"; } else { temp = string.Format("{0:N5} {1:N5} {2:N5}\n", profilex[i], fProfiley * (i / 1280), profile[i]); } } else { if (profilex[i] == -1000) { temp = "nan nan nan"; } else { temp = string.Format("{0:N5} {1:N5} {2:N5}", profilex[i], fProfiley * (i / 1280), profile[i]); } } data = System.Text.Encoding.Default.GetBytes(temp); fs.Write(data, 0, data.Length); } //清空缓冲区、关闭流 fs.Flush(); fs.Close(); } } } } }