void URConnect() { aRTClient.startRTClient(); //启动RTClient server demoInstance.startPrimary(); //启动同步 //timer1.Start(); //timer2.Start(); //StartCoroutine(timer1_Tick()); StartCoroutine(timer2_Tick()); }
private void button2_Click(object sender, EventArgs e) { aRTClient.startRTClient(); while (aRTClient.Status != RTClientStatus.Syncing) { Thread.Sleep(10); } button2.Enabled = false; button3.Enabled = true; }
private void initURRobot() { aDashBoard = new DashBoard("192.168.100.11"); aRTClient = new RTClient("192.168.100.11"); aRTDE = new RTDE(System.Environment.CurrentDirectory + "\\RTDEConfig.xml", "192.168.100.11"); //RTDE用于采集数据 aRTDE.startSync(); if (!aRTDE.isSynchronizing) { Thread.Sleep(10); } //RTClient用来控制 aRTClient.startRTClient(); while (aRTClient.Status != RTClientStatus.Syncing) { Thread.Sleep(10); } Thread thread = new Thread(new ThreadStart(refreshRobotData)); thread.Start(); }
/// <summary> ///PointCloudの各点に対応する色の配列 /// </summary> //System.Numerics.Vector3[] vertices; /// <summary> /// vertices中の何番目の点を描画するかのリスト(全部描画するけど手続き上必要) /// </summary> //Color[] colors; /// <summary> /// 座標変換(Color⇔Depth対応やDepth→xyzなど)をするためのクラス /// </summary> //int[] indices; /// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { //set the robotic arm aDashBoard = new DashBoard("169.254.213.1"); aRTClient = new RTClient("169.254.213.1"); primaryInt = new PrimaryInterface("169.254.213.1", 30001); //30001 aRTClient.startRTClient(); //169.254.213.1 //192.168.1.128 primaryInt.startPrimary(); // Open the default device this.kinect = Device.Open(); // Configure camera modes this.kinect.StartCameras(new DeviceConfiguration { ColorFormat = ImageFormat.ColorBGRA32, ColorResolution = ColorResolution.R1080p, DepthMode = DepthMode.NFOV_2x2Binned, SynchronizedImagesOnly = true }); //NFOV matrix_T_tool2tcp[0, 0] = -0.707; matrix_T_tool2tcp[0, 1] = -0.0739; matrix_T_tool2tcp[0, 2] = 0.7031; matrix_T_tool2tcp[0, 3] = 0.0566; matrix_T_tool2tcp[1, 0] = 0.7070; matrix_T_tool2tcp[1, 1] = -0.0739; matrix_T_tool2tcp[1, 2] = 0.7031; matrix_T_tool2tcp[1, 3] = 0.0566; matrix_T_tool2tcp[2, 0] = 0; matrix_T_tool2tcp[2, 1] = 0.9945; matrix_T_tool2tcp[2, 2] = 0.1045; matrix_T_tool2tcp[2, 3] = 0.1002; matrix_T_tool2tcp[3, 0] = 0; matrix_T_tool2tcp[3, 1] = 0; matrix_T_tool2tcp[3, 2] = 0; matrix_T_tool2tcp[3, 3] = 1; this.transform = this.kinect.GetCalibration().CreateTransformation(); this.colorWidth = this.kinect.GetCalibration().ColorCameraCalibration.ResolutionWidth; this.colorHeight = this.kinect.GetCalibration().ColorCameraCalibration.ResolutionHeight; this.depthWidth = this.kinect.GetCalibration().DepthCameraCalibration.ResolutionWidth; this.depthHeight = this.kinect.GetCalibration().DepthCameraCalibration.ResolutionHeight; this.bitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null); this.num = depthHeight * depthWidth; vertices_0_b1 = new Vector3[num]; vertices_0_b2 = new Vector3[num]; vertices_0_b3 = new Vector3[num]; vertices_0_b4 = new Vector3[num]; vertices_0_b5 = new Vector3[num]; colors_0_b1 = new Color32[num]; colors_0_b2 = new Color32[num]; colors_0_b3 = new Color32[num]; colors_0_b4 = new Color32[num]; colors_0_b5 = new Color32[num]; this.DataContext = this; this.InitializeComponent(); }