public override bool ProcessData(SEVData data) { try { Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); if (data != null) { if (data.LeftImage != null) { SEVDataModel.LeftImageWriteableBitmap = ImageConverter.ConvertToWrittableBitmap(data.LeftImage); } if (data.RightImage != null) { SEVDataModel.RightImageWriteableBitmap = ImageConverter.ConvertToWrittableBitmap(data.RightImage); } if (data.PointCloud != null) { SEVDataModel.PointGeometry3D = PointCloudConverter.ConvertToPointGeometry3D(data.PointCloud); } if (data.NavSatFix != null) { SEVDataModel.GNSSModel.Latitude = data.NavSatFix.Latitude; SEVDataModel.GNSSModel.Longitude = data.NavSatFix.Longitude; SEVDataModel.GNSSModel.Altitude = data.NavSatFix.Altitude; if (data.NavSatFix.Status != null) { switch (data.NavSatFix.Status.Status) { case -1: { SEVDataModel.GNSSModel.Status = "NO_FIX"; break; } case 0: { SEVDataModel.GNSSModel.Status = "FIX"; break; } case 1: { SEVDataModel.GNSSModel.Status = "SBAS_FIX"; break; } case 2: { SEVDataModel.GNSSModel.Status = "GBAS_FIX"; break; } default: { SEVDataModel.GNSSModel.Status = "Other"; break; } } switch (data.NavSatFix.Status.Service) { case 1: { SEVDataModel.GNSSModel.Service = "GPS"; break; } case 2: { SEVDataModel.GNSSModel.Service = "GLONASS"; break; } case 4: { SEVDataModel.GNSSModel.Service = "COMPASS"; break; } case 8: { SEVDataModel.GNSSModel.Service = "GALILEO"; break; } default: { SEVDataModel.GNSSModel.Service = "Other"; break; } } } } if (data.Odometry?.Pose?.Pose?.Position != null && data.Odometry?.Pose?.Pose?.Orientation != null) { SEVDataModel.StereoSystemPoseModel.X = data.Odometry.Pose.Pose.Position.X; SEVDataModel.StereoSystemPoseModel.Y = data.Odometry.Pose.Pose.Position.Y; SEVDataModel.StereoSystemPoseModel.Z = data.Odometry.Pose.Pose.Position.Z; double heading, attitude, bank; Geometry.QuaternionToEuler( data.Odometry.Pose.Pose.Orientation.X, data.Odometry.Pose.Pose.Orientation.Y, data.Odometry.Pose.Pose.Orientation.Z, data.Odometry.Pose.Pose.Orientation.W, out heading, out attitude, out bank); SEVDataModel.StereoSystemPoseModel.Heading = Geometry.RadianToDegree(heading); SEVDataModel.StereoSystemPoseModel.Attitude = Geometry.RadianToDegree(attitude); SEVDataModel.StereoSystemPoseModel.Bank = Geometry.RadianToDegree(bank); } } stopWatch.Stop(); SEVDataModel.DiagnosticsModel.MessageProcessingTimeMilliseconds = (double)stopWatch.Elapsed.Ticks / (double)TimeSpan.TicksPerMillisecond;; return(true); } catch (Exception) { return(false); } }
//connect to visionary private void button_Connect_Click(object sender, EventArgs e) { String host = textBox_IP.Text; int port = 2114; VisionaryDataStream dataStream = new VisionaryDataStream(host, port); VisionaryControl control = new VisionaryControl(host); textBox_Index.Text = "0"; Task.Run(async() => { // Buffer for reading data Byte[] bytes = new Byte[1024]; try { IPAddress ipAddress = Dns.Resolve("localhost").AddressList[0]; TcpListener server = new TcpListener(ipAddress, Convert.ToInt32(textBox_BackendPort.Text)); System.Threading.Thread.Sleep(1000); server.Start(); //MessageBox.Show("Waiting for client to connect..."); TcpClient client = server.AcceptTcpClient(); NetworkStream nStream = client.GetStream(); //network loop, send then wait for reply loop int i = -1; while (true) { Thread.Sleep(2); i = nStream.Read(bytes, 0, bytes.Length); if (i > 0) { String data = null; // Translate data bytes to a ASCII string. data = System.Text.Encoding.ASCII.GetString(bytes, 0, i); //MessageBox.Show("Received:" + data); // Process the data sent by the client. data = data.ToUpper(); //nStream.Write(bytes, 0, bytes.Length); if (data == "REQUEST\n") { //transmit 1 frame //compile image from 640x512x3 -> 983040x1 try { //byte[] bStream = ImageToByte(bitmap_RGB); nStream.Write(bitmap_arry, 0, bitmap_arry.Length); } catch { } } else if (data != "EMPTY\n" && checkBox_UseBackend.Checked == true) { try { //try parsing x/y/w/h if its not empty parsedata(data); } catch { } } } } } catch (Exception e1) { MessageBox.Show("SocketException: " + e1.Message); } }); try { Task.Run(async() => { if (!await dataStream.ConnectAsync()) { // Data stream connection failed MessageBox.Show("error"); } if (!await control.ConnectAsync()) { // Data control (CoLaB) connection failed } await control.StartAcquisitionAsync(); }); } catch (Exception ex) { MessageBox.Show("ERROR:" + ex.ToString()); throw; } System.Threading.Thread.Sleep(1500); //------------------------------------------------------------------------------------------------------- // Receiving & Image Processing thread //------------------------------------------------------------------------------------------------------- Task.Run(async() => { while (true) { //------------------------------------------------------------------------------------------------------- // Read Data from Visionary Datastream //------------------------------------------------------------------------------------------------------- VisionaryFrame frame = await dataStream.GetNextFrameAsync(); //System.Threading.Thread.Sleep(1000); VisionarySDepthMapData depthMap = frame.GetData <VisionarySDepthMapData>(); // Important: When converting multiple frames, make sure to re-use the same converter as it will result in much better performance. PointCloudConverter converter = new PointCloudConverter(); Vector3[] pointCloud = converter.Convert(depthMap); CenterH = pointCloud[250 * 640 + 320].Z; //read and set range of textboxs setTextboxRange(); //Assign converted image bitmap = depthMap.ZMap.ToBitmap(Zmap_DR, Zmap_Offset); bitmap_RGB = depthMap.RgbaMap.ToBitmap(); bitmap_arry = depthMap.RgbaMap.Data.ToArray(); ZMap_arry = depthMap.ZMap.Data.ToArray(); this.label1.Text = bitmap.GetPixel(320, 250).R.ToString(); //------------------------------------------------------------------------------------------------------- // Optional default image proccessing method (locate box) //------------------------------------------------------------------------------------------------------- if (checkBox_MinAreaRect.Checked == true) { Bitmap TempMap = bitmap; if (RGBAsZmap == true) { TempMap = bitmap_RGB; } //init different images for different detection stages Image <Bgr, byte> a = new Image <Bgr, byte>(TempMap); Image <Gray, byte> b = new Image <Gray, byte>(a.Width, a.Height); //edge detection Image <Gray, byte> c = new Image <Gray, byte>(a.Width, a.Height); //find contour //set threshold int Blue_threshold = 50; //0-255 int Green_threshold = 50; //0-255 int Red_threshold = 50; //0-255 if (RGBAsZmap == false) { a = ~a; a = a.ThresholdBinary(new Bgr(Blue_threshold, Green_threshold, Red_threshold), new Bgr(255, 255, 255)); } //Set ROI a.ROI = new Rectangle(ROIx, ROIy, (int)(640 * ROIScale), (int)(512 * ROIScale)); //Find edges int cannytherhold = 100; CvInvoke.Canny(a, b, cannytherhold / 2, cannytherhold, 3, false); //Enhance canny edges Mat struct_element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)); CvInvoke.Dilate(b, b, struct_element, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255, 255, 255)); //Find contours VectorOfVectorOfPoint con = new VectorOfVectorOfPoint(); CvInvoke.FindContours(b, con, c, RetrType.List, ChainApproxMethod.ChainApproxNone); Point[][] con1 = con.ToArrayOfArray(); PointF[][] con2 = Array.ConvertAll <Point[], PointF[]>(con1, new Converter <Point[], PointF[]>(PointToPointF)); listBox_BoxList.Items.Clear(); for (int i = 0; i < con.Size; i++) { //Filter params double tempArea = CvInvoke.ContourArea(con[i], true); double tempArc = CvInvoke.ArcLength(con[i], true); double tempScale = tempArea / Math.Pow(tempArc / 4, 2); if (tempArea >= MinPixelArea && tempScale > RatioFilter) { RotatedRect rrec = CvInvoke.MinAreaRect(con2[i]); //g //------------------------------------------------------------------------------------------------------- // find box dimensions //------------------------------------------------------------------------------------------------------- //find box height int boxHeight = -10000; int tempX = 0; //X axis offset if point is NULL int tempY = 0; //Y axis offset if point is NULL while (boxHeight < 0 && tempX <= 50) { boxHeight = (int)Math.Round(((double)BackgroundH - (double)pointCloud[(ROIy + tempY + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].Z) * (double)1000); tempX++; } tempX--;//for angle correction //apply sensor prespective angle correction if (AngleCorr == true) { double boxCenterOffset = Math.Sqrt(Math.Pow(pointCloud[(ROIy + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].X, 2) + Math.Pow(pointCloud[(ROIy + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].Y, 2)); double boxCenterAngle = Math.Atan(boxCenterOffset * 1.1 / BackgroundH); double heightMulti = 1 / Math.Cos(boxCenterAngle); boxHeight = (int)((double)boxHeight * heightMulti); } //find dimension of 1 pixel from avg of X/Y axis box plane tempX = 0; //unused here tempY = 0; //unused here double PixelScaleX = -10000; double PixelScaleY = -10000; double PixelScale = -10000; PixelScaleX = (double)(pointCloud[(int)(ROIy + rrec.Center.Y) * 640 + (int)(ROIx + rrec.Center.X) - (15 + tempX)].X - (double)pointCloud[(int)(ROIy + rrec.Center.Y) * 640 + (int)(ROIx + rrec.Center.X) + (15 + tempX)].X) * 1000.0 / (30.0 + (tempX + tempX)); PixelScaleY = (double)(pointCloud[(int)(ROIy + rrec.Center.Y - (15 + tempY)) * 640 + (int)(ROIx + rrec.Center.X)].Y - (double)pointCloud[(int)(ROIy + rrec.Center.Y + (15 + tempY)) * 640 + (int)(ROIx + rrec.Center.X)].Y) * 1000.0 / (30.0 + (tempY + tempY)); PixelScale = (PixelScaleX + PixelScaleY) / 2; int boxWidth = (int)(rrec.Size.Width * PixelScale); int boxLength = (int)(rrec.Size.Height * PixelScale); //Rounding result boxLength = (int)(Math.Round((double)boxLength / Rounding, MidpointRounding.AwayFromZero) * Rounding); boxWidth = (int)(Math.Round((double)boxWidth / Rounding, MidpointRounding.AwayFromZero) * Rounding); boxHeight = (int)(Math.Round((double)boxHeight / Rounding, MidpointRounding.AwayFromZero) * Rounding); double boxArea = ((double)boxLength / 10) * ((double)boxWidth / 10) * ((double)boxHeight / 10);//cm //add box to listbox listBox_BoxList.Items.Add("Box (Length: " + boxLength + "mm, Width: " + boxWidth + "mm, Height: " + boxHeight + "mm, Vol:" + boxArea + "cm^3)"); PointF[] pointfs = rrec.GetVertices(); for (int j = 0; j < pointfs.Length; j++) { CvInvoke.Line(a, new Point((int)pointfs[j].X, (int)pointfs[j].Y), new Point((int)pointfs[(j + 1) % 4].X, (int)pointfs[(j + 1) % 4].Y), new MCvScalar(0, 255, 0, 255), 4); } } } //save box list if (SaveQueueList == true) { System.IO.StreamWriter SaveFile = new System.IO.StreamWriter(textBox_Savepath.Text + "/Output/Detection Result/" + DateTime.Now.ToString("yyyyMMdd_hhmmss") + ".txt"); foreach (var item in listBox_BoxList.Items) { SaveFile.WriteLine(item); } SaveFile.Close(); SaveQueueList = false; } //save pointcloud if (SaveQueuePly == true) { await PointCloudPlyWriter.WriteFormatPLYAsync(textBox_Savepath.Text + "/Output/PointCloud/" + index.ToString() + ".ply", pointCloud, depthMap.RgbaMap, true); } /* * //for displaying contours * for (int i = 0; i < con.Size; i++) * { * CvInvoke.DrawContours(d, con, i, new MCvScalar(255, 255, 0, 255), 2); * } */ this.pictureBox2.Image = a.ToBitmap(); this.pictureBox1.Image = bitmap_RGB; } else { this.pictureBox2.Image = bitmap_RGB; this.pictureBox1.Image = bitmap; } try { bitmap_Mixed = mixedMap(bitmap_arry, ZMap_arry, Convert.ToUInt16(textBox_DynamicRange.Text)); this.pictureBox_Mixed.Image = bitmap_Mixed; } catch { } } }); }