示例#1
0
        private Point3fImage CalcDirections(InverseBrownConradyParams intrinsics, int width, int height)
        {
            Point3fImage directions = new Point3fImage(width, height);

            for (int row = 0; row < height; row++)
            {
                float yp  = (intrinsics.Cy - row) / intrinsics.Fy;
                float yp2 = yp * yp;

                for (int col = 0; col < width; col++)
                {
                    float xp = (intrinsics.Cx - col) / intrinsics.Fx;

                    // correct the camera distortion
                    float r2 = xp * xp + yp2;
                    float r4 = r2 * r2;
                    float k  = 1 + intrinsics.K1 * r2 + intrinsics.K2 * r4;

                    // Undistorted direction vector of the point
                    float x     = xp * k;
                    float y     = yp * k;
                    float s0Inv = (float)(1 / Math.Sqrt(x * x + y * y + 1)); //z is 1, since x and y are coordinates on normalized image plane.

                    directions[row, col] = new Point3f(-x * s0Inv, -y * s0Inv, s0Inv);
                }
            }
            return(directions);
        }
示例#2
0
        private Point3fImage CalcPoint3DImage()
        {
            // Recalculate Directions if necessary
            if (null == _directions)
            {
                int stepCount = _radii.Length;
                _directions = new float[stepCount, 2];
                for (int j = 0; j < stepCount; ++j)
                {
                    double theta = Math.PI * (double)(_startingAngle + ((stepCount - 1 - j) * _angularStepWidth)) / (180.0 * 10000.0);
                    _directions[j, 0] = (float)Math.Cos(theta);
                    _directions[j, 1] = (float)Math.Sin(theta);
                }
            }

            // Generate 3D Data
            Point3fImage image = new Point3fImage(_radii.Length, 1)
            {
                ChannelName = _channelName,
                FrameNumber = _scanCounter,
            };

            for (int x = 0; x < _radii.Length; ++x)
            {
                float r = _scalingFactor * (float)_radii[x];
                image[x] = new Point3f(x: (r * _directions[x, 0]),
                                       y: 0.0f,
                                       z: (r * _directions[x, 1]));
            }

            return(image);
        }
示例#3
0
        //TODO: Metrilus.Util should implement the *-operator on FloatImage and Point3fImage in future. Then just replace this method.
        private Point3fImage GetScaledDistances(FloatImage distances)
        {
            Point3fImage coords = new Point3fImage(distances.Width, distances.Height);

            for (int i = 0; i < coords.Length; i++)
            {
                coords[i] = distances[i] * _directions[i];
            }
            return(coords);
        }
示例#4
0
        private void backgroundWorkerGetFrames_DoWork(object sender, DoWorkEventArgs e)
        {
            while (!backgroundWorkerGetFrames.CancellationPending)
            {
                // capture a new frame
                try
                {
                    //cam.Invoke("StartStreams", null);
                    cam.Update();
                    //cam.Invoke("StopStreams", null);
                }
                catch (Exception ex)
                {
                    GC.KeepAlive(ex);
                    cam = new CameraClient("192.168.1.72", 8081, 8082, "MetriCam2.Cameras.Kinect2", "MetriCam2.Cameras.Kinect2");
                    cam.Connect();
                    //cam.Invoke("StopStreams", null);
                }

                Point3fImage p3Image = null;
                if (channel3DName == ChannelNames.Point3DImage)
                {
                    Point3fCameraImage image3D = (Point3fCameraImage)cam.CalcChannel(ChannelNames.Point3DImage);
                    p3Image = new Point3fImage(ref image3D);
                }
                else if (channel3DName == ChannelNames.Distance || channel3DName == ChannelNames.ZImage)
                {
                    FloatCameraImage image3D = (FloatCameraImage)cam.CalcChannel(channel3DName);
                    p3Image = new Point3fImage(new FloatImage(ref image3D), projectiveTransformation, channel3DName == ChannelNames.ZImage);
                }

                FloatImage ir     = ConvertToFloatImage(cam.CalcChannel(channel2DName).ToFloatCameraImage());
                Bitmap     bitmap = ir.ToBitmap();
                //secondBitmap = zImage.ToBitmap();
                // set the picturebox-bitmap in the main thread to avoid concurrency issues (a few helper methods required, easier/nicer solutions welcome).
                this.InvokeSetBmp(bitmap);

                MaskImage mask = new MaskImage(p3Image.Width, p3Image.Height);
                ir = ir.Normalize();
                for (int y = 0; y < mask.Height; y++)
                {
                    for (int x = 0; x < mask.Width; x++)
                    {
                        Point3f p = p3Image[y, x];
                        if (p.X > -99f && p.X < 99f && p.Y > -99f & p.Y < 99f && p.Z > 0 && p.Z < 99f)
                        {
                            mask[y, x] = 0xff;
                        }
                    }
                }

                p3Image.EliminateFlyingPixels(5, 0.005f, 0.2f);
                p3Image.Mask = mask;

                TriangleIndexList til = new TriangleIndexList(p3Image, false, true);
                if (renderTil == null)
                {
                    renderTil = new Metri3D.Objects.RenderTriangleIndexList(til, Color.White);
                    panel3D.AddRenderObject(renderTil);
                }
                else
                {
                    renderTil.UpdateData(til, Color.White);
                }
                panel3D.Invalidate();
            }
        }
示例#5
0
        private void UpdateLoop()
        {
            int consecutiveFailCounter = 0;

            while (!_cancelUpdateThreadSource.Token.IsCancellationRequested)
            {
                try
                {
                    // 0 - CameraModel
                    // 1 - distance Image
                    // 2 - intensity Image
                    SyncCola();
                    uint jsonSize = ReceiveDataSize();
                    _backJsonData = ReceiveJsonString(jsonSize);
                    if (null == _directions)
                    {
                        string json      = new string(_backJsonData);
                        var    frameData = JsonConvert.DeserializeObject <List <CameraObject> >(json);
                        _imageWidth  = frameData[1].Data.Data.Width;
                        _imageHeight = frameData[1].Data.Data.Height;
                        InverseBrownConradyParams intrinsics = ParseIntrinsics(frameData[0]);
                        _directions = CalcDirections(intrinsics, _imageWidth, _imageHeight);

                        //Determine the offsets of intensity and distance image, which are stable over time.
                        string needle          = "\"data\":\"";
                        int    start_first_img = json.IndexOf(needle, 0) + needle.Length;
                        int    end_first_img   = json.IndexOf("\"", start_first_img);
                        _distanceJsonOffset = start_first_img;
                        _distanceJsonSize   = end_first_img - start_first_img;
                        int start_second_img = json.IndexOf(needle, end_first_img) + needle.Length;
                        int end_second_img   = json.IndexOf("\"", start_second_img);
                        _intensityJsonOffset = start_second_img;
                        _intensityJsonSize   = end_second_img - start_second_img;

                        if ("uint16" != frameData[1].Data.Data.ImageType ||
                            "uint16" != frameData[2].Data.Data.ImageType)
                        {
                            string format = frameData[1].Data.Data.ImageType != "uint16" ? frameData[1].Data.Data.ImageType : frameData[2].Data.Data.ImageType;
                            string msg    = $"{Name}: Frame data has unexpected format: '{format}', expected: 'uint16'";
                            log.Error(msg);
                            throw new ImageAcquisitionFailedException(msg);
                        }

                        if ("little" != frameData[1].Data.Data.Pixels.endian ||
                            "little" != frameData[2].Data.Data.Pixels.endian)
                        {
                            string endian = frameData[1].Data.Data.Pixels.endian != "little" ? frameData[1].Data.Data.Pixels.endian : frameData[2].Data.Data.Pixels.endian;
                            string msg    = $"{Name}: Frame data has unexpected endian: '{endian}', expected: 'little'";
                            log.Error(msg);
                            throw new ImageAcquisitionFailedException(msg);
                        }
                    }
                    _frameAvailable.Set();
                }
                catch (Exception e)
                {
                    consecutiveFailCounter++;
                    if (consecutiveFailCounter > NumFrameRetries)
                    {
                        string msg = $"{Name}: Receive failed more than {NumFrameRetries} times in a row. Shutting down update loop.";
                        log.Error(msg);
                        log.Error(e.Message);
                        _updateThreadError     = msg;
                        _updateThreadException = e;
                        _frameAvailable.Set();
                        break;
                    }
                }

                // reset counter after sucessfull fetch
                consecutiveFailCounter = 0;
            }
        }