private Task DoUndistortImage(object o)
        {
            return(Task.Factory.StartNew(async() =>
            {
                System.Windows.Forms.OpenFileDialog openFileDialog = null;
                System.Windows.Forms.SaveFileDialog saveFileDialog = null;
                bool open = false;
                bool save = false;

                CameraViewModel cvm = null;

                Parent.SyncContext.Send(c =>
                {
                    cvm = Parent.CameraViewModel;
                    openFileDialog = new System.Windows.Forms.OpenFileDialog();
                    openFileDialog.Filter = "Image (*.png) | *.png";
                    open = openFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK;

                    saveFileDialog = new System.Windows.Forms.SaveFileDialog();
                    saveFileDialog.Filter = "Image (*.png) | *.png";
                    save = saveFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK;
                }, null);

                if (open && save)
                {
                    MetroDialogSettings settings = new MetroDialogSettings()
                    {
                        AnimateShow = false,
                        AnimateHide = false
                    };

                    var controller = await Parent.DialogCoordinator.ShowProgressAsync(Parent, "Please wait...", "Export undistort image!", settings: Parent.MetroDialogSettings);

                    controller.SetCancelable(false);

                    Mat rawImage = CvInvoke.Imread(openFileDialog.FileName, Emgu.CV.CvEnum.ImreadModes.Grayscale);
                    Mat rawImageUndist = new Mat();

                    Mat map1 = new Mat();
                    Mat map2 = new Mat();

                    Fisheye.InitUndistorRectifyMap(cvm.OrginalCameraMatrix, cvm.DistortionCoefficients, Mat.Eye(3, 3, DepthType.Cv64F, 1), cvm.CenteredCameraMatrix, new System.Drawing.Size(512, 512), DepthType.Cv32F, map1, map2);

                    CvInvoke.Remap(rawImage, rawImageUndist, map1, map2, Inter.Linear, BorderType.Constant);

                    CvInvoke.Imwrite(saveFileDialog.FileName, rawImageUndist);

                    await controller.CloseAsync();
                }
            }));
        }
示例#2
0
        private void UpdateLastKeyFrame(KeyFrame keyFrame, Mat rawImage, double minDepth, double maxDepth)
        {
            Mat rawImageColor       = new Mat();
            Mat rawImageColorUndist = new Mat();

            CvInvoke.CvtColor(rawImage, rawImageColor, ColorConversion.Gray2Bgr);

            Mat map1 = new Mat();
            Mat map2 = new Mat();

            Fisheye.InitUndistorRectifyMap(_OrginalCameraMatrix, _DistortionCoefficients, Mat.Eye(3, 3, DepthType.Cv64F, 1), _CenteredCameraMatrix, new System.Drawing.Size(512, 512), DepthType.Cv32F, map1, map2);

            CvInvoke.Remap(rawImageColor, rawImageColorUndist, map1, map2, Inter.Linear, BorderType.Constant);

            Mat hsvImage = new Mat();

            CvInvoke.CvtColor(rawImageColorUndist, hsvImage, ColorConversion.Bgr2Hsv);

            Image <Emgu.CV.Structure.Hsv, byte> rawImageColorUndistImage = hsvImage.ToImage <Emgu.CV.Structure.Hsv, byte>();

            byte[,,] data = rawImageColorUndistImage.Data;

            foreach (Point point in keyFrame.Points)
            {
                int u = (int)Math.Round(point.U);
                int v = (int)Math.Round(point.V);

                for (int i = -2; i <= +2; i++)
                {
                    for (int j = -2; j <= +2; j++)
                    {
                        byte h = (byte)Math.Round(((1.0 / point.InverseDepth - minDepth) < 0 ? 0 : (1.0 / point.InverseDepth - minDepth)) * 180 / (maxDepth - minDepth));
                        data[v + i, u + j, 0] = h;
                        data[v + i, u + j, 1] = (byte)Math.Round(255 / 100.0 * HSV_SATURATION_PERCENT);
                        data[v + i, u + j, 2] = (byte)Math.Round(255 / 100.0 * HSV_VALUE_PERCENT);
                    }
                }
            }
            CvInvoke.CvtColor(rawImageColorUndistImage.Mat, _LastKeyFrame, ColorConversion.Hsv2Bgr);
        }
        internal override void SettingsUpdated()
        {
            base.SettingsUpdated();

            bool changed = false;

            changed |= Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fx != Fx;
            changed |= Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fy != Fy;
            changed |= Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cx != Cx;
            changed |= Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cy != Cy;
            changed |= Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Alpha != Alpha;

            var firstNotSecond = _DistCoeffs.Except(Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs).ToList();
            var secondNotFirst = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs.Except(_DistCoeffs).ToList();

            changed |= firstNotSecond.Any() || secondNotFirst.Any();

            if (changed)
            {
                Fx    = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fx;
                Fy    = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fy;
                Cx    = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cx;
                Cy    = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cy;
                Alpha = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Alpha;

                _DistCoeffs = Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs.ToList();

                if (_DistCoeffs.Count == 4)
                {
                    K1 = _DistCoeffs[0];
                    K2 = _DistCoeffs[1];
                    K3 = _DistCoeffs[2];
                    K4 = _DistCoeffs[3];
                }

                Mat board     = ChArUcoCalibration.DrawBoard(5, 5, 0.04f, 0.02f, new System.Drawing.Size(Parent.CameraViewModel.ImageWidth, Parent.CameraViewModel.ImageHeight), 10, PredefinedDictionaryName.Dict6X6_250);
                Mat boardDist = board.Clone();

                Mat cameraMatrix = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                Mat distCoeffs   = new Mat(1, Parent.CameraViewModel.FishEyeCalibration ? 4 : _DistCoeffs.Count, Emgu.CV.CvEnum.DepthType.Cv64F, 1);

                cameraMatrix.SetValue(0, 0, Fx);
                cameraMatrix.SetValue(1, 1, Fy);
                cameraMatrix.SetValue(0, 1, Fx * Alpha);
                cameraMatrix.SetValue(0, 2, Cx);
                cameraMatrix.SetValue(1, 2, Cy);
                cameraMatrix.SetValue(2, 2, 1.0f);

                Mat newK = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);

                for (int i = 0; i < distCoeffs.Cols && (Parent.CameraViewModel.FishEyeCalibration ? i < 4 : true); i++)
                {
                    distCoeffs.SetValue(0, i, _DistCoeffs[i]);
                }

                if (Parent.CameraViewModel.FishEyeCalibration)
                {
                    Fisheye.EstimateNewCameraMatrixForUndistorRectify(cameraMatrix, distCoeffs, new System.Drawing.Size(Parent.CameraViewModel.ImageWidth, Parent.CameraViewModel.ImageHeight), Mat.Eye(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1), newK, 0, new System.Drawing.Size(Parent.CameraViewModel.ImageWidth, Parent.CameraViewModel.ImageHeight), 0.3);
                    Mat map1 = new Mat();
                    Mat map2 = new Mat();
                    Fisheye.InitUndistorRectifyMap(cameraMatrix, distCoeffs, Mat.Eye(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1), newK, new System.Drawing.Size(Parent.CameraViewModel.ImageWidth, Parent.CameraViewModel.ImageHeight), Emgu.CV.CvEnum.DepthType.Cv32F, map1, map2);
                    CvInvoke.Remap(board, boardDist, map1, map2, Emgu.CV.CvEnum.Inter.Linear, Emgu.CV.CvEnum.BorderType.Constant);
                }
                else
                {
                    CvInvoke.Undistort(board, boardDist, cameraMatrix, distCoeffs);
                }
                Parent.SyncContext.Post(c =>
                {
                    ChAruCoBoard         = new CvImageContainer();
                    ChAruCoBoard.CvImage = boardDist;
                }, null);
            }
        }
示例#4
0
        public void Fired(IOProxy proxy, List <AbstractProxyEventData> eventData)
        {
            CameraEventData cameraEventData = (CameraEventData)eventData.FirstOrDefault(c => c is CameraEventData);

            if (cameraEventData != null)
            {
                Task.Factory.StartNew(() =>
                {
                    Mat mat = cameraEventData.Image;

                    bool undistort = false;

                    Parent.SyncContext.Send(o =>
                    {
                        undistort = Undistort;

                        if (GammaCorretion)
                        {
                            Mat result = new Mat(ImageWidth, ImageHeight, DepthType.Cv32F, 1);
                            int i      = 0;
                            Mat lut    = new Mat(1, 256, DepthType.Cv32F, 1);
                            foreach (double val in Parent.SettingContainer.Settings.CalibrationSettings.PhotometricCalibrationSettings.ResponseValues)
                            {
                                lut.SetValue(0, i++, (byte)val);
                            }
                            CvInvoke.LUT(mat, lut, result);
                            result.ConvertTo(mat, DepthType.Cv8U);
                        }

                        if (VignetteCorretion)
                        {
                            Mat invVignette = new Mat(ImageWidth, ImageHeight, DepthType.Cv32F, 1);
                            Mat result      = new Mat(ImageWidth, ImageHeight, DepthType.Cv32F, 1);
                            CvInvoke.Divide(Mat.Ones(ImageWidth, ImageHeight, DepthType.Cv32F, 1), Parent.CalibrationViewModel.PhotometricCalibrationViewModel.Vignette.CvImage, invVignette, 255, DepthType.Cv32F);
                            CvInvoke.Multiply(mat, invVignette, result, 1, DepthType.Cv32F);
                            result.ConvertTo(mat, DepthType.Cv8U);
                        }
                    }
                                            , null);

                    if (undistort)
                    {
                        Mat matUndist = new Mat(ImageWidth, ImageHeight, DepthType.Cv8U, 1);

                        if (FishEyeCalibration)
                        {
                            Mat map1 = new Mat();
                            Mat map2 = new Mat();
                            Fisheye.InitUndistorRectifyMap(OrginalCameraMatrix, DistortionCoefficients, Mat.Eye(3, 3, DepthType.Cv64F, 1), CenteredCameraMatrix, new System.Drawing.Size(ImageWidth, ImageHeight), DepthType.Cv32F, map1, map2);
                            CvInvoke.Remap(mat, matUndist, map1, map2, Inter.Linear, BorderType.Constant);
                        }
                        else
                        {
                            CvInvoke.Undistort(mat, matUndist, CenteredCameraMatrix, DistortionCoefficients);
                        }
                        mat = matUndist;
                    }

                    _FPSCounter.CountFrame();
                    Parent.SyncContext.Post(o =>
                    {
                        ExposureTime = (eventData[0] as CameraEventData).ExposureTime;
                        Image        = new CvImageContainer();

                        Image.CvImage = mat;
                    }
                                            , null);
                });
            }
        }