コード例 #1
0
        /// <summary> Gets the number of cameras available. Caution, the first time this function
        ///     is called, it opens each camera. Thus, it should be called before starting any
        ///     camera. </summary>
        /// <returns> The number cameras. </returns>
        public int GetNumCameras()
        {
            // Count cameras manually
            if (NumCameras != -1)
            {
                return(NumCameras);
            }
            NumCameras = 0;
            while (NumCameras < 100)
            {
                using (var vc = VideoCapture.FromCamera(NumCameras))
                {
                    if (vc.IsOpened())
                    {
                        ++NumCameras;
                    }
                    else
                    {
                        break;
                    }
                }
            }

            return(NumCameras);
        }
コード例 #2
0
 /// <summary> Gets the number of cameras available. Caution, the first time this function
 ///     is called, it opens each camera. Thus, it should be called before starting any
 ///     camera. </summary>
 /// <returns> The number cameras. </returns>
 public int GetNumCameras()
 {
     // Select Source of Image
     // Count cameras manually
     if (_numCameras == -1)
     {
         _numCameras = 0;
         while (_numCameras < 100)
         {
             using (var vc = VideoCapture.FromCamera(_numCameras))
             {
                 if (vc.IsOpened())
                 {
                     ++_numCameras;
                 }
                 else
                 {
                     break;
                 }
             }
         }
     }
     //if (!string.IsNullOrWhiteSpace(camo))
     //{
     //    _numCameras = 2;
     //    return _numCameras;
     //}
     //else
     return(_numCameras);
 }
コード例 #3
0
        public FrameAnalyzer(MainWindow mWw, string filePath, Plane pictureBox, CancellationToken TT, bool camM = false)
        {
            mw    = mWw;
            token = TT;
            //Инициализуем все ссылочные переменные
            int a;

            if (int.TryParse(filePath, out a))
            {
                videoStream = VideoCapture.FromCamera(CaptureDevice.Any);
            }
            else
            {
                videoStream = VideoCapture.FromFile(filePath);
            }

            plane      = pictureBox;
            lastTime   = DateTime.Now;
            cameraMode = camM;

            logName = $@"{DateTime.Now.ToShortDateString()} {DateTime.Now.ToLongTimeString()}.txt".Replace(":", ".");

            WriteStateLogs("Начало работы", logName);
            WriteStateLogsInDB();             //Для записи в БД
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: WabisabiNeet/knn_t
 static void ProcFromCaptureDevice(int deviceid)
 {
     using (var source = VideoCapture.FromCamera(CaptureDevice.Any, deviceid))
     {
         ProcCapture(source);
     }
 }
コード例 #5
0
        private void captureButton_Click(object sender, EventArgs e)
        {
            if (capture == null)
            {
                try
                {
                    capture = VideoCapture.FromCamera(0);
                }
                catch (NullReferenceException exception)
                {
                    MessageBox.Show(exception.Message);
                }
                catch (OpenCvSharpException exc)
                {
                    MessageBox.Show(
                        "Attention: You have to copy all the assemblies and native libraries from an official release of OpenCV to the directory of the demo." +
                        Environment.NewLine + Environment.NewLine + exc);
                }
            }

            if (capture != null)
            {
                if (Capturing)
                {
                    captureButton.Text = "Start Capturing";
                    Application.Idle  -= DoDecoding;
                }
                else
                {
                    captureButton.Text = "Stop Capturing";
                    Application.Idle  += DoDecoding;
                }
                Capturing = !Capturing;
            }
        }
コード例 #6
0
        private void Form1_Load(object sender, EventArgs e)
        {
            try
            {
                cap = VideoCapture.FromCamera(CaptureDevice.Any);
                cap.Open(0);
                cap.Read(frame);
                Form1_Resize(null, null);
                //Window window = new Window("capture");
                //window.ShowImage(frame);
                //Window window0 = new Window("edge0");
                //Window window1 = new Window("edge1");
                //Window window2 = new Window("edge2");
                //Window window3 = new Window("edge3");
                //Window window4 = new Window("edge4");
                //Mat edgeframe0 = new Mat();
                //Mat edgeframe1 = new Mat();
                //Mat edgeframe2 = new Mat();
                //Mat edgeframe3 = new Mat();
                //Mat edgeframe4 = new Mat();
                //Cv2.Canny(frame, edgeframe0, 10, 50);
                //Cv2.Canny(frame, edgeframe1, 10, 70);
                //Cv2.Canny(frame, edgeframe2, 10, 90);
                //Cv2.Canny(frame, edgeframe3, 10, 110);
                //Cv2.Canny(frame, edgeframe4, 10, 130);
                //window0.ShowImage(edgeframe0);
                //window1.ShowImage(edgeframe1);
                //window2.ShowImage(edgeframe2);
                //window3.ShowImage(edgeframe3);
                //window4.ShowImage(edgeframe4);
            }
            catch (Exception extdf)
            {
                Console.WriteLine(extdf.ToString());
            }
            //while (true)
            //{
            //    try
            //    {
            //        Mat convertframe = new Mat(100, 100, MatType.CV_8U);
            //        //Cv2.ImShow("Form1", frame);
            //        frame.ConvertTo(convertframe, MatType.CV_8U);
            //        Bitmap tempimage = BitmapConverter.ToBitmap(convertframe);
            //        pictureBox1.Image = tempimage;
            //        Cv2.WaitKey(10);
            //    }
            //    catch (Exception extdf)
            //    {
            //        Console.WriteLine(extdf.ToString());
            //    }
            //}
            //Console.WriteLine("READ");
            //cap.Read(frame);
            //OpenCvSharp.Size newsize = new OpenCvSharp.Size(pictureBox1.Width, pictureBox1.Height);
            //frame = frame.Resize(newsize);
            //Bitmap tempimage = BitmapConverter.ToBitmap(frame);

            //pictureBox1.Image = tempimage;
        }
コード例 #7
0
        /// <summary>
        ///     Открывает выбор камер
        /// </summary>
        private void CameraChanger_Click(object sender, RoutedEventArgs e)
        {
            if (Start.IsEnabled == false)
            {
                Stop_Click(new object(), new RoutedEventArgs());
            }

            if (cameraMode == StreamSrc.Video)
            {
                var f = new CameraSelectWindow().GetCam();
                if (f == "")
                {
                    return;
                }

                try
                {
                    VideoCapture v;
                    if (f == "USB")
                    {
                        cameraMode = StreamSrc.USB_cam;
                        v          = VideoCapture.FromCamera(CaptureDevice.Any);
                        filePath   = "0";
                        camM       = true;
                    }
                    else
                    {
                        cameraMode = StreamSrc.IP_cam;
                        filePath   = f;
                        v          = VideoCapture.FromFile(filePath);
                        camM       = true;
                    }

                    var r = new Mat();
                    v.Read(r);
                    myImage.Source = WriteableBitmapConverter.ToWriteableBitmap(r);
                    r.Dispose();
                    v.Dispose();

                    CameraChanger.Content    = "Режим камеры активирован";
                    CameraChanger.Background = Brushes.Green;

                    labelCurState.Content = "Получение потока с камеры";
                }
                catch
                {
                    MessageBox.Show("Камера недоступна");
                    cameraMode = StreamSrc.Video;
                }
            }
            else
            {
                labelCurState.Content    = "Чтение видео-файла";
                CameraChanger.Content    = "Выбор камеры";
                CameraChanger.Background = Brushes.LightGray;
                cameraMode = StreamSrc.Video;
            }
        }
コード例 #8
0
        private void Open_btn_Click(object sender, EventArgs e)
        {
            //Thread.Sleep(3000);
            timer2.Start();

            capture             = VideoCapture.FromCamera(CaptureDevice.DShow, 0);
            capture.FrameWidth  = frameWidth;
            capture.FrameHeight = frameHeight; //종료
            capture.Open(1);                   //종료
        }
コード例 #9
0
ファイル: Program.cs プロジェクト: Kawaian/OpenPoseSharp
        static void Main(string[] args)
        {
            Console.WriteLine($"EnvPath: {Environment.CurrentDirectory}");
            var modelDir = Path.Combine(Path.GetDirectoryName(Path.GetDirectoryName(Path.GetDirectoryName(Path.GetDirectoryName(Path.GetDirectoryName(Environment.CurrentDirectory))))), "external", "openpose", "models") + Path.DirectorySeparatorChar;

            Console.WriteLine($"ModelPath: {modelDir}");

            using (var capture = VideoCapture.FromCamera(0))
                using (var e = new HandLandmarkDetector(modelDir, 256))
                {
                    e.Init();

                    int w    = (int)(capture.FrameWidth * 0.75);
                    var rect = new Rect(capture.FrameWidth / 2 - w / 2, capture.FrameHeight / 2 - w / 2, w, w);

                    while (true)
                    {
                        using (var frame = capture.RetrieveMat())
                        {
                            Cv2.Rectangle(frame, rect, Scalar.Red, 2);

                            var handrect = new HandLandmarkDetector.Rect(rect.X, rect.Y, rect.Width, rect.Height);
                            var result   = e.Detect(frame.CvPtr, handrect, handrect);

                            for (int i = 0; i < 2; i++)
                            {
                                var    marks = i == 0 ? result.LeftHand : result.RightHand;
                                double prePtY = 0, prePtX = 0;
                                foreach (var item in marks)
                                {
                                    double prob = item.Prob, ptX = item.X, ptY = item.Y;
                                    if (prob > 0.15)
                                    {
                                        var probColor = new Scalar(255 * prob, 0, (1 - prob) * 255);
                                        if (prePtY != 0)
                                        {
                                            Cv2.Line(frame, new Point(ptX, ptY), new Point(prePtX, prePtY), probColor, 3);
                                        }
                                        Cv2.Rectangle(frame, new Rect((int)ptX - 6, (int)ptY - 6, 12, 12), Scalar.Lime, -1);
                                        Cv2.Rectangle(frame, new Rect((int)ptX - 6, (int)ptY - 6, 12, 12), probColor, 2);
                                    }
                                    Cv2.PutText(frame, (i / 3).ToString(), new Point(ptX, ptY + 1), HersheyFonts.HersheyPlain, 1, Scalar.Black);
                                    Cv2.PutText(frame, (i / 3).ToString(), new Point(ptX, ptY), HersheyFonts.HersheyPlain, 1, Scalar.Cyan);
                                    prePtX = ptX;
                                    prePtY = ptY;
                                }
                            }

                            Cv2.ImShow("camera", frame);
                            Cv2.WaitKey(1);
                        }
                    }
                }
        }
コード例 #10
0
 public Form1()
 {
     // コンポーネント初期化
     // 组件初始化
     InitializeComponent();
     // カメラ映像を取得
     // 获取摄像头影像
     capture = VideoCapture.FromCamera(0);
     // カメラデバイスが正常にオープンしたか確認
     // 确认是否正常获取摄像头
     //textBox1.Text = Convert.ToString(capture.IsOpened());
 }
コード例 #11
0
ファイル: Mainform.cs プロジェクト: gwangmin68/GitHubSfashfa
        private void opencv_init()
        {
            opencv_viewer.Width  = constant.OPENCV_WIDTH;
            opencv_viewer.Height = constant.OPENCV_HEIGHT;
            opencv_viewer.Top    = constant.OPENCV_LOCATION_Y + 260;
            opencv_viewer.Left   = constant.OPENCV_LOCATION_X;

            capture             = VideoCapture.FromCamera(CaptureDevice.DShow, 0);
            capture.FrameWidth  = frameWidth;
            capture.FrameHeight = frameHeight;
            capture.Open(0);
        }
コード例 #12
0
        public CameraFeed()
        {
            InitializeComponent();

            strCamFeed   = "";
            uSelectedCam = -1;

            // WebCamera comboBox setting
            uLength = chkLiBoxCameras.Items.Count;
            for (int i = 0; i < uLength; i++)
            {
                VideoCapture cap;
                try
                {
                    cap = VideoCapture.FromCamera(i);
                    int uFrameWidth = (int)cap.Get(CaptureProperty.FrameWidth);
                    liEnabledCams.Add(i);

                    cap.Dispose();
                }
                catch (Exception e) { }
            }

            for (int i = 0; i < uLength; i++)
            {
                chkLiBoxCameras.SetItemCheckState(i, CheckState.Checked);
                if (!liEnabledCams.Contains(i))
                {
                    chkLiBoxCameras.SetItemCheckState(i, CheckState.Indeterminate);
                }
                else
                {
                    if (i == uSelectedCam)
                    {
                        chkLiBoxCameras.SetItemCheckState(i, CheckState.Checked);
                    }
                    else
                    {
                        chkLiBoxCameras.SetItemCheckState(i, CheckState.Unchecked);
                    }
                }
            }

            //... WebCamera comboBox setting
            for (int i = 0; i < liIPcamModels.Length; i++)
            {
                cmbBoxModel.Items.Insert(i, liIPcamModels[i]);
            }
        }
コード例 #13
0
        public CameraService(CameraConfig config, CancellationTokenSource cancelSource)
        {
            _config       = config;
            _cancelSource = cancelSource;

            _capture             = VideoCapture.FromCamera(config.Index);
            _capture.Fps         = config.Fps;
            _capture.FrameHeight = config.Height;
            _capture.FrameWidth  = config.Width;

            if (config.Render)
            {
                _window = new Window("SecurityCam");
            }
        }
コード例 #14
0
        public static void DoSomeThing(DateTime?startTime, long count)
        {
            var cap = VideoCapture.FromCamera(CaptureDevice.Any);

            cap.Set(CaptureProperty.FrameWidth, 512);
            cap.Set(CaptureProperty.FrameHeight, 300);
            Mat mat = new Mat();

            //while ())
            {
                cap.Read(mat);
                File.WriteAllBytes(DateTime.Now.Ticks + ".png", mat.ToBytes());
            }
            cap.Release();
            cap.Dispose();
        }
コード例 #15
0
ファイル: Form1.cs プロジェクト: tmolmf/Opencvcs_init_test
        private void Form1_Load(object sender, EventArgs e)
        {
            try
            {
                cap = VideoCapture.FromCamera(CaptureDevice.Any);
                //cap = VideoCapture.FromFile("D:\\Downloads\\프리스틴.mp4");

                cap.Open(0);
                cap.Read(frame);
                facecascade = new CascadeClassifier("haarcascade_frontalface_default.xml");
            }
            catch (Exception extdf)
            {
                Console.WriteLine(extdf.ToString());
            }
        }
コード例 #16
0
        static void Main(string[] args)
        {
            var cap = VideoCapture.FromCamera(CaptureDevice.Any);

            cap.Set(CaptureProperty.FrameWidth, 1366);
            cap.Set(CaptureProperty.FrameHeight, 1366);
            Mat mat = new Mat();

            while (cap.Read(mat))
            {
                File.WriteAllBytes(@"D:\code\cap\" + DateTime.Now.Ticks + ".png", mat.ToBytes());
                Thread.Sleep(2000);
            }
            cap.Release();
            cap.Dispose();
        }
コード例 #17
0
        /// <summary>
        ///     Переподключение (к USB). При неудаче Выбор камеры (CameraChanger_Click)
        /// </summary>
        public void MakeReconnect()
        {
            var f = new CameraSelectWindow();

            f.ShowBusyBox();
            var ff = f.imShown;

            Task.Run(() =>
            {
                var success = false;
                while (ff)
                {
                    VideoCapture v;
                    if (cameraMode == StreamSrc.USB_cam)
                    {
                        v = VideoCapture.FromCamera(CaptureDevice.Any);
                    }
                    else
                    {
                        v = VideoCapture.FromFile(filePath);
                    }

                    var r = new Mat();
                    v.Read(r);

                    if (r.Empty())
                    {
                        Thread.Sleep(1000);
                    }
                    else
                    {
                        Application.Current.Dispatcher.BeginInvoke(new Action(() => Restart(f)));
                        success = true;
                        r.Dispose();
                        break;
                    }

                    r.Dispose();
                    Application.Current.Dispatcher.BeginInvoke(new Action(() => ff = f.imShown));
                }

                if (!success)
                {
                    Application.Current.Dispatcher.BeginInvoke(new Action(() => PerformAct(CameraChanger_Click)));
                }
            });
        }
コード例 #18
0
        public GateController(Robot.Components.Robot robot)
        {
            this.robot = robot;

            this.videoCapture = VideoCapture.FromCamera(0, VideoCaptureAPIs.ANY);
            this.frame        = new Mat();

            //this.robot.GetBody().GoToRoot();
            // this.robot.GetBody().GetFrontBodyPart().SetTargetHeight(100);
            // this.robot.GetBody().GetBackBodyPart().SetTargetHeight(100);

            var hardwareInterface = ServiceLocator.Get <IHardwareInterface>();

            hardwareInterface.remoteTimeoutEvent += OnRemoteTimeout;

            this.steering = new WheelsControl(this.robot.GetSteeringJoystick(), this.robot.GetThrustJoystick(), 255);
        }
コード例 #19
0
ファイル: Form1.cs プロジェクト: saifulvonair/OpenCVSharf
        void doExecute(object param)
        {
            // Capture capture;

            VideoCapture capture = VideoCapture.FromCamera(0, VideoCaptureAPIs.ANY);

            UMat frame = new UMat();

            /*
             *
             * var src = new Mat("lenna.png", ImreadModes.Grayscale);
             * var dst = new Mat();
             *
             * Cv2.Canny(src, dst, 50, 200);
             * using (new Window("src image", src))
             * using (new Window("dst image", dst))
             * {
             *  Cv2.WaitKey();
             * }
             */


            for (; ;)
            {
                if (!mStopCapture)
                {
                    break;
                }

                _ = capture.Read(frame);
                Bitmap bitmap = BitmapConverter.ToBitmap(frame.GetMat(AccessFlag.MASK));

                if (frame.Empty())
                {
                    //  cerr << "Can't capture frame: " << i << std::endl;
                    break;
                }

                // OpenCV Trace macro for NEXT named region in the same C++ scope
                // Previous "read" region will be marked complete on this line.
                // Use this to eliminate unnecessary curly braces.
                // process_frame(frame);

                updateView(bitmap);
            }
        }
コード例 #20
0
ファイル: Form1.cs プロジェクト: drimyus/vehicle_live_cam
        // Load Camera
        private void LoadCamera()
        ///<summary>
        /// input the camera feed and configure the video capture from the camera feed(url)
        ///</summary>
        {
            camFeed.ShowDialog();
            strCapFeed = camFeed.getCamFeed();
            if (strCapFeed == "")
            {
                return;
            }
            else if (strCapFeed.Length == 1)
            {
                if (cap != null)
                {
                    cap.Dispose();
                }
                int uCamID;
                if (Int32.TryParse(strCapFeed, out uCamID))
                {
                    cap = VideoCapture.FromCamera(uCamID);
                }
            }
            else
            {
                if (cap != null)
                {
                    cap.Dispose();
                }
                cap = VideoCapture.FromFile(strCapFeed);
            }

            if (cap != null)
            {
                uFrameWidth  = (uint)cap.Get(CaptureProperty.FrameWidth);
                uFrameHeight = (uint)cap.Get(CaptureProperty.FrameHeight);
                uFps         = (uint)cap.Get(CaptureProperty.Fps);
                if ((uFps < 10) || (uFps > 60))
                {
                    uFps = 30;
                }

                strCapType = "Camera";
            }
        }
コード例 #21
0
        private bool InitWebCamera()
        {
            try
            {
                cap             = VideoCapture.FromCamera(CaptureDevice.Any, 0);
                cap.FrameWidth  = frameWidth;
                cap.FrameHeight = frameHeight;
                cap.Open(0);
                wb = new WriteableBitmap(cap.FrameWidth, cap.FrameHeight, 96, 96, PixelFormats.Bgr24, null);
                imagedata.Source = wb;

                return(true);
            }
            catch
            {
                return(false);
            }
        }
コード例 #22
0
ファイル: CameraService.cs プロジェクト: nojan1/MiniRover
        private VideoCapture CreateCapture(int?captureId)
        {
            if (!captureId.HasValue)
            {
                return(null);
            }

            var capture = VideoCapture.FromCamera(captureId.Value);

            capture.Set(CaptureProperty.FrameWidth, _cameraConfiguration.Width);
            capture.Set(CaptureProperty.FrameHeight, _cameraConfiguration.Height);

            var fourCC = Enum.Parse(typeof(FourCC), _cameraConfiguration.Format);

            capture.Set(CaptureProperty.FourCC, (int)fourCC);

            return(capture);
        }
コード例 #23
0
        public TrackingController(Robot.Components.Robot robot)
        {
            this.robot = robot;

            this.videoCapture = VideoCapture.FromCamera(0, VideoCaptureAPIs.ANY);
            this.videoCapture.Set(VideoCaptureProperties.BufferSize, 1);
            this.frame = new Mat();
            //this.robot.GetBody().GoToRoot();

            this.virtualWindow = new VirtualWindow.VirtualWindow(this.frame);
            //ServiceLocator.Get<VirtualWindowHost>().AddVirtualWindow(this.virtualWindow);

            this.robot.GetBody().GetFrontBodyPart().GetLegs()[0].GetWheel().SetSpeed((int)-40);
            this.robot.GetBody().GetFrontBodyPart().GetLegs()[1].GetWheel().SetSpeed((int)-40);
            this.robot.GetBody().GetBackBodyPart().GetLegs()[0].GetWheel().SetSpeed((int)-40);
            this.robot.GetBody().GetBackBodyPart().GetLegs()[1].GetWheel().SetSpeed((int)-40);

            Thread.Sleep(1700);

            this.robot.GetBody().GetFrontBodyPart().GetLegs()[0].GetWheel().SetSpeed((int)0);
            this.robot.GetBody().GetFrontBodyPart().GetLegs()[1].GetWheel().SetSpeed((int)0);
            this.robot.GetBody().GetBackBodyPart().GetLegs()[0].GetWheel().SetSpeed((int)0);
            this.robot.GetBody().GetBackBodyPart().GetLegs()[1].GetWheel().SetSpeed((int)0);
        }
コード例 #24
0
        // 双目RGB和IR静默活体检测(sdk内部调用opencv,返回FaceCallback)
        public bool rgb_ir_liveness_check_mat()
        {
            int faceNum   = 2;       //传入的人脸数
            int face_size = faceNum; //当前传入人脸数,传出人脸数

            TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum];
            for (int i = 0; i < faceNum; i++)
            {
                track_info[i]           = new TrackFaceInfo();
                track_info[i].landmarks = new int[144];
                track_info[i].headPose  = new float[3];
                track_info[i].face_id   = 0;
                track_info[i].score     = 0;
            }
            int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
            IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * faceNum);
            long   ir_time   = 0;
            // 序号0为电脑识别的usb摄像头编号,本demo中0为ir红外摄像头
            // 不同摄像头和电脑识别可能有区别
            // 编号一般从0-10   */
            int          device  = select_usb_device_id();
            VideoCapture camera1 = VideoCapture.FromCamera(device);

            if (!camera1.IsOpened())
            {
                Console.WriteLine("camera1 open error");
                return(false);
            }

            VideoCapture camera2 = VideoCapture.FromCamera(device + 1);

            if (!camera2.IsOpened())
            {
                Console.WriteLine("camera2 open error");
                return(false);
            }

            RotatedRect box;
            Mat         frame1     = new Mat();
            Mat         frame2     = new Mat();
            Mat         rgb_mat    = new Mat();
            Mat         ir_mat     = new Mat();
            var         window_ir  = new Window("ir_face");
            var         window_rgb = new Window("rgb_face");

            while (true)
            {
                camera1.Read(frame1);
                camera2.Read(frame2);
                if (!frame1.Empty() && !frame2.Empty())
                {
                    if (frame1.Size(0) > frame2.Size(0))
                    {
                        rgb_mat = frame1;
                        ir_mat  = frame2;
                    }
                    else
                    {
                        rgb_mat = frame2;
                        ir_mat  = frame1;
                    }
                    float rgb_score = 0;
                    float ir_score  = 0;

                    IntPtr ptr = rgb_ir_liveness_check_faceinfo(rgb_mat.CvPtr, ir_mat.CvPtr, ref rgb_score, ref ir_score, ref face_size, ref ir_time, ptT);
                    string res = Marshal.PtrToStringAnsi(ptr);
                    Console.WriteLine("res is:{0}", res);
                    string msg_ir = "ir score is:" + ir_score.ToString();
                    Cv2.PutText(ir_mat, msg_ir, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));
                    window_ir.ShowImage(ir_mat);
                    Cv2.WaitKey(1);
                    Console.WriteLine("{0}", msg_ir);

                    string msg_rgb = "rgb score is:" + rgb_score.ToString();
                    Cv2.PutText(rgb_mat, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));
                    for (int index = 0; index < face_size; index++)
                    {
                        IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                        track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo));
                        Console.WriteLine("face_id is {0}:", track_info[index].face_id);
                        Console.WriteLine("landmarks is:");
                        for (int k = 0; k < 1; k++)
                        {
                            Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},",
                                              track_info[index].landmarks[k], track_info[index].landmarks[k + 1],
                                              track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3],
                                              track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5],
                                              track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7],
                                              track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9]
                                              );
                        }

                        for (int k = 0; k < track_info[index].headPose.Length; k++)
                        {
                            Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]);
                        }
                        Console.WriteLine("score is:{0:f}", track_info[index].score);
                        // 角度
                        Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle);
                        // 人脸宽度
                        Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth);
                        // 中心点X,Y坐标
                        Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x);
                        Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y);
                        // 画人脸框
                        FaceTrack track = new FaceTrack();
                        box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                        track.draw_rotated_box(ref rgb_mat, ref box, new Scalar(0, 255, 0));
                    }
                    window_rgb.ShowImage(rgb_mat);
                    Cv2.WaitKey(1);
                    Console.WriteLine("{0}", msg_rgb);
                }
            }
            Marshal.FreeHGlobal(ptT);
            rgb_mat.Release();
            ir_mat.Release();
            frame1.Release();
            frame2.Release();
            Cv2.DestroyWindow("ir_face");
            Cv2.DestroyWindow("rgb_face");
            return(true);
        }
コード例 #25
0
        public static void Run(Options options)
        {
            //In order to playback video opencv_ffmpeg*.dll must be found.
            string includePath = Environment.Is64BitProcess ? @".\dll\x64" : @".\dll\x86";

            foreach (string file in Directory.EnumerateFiles(includePath, "*.dll"))
            {
                File.Copy(file, Path.GetFileName(file), true);
            }

            //define the upper and lower boundaries of the HSV pixel
            //intensities to be considered 'skin'
            var lower = new Scalar(0, 48, 80);
            var upper = new Scalar(20, 255, 255);

            //if a video path was not supplied, grab the reference
            //to the gray
            //otherwise, load the video
            VideoCapture camera = string.IsNullOrEmpty(options.Video)
                ? VideoCapture.FromCamera(CaptureDevice.Any)
                : new VideoCapture(Path.GetFullPath(options.Video));

            if (!camera.IsOpened())
            {
                Console.WriteLine("Failed to initialize video");
                return;
            }
            using (camera)
            {
                //keep looping over the frames in the video
                while (true)
                {
                    //grab the current frame
                    using (var frame = new Mat())
                        using (var disposer = new Disposer())
                        {
                            bool grabbed = camera.Read(frame);
                            //if we are viewing a video and we did not grab a
                            //frame, then we have reached the end of the video
                            if (!grabbed || frame.Width == 0 || frame.Height == 0)
                            {
                                if (!string.IsNullOrEmpty(options.Video))
                                {
                                    break;
                                }
                                continue;
                            }

                            //resize the frame, convert it to the HSV color space,
                            //and determine the HSV pixel intensities that fall into
                            //the speicifed upper and lower boundaries
                            Mat resizedFrame = ImageUtil.Resize(frame, width: 400);
                            disposer.Add(resizedFrame);
                            Mat converted = resizedFrame.CvtColor(ColorConversionCodes.BGR2HSV);
                            disposer.Add(converted);
                            Mat skinMask = new Mat();
                            disposer.Add(skinMask);
                            Cv2.InRange(converted, lower, upper, skinMask);

                            //apply a series of erosions and dilations to the mask
                            //using an elliptical kernel
                            using (Mat kernel = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(11, 11)))
                            {
                                skinMask = skinMask.Erode(kernel, iterations: 2);
                                disposer.Add(skinMask);
                                skinMask = skinMask.Dilate(kernel, iterations: 2);
                                disposer.Add(skinMask);
                            }

                            //blur the mask to help remove noise, then apply the
                            //mask to the frame
                            skinMask = skinMask.GaussianBlur(new Size(3, 3), 0);
                            disposer.Add(skinMask);
                            Mat skin = new Mat();
                            disposer.Add(skin);
                            Cv2.BitwiseAnd(resizedFrame, resizedFrame, skin, skinMask);

                            //show the skin in the image along with the mask
                            Cv2.ImShow("images", resizedFrame);
                            Cv2.ImShow("mask", skin);

                            //if the 'q' key is pressed, stop the loop
                            if ((Cv2.WaitKey(1) & 0xff) == 'q')
                            {
                                break;
                            }
                        }
                }
            }

            Cv2.DestroyAllWindows();
        }
コード例 #26
0
        static int Main(string[] args)
        {
            int    result    = 0;  //성공 여부 0:실패 1:성공
            string resultOCR = ""; //Tesseract 결과
            //string numeric = ""; //필터링된 숫자

            List <string> resultTesser = new List <string>();

            Mat mtSrc = new Mat();


            try
            {
                int.TryParse(ConfigurationManager.AppSettings["RunMaximum"], out int runMaximum);
                int.TryParse(ConfigurationManager.AppSettings["DetectWidth"], out int detectWidth);
                int.TryParse(ConfigurationManager.AppSettings["DetectHeight"], out int detectHeight);
                int.TryParse(ConfigurationManager.AppSettings["ThresholdValue"], out int thresholdValue);
                int.TryParse(ConfigurationManager.AppSettings["DeviceNumber"], out int deviceNumber);
                int.TryParse(ConfigurationManager.AppSettings["DeleteWidth"], out int delWidth);


                if (runMaximum < 1 || detectWidth < 1 || detectHeight < 1 || thresholdValue < 1 ||
                    string.IsNullOrEmpty(ConfigurationManager.AppSettings["PathResult"]) ||
                    string.IsNullOrEmpty(ConfigurationManager.AppSettings["PathSuccess"]) ||
                    string.IsNullOrEmpty(ConfigurationManager.AppSettings["PathFail"]) ||
                    string.IsNullOrEmpty(ConfigurationManager.AppSettings["PathException"])
                    )
                {
                    throw new ConfigurationErrorsException("config 파일 설정값 오류");
                }

                //OpenCV 캡처 초기화
                VideoCapture capture = VideoCapture.FromCamera(CaptureDevice.Any, deviceNumber);

                DirectoryInfo diResult = new DirectoryInfo(ConfigurationManager.AppSettings["PathResult"]);
                if (diResult.Exists == false)
                {
                    diResult.Create();
                }

                //result 폴더 하위 파일 지우기
                foreach (var file in diResult.GetFiles())
                {
                    File.Delete(string.Format("{0}\\{1}", diResult.Name, file));
                }

                for (int i = 1; i <= runMaximum; i++)
                {
                    resultOCR = "";

                    //화상을 Matrix 에 로드
                    capture.Read(mtSrc);

                    //디바이스 문제
                    if (mtSrc.Width < 3) //최소 3*3
                    {
                        throw new Exception("ERROR_DEVICE");
                    }

                    //영역 검출
                    MSER mser = MSER.Create();
                    OpenCvSharp.Point[][] contours;
                    OpenCvSharp.Rect[]    bboxes;
                    mser.DetectRegions(mtSrc, out contours, out bboxes); //DetectRegion 은 Canny 가 무용지물

                    //Smoothing
                    Cv2.MedianBlur(mtSrc, mtSrc, 3);

                    //색 변환
                    Cv2.CvtColor(mtSrc, mtSrc, ColorConversionCodes.BGR2GRAY);

                    //검출 결과 필터링
                    var filteredBboxes = bboxes.Where(
                        r =>
                        r.Width >= detectWidth - 3 &&
                        r.Width <= detectWidth + 5 &&
                        r.Height >= detectHeight - 5 &&
                        r.Height <= detectHeight + 5
                        );

                    if (isDebugMode)
                    {
                        Console.WriteLine(filteredBboxes.Count());
                    }

                    //var orderedBboxes = filteredBboxes.OrderBy(r => r.Width);

                    foreach (var rect in filteredBboxes)
                    {
                        resultOCR = "";

                        Rect rectTemp = rect;
                        rectTemp.X     = rect.X + delWidth;
                        rectTemp.Width = rect.Width - delWidth;

                        //rect 영역 crop
                        Mat mtCrop = mtSrc[rectTemp];

                        if (isDebugMode)
                        {
                            using (new Window(mtCrop))
                            {
                                Window.WaitKey(0);
                                Window.DestroyAllWindows();
                            }
                        }

                        resultOCR = Recognize(mtCrop, thresholdValue, rectTemp);

                        //재시도 ( 앞부분 크롭 영역 -1 조정)
                        if (resultOCR.Length < 6 || resultOCR.Contains("_"))
                        {
                            rectTemp.X     = rect.X + delWidth - 1;
                            rectTemp.Width = rect.Width - delWidth + 1;
                            mtCrop         = mtSrc[rectTemp];
                            resultOCR      = Recognize(mtCrop, thresholdValue, rectTemp);
                        }
                        //3차시도 ( 앞부분 크롭 영역 +1 조정)
                        if (resultOCR.Length < 6 || resultOCR.Contains("_"))
                        {
                            rectTemp.X     = rect.X + delWidth + 1;
                            rectTemp.Width = rect.Width - delWidth - 1;
                            mtCrop         = mtSrc[rectTemp];
                            resultOCR      = Recognize(mtCrop, thresholdValue, rectTemp);
                        }
                        //4차
                        if (resultOCR.Length < 6 || resultOCR.Contains("_"))
                        {
                            rectTemp.X     = rect.X + delWidth - 2;
                            rectTemp.Width = rect.Width - delWidth + 2;
                            mtCrop         = mtSrc[rectTemp];
                            resultOCR      = Recognize(mtCrop, thresholdValue, rectTemp);
                        }
                        //5차시도 ( 앞부분 크롭 영역 +2 조정)
                        if (resultOCR.Length < 6 || resultOCR.Contains("_"))
                        {
                            rectTemp.X     = rect.X + delWidth + 2;
                            rectTemp.Width = rect.Width - delWidth - 2;
                            mtCrop         = mtSrc[rectTemp];
                            resultOCR      = Recognize(mtCrop, thresholdValue, rectTemp);
                        }
                        if (resultOCR.Length == 6 && resultOCR.Contains("_") == false)
                        {
                            result = 1; //성공

                            Console.WriteLine(string.Format("{0}\t({1})", resultOCR, i));
                            if (isDebugMode)
                            {
                                //Console.WriteLine(string.Format("width : {0} height : {1}", rect.Width, rect.Height));
                                //Cv2.ImShow("mtCrop", mtCrop);
                                //Cv2.WaitKey(0);
                                //Cv2.DestroyWindow("seg");
                            }
                            break;
                        }
                    } // foreach

                    if (result == 1)
                    {
                        break;
                    }

                    //if (numeric.Length == 0)
                    //{
                    //    foreach (var rect in bboxes)
                    //    {
                    //        Scalar color = Scalar.RandomColor();
                    //        mtSrc.Rectangle(rect, color);
                    //    }
                    //}

                    //Cv2.ImShow(filename, mtSrc);
                    //Cv2.ImShow("clone", mtSrc);
                    //Cv2.WaitKey(0);
                    //Thread.Sleep(300);
                } //for runMaximum


                if (result == 1)
                {
                    //result = 1; //성공 여부

                    //이미지 저장
                    mtSrc.SaveImage(string.Format("{0}\\{1}.png", diResult.Name, resultOCR));

                    //이미지 사본 복사
                    DirectoryInfo diSuccess = new DirectoryInfo(ConfigurationManager.AppSettings["PathSuccess"]);
                    if (diSuccess.Exists == false)
                    {
                        diSuccess.Create();
                    }

                    string filename = resultOCR;
                    if (File.Exists(string.Format("{0}\\{1}.png", diSuccess.Name, filename)))
                    {
                        filename = resultOCR + "_" + DateTime.Now.ToString("yyMMddHHmmss");
                    }

                    File.Copy(string.Format("{0}\\{1}.png", diResult.Name, resultOCR), string.Format("{0}\\{1}.png", diSuccess.Name, filename));

                    //Console.WriteLine(numeric);
                }
                else
                {
                    //실패 이미지 저장
                    DirectoryInfo diFail = new DirectoryInfo(ConfigurationManager.AppSettings["PathFail"]);
                    if (diFail.Exists == false)
                    {
                        diFail.Create();
                    }

                    mtSrc.SaveImage(string.Format("{0}\\{1}.png", diFail.Name, DateTime.Now.ToString("yyMMddHHmmss")));
                }
            }
            catch (Exception ex)
            {
                //Exception 저장
                DirectoryInfo diEx = new DirectoryInfo(ConfigurationManager.AppSettings["PathException"]);

                if (diEx.Exists == false)
                {
                    diEx.Create();
                }

                File.WriteAllText(string.Format("{0}\\{1}.txt", diEx.Name, DateTime.Now.ToString("yyMMddHHmmss")), ex.ToString());

                //담당자에게 alert 전송
                if (isDebugMode)
                {
                    //Console.WriteLine(result);
                    Console.WriteLine(ex);
                }
            }
            finally
            {
            }

            //Console.ReadKey();

            return(result);
        }
コード例 #27
0
        private void btnCheck_Click(object sender, EventArgs e)
        /// <summary>
        /// validate the camera connection
        /// read the first frame from the camera feed
        /// </summary>
        {
            if (radioWeb.Checked)
            {
                try
                {
                    VideoCapture cap;
                    cap = VideoCapture.FromCamera(uSelectedCam);
                    int uFrameWidth = (int)cap.Get(CaptureProperty.FrameWidth);

                    cap.Dispose();
                    if (uFrameWidth == 0)
                    {
                        MessageBox.Show("ERROR! Web Camera: " + strCamFeed);
                        strCamFeed = uSelectedCam.ToString();
                    }
                    else
                    {
                        MessageBox.Show("Success! Web Camera: " + uSelectedCam.ToString());
                        strCamFeed = uSelectedCam.ToString();
                    }
                }
                catch (Exception ex) {
                    MessageBox.Show("ERROR! Web Camera: " + uSelectedCam.ToString());
                    strCamFeed = "";
                }
            }
            if (radioIP.Checked)
            {
                try
                {
                    strCamFeed = get_IPcamUrl();

                    VideoCapture cap;
                    cap = VideoCapture.FromFile(strCamFeed);
                    int uFrameWidth = (int)cap.Get(CaptureProperty.FrameWidth);
                    cap.Dispose();

                    if (uFrameWidth == 0)
                    {
                        MessageBox.Show("Error! IP Camera:" + strCamFeed);
                        strCamFeed = "";
                    }
                    else
                    {
                        MessageBox.Show("Success! IP Camera: " + strCamFeed);
                    }
                }
                catch (Exception ex)
                {
                    MessageBox.Show("ERROR! IP Camera: " + strCamFeed);
                    strCamFeed = "";
                }
            }

            if (strCamFeed == "")
            {
                chkBoxConnectCheck.Checked = false;
                chkBoxConnectCheck.Text    = "Error !";
            }
            else
            {
                chkBoxConnectCheck.Checked = true;
                chkBoxConnectCheck.Text    = "Success !";
            }
        }
コード例 #28
0
ファイル: CameraForm.cs プロジェクト: superowner/FaceRec
 protected virtual VideoCapture GetVideoCapture()
 {
     return(VideoCapture.FromCamera(0));
 }
コード例 #29
0
    public IEnumerator IStartFaceTrack(int dev, RawImage img, FaceManager _faceManager, FaceCompare _faceCompare)
    {
        faceManager = _faceManager;
        faceCompare = _faceCompare;
        image       = new Mat();
        using (VideoCapture cap = VideoCapture.FromCamera(dev))
        {
            if (!cap.IsOpened())
            {
                Debug.LogError("open camera error");
                yield break;
            }
            // When the movie playback reaches end, Mat.data becomes NULL.
            while (true)
            {
                yield return(null);

                if (isCheck)
                {
                    RotatedRect box;
                    cap.Read(image); // same as cvQueryFrame
                    if (!image.Empty())
                    {
                        int             ilen       = 2;//传入的人脸数
                        TrackFaceInfo[] track_info = new TrackFaceInfo[ilen];
                        for (int i = 0; i < ilen; i++)
                        {
                            track_info[i]           = new TrackFaceInfo();
                            track_info[i].landmarks = new int[144];
                            track_info[i].headPose  = new float[3];
                            track_info[i].face_id   = 0;
                            track_info[i].score     = 0;
                        }
                        int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
                        IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * ilen);

                        /*  trackMat
                         *  传入参数: maxTrackObjNum:检测到的最大人脸数,传入外部分配人脸数,需要分配对应的内存大小。
                         *            传出检测到的最大人脸数
                         *    返回值: 传入的人脸数 和 检测到的人脸数 中的最小值,实际返回的人脸。
                         ****/
                        int faceSize = ilen; //返回人脸数  分配人脸数和检测到人脸数的最小值
                        int curSize  = ilen; //当前人脸数 输入分配的人脸数,输出实际检测到的人脸数
                        faceSize = track_mat(ptT, image.CvPtr, ref curSize);
                        for (int index = 0; index < faceSize; index++)
                        {
                            IntPtr ptr = new IntPtr();
                            if (8 == IntPtr.Size)
                            {
                                ptr = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                            }
                            else if (4 == IntPtr.Size)
                            {
                                ptr = (IntPtr)(ptT.ToInt32() + sizeTrack * index);
                            }

                            track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptr, typeof(TrackFaceInfo));
                            trackFaceInfo     = track_info[index];
                            {
                                //face_info[index] = (FaceInfo)Marshal.PtrToStructure(info_ptr, typeof(FaceInfo));
                                //Debug.Log("in Liveness::usb_track face_id is {0}:" + track_info[index].face_id);
                                //Debug.Log("in Liveness::usb_track landmarks is:");
                                //for (int k = 0; k < 1; k++)
                                //{
                                //    Debug.Log(
                                //        track_info[index].landmarks[k + 0] + "," + track_info[index].landmarks[k + 1] + "," +
                                //        track_info[index].landmarks[k + 2] + "," + track_info[index].landmarks[k + 3] + "," +
                                //        track_info[index].landmarks[k + 4] + "," + track_info[index].landmarks[k + 5] + "," +
                                //        track_info[index].landmarks[k + 6] + "," + track_info[index].landmarks[k + 7] + "," +
                                //        track_info[index].landmarks[k + 8] + "," + track_info[index].landmarks[k + 9]
                                //        );
                                //}
                                //for (int k = 0; k < track_info[index].headPose.Length; k++)
                                //{
                                //    Debug.Log("in Liveness::usb_track angle is:" + track_info[index].headPose[k]);
                                //}
                                //Debug.Log("in Liveness::usb_track score is:" + track_info[index].score);
                                //// 角度
                                //Debug.Log("in Liveness::usb_track mAngle is:" + track_info[index].box.mAngle);
                                //// 人脸宽度
                                //Debug.Log("in Liveness::usb_track mWidth is:" + track_info[index].box.mWidth);
                                //// 中心点X,Y坐标
                                //Debug.Log("in Liveness::usb_track mCenter_x is:" + track_info[index].box.mCenter_x);
                                //Debug.Log("in Liveness::usb_track mCenter_y is:" + track_info[index].box.mCenter_y);
                            }
                            // 画人脸框
                            box = bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                            draw_rotated_box(ref image, ref box, new Scalar(0, 255, 0));
                            xcenter = image.Width / 2;
                            ycenter = image.Height / 2;
                        }
                        Marshal.FreeHGlobal(ptT);
                        if (videoTexture == null)
                        {
                            videoTexture = new Texture2D(image.Width, image.Height);
                        }
                        videoTexture.LoadImage(image.ToBytes());
                        videoTexture.Apply();
                        img.texture = videoTexture;
                        //imgBytes = image.ToBytes();
                        Cv2.WaitKey(1);
                    }
                    else
                    {
                    }
                }
            }
            image.Release();
        }
    }
コード例 #30
0
        //C#测试usb摄像头实时人脸检测
        public void usb_csharp_track_face(int dev)
        {
            using (var window = new Window("face"))
                using (VideoCapture cap = VideoCapture.FromCamera(dev))
                {
                    if (!cap.IsOpened())
                    {
                        Console.WriteLine("open camera error");
                        return;
                    }
                    // Frame image buffer
                    Mat image = new Mat();
                    // When the movie playback reaches end, Mat.data becomes NULL.
                    while (true)
                    {
                        RotatedRect box;
                        cap.Read(image); // same as cvQueryFrame
                        if (!image.Empty())
                        {
                            int             ilen       = 2;//传入的人脸数
                            TrackFaceInfo[] track_info = new TrackFaceInfo[ilen];
                            for (int i = 0; i < ilen; i++)
                            {
                                track_info[i]           = new TrackFaceInfo();
                                track_info[i].landmarks = new int[144];
                                track_info[i].headPose  = new float[3];
                                track_info[i].face_id   = 0;
                                track_info[i].score     = 0;
                            }
                            int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
                            IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * ilen);
                            //Marshal.Copy(ptTrack, 0, ptT, ilen);
                            //                        FaceInfo[] face_info = new FaceInfo[ilen];
                            //                        face_info = new FaceInfo(0.0F,0.0F,0.0F,0.0F,0.0F);

                            //Cv2.ImWrite("usb_track_Cv2.jpg", image);

                            /*  trackMat
                             *  传入参数: maxTrackObjNum:检测到的最大人脸数,传入外部分配人脸数,需要分配对应的内存大小。
                             *            传出检测到的最大人脸数
                             *    返回值: 传入的人脸数 和 检测到的人脸数 中的最小值,实际返回的人脸。
                             ****/
                            int faceSize = ilen; //返回人脸数  分配人脸数和检测到人脸数的最小值
                            int curSize  = ilen; //当前人脸数 输入分配的人脸数,输出实际检测到的人脸数
                            faceSize = track_mat(ptT, image.CvPtr, ref curSize);
                            for (int index = 0; index < faceSize; index++)
                            {
                                IntPtr ptr = new IntPtr();
                                if (8 == IntPtr.Size)
                                {
                                    ptr = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                                }
                                else if (4 == IntPtr.Size)
                                {
                                    ptr = (IntPtr)(ptT.ToInt32() + sizeTrack * index);
                                }

                                track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptr, typeof(TrackFaceInfo));
                                //face_info[index] = (FaceInfo)Marshal.PtrToStructure(info_ptr, typeof(FaceInfo));
                                Console.WriteLine("in Liveness::usb_track face_id is {0}:", track_info[index].face_id);
                                Console.WriteLine("in Liveness::usb_track landmarks is:");
                                for (int k = 0; k < 1; k++)
                                {
                                    Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},",
                                                      track_info[index].landmarks[k], track_info[index].landmarks[k + 1],
                                                      track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3],
                                                      track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5],
                                                      track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7],
                                                      track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9]
                                                      );
                                }

                                for (int k = 0; k < track_info[index].headPose.Length; k++)
                                {
                                    Console.WriteLine("in Liveness::usb_track angle is:{0:f}", track_info[index].headPose[k]);
                                }
                                Console.WriteLine("in Liveness::usb_track score is:{0:f}", track_info[index].score);
                                // 角度
                                Console.WriteLine("in Liveness::usb_track mAngle is:{0:f}", track_info[index].box.mAngle);
                                // 人脸宽度
                                Console.WriteLine("in Liveness::usb_track mWidth is:{0:f}", track_info[index].box.mWidth);
                                // 中心点X,Y坐标
                                Console.WriteLine("in Liveness::usb_track mCenter_x is:{0:f}", track_info[index].box.mCenter_x);
                                Console.WriteLine("in Liveness::usb_track mCenter_y is:{0:f}", track_info[index].box.mCenter_y);
                                //// 画人脸框
                                box = bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                                draw_rotated_box(ref image, ref box, new Scalar(0, 255, 0));
                                // 实时检测人脸属性和质量可能会视频卡顿,若一定必要可考虑跳帧检测
                                // 获取人脸属性(通过传入人脸信息)
                                //IntPtr ptrAttr = FaceAttr.face_attr_by_face(image.CvPtr, ref track_info[index]);
                                //string buf = Marshal.PtrToStringAnsi(ptrAttr);
                                //Console.WriteLine("attr res is:" + buf);
                                //// 获取人脸质量(通过传入人脸信息)
                                //IntPtr ptrQua = FaceQuality.face_quality_by_face(image.CvPtr, ref track_info[index]);
                                //buf = Marshal.PtrToStringAnsi(ptrQua);
                                //Console.WriteLine("quality res is:" + buf);
                                //// 实时检测人脸特征值可能会视频卡顿,若一定必要可考虑跳帧检测
                                //float[] feature = new float[512];
                                //IntPtr ptrfea = new IntPtr();
                                //int count = FaceCompare.get_face_feature_by_face(image.CvPtr, ref track_info[index], ref ptrfea);
                                //// 返回值为512表示取到了特征值
                                //if(ptrfea == IntPtr.Zero)
                                //{
                                //    Console.WriteLine("Get feature failed!");
                                //    continue;
                                //}
                                //if (count == 512)
                                //{
                                //    for (int i = 0; i < count; i++)
                                //    {
                                //        IntPtr floptr = new IntPtr();
                                //        if ( 8 == IntPtr.Size)
                                //        {
                                //            floptr = (IntPtr)(ptrfea.ToInt64() + i * count * Marshal.SizeOf(typeof(float)));
                                //        }
                                //        else if( 4 == IntPtr.Size)
                                //        {
                                //            floptr = (IntPtr)(ptrfea.ToInt32() + i * count * Marshal.SizeOf(typeof(float)));
                                //        }

                                //        feature[i] = (float)Marshal.PtrToStructure(floptr, typeof(float));
                                //        Console.WriteLine("feature {0} is: {1:e8}", i, feature[i]);
                                //    }
                                //    Console.WriteLine("feature count is:{0}", count);
                                //}
                            }
                            Marshal.FreeHGlobal(ptT);
                            window.ShowImage(image);
                            Cv2.WaitKey(1);
                            Console.WriteLine("mat not empty");
                        }
                        else
                        {
                            Console.WriteLine("mat is empty");
                        }
                    }
                    image.Release();
                }
        }