Read() public method

Grabs a frame from camera or video file, decompresses and returns it. This function is just a combination of cvGrabFrame and cvRetrieveFrame in one call. The returned image should not be released or modified by user.
public Read ( OpenCvSharp.Mat image ) : bool
image OpenCvSharp.Mat
return bool
Ejemplo n.º 1
6
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
Ejemplo n.º 2
1
        private void EnumSources()
        {
            int sourceId = 0;

            this.Properties.CameraSourceIds.Add(-1);
            while (true)
            {
                try
                {
                    using (Mat image = new Mat())
                        using (OpenCvSharp.VideoCapture videoCapture = new OpenCvSharp.VideoCapture(OpenCvSharp.CaptureDevice.Any, sourceId))
                        {
                            if (videoCapture.IsOpened() && videoCapture.Read(image))
                            {
                                this.Properties.CameraSourceIds.Add(sourceId++);
                            }
                            else
                            {
                                break;
                            }
                        }
                }
                catch
                {
                    break;
                }
            }
        }
Ejemplo n.º 3
1
        /// <summary>
        /// カメラから画像を取得します
        /// </summary>
        /// <param name="cameraIndex"></param>
        /// <returns></returns>
        public static Mat GetCameraImage(int cameraIndex = 0)
        {
            var frame = new Mat();
            using (var capture = new VideoCapture(0))
                capture.Read(frame);

            return frame;
        }
Ejemplo n.º 4
0
    public OpencvSource(string cam_or_url)
    {
        MAssert.Check(cam_or_url != string.Empty);

        // check if cam_or_url is number
        bool stream = false;

        for (int i = 0; i < cam_or_url.Length; ++i)
        {
            stream = stream ||
                     (cam_or_url[i] < '0') ||
                     (cam_or_url[i] > '9');
        }

        if (stream)
        {
            // open stream
            Console.WriteLine("opening stream '{0}'", cam_or_url);
            capturer = new OpenCvSharp.VideoCapture(cam_or_url);
        }
        else
        {
            // convert to integer
            int cam_id = Convert.ToInt32(cam_or_url, 10);
            MAssert.Check(cam_id >= 0, "wrong webcam id");

            // open vebcam
            Console.WriteLine("opening webcam {0}", cam_id);
            capturer = new OpenCvSharp.VideoCapture(cam_id);
            MAssert.Check(capturer.IsOpened(), "webcam not opened");

            // set resolution
            capturer.Set(OpenCvSharp.CaptureProperty.FrameWidth, 1280);
            capturer.Set(OpenCvSharp.CaptureProperty.FrameHeight, 720);

            MAssert.Check(capturer.IsOpened(), "webcam not opened");
        }

        // sometimes first few frames can be empty even if camera is good
        // so skip few frames
        OpenCvSharp.Mat frame;
        for (int i = 0; i < 10; ++i)
        {
            frame = capturer.RetrieveMat();
        }
        // check first two frames
        OpenCvSharp.Mat image1 = new OpenCvSharp.Mat(), image2 = new OpenCvSharp.Mat();
        capturer.Read(image1);
        capturer.Read(image2);
        Console.WriteLine("image1 size: {0}", image1.Size());
        Console.WriteLine("image1 size: {0}", image2.Size());

        MAssert.Check(
            !image1.Empty() &&
            !image2.Empty() &&
            image1.Size() == image2.Size() &&
            image1.Type() == OpenCvSharp.MatType.CV_8UC3 &&
            image2.Type() == OpenCvSharp.MatType.CV_8UC3,
            "error opening webcam or stream");
    }
Ejemplo n.º 5
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            if (cap == null || pause)
            {
                return;
            }
            if (forwTo)
            {
                var ofps = cap.Get(5);//5 fps

                forwTo = false;
                var frm  = cap.Get(7);//7 frame count
                var secs = (frm / ofps) * 1000;

                cap.Set(0, forwPosPercetange * secs);//posmsec 0
            }

            if (oneFrameStep && oneFrameStepDir == -1)
            {
                var pf = cap.Get(1);//1 posframes
                cap.Set(1, Math.Max(0, pf - 2));
            }
            if (oneFrameStep)
            {
                pause = true; oneFrameStep = false;
            }


            Mat mat = new Mat();

            cap.Read(mat);
            pictureBox1.Image = BitmapConverter.ToBitmap(mat);
        }
Ejemplo n.º 6
0
	// Use this for initialization
	private void Start () {						
		
		frame = new Mat ();
		cap = new VideoCapture (1);
		tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);			 
		cap.Read (frame);

		dst = new Mat ();
		thresh = new Mat ();

		tex.LoadImage (frame.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;
	}
Ejemplo n.º 7
0
        static void Main(string[] args)
        {
            //size of tag is in meters and camera parameters are obtained from calibration
            float  tag_size = 0.1F;
            float  fx       = 1200F;
            float  fy       = 1400F;
            double px       = 817.143;
            double py       = 387.159;

            //array of floats to carry values of image points (x and y * 4 points)
            float[] ptsry = new float[8];

            //initialize video capture from camera
            var capt = new OpenCvSharp.VideoCapture();

            capt.Open(0);

            //window for displaying video
            Window window = new Window("capture");

            //main task; display video and find tags
            using (Mat frame = new Mat())
            {
                //looping untill key is pressed
                while (true)
                {
                    //read from camera and show it
                    capt.Read(frame);
                    window.ShowImage(frame);

                    //detect tags and find how many and print the number
                    Apriltag ap           = new Apriltag("canny", true, "tag16h5");
                    var      current_tags = ap.detect(frame);
                    Console.WriteLine("Number of tags = " + current_tags.Count);
                    Console.WriteLine();

                    //sleep for 10 msec
                    System.Threading.Thread.Sleep(10);

                    //if a key is pressed, close the window and exit
                    if (Cv2.WaitKey(1) >= 0)
                    {
                        capt.Release();
                        Cv2.DestroyAllWindows();
                        break;
                    }
                }
            }
        }
Ejemplo n.º 8
0
        protected override Mat ExecuteImpl(Mat inputImage)
        {
            lock (m_sync)
            {
                if (m_videoCapture != null)
                {
                    if (m_videoCapture.Read(m_image) && m_image.Width != 0 && m_image.Height != 0)
                    {
                        return(m_image.Clone());
                    }
                }
            }

            return(null);
        }
        /// <inheritdoc/>
        public async Task StartAsync(int delay = 30)
        {
            OpenCvSharp.VideoCapture capture;

            switch (this.settings.VideoType)
            {
            case VideoType.Webcam:
                capture = new OpenCvSharp.VideoCapture(this.settings.WebcamIndex);
                break;

            case VideoType.Url:
                capture = new OpenCvSharp.VideoCapture(this.settings.Url);
                break;

            default:
                throw new ArgumentException("Invalid settings: video type for grabber not specified or wrong.");
            }

            using (Mat image = new Mat())
            {
                while (true)
                {
                    capture.Read(image);

                    if (image.Empty())
                    {
                        break;
                    }

                    int analysisWidth  = 320;
                    int analysisHeight = analysisWidth * image.Height / image.Width;
                    var analysisImage  = image.Clone().Resize(new Size(analysisWidth, analysisHeight));

                    using (var analysisStream = analysisImage.ToMemoryStream(".jpg", new ImageEncodingParam(ImwriteFlags.JpegQuality, 50)))
                        using (var displayStream = image.ToMemoryStream(".jpg", new ImageEncodingParam(ImwriteFlags.JpegQuality, 100)))
                        {
                            this.OnFrameGrabbed?.Invoke(
                                displayStream,
                                analysisStream,
                                "image/jpeg",
                                analysisImage.Width,
                                analysisImage.Height);
                        }

                    await Task.Delay(delay);
                }
            }
        }
Ejemplo n.º 10
0
        public IActionResult Canny()
        {
            using (var capture = new OpenCvSharp.VideoCapture(CaptureDevice.Any, index: 0))
            {
                var fps = GetFps(capture);
                capture.Fps = fps;
                var interval = (int)(1000 / fps);

                using (Mat image = new Mat()) // Frame image buffer
                {
                    // When the movie playback reaches end, Mat.data becomes NULL.
                    while (true)
                    {
                        var frame = capture.Read(image);

                        if (image.Empty())
                        {
                            break;
                        }

                        // Load the cascade
                        var haarCascade = new CascadeClassifier("haarcascade_frontalface_alt2.xml");

                        // Load target image
                        var gray = new Mat("faces.png", ImreadModes.GrayScale);


                        // Detect faces
                        Rect[] faces = haarCascade.DetectMultiScale(gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30));



                        //byte[] cannyBytes = cannyImage.ToBytes(".png");
                        //string base64 = Convert.ToBase64String(cannyBytes);
                        //// ビュー変数に設定
                        //ViewBag.Base64Image = base64;

                        //window.ShowImage(image);
                        //Cv2.WaitKey(sleepTime);
                    }
                }
            }


            return(View());
        }
Ejemplo n.º 11
0
        private void LoadVideoFrames()
        {
            if (openCvCapture == null)
            {
                openCvCapture = new OpenCvSharp.VideoCapture(0);
            }
            OpenCvSharp.Mat frame        = new OpenCvSharp.Mat();
            bool            readingVideo = true;
            int             cnt          = 0;

            while (readingVideo)
            {
                if (openCvCapture.Read(frame))
                {
                    cnt++;
                    frame.SaveImage(@"d:\junk\testCamImages\image{cnt}.png");
                    byte[]      imagearray = frame.ImEncode(".png");
                    BitmapImage bmi        = new BitmapImage();
                    using (MemoryStream ms = new MemoryStream(imagearray))
                    {
                        ms.Position = 0;
                        bmi.BeginInit();
                        bmi.CacheOption  = BitmapCacheOption.OnLoad;
                        bmi.StreamSource = ms;
                        bmi.EndInit();
                    }


                    //this.ImageSource.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal,
                    //    new Action(() =>
                    //        {
                    //            ImageSource = bmi;
                    //        }));


                    //capture.
                    OpenCvSharp.Cv2.ImShow("video", frame);
                    int key = OpenCvSharp.Cv2.WaitKey(27);
                    if (key == 27)
                    {
                        readingVideo = false;
                    }
                }
            }
        }
	// Use this for initialization
	private void Start () {						
		if (isVid) {
			frame = new Mat ();
			//gray = new Mat();
			cap = new VideoCapture (1);
			tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);
			bkrnd_win_size = 20; //cap.FrameWidth / 5;
			cap.Read (frame);
		} else {
			frame = new Mat(Application.dataPath + "/profile_photo.png", ImreadModes.Color);
			tex = new Texture2D (frame.Width, frame.Height);
			bkrnd_win_size = 20;//frame.Width / 5;
		}
		frame_backproj = new Mat ();
		mask = new Mat ();
		tex.LoadImage (frame.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;
		//myDetector = new CascadeClassifier ("C:/Users/admin/opencv/build/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml");
		bkrnd_rect = new OpenCvSharp.Rect(1,1,bkrnd_win_size,bkrnd_win_size);

	}
Ejemplo n.º 13
0
        public void Run()
        {
            // Opens MP4 file (ffmpeg is probably needed)
            var capture = new VideoCapture(FilePath.Movie.Bach);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            using (var window = new Window("capture"))
            {
                // Frame image buffer
                Mat image = new Mat();

                // When the movie playback reaches end, Mat.data becomes NULL.
                while (true)
                {
                    capture.Read(image); // same as cvQueryFrame
                    if(image.Empty())
                        break;

                    window.ShowImage(image);
                    Cv2.WaitKey(sleepTime);
                } 
            }
        }
Ejemplo n.º 14
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (th != null)
            {
                button1.Text = "start";
                stop         = true;
                th           = null;
                return;
            }
            button1.Text = "stop";
            th           = new Thread(() =>
            {
                cap          = new OpenCvSharp.VideoCapture(path);
                Stopwatch sw = Stopwatch.StartNew();
                var session1 = new InferenceSession(netPath);

                var inputMeta = session1.InputMetadata;


                Mat mat     = new Mat();
                var nFrames = cap.Get(VideoCaptureProperties.FrameCount);
                cap.Read(mat);
                var sz = mat.Size();
                if (inputDims[2] == -1)
                {
                    sz.Height = mat.Height;
                    sz.Width  = mat.Width;
                }
                string key = $"{sz.Width}x{sz.Height}";
                if (!Processing.allPriorBoxes.ContainsKey(key))
                {
                    var pd = Decoders.PriorBoxes2(sz.Width, sz.Height);;
                    Processing.allPriorBoxes.Add(key, pd);
                }
                var prior_data  = Processing.allPriorBoxes[key];
                var ofps        = cap.Get(VideoCaptureProperties.Fps);
                VideoWriter vid = null;
                if (checkBox1.Checked)
                {
                    vid = new VideoWriter("output.mp4", FourCC.XVID, ofps, mat.Size());
                }
                while (true)
                {
                    if (stop)
                    {
                        break;
                    }
                    var pf   = cap.Get(VideoCaptureProperties.PosFrames);
                    int perc = (int)Math.Round((pf / (float)nFrames) * 100);
                    progressBar1.Invoke(((Action)(() =>
                    {
                        label1.Text = $"{pf} / {nFrames}  {perc}%";
                        progressBar1.Value = perc;
                    })));
                    if (!cap.Read(mat))
                    {
                        break;
                    }
                    Mat orig = mat.Clone();
                    if (inputDims[2] == -1)
                    {
                        inputDims[2] = mat.Height;
                        inputDims[3] = mat.Width;
                    }

                    mat.ConvertTo(mat, MatType.CV_32F);
                    object param = mat;
                    foreach (var pitem in preps)
                    {
                        param = pitem.Process(param);
                    }

                    var inputData = param as float[];
                    var tensor    = new DenseTensor <float>(param as float[], inputDims);
                    var container = new List <NamedOnnxValue>();

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(inputName, tensor));

                    float[] confd;
                    float[] locd;
                    using (var results = session1.Run(container))
                    {
                        var data = results.First().AsTensor <float>();
                        locd     = data.ToArray();
                        confd    = results.Skip(1).First().AsTensor <float>().ToArray();
                    }


                    Stopwatch sw2 = Stopwatch.StartNew();
                    var ret       = Processing.boxesDecode(orig.Size(), confd, locd, new System.Drawing.Size(sz.Width, sz.Height), prior_data, visTresh);
                    if (checkBox1.Checked)
                    {
                        var out1 = Processing.drawBoxes(orig, ret.Item1, ret.Item2, visTresh, ret.Item3);
                        vid.Write(out1);
                    }
                    sw2.Stop();
                }
                vid.Release();



                sw.Stop();
            });
            th.IsBackground = true;
            th.Start();
        }
Ejemplo n.º 15
0
        static void Main(string[] args)
        {
            var faceengine = new FaceEngine(ASF_RegisterOrNot.ASF_REGISTER, 2);

            //faceengine.OnlineActivation("", "", "");
            FaceEngine.OnlineActivationFree("", "");
            //faceengine.OfflineActivation();


            faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_ONLY, 1, ASF_Mask.ASF_FACE_DETECT | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_AGE | ASF_Mask.ASF_LIVENESS);
            Console.WriteLine(faceengine.Version.BuildDate);
            Console.WriteLine(faceengine.Version.CopyRight);
            Console.WriteLine(faceengine.Version.Version);
            OpenCvSharp.VideoCapture videoCapture = new OpenCvSharp.VideoCapture();
            videoCapture.Open(0);

            var activeFile = FaceEngine.GetActiveFileInfo();


            ////Console.WriteLine(FaceEngine.GetActiveDeviceInfo());
            Stopwatch stopwatch = new Stopwatch();

            stopwatch.Restart();
            //faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_ALL_OUT, 9,
            //    ASF_Mask.ASF_AGE | ASF_Mask.ASF_FACE3DANGLE | ASF_Mask.ASF_FACELANDMARK | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_FACESHELTER | ASF_Mask.ASF_FACE_DETECT |
            //     ASF_Mask.ASF_GENDER | ASF_Mask.ASF_IMAGEQUALITY | ASF_Mask.ASF_IR_LIVENESS | ASF_Mask.ASF_LIVENESS | ASF_Mask.ASF_MASKDETECT | ASF_Mask.ASF_UPDATE_FACEDATA);
            //Console.WriteLine($"引擎初始化: {stopwatch.ElapsedMilliseconds}ms");
            Mat mat = new Mat();

            //Mat mat = new Mat(@"C:\Users\Jch\Desktop\2.jpg");
            while (true)
            {
                stopwatch.Restart();

                if (videoCapture.Read(mat))
                {
                    using (var img = mat.ToBitmap())
                        using (var imgInfo = ImageInfo.ReadBMP(img))
                        {
                            Console.WriteLine($"图片处理:{stopwatch.ElapsedMilliseconds}ms");
                            stopwatch.Restart();
                            var detectResult = faceengine.DetectFacesEx(imgInfo);
                            Console.WriteLine($"人脸定位:{stopwatch.ElapsedMilliseconds}ms");
                            if (detectResult != null)
                            {
                                foreach (var item in detectResult.FaceInfos)
                                {
                                    Console.WriteLine($"Age: {item.Age}");
                                    Console.WriteLine($"FaceID: {item.FaceID}");
                                    Console.WriteLine($"FaceOrient: {item.FaceOrient}");
                                    Console.WriteLine($"FaceShelter: {item.FaceShelter}");
                                    Console.WriteLine($"Gender: {item.Gender}");
                                    Console.WriteLine($"LeftEyeClosed: {item.LeftEyeClosed}");
                                    Console.WriteLine($"Liveness: {item.Liveness}");
                                    Console.WriteLine($"Mask: {item.Mask}");
                                    Console.WriteLine($"RightEyeClosed: {item.RightEyeClosed}");
                                    Console.WriteLine($"WearGlasses: {item.WearGlasses}");
                                    Console.WriteLine($"FaceRect: bottom->{item.FaceRect.bottom} left->{item.FaceRect.left} right->{item.FaceRect.right} top->{item.FaceRect.top}");
                                    Console.WriteLine($"FaceLandmark: x->{item.FaceLandmark.x} y->{item.FaceLandmark.x}");
                                    Console.WriteLine($"Face3DAngle: {item.Face3DAngle.roll} {item.Face3DAngle.yaw} {item.Face3DAngle.pitch} {item.Face3DAngle.status}");
                                    stopwatch.Restart();
                                    var feature = faceengine.FaceFeatureExtractEx(imgInfo, item);
                                    Console.WriteLine($"提取特征值: {stopwatch.ElapsedMilliseconds}ms");
                                    if (feature != null)
                                    {
                                        Console.WriteLine($"feature: {feature.Size}");
                                    }
                                    Console.WriteLine(faceengine.FaceFeatureCompare(feature.ASFFaceFeature, feature.ASFFaceFeature));
                                    var score = faceengine.ImageQualityDetectEx(imgInfo, item);
                                    Console.WriteLine($"人脸质量: {score}");
                                    Console.WriteLine("--------------------------------------------");
                                }
                            }
                        }
                }
            }

            Console.ReadLine();
        }
Ejemplo n.º 16
0
        private void Init()
        {
            new Thread(() =>
            {
                var eye_casc       = new cv.CascadeClassifier("eye.xml");
                var left_eye_casc  = new cv.CascadeClassifier("left_eye.xml");
                var right_eye_casc = new cv.CascadeClassifier("right_eye.xml");
                var face_casc      = new cv.CascadeClassifier("fface_default.xml");

                cap = new cv.VideoCapture(0);

                while (true)
                {
                    if (released)
                    {
                        break;
                    }
                    var img = new cv.Mat();
                    cap.Read(img);
                    var gray  = img.CvtColor(cv.ColorConversionCodes.BGR2GRAY);
                    var gaus  = gray.AdaptiveThreshold(255, cv.AdaptiveThresholdTypes.GaussianC, cv.ThresholdTypes.Binary, 115, 1);
                    img       = gaus;
                    var faces = face_casc.DetectMultiScale(gray, 1.3, 5);
                    RenderTargetBitmap eyes_lay = null;

                    foreach (var face in faces)
                    {
                        var rect = new cv.Rect(face.Location, face.Size);
                        //img.Rectangle(rect, new cv.Scalar(255, 0, 0));

                        var sub_ing     = gray[rect];
                        var sub_ing_rgb = img[rect];

                        //left eye
                        var eyes  = eye_casc.DetectMultiScale(sub_ing, 1.3, 2);
                        int count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                count = 0;
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width < face.Width / 2)
                            {
                                //sub_ing_rgb.Rectangle(rect_eye, new cv.Scalar(0, 255, 0));
                                Dispatcher.Invoke(() =>
                                {
                                    eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_l, scale_w, scale_h);
                                });
                            }
                        }

                        //left eye
                        count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width > face.Width / 2)
                            {
                                Dispatcher.Invoke(() =>
                                {
                                    if (eyes_lay != null)
                                    {
                                        eyes_lay = DrawImg(eyes_lay, eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                    else
                                    {
                                        eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                });
                            }
                        }
                    }



                    Dispatcher.Invoke(() =>
                    {
                        if (eyes_lay != null)
                        {
                            OutImg.Source = eyes_lay;
                        }
                        else
                        {
                            OutImg.Source = cv.Extensions.BitmapSourceConverter.ToBitmapSource(img);
                        }
                    });
                    //Thread.Sleep(100);
                    GC.Collect();
                }
            })
            {
                IsBackground = true
            }.Start();
        }
Ejemplo n.º 17
0
        /// <summary>
        /// 処理実行
        /// </summary>
        public ImageProcessValue Execute(SettingsObj obj)
        {
            try
            {
                // webカメラキャプチャ
                var camera = new OpenCvSharp.VideoCapture(0)
                {
                    //// 解像度の指定
                    FrameWidth  = 1920,
                    FrameHeight = 1080
                };

                using (camera)
                {
                    // カメラ内部パラメータ格納用
                    Mat mtx  = new Mat();
                    Mat dist = new Mat();

                    // ymlファイルを読み来み計算パラメータを取得
                    using (var fs = new FileStorage(obj.CalibratinFilePath, FileStorage.Mode.Read))
                    {
                        mtx  = fs["mtx"].ReadMat();
                        dist = fs["dist"].ReadMat();
                    }

                    var src = new Mat();

                    // 撮影画像の読み取り
                    camera.Read(src);

                    if (src.Empty())
                    {
                        return(null);
                    }

                    Mat calib = new Mat();
                    // 歪み補正
                    Cv2.Undistort(src, calib, mtx, dist);

                    // 画像処理
                    var tmp = new Mat();
                    // OpenCVのカラーの並びに変換
                    Cv2.CvtColor(calib, tmp, OpenCvSharp.ColorConversionCodes.RGB2BGR);
                    // BGR画像をHSV画像に変換
                    var hsv = new Mat();
                    Cv2.CvtColor(tmp, hsv, OpenCvSharp.ColorConversionCodes.BGR2HSV);
                    // inRange関数で範囲指定2値化 -> マスク画像として使う
                    var msk = new Mat();
                    Cv2.InRange(hsv, new Scalar(obj.HueMin, obj.SaturationMin, obj.ValueMin), new Scalar(obj.HueMax, obj.SaturationMax, obj.ValueMax), msk);

                    // bitwise_andで元画像にマスクをかける -> マスクされた部分の色だけ残る
                    var msk_src = new Mat();
                    Cv2.BitwiseAnd(hsv, hsv, msk_src, msk);
                    var show_msk = new Mat();
                    // 元の色に戻す
                    Cv2.CvtColor(msk_src, show_msk, ColorConversionCodes.HSV2BGR);
                    // グレースケール変換
                    var gray = new Mat();
                    Cv2.CvtColor(show_msk, gray, ColorConversionCodes.BGR2GRAY);
                    // 2値化
                    var th = new Mat();
                    Cv2.Threshold(gray, th, 130, 255, ThresholdTypes.Otsu);

                    // ブロブとラベリング
                    var label              = new Mat();
                    var stats              = new Mat();
                    var centroids          = new Mat();
                    ConnectedComponents cc = Cv2.ConnectedComponentsEx(th);

                    if (cc.LabelCount <= 1)
                    {
                        return(null);
                    }
                    // draw labels
                    //cc.RenderBlobs(show_msk);
                    // draw bonding boxes except background
                    foreach (var blob in cc.Blobs.Skip(1))
                    {
                        show_msk.Rectangle(blob.Rect, Scalar.Red);
                    }

                    // filter maximum blob
                    var maxBlob  = cc.GetLargestBlob();
                    var filtered = new Mat();
                    cc.FilterByBlob(show_msk, filtered, maxBlob);

                    // 矩形探索
                    // マスク画像から矩形探索
                    Point[][]        contours;
                    HierarchyIndex[] hierarchy;
                    Cv2.FindContours(th, out contours, out hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxNone);
                    // 見つからなかったら何もしない
                    if (contours.Length == 0)
                    {
                        return(null);
                    }

                    // 回転を考慮した外接矩形
                    foreach (var cont in contours)
                    {
                        var rect = Cv2.MinAreaRect(cont);
                        var box  = Cv2.BoxPoints(rect).Select(x => (Point)x);
                    }

                    Cv2.DrawContours(show_msk, contours, -1, Scalar.Yellow, 3);
                    //Cv2.ImShow("show_msk", show_msk);

                    // 画像、画像上の位置発火
                    var val = new ImageProcessValue();
                    val.CameraImage = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(show_msk);
                    val.Blob        = maxBlob;

                    // メモリリーク対策でDispose呼ぶ
                    mtx.Dispose();
                    dist.Dispose();
                    calib.Dispose();
                    tmp.Dispose();
                    hsv.Dispose();
                    msk.Dispose();
                    msk_src.Dispose();
                    show_msk.Dispose();
                    gray.Dispose();
                    th.Dispose();
                    label.Dispose();
                    stats.Dispose();
                    centroids.Dispose();
                    filtered.Dispose();

                    return(val);
                }
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Ejemplo n.º 18
0
    //var layerName = nets.GetLayerNames()[0];
    //prob.MinMaxLoc(out min, out max, out minLoc, out maxLoc);
    // Update is called once per frame
    void Update()
    {
        frame_count++;
        using (var frames = pipe.WaitForFrames())
            using (var depth = frames.DepthFrame)
            {
                print("The camera is pointing at an object " +
                      depth.GetDistance(depth.Width / 2, depth.Height / 2) + " meters away\t");
                rotate_character3(depth.GetDistance(depth.Width / 2, depth.Height / 2));
            }

        camera.Read(image);
        Cv2.ImShow("image", image);

        //var frames = pipe.WaitForFrames();



        //Cv2.CvtColor(image, image, ColorConversionCodes.BGR2RGB);
        //print("The camera is pointing at an object " +
        //       depth.GetDistance(depth.Width / 2, depth.Height / 2) + " meters away\t");
        //image=Cv2.ImRead("C:/Users/CheNik/Desktop/KakaoTalk_20191128_172510392.jpg");
        var image4      = image.Clone();
        var image5      = image.Clone();
        int frameWidth  = image.Width;
        int frameHeight = image.Height;

        importantImage = OpenCvSharp.Dnn.CvDnn.BlobFromImage(image, 1, new Size(300, 300), new Scalar(104, 117, 123), false, false);

        var nets = OpenCvSharp.Dnn.CvDnn.ReadNetFromCaffe("C:/Users/CheNik/Face/Assets/deploy.prototxt", "C:/Users/CheNik/Face/Assets/res10_300x300_ssd_iter_140000.caffemodel");

        //var nets = OpenCvSharp.Dnn.CvDnn.ReadNetFromTensorflow("C:/Users/CheNik/Face/Assets/opencv_face_detector_uint8.pb","C:/Users/CheNik/Face/Assets/opencv_face_detector.pbtxt");
        nets.SetInput(importantImage, "data");
        var prob2 = nets.Forward();
        var p     = prob2.Reshape(1, prob2.Size(2));

        for (int i = 0; i < prob2.Size(2); i++)
        {
            var confidence = p.At <float>(i, 2);
            if (confidence > 0.7)
            {
                //get value what we need
                var idx    = (int)p.At <float>(i, 1);
                var x1     = (int)(image.Width * p.At <float>(i, 3));
                var y1     = (int)(image.Height * p.At <float>(i, 4));
                var x2     = (int)(image.Width * p.At <float>(i, 5));
                var y2     = (int)(image.Height * p.At <float>(i, 6));
                var width  = x2 - x1 + 1;
                var height = y2 - y1 + 1;
                //draw result
                OpenCvSharp.Rect  facerect     = new OpenCvSharp.Rect(x1, y1, width + 1, height + 1);
                OpenCvSharp.Point center_point = new OpenCvSharp.Point((int)(x1 + (width / 2)) + 1, (int)(y1 + (height / 2) + 1));
                int specified_position         = get_position(center_point);
                //print("specified_position = " + frame_count +" count = "+ specified_position);
                //float depth_center_point = depth.GetDistance(center_point.X, center_point.Y);
                rotate_Character(specified_position, center_point);
                Mat face           = image4.Clone().SubMat(facerect);
                var tensorimage    = CreateTensorFromImageFileAlt(face);
                var tensored_image = int_to_float_and_div(tensorimage);



                var runner1 = session1.GetRunner();
                var runner2 = session2.GetRunner();
                var runner3 = session3.GetRunner();
                var input1  = graph1["input_1"][0];
                var input2  = graph2["input_1"][0];
                var input3  = graph3["input_1"][0];
                var pred1   = graph1["pred_pose/mul_24"][0];
                var pred2   = graph2["pred_pose/mul_24"][0];
                var pred3   = graph3["pred_pose/mul_24"][0];
                runner1.AddInput(input1, tensored_image);
                runner2.AddInput(input2, tensored_image);
                runner3.AddInput(input3, tensored_image);
                runner1.Fetch(pred1);
                runner2.Fetch(pred2);
                runner3.Fetch(pred3);
                var      output1     = runner1.Run();
                var      output2     = runner2.Run();
                var      output3     = runner3.Run();
                TFTensor probresult1 = output1[0];
                TFTensor probresult2 = output2[0];
                TFTensor probresult3 = output3[0];
                float[,] result1 = return_value(probresult1);
                float[,] result2 = return_value(probresult2);
                float[,] result3 = return_value(probresult3);
                float[] result = new float[3];
                result[0] = result1[0, 0] + result2[0, 0] + result3[0, 0];
                result[1] = result1[0, 1] + result2[0, 1] + result3[0, 1];
                result[2] = result1[0, 2] + result2[0, 2] + result3[0, 2];
                //print("model1 result" + result1[0, 0] +" " +result1[0, 1] +" " +result1[0, 2]);
                //print("model2 result" + result2[0, 0] + " " + result2[0, 1] + " " + result2[0, 2]);
                //print("model3 result" + result3[0, 0] + " " + result3[0, 1] + " " + result3[0, 2]);
                //print(result[0]/3);
                //print(result[1]/3);
                //print(result[2]/3);
                float yaw   = result[0] / 3;
                float pitch = result[1] / 3;
                float roll  = result[2] / 3;
                image4 = draw_axis(image4, yaw, pitch, roll);

                /*if (yaw < -30)
                 *  v3.y =v3.y+ 30;
                 * if (yaw > 30)
                 *  v3.y = v3.y - 30;
                 * if(pitch)*/

                float yaw2   = yaw;
                float pitch2 = pitch - 5;
                float roll2  = roll + 2.4f;
                //yaw2 = rotate_Charactor(yaw2);
                //pitch2 = rotate_Charactor(pitch2);
                //roll2 = rotate_Charactor(roll2);
                print("yaw = " + yaw2 + " " + "pitch = " + pitch2 + " " + "roll = " + roll2);
                rotate_Character2(yaw2, pitch2, roll2, specified_position);
                anaglyph_apply(yaw2, pitch2, roll2, center_point);


                //pixxy.transform.rotation = Quaternion.Euler(-pitch2, yaw2, roll2);

                Cv2.Rectangle(image4, new Point(x1, y1), new Point(x2, y2), new Scalar(255, 0, 0), 3, shift: 8);
            }
        }
        Cv2.ImShow("image4", image4);
    }
Ejemplo n.º 19
-1
        public void Start(int cameraNumber, int interval = 1000)
        {
            if (captureWorker.IsBusy)
                return;

            CameraNumber = cameraNumber;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(CameraNumber))
                {
                    capture.FrameHeight = 640;
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null)
                            throw new Exception("カメラから画像が読み取れませんでした。");

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                }
            };
            captureWorker.RunWorkerAsync();
        }
Ejemplo n.º 20
-1
        public void Start(string fileName, int interval)
        {
            if (captureWorker.IsBusy)
                throw new InvalidOperationException("すでに Capture スレッドが実行中です。"); //{ Data = { { "GetValue.Arguments.fileName", fileName } } };

            FileName = fileName;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(FileName))
                {
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null) // 動画終了
                                return;

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                    e.Cancel = true;
                }
            };
            captureWorker.RunWorkerAsync();
        }
Ejemplo n.º 21
-1
        public void Start(string fileName)
        {
            if (captureWorker.IsBusy)
                throw new InvalidOperationException("すでに Capture スレッドが実行中です。");

            if (string.IsNullOrWhiteSpace(fileName))
                throw new ArgumentNullException("fileName");

            FileName = fileName;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(FileName))
                {
                    int interval = (int)(1000 / capture.Fps);
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null) // 動画終了
                                return;

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                    e.Cancel = true;
                }
            };
            captureWorker.RunWorkerAsync();
        }
Ejemplo n.º 22
-4
 public void Run()
 {
     using (var capture = new VideoCapture(FilePath.Movie.Bach))
     using (var mog = BackgroundSubtractorMOG.Create())
     using (var windowSrc = new Window("src"))
     using (var windowDst = new Window("dst"))
     {
         var frame = new Mat();
         var fg = new Mat();
         while (true)
         {
             capture.Read(frame);
             if(frame.Empty())
                 break;
             mog.Run(frame, fg, 0.01);
             
             windowSrc.Image = frame;
             windowDst.Image = fg;
             Cv2.WaitKey(50);
         }
     }
 }
Ejemplo n.º 23
-12
        public void Run()
        {
            const string OutVideoFile = "out.avi";

            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Movie.Bach);

            // Read movie frames and write them to VideoWriter 
            Size dsize = new Size(640, 480);
            using (VideoWriter writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
            {
                Console.WriteLine("Converting each movie frames...");
                Mat frame = new Mat();
                while(true)
                {
                    // Read image
                    capture.Read(frame);
                    if(frame.Empty())
                        break;

                    Console.CursorLeft = 0;
                    Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                    // grayscale -> canny -> resize
                    Mat gray = new Mat();
                    Mat canny = new Mat();
                    Mat dst = new Mat();
                    Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                    Cv2.Canny(gray, canny, 100, 180);
                    Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
                    // Write mat to VideoWriter
                    writer.Write(dst);
                } 
                Console.WriteLine();
            }

            // Watch result movie
            using (VideoCapture capture2 = new VideoCapture(OutVideoFile))
            using (Window window = new Window("result"))
            {
                int sleepTime = (int)(1000 / capture.Fps);

                Mat frame = new Mat();
                while (true)
                {
                    capture2.Read(frame);
                    if(frame.Empty())
                        break;

                    window.ShowImage(frame);
                    Cv2.WaitKey(sleepTime);
                }
            }
        }