Example #1
0
    void CheckCapture()
    {
        if (Time.time >= NextCaptureTime)
        {
            //print("Camera Render");

            SensorCamera.Render();

            NativeArray <byte> gpuData;
            while (AvailableGpuDataArrays.TryTake(out gpuData) && gpuData.Length != Width * Height * 4)
            {
                gpuData.Dispose();
            }
            if (!gpuData.IsCreated)
            {
                gpuData = new NativeArray <byte>(Width * Height * 4, Allocator.Persistent);
            }

            var capture = new CameraCapture()
            {
                GpuData     = gpuData,
                CaptureTime = Time.time,
            };
            capture.Request = AsyncGPUReadback.RequestIntoNativeArray(ref capture.GpuData, SensorCamera.targetTexture, 0, TextureFormat.RGBA32);
            // TODO: Replace above AsyncGPUReadback.Request with following AsyncGPUReadback.RequestIntoNativeArray when we upgrade to Unity 2020.1
            // See https://issuetracker.unity3d.com/issues/asyncgpureadback-dot-requestintonativearray-crashes-unity-when-trying-to-request-a-copy-to-the-same-nativearray-multiple-times
            // for the detaisl of the bug in Unity.
            //capture.Request = AsyncGPUReadback.RequestIntoNativeArray(ref capture.GpuData, Distorted ? DistortedTexture : SensorCamera.targetTexture, 0, TextureFormat.RGBA32);
            CaptureQueue.Enqueue(capture);

            NextCaptureTime = Time.time + (1.0f / Frequency);
        }
    }
Example #2
0
    public override void OnInspectorGUI()
    {
        base.OnInspectorGUI();

        CameraCapture cameraCapture = target as CameraCapture;

        cameraCapture.ppiToDpiRatio = EditorGUILayout.FloatField(
            "DPI/PPI比", cameraCapture.ppiToDpiRatio);
        cameraCapture.swap      = EditorGUILayout.Toggle("幅と高さを入れ替える", cameraCapture.swap);
        cameraCapture.paperSize = (PaperSize)EditorGUILayout.EnumPopup(
            "用紙サイズ", cameraCapture.paperSize);
        disabled = cameraCapture.paperSize != PaperSize.Custom;
        EditorGUI.BeginDisabledGroup(disabled);
        cameraCapture.paperWidth = EditorGUILayout.FloatField(
            "幅(mm)", cameraCapture.paperWidth);
        cameraCapture.paperHeight = EditorGUILayout.FloatField(
            "高さ(mm)", cameraCapture.paperHeight);
        EditorGUI.EndDisabledGroup();
        cameraCapture.maximumPixels = EditorGUILayout.IntField(
            "最大ピクセル数", cameraCapture.maximumPixels);
        EditorGUILayout.LabelField("ステータス", cameraCapture.statusText);
        if (GUILayout.Button("キャプチャ"))
        {
            cameraCapture.captureFlag = true;
        }
    }
Example #3
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            var frame = new Mat();

            CameraCapture.Retrieve(frame);

            var grayFrame = new Mat();

            CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

            var smallGrayFrame = new Mat();

            CvInvoke.PyrDown(grayFrame, smallGrayFrame);

            var smoothedGrayFrame = new Mat();

            CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame);

            var cannyFrame = new Mat();

            CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60);

            imageBoxCaptured.Image     = frame;
            imageBoxGray.Image         = grayFrame;
            imageBoxSmoothedGray.Image = smoothedGrayFrame;
            imageBoxCanny.Image        = cannyFrame;

            NotifyStatus(string.Empty);
        }
Example #4
0
        private void btnCamera_Click(object sender, EventArgs e)
        {
            DeleteCapture();
            var hEvent = IntPtr.Zero;
            int index  = comboBoxCamera.SelectedIndex;

            camera = cameraDevice.Create(index, this.pictureBox1, hEvent);
            MethodInvoker invoked_method = (() =>
            {
                if (null != pictureBox1.Image)
                {
                    pictureBox1.Image.Dispose();
                }
                if (null != camera)
                {
                    var img = camera.SnapShot().ToBMP();
                    this.pictureBox1.Image = img;
                }
            });

            unsafe
            {
                camera.SampleEvent = (time, buf, size) =>
                {
                    BeginInvoke((MethodInvoker)invoked_method);
                };
            }
            camera.ResetSampleEvent();
            setSize(camera.Width, camera.Height);
            camera.Start();
        }
Example #5
0
        static void Main()
        {
            Console.WriteLine("Press ESC to stop playing");

            var reader = new CameraCapture(0); //capture from camera

            (reader as CameraCapture).FrameSize = new Size(640, 480);

            //reader = new FileCapture(Path.Combine(getResourceDir(), "Welcome.mp4")); //capture from video
            //reader = new ImageDirectoryCapture(Path.Combine(getResourceDir(), "Sequence"), "*.jpg");
            reader.Open();

            Bgr <byte>[,] frame = null;
            do
            {
                reader.ReadTo(ref frame);
                if (frame == null)
                {
                    break;
                }

                frame.Show(scaleForm: true);
            }while (!(Console.KeyAvailable && Console.ReadKey(true).Key == ConsoleKey.Escape));

            reader.Dispose();
        }
Example #6
0
    void CheckCapture()
    {
        if (Time.time >= NextCaptureTime)
        {
            SensorCamera.Render();

            NativeArray <byte> gpuData;
            while (AvailableGpuDataArrays.TryTake(out gpuData) && gpuData.Length != Width * Height * 4)
            {
                gpuData.Dispose();
            }
            if (!gpuData.IsCreated)
            {
                gpuData = new NativeArray <byte>(Width * Height * 4, Allocator.Persistent);
            }

            var capture = new CameraCapture()
            {
                GpuData     = gpuData,
                CaptureTime = Time.time,
            };

            capture.Request = UniversalAsyncGPUReadbackRequest.Request(SensorCamera.targetTexture);
            CaptureQueue.Enqueue(capture);

            NextCaptureTime = Time.time + (1.0f / Frequency);
        }
    }
Example #7
0
        public ViewModel(Home home)
        {
            m_home           = home;
            m_waitHandle     = new AutoResetEvent(false);
            cpu_done         = false;
            m_scan_cancel    = new CancellationTokenSource();
            m_arduinoControl = new ArduinoControl(m_waitHandle, m_scan_cancel);
            m_motors         = new Motor[Constants.NUMBER_OF_MOTORS];
            for (int i = 0; i < Constants.NUMBER_OF_MOTORS; ++i)
            {
                m_motors[i] = new Motor();
            }
            cameraCapture = new CameraCapture();
            /**********Added for barcode*********/
            m_barcode = new Barcode();
            /*********Added for ImageStitching*********/
            m_stitcher = new ImageStitching();

            m_collabrated           = false;
            m_cpu_scanned           = 0;
            m_y_axis_dividers_count = 0;
            m_progress = 0;
            m_cpu_done = false;
            UpdateLoggerPath();
            UsrSettings.PropertyChanged     += UpdateLoggerPathEventHandler;
            DevSettingsProp.SettingChanging += ValidateDevSettings;
            BindingOperations.EnableCollectionSynchronization(ErrorMessages, m_lock); //This is needed to update the collection
            BindingOperations.EnableCollectionSynchronization(InfoMessages, m_lock);
        }
Example #8
0
 // Use this for initialization
 void Start()
 {
     width    = System.Convert.ToInt32(rectT.rect.width);
     height   = System.Convert.ToInt32(rectT.rect.height);
     fileName = Path.Combine(Application.persistentDataPath, "test.png");
     instance = this;
 }
Example #9
0
 private bool CheckFrameAvailable(CameraCapture camera)
 {
     return camera != null &&
         camera.CurrentFrame != null &&
         (camera.State == CaptureStates.Running ||
         camera.State == CaptureStates.Paused);
 }
Example #10
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var frame = new Mat())
            {
                CameraCapture.Retrieve(frame);
                var inputImage = frame.ToImage <Bgr, byte>();

                if (radTrackingApi.Checked)
                {
                    inputImage = DoTrackingApi(frame, inputImage);
                }
                else if (radCamshift.Checked)
                {
                    var output = DoCamShift(frame, inputImage);
                    imageBoxProcessed.Image = output.BackProjection;
                }

                if (!_imageBoxSelector.SeedingRectangle.IsEmpty)
                {
                    inputImage.Draw(_imageBoxSelector.SeedingRectangle, new Bgr(Color.Chartreuse));
                }

                imageBoxTracking.Image = inputImage;
            }
        }
Example #11
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            if (_transmitTask == null || _transmitTask.IsCompleted)
            {
                using (var matCaptured = new Mat())
                {
                    CameraCapture.Retrieve(matCaptured);
                    var bgrImage = matCaptured.ToImage <Bgr, byte>();
                    WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt"));
                    imageBoxCaptured.Image = bgrImage;


                    IImageTransmitter transmitter = null;
                    if (radBsonImage.Checked)
                    {
                        transmitter = _imageTransmitter;
                    }

                    if (radBsonJpeg.Checked)
                    {
                        transmitter = _jpegTransmitter;
                    }

                    if (transmitter != null)
                    {
                        _transmitTask = transmitter.Transmit(bgrImage);
                    }
                }
            }
        }
Example #12
0
        void CheckCapture()
        {
            if (Time.time >= NextCaptureTime)
            {
                RenderCamera();

                NativeArray <byte> gpuData;
                while (AvailableGpuDataArrays.TryTake(out gpuData) && gpuData.Length != Width * Height * 4)
                {
                    gpuData.Dispose();
                }
                if (!gpuData.IsCreated)
                {
                    gpuData = new NativeArray <byte>(Width * Height * 4, Allocator.Persistent);
                }

                var capture = new CameraCapture()
                {
                    GpuData     = gpuData,
                    CaptureTime = SimulatorManager.Instance.CurrentTime,
                };
                capture.Request = AsyncGPUReadback.Request(renderTarget.ColorTexture, 0, TextureFormat.RGBA32);
                CaptureList.Add(capture);

                NextCaptureTime = Time.time + (1.0f / Frequency);
            }
        }
Example #13
0
 public override void ImageGrabbedHandler(object sender, EventArgs e)
 {
     using (var matCaptured = new Mat())
     {
         CameraCapture.Retrieve(matCaptured);
     }
 }
Example #14
0
    void Start()
    {
        camCap = FindObjectOfType <CameraCapture>();
        upload = FindObjectOfType <Upload>();

        if (camCap == null)
        {
            camCap = GameObject.FindObjectOfType <CameraCapture>();
        }

        this.camCap.CaptureVideoCompleted += onCaptureVideoCompleted;
        this.camCap.TakePhotoCompleted    += onTakePhotoCompleted;
        this.camCap.PickCompleted         += onChooseFile;
        this.camCap.Failed += onCancelled;

        courseCode    = ApplicationModel.courseCode;
        lectureNumber = ApplicationModel.lectureNumber;
        username      = ApplicationModel.username;

        Debug.Log("CourseCode: " + courseCode);
        Debug.Log("CourseFullName: " + lectureNumber);
        Debug.Log("Username: "******"Launching Camera");
        StartRecording();
    }
Example #15
0
 private void btnCamera_Click(object sender, EventArgs e)
 {
     DeleteCapture();
     var hEvent = IntPtr.Zero;
     int index = comboBoxCamera.SelectedIndex;
     camera = cameraDevice.Create(index, this.pictureBox1, hEvent);
     MethodInvoker invoked_method = (() =>
                         {
                             if (null != pictureBox1.Image) pictureBox1.Image.Dispose();
                             if (null != camera)
                             {
                                 var img = camera.SnapShot().ToBMP();
                                 this.pictureBox1.Image = img;
                             }
                         });
     unsafe
     {
         camera.SampleEvent = (time, buf, size) =>
         {
             BeginInvoke((MethodInvoker)invoked_method);
         };
     }
     camera.ResetSampleEvent();
     setSize(camera.Width, camera.Height);
     camera.Start();
 }
        public bool processCameraImageRequest(string cameraName, HttpListenerRequest request, HttpListenerResponse response)
        {
            cameraName = cameraName.ToLower();
            if (!CameraCaptureManager.classedInstance.cameras.ContainsKey(cameraName))
            {
                response.StatusCode = 404;
                return(true);
            }

            CameraCapture camera = CameraCaptureManager.classedInstance.cameras[cameraName];

            //PluginLogger.debug("RENDERING SAVED CAMERA: "+ camera.cameraManagerName());
            if (camera.didRender)
            {
                response.ContentEncoding = Encoding.UTF8;
                response.ContentType     = "image/jpeg";
                response.WriteContent(camera.imageBytes);
                dataRates.SendDataToClient(camera.imageBytes.Length);
            }
            else
            {
                response.StatusCode = 503;
            }

            return(true);
        }
Example #17
0
        private void OnCameraSettingsChanged()
        {
            _cameraLeft = _settingsTab.CameraLeft;
            _cameraRight = _settingsTab.CameraRight;

            _captureLeft.Camera = _cameraLeft;
            _captureRight.Camera = _cameraRight;
        }
Example #18
0
    void RenderToPanel(RawImage image, byte[] inputBytes)
    {
        Vector2   dims = CameraCapture.GetDimentions();
        Texture2D tex  = new Texture2D((int)(dims.x), (int)dims.y, TextureFormat.R8, true);

        tex.LoadImage(inputBytes);
        tex.Apply();
        image.texture = tex;
    }
Example #19
0
    public Texture2D Photo(int width, int height)
    {
        gameObject.SetActive(true);
        var tex = CameraCapture.Capture(MyCamera, width, height);

        DebugLog.log("キャプチャ");
        gameObject.SetActive(false);
        return(tex);
    }
        static void Main(string[] args)
        {
            var inferingInTensorflow = new InferingInTensorflow();

            inferingInTensorflow.Run();

            var camera = new CameraCapture();

            camera.Run();
        }
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var matCaptured = new Mat())
            {
                var retrieveElapsed = Stopwatch.StartNew();
                CameraCapture.Retrieve(matCaptured);
                retrieveElapsed.Stop();

                if (_readyRectangle.IsEmpty)
                {
                    _detectorInput.ErodeDilateIterations = (int)spinDilateIterations.Value;
                    _detectorInput.Settings.Roi          = GetRegionOfInterestFromControls();
                    _detectorInput.Captured = matCaptured;

                    var output = _colorDetector.Process(_detectorInput);

                    DrawReticle(output.CapturedImage, output.CentralPoint.ToPoint(), Color.Aqua);

                    if (output.IsDetected)
                    {
                        var radius = 50;
                        var circle = new CircleF(output.CentralPoint, radius);
                        var color  = new Bgr(Color.Yellow);
                        output.CapturedImage.Draw(circle, color, 3);
                        var ballTextLocation = output.CentralPoint.ToPoint();
                        ballTextLocation.X += radius;
                        //  output.CapturedImage.Draw("ball", ballTextLocation, FontFace.HersheyPlain, 3, color);
                    }

                    if (checkBoxRoi.Checked)
                    {
                        output.CapturedImage.Draw(_detectorInput.Settings.Roi, Color.Green.ToBgr(), 3);
                    }

                    if (!_imageBoxSelector.SeedingRectangle.IsEmpty)
                    {
                        output.CapturedImage.Draw(_imageBoxSelector.SeedingRectangle, new Bgr(Color.Chartreuse));
                    }

                    imageBoxCaptured.Image = output.CapturedImage;
                    imageBoxFiltered.Image = output.ThresholdImage;

                    NotifyStatus(
                        "Retrieved frame in {0}, {1}"
                        , retrieveElapsed.Elapsed.ToHumanReadable(HumanReadableTimeSpanOptions.Abbreviated)
                        , output);
                }
                else
                {
                    DoAutoThresholding(matCaptured);
                }

                ResizeImageControls();
            }
        }
Example #22
0
 /// <summary>
 /// 摄像头属性
 /// </summary>
 public MainWindow()
 {
     InitializeComponent();
     camera             = new CameraCapture();
     cameraPTZ          = new CameraPTZ();
     cameraPTZ.Host     = "192.168.1.245";
     cameraPTZ.Port     = 80;
     cameraPTZ.UserName = "******";
     cameraPTZ.Password = "******";
     cameraPTZ.Args     = "/web/cgi-bin/hi3510/ptzctrl.cgi?-step=0&-act=[PTZ]";
 }
Example #23
0
    private void Start()
    {
        if (CamCap == null)
        {
            CamCap = GameObject.FindObjectOfType <CameraCapture> ();
        }

        this.CamCap.CaptureVideoCompleted += new CameraCapture.MediaDelegate(this.Completetd);
        this.CamCap.TakePhotoCompleted    += new CameraCapture.MediaDelegate(this.Completetd);
        this.CamCap.PickCompleted         += new CameraCapture.MediaDelegate(this.Completetd);
        this.CamCap.Failed += new CameraCapture.ErrorDelegate(this.ErrorInfo);
    }
Example #24
0
    void Start()
    {
        _cameraCapture = GetComponent <CameraCapture>();

        _gestureRecognizer = new GestureRecognizer();
        _gestureRecognizer.SetRecognizableGestures(GestureSettings.Tap);
        _gestureRecognizer.TappedEvent += (source, tapCount, ray) =>
        {
            TakePicture();
        };
        _gestureRecognizer.StartCapturingGestures();
    }
Example #25
0
    void Awake()
    {
        if (captureController == null)
        {
            captureController = FindObjectOfType <CameraCapture>();
        }

        if (image == null)
        {
            image = GetComponent <RawImage>();
        }
    }
Example #26
0
    // Update is called once per frame
    void Update()
    {
        time += Time.deltaTime;

        if (time >= 3.0f && (isEnd == false))
        {
            var tex = CameraCapture.Capture(Camera.main);
            meshRen.material.mainTexture = tex;
            //isEnd = true;
            time = 0.0f;
        }
    }
Example #27
0
        public void Run()
        {
            CameraCapture.Start();
            Started = true;

            var keyHandler = new KeyHandler();

            keyHandler.KeyEvent += keyHandler_KeyEvent;
            keyHandler.WaitForExit();

            Stop();
        }
Example #28
0
        private Image <Bgr, byte> PullImage()
        {
            Image <Bgr, byte> output;

            using (var matCaptured = new Mat())
            {
                CameraCapture.Retrieve(matCaptured);
                output = matCaptured.ToImage <Bgr, byte>();
                WriteText(output, 30, DateTime.Now.ToString(TimeFormat));
                imageBoxCaptured.Image = output;
            }
            return(output);
        }
Example #29
0
 private void ShowSettings()
 {
     if (ccForm != null)
     {
         ccForm.BringToFront();
     }
     else
     {
         ccForm       = new CameraCapture();
         ccForm.MainF = this;
         ccForm.Show();
     }
 }
        public TaskViewModel()
        {
            lightStatus   = 1;
            DisplaySize   = 400;
            cameraCapture = new CameraCapture( );
            iotControl    = new IoTControl( );
            _task         = new CountdownTask( );
            _future       = DateTime.Now.AddMinutes(15);
            timerActive   = true;
            _token        = new CancellationTokenSource( );
#pragma warning disable VSTHRD101 // Avoid unsupported async delegates
            ButtonClick = new DelegateCommand(async() => await ResetTimerAsync());
#pragma warning restore VSTHRD101 // Avoid unsupported async delegates
            Task.Factory.StartNew(async() => await RunTimerAsync(_token.Token), TaskCreationOptions.LongRunning);
        }
Example #31
0
 public override void ImageGrabbedHandler(object sender, EventArgs e)
 {
     using (var matCaptured = new Mat())
     {
         CameraCapture.Retrieve(matCaptured);
         var input = new CascadeDetectorInput {
             Captured = matCaptured
         };
         var result = _detector.Process(input);
         if (result.IsDetected)
         {
             Log.Info(m => m("{0}", result));
         }
     }
 }
Example #32
0
    IEnumerator CaptureAll()
    {
        int    i   = 0;
        Camera cam = Camera.main;

        while (i < snapshotAmount)
        {
            Transform tr = PointCloudCoordinator.NextPoint();
            cam.transform.position = tr.position;
            cam.transform.rotation = tr.rotation;
            CameraCapture.Capture();
            i++;
            yield return(null);
        }
    }
Example #33
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            if (_transmitTask == null || _transmitTask.IsCompleted)
            {
                using (var matCaptured = new Mat())
                {
                    CameraCapture.Retrieve(matCaptured);
                    var bgrImage = matCaptured.ToImage <Bgr, byte>();
                    WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt"));
                    imageBoxCaptured.Image = bgrImage;

                    _transmitTask = _bsonPoster.Transmit(bgrImage);
                }
            }
        }
Example #34
0
 public void FreeCamera(CameraCapture camera)
 {
     _freeCameras.Add(camera);
     if (CameraFreed != null)
         CameraFreed(this, new CameraEventArgs()
         {
             Camera = camera,
             Available = true
         });
 }
Example #35
0
 private void DeleteCapture()
 {
     if (null != video)
     {
         video.Dispose();
         video = null;
     }
     if (null != camera)
     {
         camera.Dispose();
         camera = null;
     }
 }
Example #36
0
 private void OnCameraUnplugged(CameraCapture camera)
 {
     camera.TerminateAsync();
     if (CameraUnplugged != null)
         CameraUnplugged(this, new CameraEventArgs()
         {
             Camera = camera,
             Available = false
         });
 }
 /// <summary>
 /// The state of a camera capture is changed.
 /// </summary>
 /// <param name="CameraState">New state of the camera capture.</param>
 /// <param name="CameraCapture">Fired in this camera capture.</param>
 private void OnCameraStateChanged(CaptureInterface.Enumeration.CameraState CameraState, CameraCapture.Model.CameraCapture.CameraCapture CameraCapture)
 {
     if (Shared.SharedData.CameraCapture != null)
     {
         Shared.SharedData.CameraCapture.OnWPFGrayFrame -= OnWPFFrame;
         Shared.SharedData.CameraCapture.OnWPFColorFrame -= OnWPFFrame;
         Shared.SharedData.CameraCapture.OnWPFSmallGrayFrame -= OnWPFFrame;
         Shared.SharedData.CameraCapture.OnWPFSmoothedGrayFrame -= OnWPFFrame;
         switch (CameraState)
         {
             case global::CaptureInterface.Enumeration.CameraState.Open:
                 {
                     Shared.SharedData.CameraCapture.OnWPFGrayFrame += OnWPFFrame;
                 }
                 break;
         }
     }
 }
        /// <summary>
        /// On receive a new camera image.
        /// Image is converted to ImageSource so we can
        /// present it in wpf.
        /// </summary>
        /// <param name="CameraImage">New camera image.</param>
        /// <param name="CameraCapture">Get from this camera capture.</param>
        private void OnWPFFrame(ImageSource CameraImage, CameraCapture.Model.CameraCapture.CameraCapture CameraCapture)
        {
            PresentationImage = CameraImage;

            try
            {
                if (PresentationImage != null)
                {
                    drawingCanvasView.Width = PresentationImage.Width;
                    drawingCanvasView.Height = PresentationImage.Height;
                }
            }
            catch (Exception Ex)
            {

            }
        }
 /// <summary>
 /// On receive a new camera image.
 /// </summary>
 /// <param name="CameraImage">New camera image.</param>
 /// <param name="CameraCapture">Get from this camera capture.</param>
 private void OnGrayFrame(Image<Emgu.CV.Structure.Gray, byte> CameraImage, CameraCapture.Model.CameraCapture.CameraCapture CameraCapture)
 {
     if (contentSource != null)
     {
         contentSource.OnNewImage(CameraImage);
     }
 }
Example #40
0
        private void SaveFrame(CameraCapture camera)
        {
            if (CheckFrameAvailable(camera))
            {
                BitmapSource frame = camera.CurrentFrame;

                bool rightRunning = _captureRight.CaptureState == CaptureStates.Running;
                bool leftRunning = _captureLeft.CaptureState == CaptureStates.Running;

                if(rightRunning)
                    _captureRight.PauseAsync();
                if (leftRunning)
                    _captureLeft.PauseAsync();

                SaveFileDialog saveDialog = new SaveFileDialog();
                saveDialog.Filter = "PNG|*.png";
                bool? res = saveDialog.ShowDialog();
                if (res.Value == true)
                {
                    if (!saveDialog.FileName.EndsWith(".png"))
                        MessageBox.Show("Unsupported file format");
                    else
                    {
                        Stream imgFileStream = saveDialog.OpenFile();
                        try
                        {
                            BitmapEncoder encoder = new PngBitmapEncoder();
                            encoder.Frames.Add(BitmapFrame.Create(frame));
                            encoder.Save(imgFileStream);
                        }
                        catch (Exception exc)
                        {
                            MessageBox.Show("Failed to save data: " + exc.Message, "Error");
                        }
                        imgFileStream.Close();
                    }
                }

                if(rightRunning)
                    _captureRight.StartAsync();
                if(leftRunning)
                    _captureLeft.StartAsync();
            }
        }
Example #41
0
 public void ReserveCamera(CameraCapture camera)
 {
     _freeCameras.Remove(camera);
     if (CameraReserved != null)
         CameraReserved(this, new CameraEventArgs()
         {
             Camera = camera,
             Available = false
         });
 }
Example #42
0
 private void OnCameraPlugged(DsDevice device)
 {
     CameraCapture camera = new CameraCapture(device, WindowHandle);
     _camerasAll.Add(camera);
     _freeCameras.Add(camera);
     if (CameraPlugged != null)
         CameraPlugged(this, new CameraEventArgs()
         {
             Camera = camera,
             Available = true
         });
 }
        /// <summary>
        /// On get a new exception from an camera capture item.
        /// </summary>
        /// <param name="CameraException">Reference to the exception.</param>
        /// <param name="CameraCapture">Fired in this camera capture.</param>
        private void OnCameraException(Exception CameraException, CameraCapture.Model.CameraCapture.CameraCapture CameraCapture)
        {
            string presentationText = DateTime.Now.ToString() + " " + CameraException.Message + Environment.NewLine + "Camera: " + CameraCapture.HardwareCameraIndex;

            if (CameraException.InnerException != null)
            {
                presentationText += Environment.NewLine + "Inner exception: " + CameraException.InnerException.Message;
            }
            CallDispatcher.InvokeAsync((Action)delegate()
            {
                CurrentMessage = presentationText;
            });
        }
Example #44
0
 public CameraViewItem(CameraCapture cam)
 {
     Camera = cam;
 }
        /// <summary>
        /// Depth of a camera is changed.
        /// We use this to register for new frame color responce image.
        /// </summary>
        /// <param name="CaptureDepth">New depth of the camera capture.</param>
        private void OnCameraCaptureDepthChanged(CameraCapture.Model.CameraCapture.CaptureDepth CaptureDepth)
        {
            if (Shared.SharedData.CameraCapture != null)
            {
                Shared.SharedData.CameraCapture.OnWPFGrayFrame -= OnWPFFrame;
                Shared.SharedData.CameraCapture.OnWPFColorFrame -= OnWPFFrame;
                Shared.SharedData.CameraCapture.OnWPFSmallGrayFrame -= OnWPFFrame;
                Shared.SharedData.CameraCapture.OnWPFSmoothedGrayFrame -= OnWPFFrame;

                switch (CaptureDepth.ColorDepth)
                {
                    case CaptureInterface.Enumeration.ColorDepth.Color:
                        {
                            Shared.SharedData.CameraCapture.OnWPFColorFrame += OnWPFFrame;
                        }
                        break;
                    case CaptureInterface.Enumeration.ColorDepth.Gray:
                        {
                            Shared.SharedData.CameraCapture.OnWPFGrayFrame += OnWPFFrame;
                        }
                        break;
                    case CaptureInterface.Enumeration.ColorDepth.SmallGray:
                        {
                            Shared.SharedData.CameraCapture.OnWPFSmallGrayFrame += OnWPFFrame;
                        }
                        break;
                    case CaptureInterface.Enumeration.ColorDepth.SmoothedGray:
                        {
                            Shared.SharedData.CameraCapture.OnWPFSmoothedGrayFrame += OnWPFFrame;
                        }
                        break;
                    case CaptureInterface.Enumeration.ColorDepth.CannyGray:
                        {
                            Shared.SharedData.CameraCapture.OnWPFCannyGrayFrame += OnWPFFrame;
                        }
                        break;
                }
            }
        }