Beispiel #1
0
        private void Camera_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image image = e.GetFrame();

            pictureBox1.BackgroundImageLayout = ImageLayout.Stretch;
            pictureBox1.Image = image;
        }
 private void CameraPreviewControl_FrameArrived(CameraPreviewControl sender, FrameArrivedEventArgs args)
 {
     if (QR_CODE_IMAGE_SEMA.CurrentCount >= 1 && qrCodeBitmap is null)
     {
         args.GetSoftwareBitmap(ref qrCodeBitmap);
     }
 }
Beispiel #3
0
 private void mycamer_OnFrameArrived(object source, FrameArrivedEventArgs e)
 {
     try
     {
         Image img = e.GetFrame();
         picBox.Image = img;
     }
     catch (Exception)
     {
     }
 }
        public void FrameArrived(object sender, FrameArrivedEventArgs e)
        {
            if (!this.Enabled)
            {
                return;
            }

            byte[]          bytes;
            WriteableBitmap bitmap;

            Handle <MemoryFrame> frameHandle = e.FrameHandle.Clone();

            switch (this.CameraMode)
            {
            case CameraMode.Color:
                bytes  = frameHandle.Item.BufferColor;
                bitmap = _colorBitmap.Bitmap;
                break;

            case CameraMode.Depth:
                bytes = frameHandle.Item.BufferDepthPreview;
                if (_use_DSAPI)
                {
                    bitmap = _depthPreviewBitmap.Bitmap;
                }
                else
                {
                    bitmap = _depthBitmap.Bitmap;
                }
                break;

            default:
                throw new InvalidOperationException("Shouldn't get here");     // Avoid unassigned variable error
            }

            CameraPrimary.Dispatcher.InvokeAsync(() =>
            {
                bitmap.Lock();

                Int32Rect rect = new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight);

                bitmap.WritePixels(rect, bytes, bitmap.PixelWidth * ((this.CameraMode == CameraMode.Depth && _use_DSAPI) ? 3:4), 0);

                bitmap.AddDirtyRect(rect);
                bitmap.Unlock();

                EnsureImageSource(bitmap);

                frameHandle.Dispose();
            });
        }
 /// <summary>
 /// Depending on color format, the frame is passed on or buffered for conversion.
 /// </summary>
 private void OnFrameArrived(object sender, FrameArrivedEventArgs e)
 {
     if (Format == ColorFormat.Grayscale)
     {
         _frame = e.Frame;
         FPSUtils.VideoTick();
         FrameArrived?.Invoke(this, e);
     }
     else
     {
         if (IsProcessingFrame)
         {
             return;
         }
         IsProcessingFrame  = true;
         CurrentCameraFrame = e.Frame;
     }
 }
        /// <summary>
        /// Invoked if the NV12 to RGB conversion is complete and the data is ready to be read to the CPU.
        /// </summary>
        private void OnCompleteReadback(AsyncGPUReadbackRequest request)
        {
            if (request.hasError)
            {
                Debug.LogError("GPU readback error");
                return;
            }

            MatUtils.copyToMat(request.GetData <uint>(), _rgb);
            Core.flip(_rgb, _rgb, 0); // image is flipped on x-axis
            CameraFrame           newFrame = new CameraFrame(_rgb, CurrentCameraFrame.Intrinsic, CurrentCameraFrame.Extrinsic, CurrentCameraFrame.Width, CurrentCameraFrame.Height, CurrentCameraFrame.FrameCount, Format);
            FrameArrivedEventArgs args     = new FrameArrivedEventArgs(newFrame);

            _frame = newFrame;
            FrameArrived?.Invoke(this, args);
            FPSUtils.VideoTick();
            NewFrameAvailable = true;
            IsProcessingFrame = false;
        }
Beispiel #7
0
        private void KftOnFrameArrived(object sender, FrameArrivedEventArgs e)
        {
            var faceLocations = e.FaceLocationResult;

            var bitmapTask = Task.Run(() => faceLocations.ImageBuffer.ToBitmap());

            var task = Task.Run(async() =>
            {
                int numFaces      = faceLocations.FaceRectangles.Length;
                var faceLabelTask = Task.Run(() =>
                {
                    var labels = new string[numFaces];
                    for (int i = 0; i < numFaces; i++)
                    {
                        if (_kft.TrackedFaces.TryGetValue(faceLocations.TrackingIds[i], out var trackingStatus))
                        {
                            labels[i] = _kft.FaceDatabase[trackingStatus.TopTrackedCandidate.FaceId]?.Name ??
                                        $"ID: {trackingStatus.TopTrackedCandidate.FaceId}";
                        }
                    }

                    return(labels);
                });

                _renderer.Image = await bitmapTask;
                _renderer.DrawBodies(faceLocations.Bodies, _coordinateMapper);
                _renderer.DrawRectangles(faceLocations.FaceRectangles, faceLocations.TrackingIds);
                _renderer.DrawNames(await faceLabelTask, faceLocations.FaceRectangles.Select(r => new Point(r.Left, r.Bottom)).ToArray(), faceLocations.TrackingIds);
                return(_renderer.Image);
            });

            _fpsCounter.NewFrame();
            _lastFaceRects       = faceLocations.FaceRectangles;
            _lastTrackingIds     = faceLocations.TrackingIds;
            statusLabel.Text     = $"FPS: {_fpsCounter.CurrentFps:F2} (Mean {_fpsCounter.AverageFps:F2} Min {_fpsCounter.MinFps:F2} Max {_fpsCounter.MaxFps:F2}){Environment.NewLine}Frames: {_fpsCounter.TotalFrames}";
            mainPictureBox.Image = task.Result;
        }
Beispiel #8
0
        private void Cam_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            pictureBoxCapture.Image = img;
        }
        public virtual void FrameArrived(object sender, FrameArrivedEventArgs ea)
        {
            // When the first frame arrive, start the calibration operation. This won't work
            // if we try to do it right after calling _sensor.Open().
            if (_calibration == null)
            {
                _calibration = Calibrator.CalibrateAsync(_reader);
                // set device config once
                //_reader.Device.SetDeviceConfig();
            }

            if (_capturingShot == null)
            {
                return;
            }

            if (_mode == CaptureMode.Sweeping && !_sweeping)
            {
                return;
            }

            if (!_cameraConfigLocked)
            {
                LockCameraExposureAndGain();
                _cameraConfigLocked = true;
            }

            if (!_lastShotTaken.HasValue)
            {
                _lastShotTaken = DateTime.Now;
            }

            DateTime currentFrame = DateTime.Now;

            double durationFromLastShot = (currentFrame - _lastShotTaken.Value).TotalMilliseconds;


            if (durationFromLastShot != 0 && durationFromLastShot < _capturingShot.ShotDefinition.ShotDuration)
            {
                return;
            }
            // (1) Serialize frame data

            if (_frameCount >= _session.MaximumFrameCount)
            {
                Console.WriteLine(string.Format("Too many frames! Got {0} but we only have room for {1}", _frameCount + 1, _session.MaximumFrameCount));
            }

            _frameHandles.Add(ea.FrameHandle.Clone());

            // Increment whether we saved the data or not (this allows an improved error message)
            _frameCount += 1;

            if (FrameCaptured != null)
            {
                var outEvent = new Smithers.Sessions.SessionManagerEventArgs <TShot, TShotDefinition, TSavedItem>(_capturingShot, null, null);
                FrameCaptured(this, outEvent);
            }


            if (_mode == CaptureMode.Sweeping)
            {
                if (_capturingShot != null && _frameCount < _capturingShot.ShotDefinition.MaximumFrameCount)
                {
                    _lastShotTaken = DateTime.Now;
                    return;
                }
            }
            else if ((_mode != CaptureMode.Sweeping) && _frameCount < _capturingShot.ShotDefinition.MaximumFrameCount)
            {
                return;
            }

            if (_capturingShot != null)
            {
                MoveToNextShot();
            }
        }
Beispiel #10
0
        private void myCam_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            pictureBoxLoading.Image = img;
        }
Beispiel #11
0
 private void myCamera_OnFrameArrived(object source, FrameArrivedEventArgs e)
 {
     BarcodeImageBox.Image = e.GetFrame();
 }
Beispiel #12
0
        //-------------------------------------------------------------------------------------------
        //Metodo para la camara
        private void camara_onFArameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            pictureBox1.Image = img;
        }
Beispiel #13
0
        private void Camera_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            PXB_Picture.Image = img;
        }
Beispiel #14
0
 private void Camera_OnFrameArrived(object source, FrameArrivedEventArgs e)
 {
     image = e.GetFrame();
     clientPicureBox.Image = image;
 }
Beispiel #15
0
        public void MyCamera_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            pictureBox1.Image = img;
        }
Beispiel #16
0
        private void MyCamera_OnFrameArrived(object source, FrameArrivedEventArgs e)
        {
            Image img = e.GetFrame();

            picCamera.Image = img;
        }
 private void MyCamera_OnFrameArrived(object source, FrameArrivedEventArgs e)
 {
 }