private void OnNewSample(Frame frame)
    {
        try
        {
            if (frame.IsComposite)
            {
                using (var fs = FrameSet.FromFrame(frame))
                    using (var points = TryGetPoints(fs))
                    {
                        if (points != null)
                        {
                            q.Enqueue(points);
                        }
                    }
                return;
            }

            if (frame is Points)
            {
                q.Enqueue(frame);
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e);
        }
    }
    void OnNewSample(Frame frame)
    {
        try
        {
            if (frame.IsComposite)
            {
                using (var fs = frame.As <FrameSet>())
                    using (var f = fs.FirstOrDefault(matcher))
                    {
                        if (f != null)
                        {
                            q.Enqueue(f);
                        }
                        return;
                    }
            }

            if (!matcher(frame))
            {
                return;
            }

            using (frame)
            {
                q.Enqueue(frame);
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e);
            // throw;
        }
    }
Beispiel #3
0
    private void OnNewSample(Frame frame)
    {
        if (q == null)
        {
            return;
        }
        try
        {
            if (frame.IsComposite)
            {
                using (var fs = frame.As <FrameSet>())
                    using (var points = fs.FirstOrDefault <Points>(Stream.Depth, Format.Xyz32f))
                    {
                        if (points != null)
                        {
                            q.Enqueue(points);
                        }
                    }
                return;
            }

            if (frame.Is(Extension.Points))
            {
                q.Enqueue(frame);
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e);
        }
    }
Beispiel #4
0
 void OnNewPointSample(Frame frame)
 {
     using (var pf = RetrievePointFrame(frame))
         if (pf != null)
         {
             _pointQueue.Enqueue(pf);
         }
 }
Beispiel #5
0
 void OnNewColorSample(Frame frame)
 {
     using (var cf = RetrieveColorFrame(frame))
         if (cf != null)
         {
             _colorQueue.Enqueue(cf);
         }
 }
        private void Init()
        {
            using Context ctx = new Context();
            var devices = ctx.QueryDevices();

            Console.WriteLine($"Found {devices.Count} RealSense devices connected.");
            if (devices.Count == 0)
            {
                throw new Exception("No RealSense device detected!");
            }

            Device dev = devices[0];

            Console.WriteLine($"Using device 0: {dev.Info[CameraInfo.Name]}");
            Console.WriteLine("Device Sources:");

            foreach (Sensor sensor in dev.Sensors)
            {
                Console.WriteLine($"Sensor found: {sensor.Info[CameraInfo.Name]}");
            }
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth);
            cfg.EnableStream(Stream.Color, Format.Bgr8);

            intelPipe = new Intel.RealSense.Pipeline();
            PipelineProfile profileIntelPipe = intelPipe.Start(cfg);
            var             streamDepth      = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Depth);

            sicsDepth = streamDepth.GetIntrinsics();
            Console.WriteLine($"Depth Stream: {sicsDepth.width}X{sicsDepth.height}");

            var streamRBG = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Color);

            sicsRBG = streamRBG.GetIntrinsics();
            Console.WriteLine($"RBG Stream: {sicsRBG.width}X{sicsRBG.height}");

            Task.Run(() =>
            {
                while (true)
                {
                    try
                    {
                        using FrameSet frames = intelPipe.WaitForFrames();
                        using Frame frDepth   = frames.FirstOrDefault(Stream.Depth);
                        qDepth.Enqueue(frDepth);
                        using Frame frRBG = frames.FirstOrDefault(Stream.Color);
                        qRBG.Enqueue(frRBG);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine(e.Message);
                    }
                }
            });
        }
 private void OnNewSample(Frame f)
 {
     if (f.IsComposite)
     {
         using (var fs = f.As <FrameSet>())
             using (var poseFrame = fs.FirstOrDefault(Stream.Pose, Format.SixDOF))
                 if (poseFrame != null)
                 {
                     q.Enqueue(poseFrame);
                 }
     }
     else
     {
         using (var p = f.Profile)
             if (p.Stream == Stream.Pose && p.Format == Format.SixDOF)
             {
                 q.Enqueue(f);
             }
     }
 }
    void OnNewSample(Frame frame)
    {
        try
        {
            if (frame.IsComposite)
            {
                using (var fs = FrameSet.FromFrame(frame))
                    // using (var f = fs[_stream, _format, _streamIndex])
                    using (var f = fs.FirstOrDefault(matcher))
                    {
                        if (f != null)
                        {
                            q.Enqueue(f);
                        }
                        return;
                    }
            }

            // using (var p = frame.Profile)
            // {
            //     if (p.Stream != _stream || p.Format != _format || p.Index != _streamIndex)
            //     {
            //         return;
            //     }
            // }
            if (!matcher(frame))
            {
                return;
            }

            using (frame)
            {
                q.Enqueue(frame);
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e, this);
            // throw;
        }
    }
        internal void SaveFrame(byte[] frameBytes, int left, int top, int width, int height, string viewerID, string machineName, DateTimeOffset startTime)
        {
            var rcFrame = new RemoteControlFrame(frameBytes, left, top, width, height, viewerID, machineName, startTime);

            FrameQueue.Enqueue(rcFrame);

            lock (LockObject)
            {
                if (ProcessingTask?.IsCompleted ?? true)
                {
                    ProcessingTask = Task.Run(new Action(StartProcessing));
                }
            }
        }
        /// <summary>
        /// Encodes and sends a packet with the Herkulex protocol
        /// </summary>
        /// <param name="port">Serial port to use</param>
        /// <param name="pID">Servo ID</param>
        /// <param name="CMD">Command ID</param>
        private void EncodeAndSendPacket(SerialPort port, byte pID, byte CMD)
        {
            byte[] packet = new byte[7];

            packet[0] = 0xFF;
            packet[1] = 0xFF;
            packet[2] = 7;
            packet[3] = pID;
            packet[4] = CMD;
            packet[5] = CommonMethods.CheckSum1(packet[2], packet[3], packet[4]);
            packet[6] = CommonMethods.CheckSum2(packet[5]);

            FrameQueue.Enqueue(packet);
            MessageEnqueuedEvent.Set();
            //port.Write(packet, 0, packet.Length);
        }
Beispiel #11
0
 public void onNewDepthSample(Frame i_Frame)
 {
     if (m_Queue == null)
     {
         return;
     }
     try
     {
         if (i_Frame.IsComposite)
         {
             // using (var depthFrame = i_Frame as DepthFrame)
             var depthFrame = i_Frame as DepthFrame;
             m_Queue.Enqueue(depthFrame);
             return;
         }
     }
     catch (System.Exception e)
     {
         // TODO: handle the exception
     }
 }
    private void OnFrames(FrameSet frames)
    {
        if (frames.DepthFrame == null)
        {
            Debug.Log("No depth frame in frameset, can't create point cloud");
            return;
        }

        if (!UpdateParticleParams(frames.DepthFrame.Width, frames.DepthFrame.Height))
        {
            Debug.Log("Unable to craete point cloud");
            return;
        }

        using (var points = pc.Calculate(frames.DepthFrame))
        {
            if (frames.ColorFrame != null)
            {
                if (frames.ColorFrame.BitsPerPixel == 24)
                {
                    pc.MapTexture(frames.ColorFrame);
                    colorFrameWidth  = frames.ColorFrame.Width;
                    colorFrameHeight = frames.ColorFrame.Height;
                    var newSize = frames.ColorFrame.Stride * colorFrameHeight;
                    lock (l)
                    {
                        if (lastColorImage == null || lastColorImage.Length != newSize)
                        {
                            lastColorImage = new byte[newSize];
                        }

                        frames.ColorFrame.CopyTo(lastColorImage);
                    }
                }
            }
            pointsQueue.Enqueue(points);
        }
    }
        /// <summary>
        /// Encodes and sends a packet with the Herkulex protocol
        /// </summary>
        /// <param name="port">Serial port to use</param>
        /// <param name="pID">Servo ID</param>
        /// <param name="CMD">Command ID</param>
        /// <param name="dataToSend">Data</param>
        private void EncodeAndSendPacket(SerialPort port, byte pID, byte CMD, byte[] dataToSend)
        {
            byte packetSize = (byte)(7 + dataToSend.Length);

            byte[] packet = new byte[packetSize];

            packet[0] = 0xFF;
            packet[1] = 0xFF;
            packet[2] = packetSize;
            packet[3] = pID;
            packet[4] = CMD;
            packet[5] = CommonMethods.CheckSum1(packet[2], packet[3], packet[4], dataToSend);
            packet[6] = CommonMethods.CheckSum2(packet[5]);

            for (int i = 0; i < dataToSend.Length; i++)
            {
                packet[7 + i] = dataToSend[i];
            }

            FrameQueue.Enqueue(packet);
            //Console.WriteLine("inQueue : " + FrameQueue.Count);
            MessageEnqueuedEvent.Set();
            //port.Write(packet, 0, packet.Length);
        }
        public bool ReadNextPacket()
        {
            var packetLoc    = packet;
            var pPacket      = &packetLoc;
            var pVideoStream = pFormatContext->streams[streamVideoIndex];
            //var pAudioStream = pFormatContext->streams[streamAudioIndex];
            //    var pAudioStream= pFormatContext->streams[streamAudioIndex];
            //   var pVideoCodecContext = pFormatContext->streams[streamIndex]->codec;

            //var readFullFrame = false;

            // Ignore the return value, which can randomly be -1 for MKVs on last frame without any other issues.
            // todo: send NULL packet at the end of the video or something
            var readFrameRet = ffmpeg.av_read_frame(pFormatContext, pPacket);

            packet = packetLoc;
            if (readFrameRet == ffmpeg.AVERROR_EOF)
            {
                ffmpeg.av_packet_unref(pPacket);
                return(false);
            }

            if (pPacket->stream_index == pVideoStream->index)
            {
                var sendPacketResult = ffmpeg.avcodec_send_packet(pVideoCodecContext, pPacket);
                if (sendPacketResult >= 0)
                {
                    while (ffmpeg.avcodec_receive_frame(pVideoCodecContext, pDecodedFrame) >= 0)
                    {
                        ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, Height,
                                         dstData, dstLinesize);

                        var rgbaData = new byte[videoDataSize];
                        Marshal.Copy((IntPtr)convertedFrameBuffer, rgbaData, 0, rgbaData.Length);
                        var timeStamp = (double)(pDecodedFrame->pts * pVideoStream->time_base.num) / pVideoStream->time_base.den;
                        FrameQueue.Enqueue(new MovieFrame
                        {
                            RgbaData  = rgbaData,
                            Timestamp = timeStamp
                        });
                    }
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }
            }
            //else if (pPacket->stream_index == pAudioStream->index)
            //{
            //    var sendPacketResult = ffmpeg.avcodec_send_packet(pAudioCodecContext, pPacket);
            //    if (sendPacketResult >= 0)
            //    {
            //        while (ffmpeg.avcodec_receive_frame(pAudioCodecContext, pDecodedFrame) >= 0)
            //        {
            //            var format = (AVSampleFormat)pDecodedFrame->format;
            //            var planar = ffmpeg.av_sample_fmt_is_planar(format) != 0;
            //            var channels = ffmpeg.av_get_channel_layout_nb_channels(pDecodedFrame->channel_layout);
            //            var planes = planar ? channels : 1;
            //            var bps = ffmpeg.av_get_bytes_per_sample(format);
            //            var planeSize = bps * pDecodedFrame->nb_samples * (planar ? 1 : channels);
            //            var sampleRate = pDecodedFrame->sample_rate;
            //
            //            // todo: correctly process the planar case
            //            var sampleData = new byte[planeSize];
            //            Marshal.Copy((IntPtr)pDecodedFrame->extended_data[0], sampleData, 0, planeSize);
            //            var timeStamp = (double)(pDecodedFrame->pts * pAudioStream->time_base.num) / pAudioStream->time_base.den;
            //            AudioQueue.Enqueue(new MovieAudioFrame
            //            {
            //                SamplesData = sampleData,
            //                Timestamp = timeStamp,
            //                SampleRate = sampleRate,
            //                NumChannels = planar ? 1 : channels,
            //                BitsPerSample = bps
            //            });
            //        }
            //        ffmpeg.av_frame_unref(pDecodedFrame);
            //    }
            //}

            ffmpeg.av_packet_unref(pPacket);

            return(true);
        }
        private void StartWebCam(BackgroundWorker worker = null)
        {
            if (cap == null)
            {
                cap = new VideoCapture(0);
            }
            if (!cap.Open(0))
            {
                return;
            }
            OpenCvSharp.Cv2.NamedWindow("Video", WindowMode.AutoSize);
            int       cnt      = 0;
            Mat       frame    = new Mat();
            EyePoints rightEye = new EyePoints(true);
            EyePoints leftEye  = new EyePoints(false);

            IsRunning = true;
            while (IsRunning)
            {
                bool result = cap.Read(frame);
                if (!result)
                {
                    worker.CancelAsync();
                    IsRunning = false;
                }
                if (frame != null && (frame.Rows * frame.Cols > 0))
                {
                    cnt++;
                    if (cnt % frameskip == 0)
                    {
                        FrameQueue.Enqueue(frame);
                        cnt = 0;
                    }
                }
                while (FrameQueue.Count > 0)
                {
                    Mat    que   = FrameQueue.Dequeue();
                    Rect[] faces = GetFaces(que, 1);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        //GetFaceInRect(faces[i], que, i);
                        Scalar              eyecolor  = new Scalar(0, 0, 255);
                        Array2D <byte>      gray      = ConvertMatToDlib2DArray(que);
                        FullObjectDetection landmarks = predictor.Detect(gray, ConvertToDlib(faces[i]));
                        InitializeEyes(landmarks, leftEye, rightEye);
                        //DrawEye(que, landmarks, leftEye);
                        //DrawEye(que, landmarks, rightEye);
                        Rect leftboundingBox = BoundingBoxAroundEye(leftEye, 0);
                        DrawRect(que, leftboundingBox);
                        OpenCvSharp.Point centerOfLeftEye = DetectCenterOfEye(que, leftboundingBox);
                        centerOfLeftEye.X += leftboundingBox.X;

                        Rect rightboundingBox = BoundingBoxAroundEye(rightEye, 0);
                        DrawRect(que, rightboundingBox);
                        OpenCvSharp.Point centerOfRightEye = DetectCenterOfEye(que, rightboundingBox);
                        centerOfRightEye.X += rightboundingBox.X;

                        EyeDirection leftEyeDirection  = leftEye.GetEyePosition(centerOfLeftEye);
                        EyeDirection rightEyeDirection = rightEye.GetEyePosition(centerOfRightEye);

                        EyeDirection eyeDirection = EyeDirection.unknown;
                        if (leftEyeDirection == EyeDirection.center || rightEyeDirection == EyeDirection.center)
                        {
                            eyeDirection = EyeDirection.center;
                        }
                        else if (leftEyeDirection == EyeDirection.left)
                        {
                            eyeDirection = EyeDirection.left;
                        }
                        else if (rightEyeDirection == EyeDirection.right)
                        {
                            eyeDirection = EyeDirection.right;
                        }

                        OpenCvSharp.Point position = new OpenCvSharp.Point(50, 50);
                        Cv2.PutText(img: que, text: eyeDirection.ToDisplay(), org: position, fontFace: HersheyFonts.HersheySimplex, fontScale: 2, new Scalar(0, 0, 255));
                    }
                    //BitmapImage bmi = ConvertToBMI(frame, cnt, "D:/junk/TestCamImages");
                    if (worker != null)
                    {
                        //worker.ReportProgress(cnt, bmi);
                        try
                        {
                            OpenCvSharp.Cv2.ImShow("Video", que);
                            int key = Cv2.WaitKey(10);   // as in 10 milliseconds
                            if (key == 27)
                            {
                                worker.CancelAsync();
                                IsRunning = false;
                            }
                        }
                        catch (Exception ex)
                        {
                            string msg = ex.Message;
                        }
                    }
                    if (worker.CancellationPending)
                    {
                        Cv2.DestroyWindow("Video");
                        break;
                    }
                }
            }
        }
Beispiel #16
0
        void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs)
        {
            webcamImage.Dispatcher.Invoke(delegate() {
                if (webcamImage.Width != eventArgs.Frame.Size.Width || webcamImage.Height != eventArgs.Frame.Size.Height)
                {
                    webcamImage.Width  = eventArgs.Frame.Size.Width;
                    webcamImage.Height = eventArgs.Frame.Size.Height;
                }
            });

            if (isRecording)
            {
                FrameQueue.Enqueue(new VideoFrame((Bitmap)eventArgs.Frame.Clone(), DateTime.Now - videoStart));
            }

            BitmapImage bi = new BitmapImage();

            bi.BeginInit();

            MemoryStream ms = new MemoryStream();

            eventArgs.Frame.Save(ms, ImageFormat.Bmp);
            ms.Seek(0, SeekOrigin.Begin);

            bi.StreamSource = ms;
            bi.EndInit();

            //Using the freeze function to avoid cross thread operations
            bi.Freeze();

            //Calling the UI thread using the Dispatcher to update the 'Image' WPF control
            webcamImage.Dispatcher.Invoke(delegate
            {
                webcamImage.Source = bi; /*frameholder is the name of the 'Image' WPF control*/
                FrameCounter.Instance.Count();
                frame_counter++;

                fpsLabel.Content = fps + " FPS";

                resolutionLabel.Content = videoSource.VideoResolution.FrameSize.Width + "x" + videoSource.VideoResolution.FrameSize.Height + ", " + videoSource.VideoResolution.AverageFrameRate + " FPS";

                cameraIDLabel.Content = videoSource.Source;

                debugGrid.Width = MeasureString(videoSource.Source).Width + 5;

                framesLabel.Content = frame_counter + " frames decoded";

                if (isRecording)
                {
                    TimeSpan recordTimeElapsed = DateTime.Now - videoStart;

                    string time = "";

                    if (recordTimeElapsed.Hours > 0)
                    {
                        time += recordTimeElapsed.Hours + ":";
                    }

                    time += recordTimeElapsed.Minutes + ":";

                    if (recordTimeElapsed.Seconds < 10)
                    {
                        time += "0";
                    }

                    time += recordTimeElapsed.Seconds.ToString();

                    recordTimeLabel.Content = time;
                }
            });

            GC.Collect();
        }
 /// <summary>
 /// Interal help callback on remote video frame ready. Enqueues the newly-available video
 /// frame into the internal <see cref="VideoSource.FrameQueue"/> for later consumption by
 /// a video renderer.
 /// </summary>
 /// <param name="frame">The newly-available video frame from the remote peer</param>
 private void I420ARemoteVideoFrameReady(I420AVideoFrame frame)
 {
     // This does not need to enqueue work, because FrameQueue is thread-safe
     // and can be manipulated from any thread (does not access Unity objects).
     FrameQueue.Enqueue(frame);
 }