/// <summary>
        /// Respond to capture event.
        /// This should return as fast as possible.
        /// </summary>
        public void HandleCapturedFrame(object sender, VideoDataEventArgs e)
        {
            if (e.Frame != null)
            {
                TimedImage ti = new TimedImage(e.CaptureTime, width, height);

                try {
                    if (ProcessorQueue != null)                       // process the source image
                    {
                        foreach (var item in ProcessorQueue)
                        {
                            item.ProcessFrame(e.Frame, e.CaptureTime);
                        }
                    }
                } catch { }

                lock (WaitingFrames) {
                    WaitingFrames.Add(ti);
                    ResampleBuffer(e.Frame, ref ti.Luma, ref ti.Cr, ref ti.Cb, width, height);
                }

                try {
                    if (ProcessorQueue != null)                       // process the YUV buffer
                    {
                        foreach (var item in ProcessorQueue)
                        {
                            item.ProcessFrame(ti);
                        }
                    }
                } catch { }
            }
        }
Exemple #2
0
 void RenderRun()
 {
     try
     {
         long withinFramesFactor = 0L;
         while (running)
         {
             Stopwatch sw = new Stopwatch();
             sw.Start();
             withinFramesFactor = frameTime;
             lock (imgBuffer)
             {
                 if (imgBuffer.Count > 0)
                 {
                     TimedImage ti = imgBuffer.First();
                     if (ti.ShowTime > withinFramesFactor)
                     {
                         withinFramesFactor = ti.ShowTime;
                     }
                     BitmapImage src = ti.Image;
                     if (config.Render)
                     {
                         //logger.Trace("Render > {0}", ti);
                         if (ImageLoaded != null)
                         {
                             ImageLoaded(src);
                         }
                     }
                     if (config.Compress)
                     {
                         //logger.Trace("Compress > {0}", ti);
                         CompressImage(src, ti.Name);
                     }
                     imgBuffer.Remove(ti);
                 }
             }
             long elapsed = sw.ElapsedMilliseconds;
             sw.Reset();
             int delay = (int)(frameTime - elapsed);
             if (delay > 1)
             {
                 System.Threading.Thread.Sleep(delay);
             }
         }
     }
     catch (System.Threading.ThreadInterruptedException)
     {
         //
     }
     catch (Exception e)
     {
         MessageBox.Show("Render: " + e.Message);
         running = false;
     }
 }
        /// <summary>
        /// Load the closest matching frame by offset time.
        /// Fills the encoder-ready frame, with given time-code.
        /// WARNING: use this *OR* 'LoadToFrame', but not both!
        /// </summary>
        public void SelectiveLoadFrame(ref MediaFrame Frame, double OffsetSeconds)
        {
            // This is meant to be used for big frame skips on static bars&tones.
            // will need to be called from a clocked reference, and will give frames
            // a time based on that clock.

            // You should call 'SelectiveDequeue' before updating the reference clock
            int idx = FirstFrameMatchingTime(OffsetSeconds);

            if (idx < 0)               // no frame available
            {
                Frame.Yplane    = IntPtr.Zero;
                Frame.Uplane    = IntPtr.Zero;
                Frame.Vplane    = IntPtr.Zero;
                Frame.VideoSize = 0;
                return;
            }

            try {
                TimedImage img = null;
                lock (WaitingFrames) {
                    img = WaitingFrames[idx];
                }
                if (img == null)
                {
                    return;                              // screw-up
                }
                if (img.Luma == null || img.Cr == null || img.Cb == null)
                {
                    return;                                                                       // crap frame
                }
                Y = img.Luma;
                u = img.Cr;
                v = img.Cb;

                Frame.VideoSize       = (ulong)Y.Length;
                Frame.VideoSampleTime = OffsetSeconds;

                pinY         = GCHandle.Alloc(Y, GCHandleType.Pinned);
                Frame.Yplane = pinY.AddrOfPinnedObject();

                pinU         = GCHandle.Alloc(u, GCHandleType.Pinned);
                Frame.Uplane = pinU.AddrOfPinnedObject();

                pinV         = GCHandle.Alloc(v, GCHandleType.Pinned);
                Frame.Vplane = pinV.AddrOfPinnedObject();
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame);                 // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
        /// <summary>
        /// Load the buffer into a MediaFrame for the encoder.
        /// IMPORTANT: You must call UnloadFrame after this method is called.
        /// For efficiency, unload as soon as possible.
        /// </summary>
        public void LoadToFrame(ref MediaFrame Frame)
        {
            try {
                if (WaitingFrames.Count > 0)
                {
                    TimedImage img = null;
                    lock (WaitingFrames) {
                        WaitingFrames.RemoveAll(a => a == null);
                        WaitingFrames.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));

                        img = WaitingFrames[0];
                        WaitingFrames.RemoveAt(0);
                    }

                    if (img.Luma == null || img.Cr == null || img.Cb == null)
                    {
                        return;                                                                           // crap frame
                    }
                    Y = img.Luma;
                    u = img.Cr;
                    v = img.Cb;

                    Frame.VideoSize       = (ulong)Y.Length;
                    Frame.VideoSampleTime = img.Seconds;

                    pinY         = GCHandle.Alloc(Y, GCHandleType.Pinned);
                    Frame.Yplane = pinY.AddrOfPinnedObject();

                    pinU         = GCHandle.Alloc(u, GCHandleType.Pinned);
                    Frame.Uplane = pinU.AddrOfPinnedObject();

                    pinV         = GCHandle.Alloc(v, GCHandleType.Pinned);
                    Frame.Vplane = pinV.AddrOfPinnedObject();
                }
                else
                {
                    Frame.Yplane    = IntPtr.Zero;
                    Frame.Uplane    = IntPtr.Zero;
                    Frame.Vplane    = IntPtr.Zero;
                    Frame.VideoSize = 0;
                    Console.WriteLine("Frame buffer was empty (in ImageToYUV_Buffer.LoadToFrame())");
                }
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame);                 // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
 private void RescaleBuffers(TimedImage Src, TimedImage Dst)
 {
     ScalePlane(Src.Luma, Dst.Luma, Src.Width, Src.Height, Dst.Width, Dst.Height, true);
     ScalePlane(Src.Cr, Dst.Cr, Src.Width, Src.Height, Dst.Width / 2, Dst.Height / 2, false);
     ScalePlane(Src.Cb, Dst.Cb, Src.Width, Src.Height, Dst.Width / 2, Dst.Height / 2, false);
 }
        /// <summary>
        /// Convert a captured YUV buffer into a scaled YUV buffer
        /// </summary>
        public void RebufferCapturedFrames()
        {
            TimedImage ti = null;
            if (!WaitingCaptures.DataAvailable(this)) return;
            ti = WaitingCaptures.Dequeue(this);
            if (ti == null) return;

            int twidth = Math.Max(width, ti.Width); // allocate with enough room for in-place scaling
            int theight = Math.Max(height, ti.Height); // allocate with enough room for in-place scaling
            TimedImage sti = null;
            //try {
                sti = new TimedImage(ti.Seconds, twidth, theight);
            //} catch (OutOfMemoryException) {
            //	GC.Collect();
            //	return; //dropped a frame
            //}
            sti.Width = width; // set scaling to target size
            sti.Height = height;

            RescaleBuffers(ti, sti); // do the scaling
            lock (WaitingFrames) {
                WaitingFrames.Add(sti);
                WaitingFrames.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));
            }
        }
        /// <summary>
        /// Respond to capture event.
        /// This should return as fast as possible.
        /// </summary>
        public void HandleCapturedFrame(object sender, VideoDataEventArgs e)
        {
            if (e.Frame != null) {
                TimedImage ti = new TimedImage(e.CaptureTime, width, height);

                try {
                    if (ProcessorQueue != null) { // process the source image
                        foreach (var item in ProcessorQueue) {
                            item.ProcessFrame(e.Frame, e.CaptureTime);
                        }
                    }
                } catch { }

                lock (WaitingFrames) {
                    WaitingFrames.Add(ti);
                    ResampleBuffer(e.Frame, ref ti.Luma, ref ti.Cr, ref ti.Cb, width, height);
                }

                try {
                    if (ProcessorQueue != null) { // process the YUV buffer
                        foreach (var item in ProcessorQueue) {
                            item.ProcessFrame(ti);
                        }
                    }
                } catch { }
            }
        }