Ejemplo n.º 1
0
        /// <summary>
        /// Renders the video image. This method is called on a Dispatcher timer.
        /// It is responsible for rendering the decoded video image continuously.
        /// It also avoids rendering the same image again.
        /// </summary>
        /// <param name="sender">The sender.</param>
        /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param>
        private void RenderVideoImage(object sender, EventArgs e)
        {
            MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
            var renderTime = RealtimeClock.PositionSeconds;

            try
            {
                var videoFrame = VideoFramesCache.GetFrame(renderTime, false);
                if (videoFrame == null || videoFrame == LastRenderedVideoFrame)
                {
                    return;
                }
                if (videoFrame.PictureBufferPtr != IntPtr.Zero)
                {
                    VideoRenderer.Lock();
                    Helper.NativeMethods.RtlMoveMemory(VideoRenderer.BackBuffer, videoFrame.PictureBufferPtr, videoFrame.PictureBufferLength);
                    VideoRenderer.AddDirtyRect(new Int32Rect(0, 0, VideoRenderer.PixelWidth, VideoRenderer.PixelHeight));
                    VideoRenderer.Unlock();
                    LastRenderedVideoFrame = videoFrame;
                }
            }
            finally
            {
                this.Position = renderTime;
            }
        }
Ejemplo n.º 2
0
 public virtual VideoRenderer.I420Frame takeFrame(VideoRenderer.I420Frame source)
 {
     long desc = summarizeFrameDimensions(source);
     VideoRenderer.I420Frame dst = null;
     lock (availableFrames)
     {
         LinkedList<VideoRenderer.I420Frame> frames;
         availableFrames.TryGetValue(desc, out frames);
         if (frames == null)
         {
             frames = new LinkedList<VideoRenderer.I420Frame>();
             availableFrames[desc] = frames;
         }
         if (frames.Count > 0)
         {
             dst = frames.First.Value;
             frames.RemoveFirst();
         }
         else
         {
             dst = new VideoRenderer.I420Frame(source.Width, source.Height, source.YuvStrides.ToArray(), null);
         }
     }
     return dst;
 }
Ejemplo n.º 3
0
 public void Dispose()
 {
     if (videoRenderer != null)
     {
         videoRenderer.Dispose();
         videoRenderer = null;
     }
 }
Ejemplo n.º 4
0
 public void SetGameObject(GameObject go)
 {
     m_GameObject = go;
     if (m_GameObject != null)
     {
         videoRenderer = m_GameObject.GetComponent <VideoRenderer>();
     }
 }
Ejemplo n.º 5
0
        public void Dispose()
        {
#if DRAW_SAMPLE_CONTENT
            if (videoRenderer != null)
            {
                videoRenderer.ReleaseDeviceDependentResources();
                videoRenderer.Dispose();
                videoRenderer = null;
            }
#endif
        }
Ejemplo n.º 6
0
        public void SetHolographicSpace(HolographicSpace holographicSpace)
        {
            this.holographicSpace = holographicSpace;

            //
            // TODO: Add code here to initialize your content.
            //


            // Initialize the sample hologram.
            videoRenderer = new VideoRenderer(deviceResources);

            spatialInputHandler = new SpatialInputHandler();


            // Use the default SpatialLocator to track the motion of the device.
            locator = SpatialLocator.GetDefault();

            // Be able to respond to changes in the positional tracking state.
            locator.LocatabilityChanged += this.OnLocatabilityChanged;

            // Respond to camera added events by creating any resources that are specific
            // to that camera, such as the back buffer render target view.
            // When we add an event handler for CameraAdded, the API layer will avoid putting
            // the new camera in new HolographicFrames until we complete the deferral we created
            // for that handler, or return from the handler without creating a deferral. This
            // allows the app to take more than one frame to finish creating resources and
            // loading assets for the new holographic camera.
            // This function should be registered before the app creates any HolographicFrames.
            holographicSpace.CameraAdded += this.OnCameraAdded;

            // Respond to camera removed events by releasing resources that were created for that
            // camera.
            // When the app receives a CameraRemoved event, it releases all references to the back
            // buffer right away. This includes render target views, Direct2D target bitmaps, and so on.
            // The app must also ensure that the back buffer is not attached as a render target, as
            // shown in DeviceResources.ReleaseResourcesForBackBuffer.
            holographicSpace.CameraRemoved += this.OnCameraRemoved;

            // The simplest way to render world-locked holograms is to create a stationary reference frame
            // when the app is launched. This is roughly analogous to creating a "world" coordinate system
            // with the origin placed at the device's position as the app is launched.
            referenceFrame = locator.CreateStationaryFrameOfReferenceAtCurrentLocation();

            // Notes on spatial tracking APIs:
            // * Stationary reference frames are designed to provide a best-fit position relative to the
            //   overall space. Individual positions within that reference frame are allowed to drift slightly
            //   as the device learns more about the environment.
            // * When precise placement of individual holograms is required, a SpatialAnchor should be used to
            //   anchor the individual hologram to a position in the real world - for example, a point the user
            //   indicates to be of special interest. Anchor positions do not drift, but can be corrected; the
            //   anchor will use the corrected position starting in the next frame after the correction has
            //   occurred.
        }
Ejemplo n.º 7
0
 public virtual void returnFrame(VideoRenderer.I420Frame frame)
 {
     long desc = summarizeFrameDimensions(frame);
     lock (availableFrames)
     {
         LinkedList<VideoRenderer.I420Frame> frames = availableFrames[desc];
         if (frames == null)
         {
             throw new System.ArgumentException("Unexpected frame dimensions");
         }
         frames.AddFirst(frame);
     }
 }
Ejemplo n.º 8
0
        private void TestEm()
        {
            int hr;

            ISeekingPassThru spt = new SeekingPassThru() as ISeekingPassThru;

            IBaseFilter ibf = new VideoRenderer() as IBaseFilter;

            IPin pPin = DsFindPin.ByDirection(ibf, PinDirection.Input, 0);

            hr = spt.Init(true, pPin);
            DsError.ThrowExceptionForHR(hr);
        }
Ejemplo n.º 9
0
        private void Session_StreamReceived(object sender, Session.StreamEventArgs e)
        {
            Console.WriteLine("Session stream received");
            VideoRenderer renderer = new VideoRenderer();

            SubscriberGrid.Children.Add(renderer);
            UpdateGridSize(SubscriberGrid.Children.Count);
            Subscriber subscriber = new Subscriber(Context.Instance, e.Stream, renderer);

            SubscriberByStream.Add(e.Stream, subscriber);

            try
            {
                Session.Subscribe(subscriber);
            }
            catch (OpenTokException ex)
            {
                Console.WriteLine("OpenTokException " + ex.ToString());
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Constructor for the Application object.
        /// </summary>
        public App()
        {
            // Global handler for uncaught exceptions.
            UnhandledException += Application_UnhandledException;

            // Standard XAML initialization
            InitializeComponent();

            // Phone-specific initialization
            InitializePhoneApplication();

            // Language display initialization
            InitializeLanguage();

            // Show graphics profiling information while debugging.
            if (Debugger.IsAttached)
            {
                // Display the current frame rate counters.
                Application.Current.Host.Settings.EnableFrameRateCounter = true;

                // Show the areas of the app that are being redrawn in each frame.
                //Application.Current.Host.Settings.EnableRedrawRegions = true;

                // Enable non-production analysis visualization mode,
                // which shows areas of a page that are handed off to GPU with a colored overlay.
                //Application.Current.Host.Settings.EnableCacheVisualization = true;

                // Prevent the screen from turning off while under the debugger by disabling
                // the application's idle detection.
                // Caution:- Use this under debug mode only. Application that disables user idle detection will continue to run
                // and consume battery power when the user is not using the phone.
                PhoneApplicationService.Current.UserIdleDetectionMode = IdleDetectionMode.Disabled;
            }

            tester = new Mediastreamer2TesterNative();
            videoRenderer = new VideoRenderer();
            suite = null;
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Constructor for the Application object.
        /// </summary>
        public App()
        {
            // Global handler for uncaught exceptions.
            UnhandledException += Application_UnhandledException;

            // Standard XAML initialization
            InitializeComponent();

            // Phone-specific initialization
            InitializePhoneApplication();

            // Language display initialization
            InitializeLanguage();

            // Show graphics profiling information while debugging.
            if (Debugger.IsAttached)
            {
                // Display the current frame rate counters.
                Application.Current.Host.Settings.EnableFrameRateCounter = true;

                // Show the areas of the app that are being redrawn in each frame.
                //Application.Current.Host.Settings.EnableRedrawRegions = true;

                // Enable non-production analysis visualization mode,
                // which shows areas of a page that are handed off to GPU with a colored overlay.
                //Application.Current.Host.Settings.EnableCacheVisualization = true;

                // Prevent the screen from turning off while under the debugger by disabling
                // the application's idle detection.
                // Caution:- Use this under debug mode only. Application that disables user idle detection will continue to run
                // and consume battery power when the user is not using the phone.
                PhoneApplicationService.Current.UserIdleDetectionMode = IdleDetectionMode.Disabled;
            }

            tester        = new Mediastreamer2TesterNative();
            videoRenderer = new VideoRenderer();
            suite         = null;
        }
Ejemplo n.º 12
0
        public RenderVideo()
        {
            SetStyle(
                ControlStyles.Opaque | ControlStyles.UserPaint | ControlStyles.ResizeRedraw | ControlStyles.AllPaintingInWmPaint,
                true);

            var allocator = new AllocatorPresenter();

            _allocator            = allocator;
            _videoLayer           = new VideoRenderer(allocator);
            _osdLayer             = new OsdRenderer(allocator);
            _iconLayer            = new IconRenderer(allocator);
            _videoLayer.IsVisible = true;

            _allocator.Register(_videoLayer);
            _allocator.Register(_osdLayer);
            _allocator.Register(_iconLayer);

            _allocator.PresentCompleted += AllocatorPresenter_OnPresentCompleted;

            IsSyncSupported = true;
            VideoFilter     = VideoFilter.None;
            ScaleMode       = ScaleMode.FixedPixelSize;
        }
 public void RenderFrame(VideoRenderer.I420Frame frame)
 {
     view.queueFrame(stream, frame);
 }
Ejemplo n.º 14
0
 // Return a code summarizing the dimensions of |frame|.  Two frames that
 // return the same summary are guaranteed to be able to store each others'
 // contents.  Used like Object.hashCode(), but we need all the bits of a long
 // to do a good job, and hashCode() returns int, so we do this.
 private static long summarizeFrameDimensions(VideoRenderer.I420Frame frame)
 {
     long ret = frame.Width;
     ret = ret * MAX_DIMENSION + frame.Height;
     ret = ret * MAX_DIMENSION + frame.YuvStrides[0];
     ret = ret * MAX_DIMENSION + frame.YuvStrides[1];
     ret = ret * MAX_DIMENSION + frame.YuvStrides[2];
     return ret;
 }
Ejemplo n.º 15
0
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl)
        {
            int hr;

            ISampleGrabber sampGrabber = null;
            IBaseFilter    capFilter   = null;
            IPin           pCaptureOut = null;
            IPin           pSampleIn   = null;
            IPin           pRenderIn   = null;

            // Get the graphbuilder object
            m_FilterGraph = new FilterGraph() as IFilterGraph2;

            try
            {
#if DEBUG
                m_rot = new DsROTEntry(m_FilterGraph);
#endif
                // add the video input device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter);
                DsError.ThrowExceptionForHR(hr);

                m_CamControl = (IAMCameraControl)capFilter;

                // Didn't find one.  Is there a preview pin?
                if (m_pinStill == null)
                {
                    m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0);
                }

                // Still haven't found one.  Need to put a splitter in so we have
                // one stream to capture the bitmap from, and one to display.  Ok, we
                // don't *have* to do it that way, but we are going to anyway.
                if (m_pinStill == null)
                {
                    IPin pRaw   = null;
                    IPin pSmart = null;

                    // There is no still pin
                    m_VidControl = null;

                    // Add a splitter
                    IBaseFilter iSmartTee = (IBaseFilter) new SmartTee();

                    try
                    {
                        hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee");
                        DsError.ThrowExceptionForHR(hr);

                        // Find the find the capture pin from the video device and the
                        // input pin for the splitter, and connnect them
                        pRaw   = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);
                        pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0);

                        hr = m_FilterGraph.Connect(pRaw, pSmart);
                        DsError.ThrowExceptionForHR(hr);

                        // Now set the capture and still pins (from the splitter)
                        m_pinStill  = DsFindPin.ByName(iSmartTee, "Preview");
                        pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture");

                        // If any of the default config items are set, perform the config
                        // on the actual video device (rather than the splitter)
                        if (iHeight + iWidth + iBPP > 0)
                        {
                            SetConfigParms(pRaw, iWidth, iHeight, iBPP);
                        }
                    }
                    finally
                    {
                        if (pRaw != null)
                        {
                            Marshal.ReleaseComObject(pRaw);
                        }
                        if (pRaw != pSmart)
                        {
                            Marshal.ReleaseComObject(pSmart);
                        }
                        if (pRaw != iSmartTee)
                        {
                            Marshal.ReleaseComObject(iSmartTee);
                        }
                    }
                }
                else
                {
                    // Get a control pointer (used in Click())
                    m_VidControl = capFilter as IAMVideoControl;

                    pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);

                    // If any of the default config items are set
                    if (iHeight + iWidth + iBPP > 0)
                    {
                        SetConfigParms(m_pinStill, iWidth, iHeight, iBPP);
                    }
                }

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;

                // Configure the sample grabber
                IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter;
                ConfigureSampleGrabber(sampGrabber);
                pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                // Get the default video renderer
                // IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter;
                IBaseFilter pRenderer = new VideoRenderer() as IBaseFilter;
                hr = m_FilterGraph.AddFilter(pRenderer, "Renderer");
                DsError.ThrowExceptionForHR(hr);

                pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0);

                // Add the sample grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                if (m_VidControl == null)
                {
                    // Smart Tree

                    // Connect the Still pin to the sample grabber
                    hr = m_FilterGraph.Connect(m_pinStill, pSampleIn);
                    DsError.ThrowExceptionForHR(hr);

                    // Connect the capture pin to the renderer
                    hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn);
                    DsError.ThrowExceptionForHR(hr);
                }
                else
                {
                    // Still Image

                    // Connect the capture pin to the renderer
                    hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn);
                    DsError.ThrowExceptionForHR(hr);

                    // Connect the Still pin to the sample grabber
                    hr = m_FilterGraph.Connect(m_pinStill, pSampleIn);
                    DsError.ThrowExceptionForHR(hr);
                }

                // Learn the video properties
                SaveSizeInfo(sampGrabber);
                ConfigVideoWindow(hControl);

                // Start the graph
                IMediaControl mediaCtrl = m_FilterGraph as IMediaControl;
                hr = mediaCtrl.Run();
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (pCaptureOut != null)
                {
                    Marshal.ReleaseComObject(pCaptureOut);
                    pCaptureOut = null;
                }
                if (pRenderIn != null)
                {
                    Marshal.ReleaseComObject(pRenderIn);
                    pRenderIn = null;
                }
                if (pSampleIn != null)
                {
                    Marshal.ReleaseComObject(pSampleIn);
                    pSampleIn = null;
                }
            }
        }
Ejemplo n.º 16
0
            public Participant(uint objectId, SktSkype skype)
                : base(objectId, skype)
            {
                gui = new Panel()
                {
                    Width = panelWidth,
                    Height = panelHeight,
                    BorderStyle = BorderStyle.FixedSingle,
                    BackColor = SystemColors.GradientInactiveCaption
                };
                gui.Controls.Add(pic);

                pic = new VideoRenderer(skypeRef)
                {
                    Top = 5,
                    Left = 5,
                    Height = picHeight,
                    Width = piclWidth,
                    SizeMode = PictureBoxSizeMode.StretchImage
                };
                gui.Controls.Add(pic);

                nameLabel = new Label()
                {
                    Top = picHeight + 5,
                    Left = 10,
                    Height = 11,
                    Width = panelWidth - 20,
                    Text = "<n/a>",
                    TextAlign = ContentAlignment.TopCenter,
                    AutoSize = false
                };
                gui.Controls.Add(nameLabel);

                voiceVolume = new ProgressBar()
                {
                    Top = picHeight + 17,
                    Left = 7,
                    Height = 10,
                    Width = panelWidth - 20,
                    Value = 0,
                    Maximum = 10,
                };
                gui.Controls.Add(voiceVolume);
            }
Ejemplo n.º 17
0
 /// <summary>
 /// Validate that |frame| can be managed by the pool. </summary>
 public static bool validateDimensions(VideoRenderer.I420Frame frame)
 {
     return frame.Width < MAX_DIMENSION && frame.Height < MAX_DIMENSION && frame.YuvStrides[0] < MAX_DIMENSION && frame.YuvStrides[1] < MAX_DIMENSION && frame.YuvStrides[2] < MAX_DIMENSION;
 }
Ejemplo n.º 18
0
        public void RendersAndSendsFrameUsingD3D11()
        {
            bool isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
            bool hasNvEnc  = PeerConnection.SupportsHardwareTextureEncoding;

            if (isWindows && hasNvEnc)
            {
                PeerConnection.Configure(new GlobalOptions
                {
                    UseFakeDecoders  = true,
                    LogToDebugOutput = false,
                    MinimumLogLevel  = TraceLevel.Info
                });

                using (var sender = new ObservablePeerConnection(new PeerConnectionOptions()))
                    using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions {
                        CanReceiveVideo = true
                    }))
                        using (var vt = new ObservableVideoTrack(sender, VideoEncoderOptions.OptimizedFor(320, 240, 10)))
                        {
                            using (var rnd = new VideoRenderer(vt, new RendererOptions {
                                VideoFrameQueueSize = 2
                            }))
                            {
                                // Wait until sender and receiver are connected,
                                // signaling is complete,
                                // and video track is added.

                                // TODO: When using tasks for this, this test hangs when disposing!

                                // ReSharper disable once InvokeAsExtensionMethod
                                //var ready = Observable.Zip(
                                //    receiver.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected),
                                //    sender.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected),
                                //    receiver.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable),
                                //    sender.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable),
                                //    receiver.RemoteTrackChangeStream.FirstAsync(
                                //        c => !string.IsNullOrEmpty(c.TransceiverMid) &
                                //             c.MediaKind == TrackMediaKind.Video &&
                                //             c.ChangeKind == TrackChangeKind.Changed),
                                //    (a, b, c, d, e) => true);
                                //// Wait until connected and video track is ready.
                                //var ev = new AutoResetEvent(false);
                                //ready.Subscribe(_ => ev.Set());

                                receiver.Connect(
                                    Observable.Never <DataMessage>(),
                                    sender.LocalSessionDescriptionStream,
                                    sender.LocalIceCandidateStream);

                                sender.Connect(
                                    Observable.Never <DataMessage>(),
                                    receiver.LocalSessionDescriptionStream,
                                    receiver.LocalIceCandidateStream);

                                sender.CreateOffer();

                                int remoteVideoFrameReceivedCount = 0;

                                receiver.RemoteVideoFrameReceived += (pc, frame) =>
                                {
                                    remoteVideoFrameReceivedCount += 1;
                                };

                                // The remote peer connection is not immediately ready to receive frames,
                                // so we keep sending until it succeeds.
                                // TODO: Figure out what webrtc event can be used for this.
                                while (remoteVideoFrameReceivedCount == 0)
                                {
                                    using (rnd.TakeNextFrameForSending())
                                    {
                                    }
                                }

                                // Continue sending until the video queue is empty
                                while (rnd.VideoFrameQueueCount > 0)
                                {
                                    using (rnd.TakeNextFrameForSending())
                                    {
                                    }
                                }
                            }

                            // The video renderer is now disposed while the video track is still encoding some textures
                            // This should not crash.
                            // We need to wait a while before disposing the video-track and peer-connection to check this.
                            Thread.Sleep(100);
                        }
            }
        }
Ejemplo n.º 19
0
 public VideoEaser(VideoRenderer renderer) : base(renderer.MainRenderer)
 {
     VideoRenderer = renderer;
 }
 // Upload the YUV planes from |frame| to |textures|.
 private void texImage2D(VideoRenderer.I420Frame frame, int[] textures)
 {
     for (int i = 0; i < 3; ++i)
     {
         ByteBuffer plane = frame.YuvPlanes[i];
         GLES20.GlActiveTexture(GLES20.GlTexture0 + i);
         GLES20.GlBindTexture(GLES20.GlTexture2d, textures[i]);
         int w = i == 0 ? frame.Width : frame.Width / 2;
         int h = i == 0 ? frame.Height : frame.Height / 2;
         abortUnless(w == frame.YuvStrides[i], frame.YuvStrides[i] + "!=" + w);
         GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlLuminance, w, h, 0, GLES20.GlLuminance, GLES20.GlUnsignedByte, plane);
     }
     checkNoGLES2Error();
 }