public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { //This is called everytime a new frame needs to be drawn! int hr = 0; lock (this) { try { // if we are in the middle of the display change if (NeedToHandleDisplayChange()) { // NOTE: this piece of code is left as a user exercise. // The D3DDevice here needs to be switched // to the device that is using another adapter } hr = PresentHelper(lpPresInfo); return(hr); } catch (DirectXException e) { return(e.ErrorCode); } catch { return(E_FAIL); } } }
private int PresentHelper(VMR9PresentationInfo lpPresInfo) { int hr = 0; try { device.SetRenderTarget(0, renderTarget); if (privateTexture != null) { Marshal.AddRef(lpPresInfo.lpSurf); using(Surface surface = new Surface(lpPresInfo.lpSurf)) { device.StretchRectangle( surface, new Rectangle(0, 0, surface.Description.Width, surface.Description.Height), privateSurface, new Rectangle(0, 0, privateSurface.Description.Width, privateSurface.Description.Height), TextureFilter.None ); } hr = scene.DrawScene(device, privateTexture); if (hr < 0) return hr; } else { if (textures.ContainsKey(lpPresInfo.lpSurf)) { hr = scene.DrawScene(device, textures[lpPresInfo.lpSurf] as Texture); if (hr < 0) return hr; } else hr = E_FAIL; } device.Present(); return 0; } catch (DirectXException e) { return e.ErrorCode; } catch { return E_FAIL; } }
public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { int hr = 0; lock(this) { try { // if we are in the middle of the display change if(NeedToHandleDisplayChange()) { // NOTE: this piece of code is left as a user exercise. // The D3DDevice here needs to be switched // to the device that is using another adapter } hr = PresentHelper(lpPresInfo); return hr; } catch (DirectXException e) { return e.ErrorCode; } catch { return E_FAIL; } } }
private int PresentHelper(VMR9PresentationInfo lpPresInfo) { // This redraws the scence int hr = 0; try { OpenSebJ.eRender.device.SetRenderTarget(0, renderTarget); if (privateTexture != null) { Marshal.AddRef(lpPresInfo.lpSurf); using (Surface surface = new Surface(lpPresInfo.lpSurf)) { OpenSebJ.eRender.device.StretchRectangle( surface, new Rectangle(0, 0, surface.Description.Width, surface.Description.Height), privateSurface, new Rectangle(0, 0, privateSurface.Description.Width, privateSurface.Description.Height), TextureFilter.None ); } //hr = scene.DrawScene(device, privateTexture); Texture frame = privateTexture; OpenSebJ.eRender.renderBackground(frame); if (hr < 0) { return(hr); } } else { if (textures.ContainsKey(lpPresInfo.lpSurf)) { //hr = scene.DrawScene(device, textures[lpPresInfo.lpSurf] as Texture); //eventRender.eRender.renderBackground(textures[lpPresInfo.lpSurf] as Texture); Texture frame = textures[lpPresInfo.lpSurf] as Texture; OpenSebJ.eRender.renderBackground(frame); if (hr < 0) { return(hr); } } else { hr = E_FAIL; } } OpenSebJ.eRender.device.Present(); return(0); } catch (DirectXException e) { return(e.ErrorCode); } catch { return(E_FAIL); } }
public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { _currentFrameSurface = lpPresInfo.lpSurf; _frameChanged = true; // Invoke new frame available on UI thread - becouse for it may be followed // with showing it on screen if (NewFrameAvailable != null) { _imageConverter.Dispatcher.Invoke(() => { // Double check if its null, becouse camera can be paused/stopped // before UI thread invokes the delegate if(NewFrameAvailable != null) NewFrameAvailable(); }); } return 0; }
/// <summary> /// The PresentImage method is called at precisely the moment this video frame should be presented. /// </summary> /// <param name="dwUserID"> /// An application-defined DWORD_PTR that uniquely identifies this instance of the VMR in scenarios when /// multiple instances of the VMR are being used with a single instance of an allocator-presenter. /// </param> /// <param name="lpPresInfo"> /// Specifies a VMR9PresentationInfo structure that contains information about the video frame. /// </param> /// <returns>Returns an HRESULT</returns> public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { VMR9PresentationInfo presInfo = lpPresInfo; int hr = 0; try { lock(m_staticLock) { /* Test to see if our device was lost, is so fix it */ TestRestoreLostDevice(); if (m_privateSurface != null) hr = m_device.StretchRect(presInfo.lpSurf, presInfo.rcSrc, m_privateSurface, presInfo.rcDst, 0); if (hr < 0) return hr; } /* Notify to our listeners we just got a new frame */ InvokeNewAllocatorFrame(); hr = 0; } catch (Exception) { hr = E_FAIL; } return hr; }
public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { lock (this) { try { // If YUV mixing is activated, a surface copy is needed if (needCopy) { // Use StretchRectangle to do the Pixel Format conversion device.StretchRectangle( videoSurface, new Rectangle(Point.Empty, videoSize), privateSurface, new Rectangle(Point.Empty, videoSize), TextureFilter.None ); } } catch(DirectXException e) { // A Direct3D error can occure : Notify it to the VMR9 filter return e.ErrorCode; } catch { // Or else, notify a more general error return E_FAIL; } // This presentation is a success return 0; } }
public int PresentImage(IntPtr dwUserID, ref VMR9PresentationInfo lpPresInfo) { lock (this) { try { // If YUV mixing is activated, a surface copy is needed if (needCopy) { FrameRender(); } } catch (DirectXException e) { // A Direct3D error can occure : Notify it to the VMR9 filter LogUtil.ExceptionLog.ErrorFormat("Caught DirectX Exception: {0}", e.ToString()); return e.ErrorCode; } catch (Exception e) { // Or else, notify a more general error LogUtil.ExceptionLog.ErrorFormat("Caught Exception: {0}", e.ToString()); return E_FAIL; } // This presentation is a success return 0; } }