// IMFSourceReaderCallback methods //------------------------------------------------------------------- // OnReadSample // // Called when the IMFMediaSource::ReadSample method completes. //------------------------------------------------------------------- public int OnReadSample(int hrStatus, int dwStreamIndex, MF_SOURCE_READER_FLAG dwStreamFlags, long llTimestamp, IMFSample pSample) { var hr = hrStatus; IMFMediaBuffer pBuffer = null; lock (LockSync) { try { if (pSample != null) { // Get the video frame buffer from the sample. if (Succeeded(hr)) { hr = pSample.GetBufferByIndex(0, out pBuffer); } if (Succeeded(hr)) { hr = OnFrame(pSample, pBuffer, llTimestamp, snapFormat); snapFormat = string.Empty; } } // Request the next frame. if (Succeeded(hr)) { // Read next sample. hr = PReader.ReadSample( (int)MF_SOURCE_READER.FirstVideoStream, 0, IntPtr.Zero, // actual IntPtr.Zero, // flags IntPtr.Zero, // time stamp IntPtr.Zero // sample ); } if (Failed(hr)) { NotifyError(hr); } } finally { SafeRelease(pBuffer); SafeRelease(pSample); } } return(hr); }
// IMFSourceReaderCallback methods //------------------------------------------------------------------- // OnReadSample // // Called when the IMFMediaSource::ReadSample method completes. //------------------------------------------------------------------- public HResult OnReadSample(HResult hrStatus, int dwStreamIndex, MF_SOURCE_READER_FLAG dwStreamFlags, long llTimestamp, IMFSample pSample) { HResult hr = hrStatus; IMFMediaBuffer pBuffer = null; lock (this) { try { if (Succeeded(hr)) { if (pSample != null) { // Get the video frame buffer from the sample. hr = pSample.GetBufferByIndex(0, out pBuffer); // Draw the frame. if (Succeeded(hr)) { hr = m_draw.DrawFrame(pBuffer); } } } // Request the next frame. if (Succeeded(hr)) { // Ask for the first sample. hr = m_pReader.ReadSample((int)MF_SOURCE_READER.FirstVideoStream, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); } if (Failed(hr)) { NotifyError(hr); } } finally { //SafeRelease(pBuffer); SafeRelease(pSample); } } return(hr); }
// IMFSourceReaderCallback methods //------------------------------------------------------------------- // OnReadSample // // Called when the IMFMediaSource::ReadSample method completes. //------------------------------------------------------------------- public int OnReadSample(int hrStatus, int dwStreamIndex, MF_SOURCE_READER_FLAG dwStreamFlags, long llTimestamp, IMFSample pSample) { int hr = hrStatus; IMFMediaBuffer pBuffer = null; lock (this) { try { if (Succeeded(hr)) { if (pSample != null) { // Get the video frame buffer from the sample. hr = pSample.GetBufferByIndex(0, out pBuffer); // Draw the frame. if (Succeeded(hr)) { hr = m_draw.DrawFrame(pBuffer); } } } // Request the next frame. if (Succeeded(hr)) { // Ask for the first sample. hr = m_pReader.ReadSample((int)MF_SOURCE_READER.FirstVideoStream, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); } if (Failed(hr)) { NotifyError(hr); } } finally { //SafeRelease(pBuffer); SafeRelease(pSample); } } return hr; }
//------------------------------------------------------------------- // Name: WriteSampleToFile // Description: Output one media sample to the file. //------------------------------------------------------------------- void WriteSampleToFile(IMFSample pSample) { int hr; int i; long time; int cBufferCount; // Number of buffers in the sample. IntPtr pData; int cbData = 0; int cbWritten = 0; // Get the time stamp hr = pSample.GetSampleTime(out time); MFError.ThrowExceptionForHR(hr); // If the time stamp is too early, just discard this sample. if (time < m_StartTime) { return; } // Note: If there is no time stamp on the sample, proceed anyway. // Find how many buffers are in this sample. hr = pSample.GetBufferCount(out cBufferCount); MFError.ThrowExceptionForHR(hr); // Loop through all the buffers in the sample. for (int iBuffer = 0; iBuffer < cBufferCount; iBuffer++) { IMFMediaBuffer pBuffer = null; hr = pSample.GetBufferByIndex(iBuffer, out pBuffer); MFError.ThrowExceptionForHR(hr); try { // Lock the buffer and write the data to the file. hr = pBuffer.Lock(out pData, out i, out cbData); MFError.ThrowExceptionForHR(hr); hr = m_pByteStream.Write(pData, cbData, out cbWritten); MFError.ThrowExceptionForHR(hr); hr = pBuffer.Unlock(); MFError.ThrowExceptionForHR(hr); // Update the running tally of bytes written. m_cbDataWritten += cbData; } finally { SafeRelease(pBuffer); } } // for loop }
//----------------------------------------------------------------------------- // PresentSample // // Presents a video frame. // // pSample: Pointer to the sample that contains the surface to present. If // this parameter is NULL, the method paints a black rectangle. // llTarget: Target presentation time. // // This method is called by the scheduler and/or the presenter. //----------------------------------------------------------------------------- public void PresentSample(IMFSample pSample, long llTarget) { HResult hr; IMFMediaBuffer pBuffer = null; IDirect3DSurface9 pSurface = null; IDirect3DSwapChain9 pSwapChain = null; object o; try { if (pSample != null) { // Get the buffer from the sample. hr = pSample.GetBufferByIndex(0, out pBuffer); MFError.ThrowExceptionForHR(hr); // Get the surface from the buffer. hr = MFExtern.MFGetService(pBuffer, MFServices.MR_BUFFER_SERVICE, typeof(IDirect3DSurface9).GUID, out o); MFError.ThrowExceptionForHR(hr); pSurface = o as IDirect3DSurface9; } else if (m_pSurfaceRepaint != null) { // Redraw from the last surface. pSurface = m_pSurfaceRepaint; } if (pSurface != null) { // Get the swap chain from the surface. pSurface.GetContainer(typeof(IDirect3DSwapChain9).GUID, out o); pSwapChain = o as IDirect3DSwapChain9; // Present the swap chain. PresentSwapChain(pSwapChain, pSurface); // Store this pointer in case we need to repaint the surface. if (m_pSurfaceRepaint != pSurface) { SafeRelease(m_pSurfaceRepaint); m_pSurfaceRepaint = pSurface; } } else { // No surface. All we can do is paint a black rectangle. PaintFrameWithGDI(); } } catch (Exception e) { hr = (HResult)Marshal.GetHRForException(e); if (hr == (HResult)D3DError.DeviceLost || hr == (HResult)D3DError.DeviceNotReset || hr == (HResult)D3DError.DeviceHung) { // We failed because the device was lost. Fill the destination rectangle. PaintFrameWithGDI(); // Ignore. We need to reset or re-create the device, but this method // is probably being called from the scheduler thread, which is not the // same thread that created the device. The Reset(Ex) method must be // called from the thread that created the device. // The presenter will detect the state when it calls CheckDeviceState() // on the next sample. } } finally { SafeRelease(pSwapChain); pSwapChain = null; //SafeRelease(pSurface); pSurface = null; SafeRelease(pBuffer); pBuffer = null; } }
//----------------------------------------------------------------------------- // PresentSample // // Presents a video frame. // // pSample: Pointer to the sample that contains the surface to present. If // this parameter is NULL, the method paints a black rectangle. // llTarget: Target presentation time. // // This method is called by the scheduler and/or the presenter. //----------------------------------------------------------------------------- public void PresentSample(IMFSample pSample, long llTarget) { int hr; IMFMediaBuffer pBuffer = null; IDirect3DSurface9 pSurface = null; IDirect3DSwapChain9 pSwapChain = null; object o; try { if (pSample != null) { // Get the buffer from the sample. hr = pSample.GetBufferByIndex(0, out pBuffer); MFError.ThrowExceptionForHR(hr); // Get the surface from the buffer. hr = MFExtern.MFGetService(pBuffer, MFServices.MR_BUFFER_SERVICE, typeof(IDirect3DSurface9).GUID, out o); MFError.ThrowExceptionForHR(hr); pSurface = o as IDirect3DSurface9; } else if (m_pSurfaceRepaint != null) { // Redraw from the last surface. pSurface = m_pSurfaceRepaint; } if (pSurface != null) { // Get the swap chain from the surface. pSurface.GetContainer(typeof(IDirect3DSwapChain9).GUID, out o); pSwapChain = o as IDirect3DSwapChain9; // Present the swap chain. PresentSwapChain(pSwapChain, pSurface); // Store this pointer in case we need to repaint the surface. if (m_pSurfaceRepaint != pSurface) { SafeRelease(m_pSurfaceRepaint); m_pSurfaceRepaint = pSurface; } } else { // No surface. All we can do is paint a black rectangle. PaintFrameWithGDI(); } } catch (Exception e) { hr = Marshal.GetHRForException(e); if (hr == (int)D3DError.DeviceLost || hr == (int)D3DError.DeviceNotReset || hr == (int)D3DError.DeviceHung) { // We failed because the device was lost. Fill the destination rectangle. PaintFrameWithGDI(); // Ignore. We need to reset or re-create the device, but this method // is probably being called from the scheduler thread, which is not the // same thread that created the device. The Reset(Ex) method must be // called from the thread that created the device. // The presenter will detect the state when it calls CheckDeviceState() // on the next sample. } } finally { SafeRelease(pSwapChain); pSwapChain = null; //SafeRelease(pSurface); pSurface = null; SafeRelease(pBuffer); pBuffer = null; } }
private void CaptureStillImages(MediaItem item) { using (var releaser = new ComReleaser()) { MF.CreateVideoDeviceSource(item.DeviceItem.SymLink, out IMFMediaSource source); releaser.Add(source); source.CreatePresentationDescriptor(out IMFPresentationDescriptor presDesc); releaser.Add(presDesc); presDesc.GetStreamDescriptorByIndex(item.DescIndex, out bool selected, out IMFStreamDescriptor strmDesc); releaser.Add(strmDesc); strmDesc.GetMediaTypeHandler(out IMFMediaTypeHandler handler); releaser.Add(handler); handler.GetMediaTypeByIndex(item.TypeIndex, out IMFMediaType type); handler.SetCurrentMediaType(type); MF.CreateSourceReaderFromMediaSource(source, out IMFSourceReader reader); if (reader == null) { return; } releaser.Add(reader); IMFTransform transform = null; MFTOutputDataBuffer[] outSamples = null; IMFSample outRgb24Sample = null; IMFMediaBuffer outRgb24Buffer = null; int rgbSize = item.Width * item.Height * 3; var needToConvert = item.SubType != MFMediaType.RGB24; if (needToConvert) { var processor = new VideoProcessorMFT(); releaser.Add(processor); transform = (IMFTransform)processor; HR(transform.SetInputType(0, type, MFTSetTypeFlags.None)); var rgbMediaType = MF.CreateMediaType(); releaser.Add(rgbMediaType); HR(type.CopyAllItems(rgbMediaType)); HR(rgbMediaType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.RGB24)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, 3 * item.Width)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_SAMPLE_SIZE, rgbSize)); HR(transform.SetOutputType(0, rgbMediaType, MFTSetTypeFlags.None)); outSamples = new MFTOutputDataBuffer[1]; outSamples[0] = new MFTOutputDataBuffer(); outRgb24Sample = MF.CreateSample(); releaser.Add(outRgb24Sample); outRgb24Buffer = MF.CreateMemoryBuffer(rgbSize); releaser.Add(outRgb24Buffer); outRgb24Sample.AddBuffer(outRgb24Buffer); outSamples[0].pSample = Marshal.GetIUnknownForObject(outRgb24Sample); } while (true) { int frames = 0; var hrRS = reader.ReadSample( (int)MF_SOURCE_READER.AnyStream, MF_SOURCE_READER_CONTROL_FLAG.None, out int streamIndex, out MF_SOURCE_READER_FLAG flags, out long timeStamp, out IMFSample sample ); if (sample != null) { try { IMFSample rgbSample = sample; if (transform != null) { transform.ProcessInput(0, sample, 0); while (true) { var hrPO = transform.ProcessOutput( MFTProcessOutputFlags.None, 1, outSamples, out ProcessOutputStatus status ); if (hrPO.Succeeded()) { ConsumeBuffer(outRgb24Buffer, item); frames++; Marshal.ReleaseComObject(sample); return; //break; } else { break; } } //var hrPI = transform.ProcessInput(0, sample, 0); continue; } rgbSample.GetBufferByIndex(0, out IMFMediaBuffer buff); if (ConsumeBuffer(buff, item)) { frames++; } else { return; } } finally { Marshal.ReleaseComObject(sample); } break; } } } }
/// <summary> /// This gets called when a Called IMFSourceReader.ReadSample method completes /// (assuming the SourceReader has been given this class during setup with /// an attribute of MFAttributesClsid.MF_SOURCE_READER_ASYNC_CALLBACK). /// The first ReadSample triggers it after that it continues by itself /// </summary> /// <param name="hrStatus">The status code. If an error occurred while processing the next sample, this parameter contains the error code.</param> /// <param name="streamIndex">The zero-based index of the stream that delivered the sample.</param> /// <param name="streamFlags">A bitwise OR of zero or more flags from the MF_SOURCE_READER_FLAG enumeration.</param> /// <param name="sampleTimeStamp">The time stamp of the sample, or the time of the stream event indicated in streamFlags. The time is given in 100-nanosecond units. </param> /// <param name="mediaSample">A pointer to the IMFSample interface of a media sample. This parameter might be NULL.</param> /// <returns>Returns an HRESULT value. Reputedly, the source reader ignores the return value.</returns> public HResult OnReadSample(HResult hrStatus, int streamIndex, MF_SOURCE_READER_FLAG streamFlags, long sampleTimeStamp, IMFSample mediaSample) { HResult hr = HResult.S_OK; switch (streamIndex) { case 0: Console.WriteLine(streamFlags); break; case 1: break; } try { lock (this) { // have we got an error? if (Failed(hrStatus)) { } else { // have we got a sample? It seems this can be null on the first sample // in after the ReadSample that triggered this. So we just ignore it // and request the next to get things rolling if (mediaSample != null) { if (streamIndex == 1) { } try { if (!HandlingImage) { HandlingImage = true; mediaSample.GetBufferByIndex(0, out IMFMediaBuffer Buffer); Buffer.Lock(out IntPtr bufPointer, out int MaxLength, out int CurrLength); byte[] managedArray = new byte[MaxLength * 480]; Marshal.Copy(bufPointer, managedArray, 0, MaxLength); Buffer.Unlock(); SKImage temp = SKImage.FromEncodedData(SKImage.FromEncodedData(managedArray).Encode()); switch (streamIndex) { case 0: OnFrame(this, new FrameReadyEventArgs { FrameBuffer = null, Image = temp }); break; case 1: OnMFB(this, new FrameReadyEventArgs { FrameBuffer = null, Image = temp }); break; } HandlingImage = false; } } catch { HandlingImage = false; } } else { } } switch (streamIndex) { case 0: //StreamReader.Flush(1); // Read another sample. hr = StreamReader.ReadSample( 0, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.None, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); // Read another sample. hr = StreamReader.ReadSample( 1, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.Drain, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); break; } } } catch { } finally { SafeRelease(mediaSample); } return(hr); }
// IMFSourceReaderCallback methods //------------------------------------------------------------------- // OnReadSample // // Called when the IMFMediaSource::ReadSample method completes. //------------------------------------------------------------------- public int OnReadSample(int hrStatus, int dwStreamIndex, MF_SOURCE_READER_FLAG dwStreamFlags, long llTimestamp, IMFSample pSample) { var hr = hrStatus; IMFMediaBuffer pBuffer = null; lock (LockSync) { try { if (pSample != null) { // Get the video frame buffer from the sample. if (Succeeded(hr)) hr = pSample.GetBufferByIndex(0, out pBuffer); if (Succeeded(hr)) { hr = OnFrame(pSample, pBuffer, llTimestamp, snapFormat); snapFormat = string.Empty; } } // Request the next frame. if (Succeeded(hr)) { // Read next sample. hr = PReader.ReadSample( (int) MF_SOURCE_READER.FirstVideoStream, 0, IntPtr.Zero, // actual IntPtr.Zero, // flags IntPtr.Zero, // time stamp IntPtr.Zero // sample ); } if (Failed(hr)) { NotifyError(hr); } } finally { SafeRelease(pBuffer); SafeRelease(pSample); } } return hr; }
public HResult Invoke(IMFAsyncResult pResult) { object pUnkObject; IMFSample pSample = null; IMFMediaBuffer pBuffer = null; IMFDXGIBuffer pDXGIBuffer = null; // Get the IUnknown out of the IMFAsyncResult if there is one HResult hr = pResult.GetObject(out pUnkObject); if (Succeeded(hr)) { pSample = pUnkObject as IMFSample; } if (pSample != null) { // Based on your implementation, there should only be one // buffer attached to one sample, so we can always grab the // first buffer. You could add some error checking here to make // sure the sample has a buffer count that is 1. hr = pSample.GetBufferByIndex(0, out pBuffer); } if (Succeeded(hr)) { // Query the IMFMediaBuffer to see if it implements IMFDXGIBuffer pDXGIBuffer = pBuffer as IMFDXGIBuffer; } if (pDXGIBuffer != null) { // Got an IMFDXGIBuffer, so we can extract the internal // ID3D11Texture2D and make a new SharpDX.Texture2D wrapper. hr = pDXGIBuffer.GetResource(s_IID_ID3D11Texture2D, out pUnkObject); } if (Succeeded(hr)) { // If we got here, pUnkObject is the native D3D11 Texture2D as // a System.Object, but it's unlikely you have an interface // definition for ID3D11Texture2D handy, so we can't just cast // the object to the proper interface. // Happily, SharpDX supports wrapping System.Object within // SharpDX.ComObject which makes things pretty easy. SharpDX.ComObject comWrapper = new SharpDX.ComObject(pUnkObject); // If this doesn't work, or you're using something like SlimDX // which doesn't support object wrapping the same way, the below // code is an alternative way. /* * IntPtr pD3DTexture2D = Marshal.GetIUnknownForObject(pUnkObject); * // Create your wrapper object here, like this for SharpDX * SharpDX.ComObject comWrapper = new SharpDX.ComObject(pD3DTexture2D); * // or like this for SlimDX * SlimDX.Direct3D11.Texture2D.FromPointer(pD3DTexture2D); * Marshal.Release(pD3DTexture2D); */ // You might need to query comWrapper for a SharpDX.DXGI.Resource // first, then query that for the SharpDX.Direct3D11.Texture2D. SharpDX.Direct3D11.Texture2D texture = comWrapper.QueryInterface <SharpDX.Direct3D11.Texture2D>(); if (texture != null) { // Now you can add "texture" back to the allocator's free list ReturnFreeTexture(texture); } } }