public MF.Sample CreateSampleFromFrame(byte[] data) { MF.MediaBuffer mediaBuffer = MF.MediaFactory.CreateMemoryBuffer(data.Length); // Write all contents to the MediaBuffer for media foundation int cbMaxLength = 0; int cbCurrentLength = 0; IntPtr mediaBufferPointer = mediaBuffer.Lock(out cbMaxLength, out cbCurrentLength); try { Marshal.Copy(data, 0, mediaBufferPointer, data.Length); } finally { mediaBuffer.Unlock(); mediaBuffer.CurrentLength = data.Length; } // Create the sample (includes image and timing information) MF.Sample sample = MF.MediaFactory.CreateSample(); sample.AddBuffer(mediaBuffer); return(sample); }
public MF.Sample CreateSampleFromFrame(MemoryMappedTexture32bpp frame) { MF.MediaBuffer mediaBuffer = MF.MediaFactory.CreateMemoryBuffer((int)frame.SizeInBytes); // Write all contents to the MediaBuffer for media foundation int cbMaxLength = 0; int cbCurrentLength = 0; IntPtr mediaBufferPointer = mediaBuffer.Lock(out cbMaxLength, out cbCurrentLength); try { if (FlipY) { unsafe { int stride = videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndexTarget = loopX + (loopY * videoPixelSize.Width); int actIndexSource = loopX + ((videoPixelSize.Height - (1 + loopY)) * videoPixelSize.Width); mediaBufferPointerNative[actIndexTarget] = targetBufferPointerNative[actIndexSource]; } } } } else { unsafe { int stride = videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndex = loopX + (loopY * videoPixelSize.Width); mediaBufferPointerNative[actIndex] = targetBufferPointerNative[actIndex]; } } } } } finally { mediaBuffer.Unlock(); mediaBuffer.CurrentLength = (int)frame.SizeInBytes; } // Create the sample (includes image and timing information) MF.Sample sample = MF.MediaFactory.CreateSample(); sample.AddBuffer(mediaBuffer); return(sample); }
/// <summary> /// Reads the next frame and returns the corresponding buffer. /// Null is returned if there was nothing to read. /// </summary> protected SeeingSharpMediaBuffer ReadFrameInternal() { if (m_endReached) { return(null); } MF.SourceReaderFlags readerFlags; int dummyStreamIndex; using (MF.Sample nextSample = m_sourceReader.ReadSample( MF.SourceReaderIndex.FirstVideoStream, MF.SourceReaderControlFlags.None, out dummyStreamIndex, out readerFlags, out m_currentPositionLong)) { // Check for end-of-stream if (readerFlags == MF.SourceReaderFlags.Endofstream) { m_endReached = true; return(null); } // No sample received if (nextSample == null) { return(null); } // Reset end-reached flag (maybe the user called SetPosition again..) m_endReached = false; // Copy pixel data into target buffer if (nextSample.BufferCount > 0) { return(new SeeingSharpMediaBuffer(nextSample.GetBufferByIndex(0))); } } return(null); }
/// <summary> /// Draws the given frame to the video. /// </summary> /// <param name="device">The device on which the given framebuffer is created.</param> /// <param name="uploadedTexture">The texture which should be added to the video.</param> protected override void DrawFrameInternal(EngineDevice device, MemoryMappedTexture32bpp uploadedTexture) { // Cancel here if the given texture has an invalid size if (m_videoPixelSize != new Size2(uploadedTexture.Width, uploadedTexture.Height)) { return; } m_frameIndex++; MF.MediaBuffer mediaBuffer = MF.MediaFactory.CreateMemoryBuffer((int)uploadedTexture.SizeInBytes); try { // Write all contents to the MediaBuffer for media foundation int cbMaxLength = 0; int cbCurrentLength = 0; IntPtr mediaBufferPointer = mediaBuffer.Lock(out cbMaxLength, out cbCurrentLength); try { if (this.FlipY) { unsafe { int stride = m_videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)uploadedTexture.Pointer.ToPointer(); for (int loopY = 0; loopY < m_videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < m_videoPixelSize.Width; loopX++) { int actIndexTarget = loopX + (loopY * m_videoPixelSize.Width); int actIndexSource = loopX + ((m_videoPixelSize.Height - (1 + loopY)) * m_videoPixelSize.Width); mediaBufferPointerNative[actIndexTarget] = targetBufferPointerNative[actIndexSource]; } } } } else { unsafe { int stride = m_videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)uploadedTexture.Pointer.ToPointer(); for (int loopY = 0; loopY < m_videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < m_videoPixelSize.Width; loopX++) { int actIndex = loopX + (loopY * m_videoPixelSize.Width); mediaBufferPointerNative[actIndex] = targetBufferPointerNative[actIndex]; } } } } } finally { mediaBuffer.Unlock(); } mediaBuffer.CurrentLength = (int)uploadedTexture.SizeInBytes; // Create the sample (includes image and timing information) MF.Sample sample = MF.MediaFactory.CreateSample(); try { sample.AddBuffer(mediaBuffer); long frameDuration = 10 * 1000 * 1000 / m_framerate; sample.SampleTime = frameDuration * m_frameIndex; sample.SampleDuration = frameDuration; m_sinkWriter.WriteSample(m_streamIndex, sample); } finally { GraphicsHelper.SafeDispose(ref sample); } } finally { GraphicsHelper.SafeDispose(ref mediaBuffer); } }