public MF.Sample CreateSampleFromFrame(MemoryMappedTexture32bpp frame) { MF.MediaBuffer mediaBuffer = MF.MediaFactory.CreateMemoryBuffer((int)frame.SizeInBytes); // Write all contents to the MediaBuffer for media foundation int cbMaxLength = 0; int cbCurrentLength = 0; IntPtr mediaBufferPointer = mediaBuffer.Lock(out cbMaxLength, out cbCurrentLength); try { if (FlipY) { unsafe { int stride = videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndexTarget = loopX + (loopY * videoPixelSize.Width); int actIndexSource = loopX + ((videoPixelSize.Height - (1 + loopY)) * videoPixelSize.Width); mediaBufferPointerNative[actIndexTarget] = targetBufferPointerNative[actIndexSource]; } } } } else { unsafe { int stride = videoPixelSize.Width; int *mediaBufferPointerNative = (int *)mediaBufferPointer.ToPointer(); int *targetBufferPointerNative = (int *)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndex = loopX + (loopY * videoPixelSize.Width); mediaBufferPointerNative[actIndex] = targetBufferPointerNative[actIndex]; } } } } } finally { mediaBuffer.Unlock(); mediaBuffer.CurrentLength = (int)frame.SizeInBytes; } // Create the sample (includes image and timing information) MF.Sample sample = MF.MediaFactory.CreateSample(); sample.AddBuffer(mediaBuffer); return(sample); }
public void AddVideoAndAudioFrame(MemoryMappedTexture32bpp frame, byte[] audioFrame) { Debug.Assert(frame != null && frame.SizeInBytes != 0 && audioFrame != null && audioFrame.Length != 0); var videoSample = CreateSampleFromFrame(frame); var audioSample = audioWriter.CreateSampleFromFrame(audioFrame); try { var samples = new Dictionary <int, Sample>(); samples.Add(StreamIndex, videoSample); samples.Add(audioWriter.StreamIndex, audioSample); WriteSamples(samples); } finally { videoSample.Dispose(); audioSample.Dispose(); } }
public void AddVideoFrame(MemoryMappedTexture32bpp frame) { Debug.Assert(frame != null && frame.SizeInBytes != 0); // Create the sample (includes image and timing information) var videoSample = CreateSampleFromFrame(frame); try { var samples = new Dictionary <int, Sample>(); samples.Add(StreamIndex, videoSample); WriteSamples(samples); } catch (SharpDXException e) { Debug.WriteLine(e.Message); } finally { videoSample.Dispose(); } }
public MF.Sample CreateSampleFromFrame(MemoryMappedTexture32bpp frame) { MF.MediaBuffer mediaBuffer = MF.MediaFactory.CreateMemoryBuffer((int)frame.SizeInBytes); // Write all contents to the MediaBuffer for media foundation int cbMaxLength = 0; int cbCurrentLength = 0; IntPtr mediaBufferPointer = mediaBuffer.Lock(out cbMaxLength, out cbCurrentLength); try { if (FlipY) { unsafe { int stride = videoPixelSize.Width; int* mediaBufferPointerNative = (int*)mediaBufferPointer.ToPointer(); int* targetBufferPointerNative = (int*)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndexTarget = loopX + (loopY * videoPixelSize.Width); int actIndexSource = loopX + ((videoPixelSize.Height - (1 + loopY)) * videoPixelSize.Width); mediaBufferPointerNative[actIndexTarget] = targetBufferPointerNative[actIndexSource]; } } } } else { unsafe { int stride = videoPixelSize.Width; int* mediaBufferPointerNative = (int*)mediaBufferPointer.ToPointer(); int* targetBufferPointerNative = (int*)frame.Pointer.ToPointer(); for (int loopY = 0; loopY < videoPixelSize.Height; loopY++) { for (int loopX = 0; loopX < videoPixelSize.Width; loopX++) { int actIndex = loopX + (loopY * videoPixelSize.Width); mediaBufferPointerNative[actIndex] = targetBufferPointerNative[actIndex]; } } } } } finally { mediaBuffer.Unlock(); mediaBuffer.CurrentLength = (int)frame.SizeInBytes; } // Create the sample (includes image and timing information) MF.Sample sample = MF.MediaFactory.CreateSample(); sample.AddBuffer(mediaBuffer); return sample; }
public void AddVideoFrame(MemoryMappedTexture32bpp frame) { Debug.Assert(frame != null && frame.SizeInBytes != 0); // Create the sample (includes image and timing information) var videoSample = CreateSampleFromFrame(frame); try { var samples = new Dictionary<int, Sample>(); samples.Add(StreamIndex, videoSample); WriteSamples(samples); } catch (SharpDXException e) { Debug.WriteLine(e.Message); } finally { videoSample.Dispose(); } }
public void AddVideoAndAudioFrame(MemoryMappedTexture32bpp frame, byte[] audioFrame) { Debug.Assert(frame != null && frame.SizeInBytes != 0 && audioFrame != null && audioFrame.Length != 0); var videoSample = CreateSampleFromFrame(frame); var audioSample = audioWriter.CreateSampleFromFrame(audioFrame); try { var samples = new Dictionary<int, Sample>(); samples.Add(StreamIndex, videoSample); samples.Add(audioWriter.StreamIndex, audioSample); WriteSamples(samples); } finally { videoSample.Dispose(); audioSample.Dispose(); } }