// Token: 0x06000A59 RID: 2649 RVA: 0x0001E1A8 File Offset: 0x0001C3A8 private IMFSample ReadFromSource() { int num = this.sourceProvider.Read(this.sourceBuffer, 0, this.sourceBuffer.Length); if (num == 0) { return(null); } IMFMediaBuffer imfmediaBuffer = MediaFoundationApi.CreateMemoryBuffer(num); IntPtr destination; int num2; int num3; imfmediaBuffer.Lock(out destination, out num2, out num3); Marshal.Copy(this.sourceBuffer, 0, destination, num); imfmediaBuffer.Unlock(); imfmediaBuffer.SetCurrentLength(num); IMFSample imfsample = MediaFoundationApi.CreateSample(); imfsample.AddBuffer(imfmediaBuffer); imfsample.SetSampleTime(this.inputPosition); long num4 = MediaFoundationTransform.BytesToNsPosition(num, this.sourceProvider.WaveFormat); imfsample.SetSampleDuration(num4); this.inputPosition += num4; Marshal.ReleaseComObject(imfmediaBuffer); return(imfsample); }
// Token: 0x06000945 RID: 2373 RVA: 0x0001B0A0 File Offset: 0x000192A0 private long ConvertOneBuffer(IMFSinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer) { long num = 0L; IMFMediaBuffer imfmediaBuffer = MediaFoundationApi.CreateMemoryBuffer(managedBuffer.Length); int count; imfmediaBuffer.GetMaxLength(out count); IMFSample imfsample = MediaFoundationApi.CreateSample(); imfsample.AddBuffer(imfmediaBuffer); IntPtr destination; int num2; imfmediaBuffer.Lock(out destination, out count, out num2); int num3 = inputProvider.Read(managedBuffer, 0, count); if (num3 > 0) { num = MediaFoundationEncoder.BytesToNsPosition(num3, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, destination, num3); imfmediaBuffer.SetCurrentLength(num3); imfmediaBuffer.Unlock(); imfsample.SetSampleTime(position); imfsample.SetSampleDuration(num); writer.WriteSample(streamIndex, imfsample); } else { imfmediaBuffer.Unlock(); } Marshal.ReleaseComObject(imfsample); Marshal.ReleaseComObject(imfmediaBuffer); return(num); }
private long ConvertOneBuffer(IMFSinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer, int seconds, ref bool flag) { long durationConverted = 0; int maxLength; IMFMediaBuffer buffer = MediaFoundationApi.CreateMemoryBuffer(managedBuffer.Length); buffer.GetMaxLength(out maxLength); IMFSample sample = MediaFoundationApi.CreateSample(); sample.AddBuffer(buffer); IntPtr ptr; int currentLength; buffer.Lock(out ptr, out maxLength, out currentLength); int oneLength = inputProvider.WaveFormat.AverageBytesPerSecond; int read = 0; if (flag) { for (int i = 0; i < seconds; i++) { read = inputProvider.Read(managedBuffer, 0, oneLength); } flag = false; } else { read = inputProvider.Read(managedBuffer, 0, oneLength); } if (read > 0) { durationConverted = BytesToNsPosition(read, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, ptr, read); buffer.SetCurrentLength(read); buffer.Unlock(); sample.SetSampleTime(position); sample.SetSampleDuration(durationConverted); writer.WriteSample(streamIndex, sample); //writer.Flush(streamIndex); } else { buffer.Unlock(); } Marshal.ReleaseComObject(sample); Marshal.ReleaseComObject(buffer); return(durationConverted); }
//todo: put the process work to background thread to speed up. public HResult ProcessSample(IMFSample videoSample) { HResult hr = HResult.S_OK; MFTOutputStatusFlags mftOutFlags; MFTOutputStreamInfo StreamInfo; if (videoSample == null) { return(hr); } pDecoderTransform.ProcessInput(0, videoSample, 0); pDecoderTransform.GetOutputStatus(out mftOutFlags); pDecoderTransform.GetOutputStreamInfo(0, out StreamInfo); while (true) { IMFMediaBuffer resultBuffer; //reset the cache buffer. MFExtern.MFCreateMemoryBuffer(StreamInfo.cbSize, out resultBuffer); _mftOutSample.RemoveAllBuffers(); _mftOutSample.AddBuffer(resultBuffer); ProcessOutputStatus outputStatus; var mftProcessOutput = pDecoderTransform.ProcessOutput(0, 1, _mftOutBufferContainer, out outputStatus); if (mftProcessOutput == HResult.MF_E_TRANSFORM_NEED_MORE_INPUT) { //continue provice input data. break; } else if (_mftOutBufferContainer[0].dwStatus == MFTOutputDataBufferFlags.Incomplete) { //todo: the decoded data include more than one samples,we need to receive all data items. } else { IMFMediaBuffer buffer; _mftOutSample.ConvertToContiguousBuffer(out buffer); invokeDecodeComplete(buffer, StreamInfo.cbSize); } } return(hr); }
private void DuplicateSample(IMFSample pInSample, out IMFSample pOutSample) { MFError throwonhr; int flags; long lTime; throwonhr = MFExtern.MFCreateSample(out pOutSample); throwonhr = pInSample.CopyAllItems(pOutSample); HResult hr = pInSample.GetSampleDuration(out lTime); if (Succeeded(hr)) { throwonhr = pOutSample.SetSampleDuration(lTime); } hr = pInSample.GetSampleTime(out lTime); if (Succeeded(hr)) { throwonhr = pOutSample.SetSampleTime(lTime); } hr = pInSample.GetSampleFlags(out flags); if (Succeeded(hr)) { throwonhr = pOutSample.SetSampleFlags(flags); } IMFMediaBuffer mb; throwonhr = MFExtern.MFCreateMemoryBuffer(m_imageHeightInPixels * m_imageWidthInPixels * 4, out mb); try { // Set the data size on the output buffer. throwonhr = mb.SetCurrentLength(m_cbImageSizeOutput); throwonhr = pOutSample.AddBuffer(mb); } finally { SafeRelease(mb); } }
private int ReadFromTransform() { MFT_OUTPUT_DATA_BUFFER[] array = new MFT_OUTPUT_DATA_BUFFER[1]; IMFSample iMFSample = MediaFoundationApi.CreateSample(); IMFMediaBuffer iMFMediaBuffer = MediaFoundationApi.CreateMemoryBuffer(this.outputBuffer.Length); iMFSample.AddBuffer(iMFMediaBuffer); iMFSample.SetSampleTime(this.outputPosition); array[0].pSample = iMFSample; _MFT_PROCESS_OUTPUT_STATUS mFT_PROCESS_OUTPUT_STATUS; int num = this.transform.ProcessOutput(_MFT_PROCESS_OUTPUT_FLAGS.None, 1, array, out mFT_PROCESS_OUTPUT_STATUS); if (num == -1072861838) { Marshal.ReleaseComObject(iMFMediaBuffer); Marshal.ReleaseComObject(iMFSample); return(0); } if (num != 0) { Marshal.ThrowExceptionForHR(num); } IMFMediaBuffer iMFMediaBuffer2; array[0].pSample.ConvertToContiguousBuffer(out iMFMediaBuffer2); IntPtr source; int num2; int num3; iMFMediaBuffer2.Lock(out source, out num2, out num3); this.outputBuffer = BufferHelpers.Ensure(this.outputBuffer, num3); Marshal.Copy(source, this.outputBuffer, 0, num3); this.outputBufferOffset = 0; this.outputBufferCount = num3; iMFMediaBuffer2.Unlock(); this.outputPosition += MediaFoundationTransform.BytesToNsPosition(this.outputBufferCount, this.WaveFormat); Marshal.ReleaseComObject(iMFMediaBuffer); iMFSample.RemoveAllBuffers(); Marshal.ReleaseComObject(iMFSample); Marshal.ReleaseComObject(iMFMediaBuffer2); return(num3); }
/// <summary> /// Create a media buffer to be used as input or output for resampler. /// </summary> /// <param name="bufferSize">Size of buffer to create.</param> /// <param name="sample">Media Foundation sample created.</param> /// <param name="buffer">Media buffer created.</param> internal static void CreateResamplerBuffer(int bufferSize, out IMFSample sample, out IMFMediaBuffer buffer) { sample = NativeMethods.MFCreateSample(); buffer = NativeMethods.MFCreateMemoryBuffer(bufferSize); sample.AddBuffer(buffer); }
/// <summary>Generates the "tail" of the audio effect.</summary> /// <param name="InputMessageNumber">Message number to use with OutputSample.</param> /// <remarks> /// Generates the "tail" of the audio effect. The tail is the portion /// of the delay effect that is heard after the input stream ends. /// /// To generate the tail, the client must drain the MFT by sending /// the MFT_MESSAGE_COMMAND_DRAIN message and then call ProcessOutput /// to get the tail samples. /// </remarks> private void ProcessEffectTail(int InputMessageNumber) { IMFMediaBuffer pOutputBuffer = null; MFError throwonhr; IntPtr pbOutputData = IntPtr.Zero; // Pointer to the memory in the output buffer. int cbOutputLength = 0; // Size of the output buffer. int cbBytesProcessed = 0; // How much data we processed. IMFSample pOutSample = null; // Allocate an output buffer. throwonhr = MFExtern.MFCreateMemoryBuffer(m_cbTailSamples, out pOutputBuffer); try { throwonhr = MFExtern.MFCreateSample(out pOutSample); throwonhr = pOutSample.AddBuffer(pOutputBuffer); // Lock the output buffer. int cb; throwonhr = pOutputBuffer.Lock(out pbOutputData, out cbOutputLength, out cb); // Calculate how many audio samples we can process. cbBytesProcessed = Math.Min(m_cbTailSamples, cbOutputLength); // Round to the next lowest multiple of nBlockAlign. cbBytesProcessed -= (cbBytesProcessed % m_Alignment); // Fill the output buffer with silence, because we are also using it as the input buffer. FillBufferWithSilence(pbOutputData, cbBytesProcessed); // Process the data. ProcessAudio(pbOutputData, pbOutputData, cbBytesProcessed / m_Alignment); // Set the data length on the output buffer. throwonhr = pOutputBuffer.SetCurrentLength(cbBytesProcessed); if (m_rtTimestamp >= 0) { long hnsDuration = (cbBytesProcessed / m_AvgBytesPerSec) * UNITS; // Set the time stamp and duration on the output sample. throwonhr = pOutSample.SetSampleTime(m_rtTimestamp); throwonhr = pOutSample.SetSampleDuration(hnsDuration); } // Done. m_cbTailSamples = 0; OutputSample(pOutSample, InputMessageNumber); } catch { SafeRelease(pOutSample); throw; } finally { if (pbOutputData != IntPtr.Zero) { pOutputBuffer.Unlock(); } SafeRelease(pOutputBuffer); } }
private void CaptureStillImages(MediaItem item) { using (var releaser = new ComReleaser()) { MF.CreateVideoDeviceSource(item.DeviceItem.SymLink, out IMFMediaSource source); releaser.Add(source); source.CreatePresentationDescriptor(out IMFPresentationDescriptor presDesc); releaser.Add(presDesc); presDesc.GetStreamDescriptorByIndex(item.DescIndex, out bool selected, out IMFStreamDescriptor strmDesc); releaser.Add(strmDesc); strmDesc.GetMediaTypeHandler(out IMFMediaTypeHandler handler); releaser.Add(handler); handler.GetMediaTypeByIndex(item.TypeIndex, out IMFMediaType type); handler.SetCurrentMediaType(type); MF.CreateSourceReaderFromMediaSource(source, out IMFSourceReader reader); if (reader == null) { return; } releaser.Add(reader); IMFTransform transform = null; MFTOutputDataBuffer[] outSamples = null; IMFSample outRgb24Sample = null; IMFMediaBuffer outRgb24Buffer = null; int rgbSize = item.Width * item.Height * 3; var needToConvert = item.SubType != MFMediaType.RGB24; if (needToConvert) { var processor = new VideoProcessorMFT(); releaser.Add(processor); transform = (IMFTransform)processor; HR(transform.SetInputType(0, type, MFTSetTypeFlags.None)); var rgbMediaType = MF.CreateMediaType(); releaser.Add(rgbMediaType); HR(type.CopyAllItems(rgbMediaType)); HR(rgbMediaType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.RGB24)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, 3 * item.Width)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_SAMPLE_SIZE, rgbSize)); HR(transform.SetOutputType(0, rgbMediaType, MFTSetTypeFlags.None)); outSamples = new MFTOutputDataBuffer[1]; outSamples[0] = new MFTOutputDataBuffer(); outRgb24Sample = MF.CreateSample(); releaser.Add(outRgb24Sample); outRgb24Buffer = MF.CreateMemoryBuffer(rgbSize); releaser.Add(outRgb24Buffer); outRgb24Sample.AddBuffer(outRgb24Buffer); outSamples[0].pSample = Marshal.GetIUnknownForObject(outRgb24Sample); } while (true) { int frames = 0; var hrRS = reader.ReadSample( (int)MF_SOURCE_READER.AnyStream, MF_SOURCE_READER_CONTROL_FLAG.None, out int streamIndex, out MF_SOURCE_READER_FLAG flags, out long timeStamp, out IMFSample sample ); if (sample != null) { try { IMFSample rgbSample = sample; if (transform != null) { transform.ProcessInput(0, sample, 0); while (true) { var hrPO = transform.ProcessOutput( MFTProcessOutputFlags.None, 1, outSamples, out ProcessOutputStatus status ); if (hrPO.Succeeded()) { ConsumeBuffer(outRgb24Buffer, item); frames++; Marshal.ReleaseComObject(sample); return; //break; } else { break; } } //var hrPI = transform.ProcessInput(0, sample, 0); continue; } rgbSample.GetBufferByIndex(0, out IMFMediaBuffer buff); if (ConsumeBuffer(buff, item)) { frames++; } else { return; } } finally { Marshal.ReleaseComObject(sample); } break; } } } }