private void CreateH264Decoder() { HResult hr; // create H.264 decoder var comobject = new ResamplerMediaComObject(); decodertransform = (IMFTransform)comobject; // setup input media type for decoder MFExtern.MFCreateMediaType(out decinputmediatype); // setup media type manualy IMFMediaType testdecinputmediatype, testdecoutputmediatype; decinputmediatype.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); decinputmediatype.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.H264); decinputmediatype.SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, (int)MFVideoInterlaceMode.Progressive); MFExtern.MFSetAttributeSize(decinputmediatype, MFAttributesClsid.MF_MT_FRAME_SIZE, VIDEO_SAMPLE_WIDTH, VIDEO_SAMPLE_HEIGHT); uint fixedSampleSize = VIDEO_SAMPLE_WIDTH * (16 * ((VIDEO_SAMPLE_HEIGHT + 15) / 16)) + VIDEO_SAMPLE_WIDTH * (VIDEO_SAMPLE_HEIGHT / 2);//for Y, U and V decinputmediatype.SetUINT32(MFAttributesClsid.MF_MT_SAMPLE_SIZE, fixedSampleSize); decinputmediatype.SetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, VIDEO_SAMPLE_WIDTH); decinputmediatype.SetUINT32(MFAttributesClsid.MF_MT_FIXED_SIZE_SAMPLES, 1); decinputmediatype.SetUINT32(MFAttributesClsid.MF_MT_ALL_SAMPLES_INDEPENDENT, 1); MFExtern.MFSetAttributeRatio(decinputmediatype, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 1, 1); hr = decodertransform.SetInputType(0, decinputmediatype, 0); decodertransform.GetInputAvailableType(0, 0, out testdecinputmediatype); // setup media type for output of decoder MFExtern.MFCreateMediaType(out decoutputmediatype); decoutputmediatype.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); decoutputmediatype.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.IYUV); MFExtern.MFSetAttributeSize(decoutputmediatype, MFAttributesClsid.MF_MT_FRAME_SIZE, VIDEO_SAMPLE_WIDTH, VIDEO_SAMPLE_HEIGHT); MFExtern.MFSetAttributeRatio(decoutputmediatype, MFAttributesClsid.MF_MT_FRAME_RATE, 30, 1); MFExtern.MFSetAttributeRatio(decoutputmediatype, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 1, 1); decoutputmediatype.SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, 2); hr = decodertransform.SetOutputType(0, decoutputmediatype, 0); decodertransform.GetOutputAvailableType(0, 0, out testdecoutputmediatype); decodertransform.GetInputStatus(0, out mftStatus); if (mftStatus != MFTInputStatusFlags.AcceptData) { Debug.WriteLine("DECODER NOT ACCEPT INPUT DATA"); return; } else { Debug.WriteLine("PROCESS INPUT DONE>>>> " + mftStatus); } decodertransform.ProcessMessage(MFTMessageType.CommandFlush, (IntPtr)null); decodertransform.ProcessMessage(MFTMessageType.NotifyBeginStreaming, (IntPtr)null); decodertransform.ProcessMessage(MFTMessageType.NotifyStartOfStream, (IntPtr)null); }
protected override IMFTransform CreateTransform() { object obj = this.CreateResamplerComObject(); IMFTransform arg_1E_0 = (IMFTransform)obj; IMFMediaType iMFMediaType = MediaFoundationApi.CreateMediaTypeFromWaveFormat(this.sourceProvider.WaveFormat); arg_1E_0.SetInputType(0, iMFMediaType, _MFT_SET_TYPE_FLAGS.None); Marshal.ReleaseComObject(iMFMediaType); IMFMediaType iMFMediaType2 = MediaFoundationApi.CreateMediaTypeFromWaveFormat(this.outputWaveFormat); arg_1E_0.SetOutputType(0, iMFMediaType2, _MFT_SET_TYPE_FLAGS.None); Marshal.ReleaseComObject(iMFMediaType2); ((IWMResamplerProps)obj).SetHalfFilterLength(this.ResamplerQuality); return(arg_1E_0); }
public void initialize(int streamId, IMFMediaType mediaType) { //process input. ThrowIfError(MFExtern.MFGetAttributeSize(mediaType, MFAttributesClsid.MF_MT_FRAME_SIZE, out _videoWitdh, out _videoHeight)); ThrowIfError(MFExtern.MFGetAttributeRatio(mediaType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, out _videoRatioN, out _videoRatioD)); ThrowIfError(pDecoderTransform.SetInputType(streamId, mediaType, MFTSetTypeFlags.None)); //process output IMFMediaType h264OutputType; ThrowIfError(MFExtern.MFCreateMediaType(out h264OutputType)); ThrowIfError(h264OutputType.SetGUID(MF_MT_MAJOR_TYPE, MFMediaType.Video)); ThrowIfError(h264OutputType.SetGUID(MF_MT_SUBTYPE, MFMediaType.YUY2)); IMFAttributes attr = h264OutputType as IMFAttributes; ThrowIfError(MFExtern.MFSetAttributeSize(attr, MF_MT_FRAME_SIZE, _videoWitdh, _videoHeight)); ThrowIfError(MFExtern.MFSetAttributeRatio(attr, MF_MT_FRAME_RATE, 30, 1)); ThrowIfError(MFExtern.MFSetAttributeRatio(attr, MF_MT_PIXEL_ASPECT_RATIO, _videoRatioN, _videoRatioD)); ThrowIfError(attr.SetUINT32(MF_MT_INTERLACE_MODE, 2)); ThrowIfError(pDecoderTransform.SetOutputType(streamId, h264OutputType, MFTSetTypeFlags.None)); MFTInputStatusFlags mftStatus; ThrowIfError(pDecoderTransform.GetInputStatus(streamId, out mftStatus)); if (MFTInputStatusFlags.AcceptData != mftStatus) { throw new Exception("H.264 decoder MFT is not accepting data.\n"); } ThrowIfError(pDecoderTransform.ProcessMessage(MFTMessageType.CommandFlush, IntPtr.Zero)); ThrowIfError(pDecoderTransform.ProcessMessage(MFTMessageType.NotifyBeginStreaming, IntPtr.Zero)); ThrowIfError(pDecoderTransform.ProcessMessage(MFTMessageType.NotifyStartOfStream, IntPtr.Zero)); ThrowIfError(MFExtern.MFCreateSample(out _mftOutSample)); _mftOutBufferContainer = new MFTOutputDataBuffer[1]; //todo:set the stream id again when receive media stream later. _mftOutBufferContainer[0].dwStreamID = streamId; _mftOutBufferContainer[0].dwStatus = 0; _mftOutBufferContainer[0].pEvents = null; _mftOutBufferContainer[0].pSample = Marshal.GetIUnknownForObject(_mftOutSample); _videoStreamId = streamId; _streamMediaType = mediaType; }
/// <summary> /// Create Media Foundation transform that resamples audio in specified input format /// into specified output format. /// </summary> /// <param name="inputFormat"> /// Wave format input to resampling operation. /// </param> /// <param name="outputFormat"> /// Wave format output from resampling operation. /// </param> /// <returns> /// Media transform object that will resample audio. /// </returns> internal static IMFTransform CreateResampler(WaveFormat inputFormat, WaveFormat outputFormat) { IMFTransform resampler = null; IMFMediaType inputType = null; IMFMediaType outputType = null; try { resampler = (IMFTransform) new CResamplerMediaObject(); inputType = CreateMediaType(inputFormat); resampler.SetInputType(0, inputType, 0); outputType = CreateMediaType(outputFormat); resampler.SetOutputType(0, outputType, 0); } finally { Marshal.ReleaseComObject(inputType); Marshal.ReleaseComObject(outputType); } return(resampler); }
private void CaptureStillImages(MediaItem item) { using (var releaser = new ComReleaser()) { MF.CreateVideoDeviceSource(item.DeviceItem.SymLink, out IMFMediaSource source); releaser.Add(source); source.CreatePresentationDescriptor(out IMFPresentationDescriptor presDesc); releaser.Add(presDesc); presDesc.GetStreamDescriptorByIndex(item.DescIndex, out bool selected, out IMFStreamDescriptor strmDesc); releaser.Add(strmDesc); strmDesc.GetMediaTypeHandler(out IMFMediaTypeHandler handler); releaser.Add(handler); handler.GetMediaTypeByIndex(item.TypeIndex, out IMFMediaType type); handler.SetCurrentMediaType(type); MF.CreateSourceReaderFromMediaSource(source, out IMFSourceReader reader); if (reader == null) { return; } releaser.Add(reader); IMFTransform transform = null; MFTOutputDataBuffer[] outSamples = null; IMFSample outRgb24Sample = null; IMFMediaBuffer outRgb24Buffer = null; int rgbSize = item.Width * item.Height * 3; var needToConvert = item.SubType != MFMediaType.RGB24; if (needToConvert) { var processor = new VideoProcessorMFT(); releaser.Add(processor); transform = (IMFTransform)processor; HR(transform.SetInputType(0, type, MFTSetTypeFlags.None)); var rgbMediaType = MF.CreateMediaType(); releaser.Add(rgbMediaType); HR(type.CopyAllItems(rgbMediaType)); HR(rgbMediaType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.RGB24)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, 3 * item.Width)); HR(rgbMediaType.SetUINT32(MFAttributesClsid.MF_MT_SAMPLE_SIZE, rgbSize)); HR(transform.SetOutputType(0, rgbMediaType, MFTSetTypeFlags.None)); outSamples = new MFTOutputDataBuffer[1]; outSamples[0] = new MFTOutputDataBuffer(); outRgb24Sample = MF.CreateSample(); releaser.Add(outRgb24Sample); outRgb24Buffer = MF.CreateMemoryBuffer(rgbSize); releaser.Add(outRgb24Buffer); outRgb24Sample.AddBuffer(outRgb24Buffer); outSamples[0].pSample = Marshal.GetIUnknownForObject(outRgb24Sample); } while (true) { int frames = 0; var hrRS = reader.ReadSample( (int)MF_SOURCE_READER.AnyStream, MF_SOURCE_READER_CONTROL_FLAG.None, out int streamIndex, out MF_SOURCE_READER_FLAG flags, out long timeStamp, out IMFSample sample ); if (sample != null) { try { IMFSample rgbSample = sample; if (transform != null) { transform.ProcessInput(0, sample, 0); while (true) { var hrPO = transform.ProcessOutput( MFTProcessOutputFlags.None, 1, outSamples, out ProcessOutputStatus status ); if (hrPO.Succeeded()) { ConsumeBuffer(outRgb24Buffer, item); frames++; Marshal.ReleaseComObject(sample); return; //break; } else { break; } } //var hrPI = transform.ProcessInput(0, sample, 0); continue; } rgbSample.GetBufferByIndex(0, out IMFMediaBuffer buff); if (ConsumeBuffer(buff, item)) { frames++; } else { return; } } finally { Marshal.ReleaseComObject(sample); } break; } } } }
internal void SetInputMediaType(uint streamIndex, AudioEncodingProperties encoding, bool bTestOnly) { IMFMediaType mediaType = MediaTypeFromProperties(encoding); _mft.SetInputType(streamIndex, mediaType, bTestOnly ? 1u : 0u); }