コード例 #1
0
        /// <summary>
        /// Starts rendering to the target.
        /// </summary>
        /// <param name="videoPixelSize">The pixel size of the video.</param>
        protected override void StartRenderingInternal(Size2 videoPixelSize)
        {
            m_outStreamNet = base.TargetFile.OpenOutputStream();
            m_outStream    = new MF.ByteStream(m_outStreamNet);

            // Pass dummy filename as described here:
            // https://social.msdn.microsoft.com/forums/windowsapps/en-us/49bffa74-4e84-4fd6-9d67-42e8385611b8/video-sinkwriter-in-metro-app
            m_sinkWriter = MF.MediaFactory.CreateSinkWriterFromURL(
                this.DummyFileName, m_outStream.NativePointer, null);
            m_videoPixelSize = videoPixelSize;

            CreateMediaTarget(m_sinkWriter, m_videoPixelSize, out m_streamIndex);

            // Configure input
            using (MF.MediaType mediaTypeIn = new MF.MediaType())
            {
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_INPUT_FORMAT);
                mediaTypeIn.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(m_framerate, 1));
                m_sinkWriter.SetInputMediaType(m_streamIndex, mediaTypeIn, null);
            }

            // Start writing the video file
            m_sinkWriter.BeginWriting();

            // Set initial frame index
            m_frameIndex = -1;
        }
コード例 #2
0
        /// <summary>
        ///     Creates an new instance of the <see cref="MediaFoundationEncoder"/> class.
        /// </summary>
        /// <param name="inputMediaType">Mediatype of the source to encode.</param>
        /// <param name="stream">Stream which will be used to store the encoded data.</param>
        /// <param name="targetMediaType">The format of the encoded data.</param>
        /// <param name="containerType">See container type. For a list of all available container types, see <see cref="TranscodeContainerTypes"/>.</param>
        public MediaFoundationEncoder(Stream stream, SharpDX.MediaFoundation.MediaType inputMediaType, MediaType targetMediaType,
                                      Guid containerType)
        {
            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }
            if (!stream.CanWrite)
            {
                throw new ArgumentException("Stream is not writeable.");
            }
            if (!stream.CanRead)
            {
                throw new ArgumentException("Stream is not readable.");
            }

            if (inputMediaType == null)
            {
                throw new ArgumentNullException("inputMediaType");
            }
            if (targetMediaType == null)
            {
                throw new ArgumentNullException("targetMediaType");
            }

            if (containerType == Guid.Empty)
            {
                throw new ArgumentException("containerType");
            }

            _targetMediaType = targetMediaType;

            SetTargetStream(stream, inputMediaType, targetMediaType, containerType);
        }
コード例 #3
0
 private void OnMediaTypeChanged(SharpDX.MediaFoundation.MediaType mediaType)
 {
     if (videoRenderer != null)
     {
         videoRenderer.SetMediaType(mediaType);
     }
 }
コード例 #4
0
        /// <summary>
        /// Encodes a file
        /// </summary>
        /// <param name="outputFile">Output filename (container type is deduced from the filename)</param>
        /// <param name="inputProvider">Input provider (should be PCM, some encoders will also allow IEEE float)</param>
        public void Encode(string outputFile, IWaveProvider inputProvider)
        {
            if (inputProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm && inputProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
            {
                throw new ArgumentException("Encode input format must be PCM or IEEE float");
            }

            // possibly could use Marshaling to convert instead
            // given that input should be PCM or IEEE float, this should work just fine
            var sharpWf = SharpDX.Multimedia.WaveFormat.CreateCustomFormat(
                (SharpDX.Multimedia.WaveFormatEncoding)inputProvider.WaveFormat.Encoding,
                inputProvider.WaveFormat.SampleRate,
                inputProvider.WaveFormat.Channels,
                inputProvider.WaveFormat.AverageBytesPerSecond,
                inputProvider.WaveFormat.BlockAlign,
                inputProvider.WaveFormat.BitsPerSample);


            using var inputMediaType = new MediaType();
            var size = 18 + sharpWf.ExtraSize;

            MediaFactory.InitMediaTypeFromWaveFormatEx(inputMediaType, new[] { sharpWf }, size);

            using var writer = CreateSinkWriter(outputFile);
            writer.AddStream(outputMediaType, out int streamIndex);

            // n.b. can get 0xC00D36B4 - MF_E_INVALIDMEDIATYPE here
            writer.SetInputMediaType(streamIndex, inputMediaType, null);

            PerformEncode(writer, streamIndex, inputProvider);
        }
コード例 #5
0
        /// <inheritdoc />
        /// <summary>
        ///   Starts the encoder after all properties have been initialied
        /// </summary>
        public override void Start()
        {
            MediaFactory.Startup(MediaFactory.Version, MFSTARTUP_NOSOCKET);

            using (var attrs = new MediaAttributes()) {
                attrs.Set(TranscodeAttributeKeys.TranscodeContainertype, this.containerType);
                attrs.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1);
                attrs.Set(SinkWriterAttributeKeys.LowLatency, true);

                if (this.dxgiManager != null)
                {
                    attrs.Set(SinkWriterAttributeKeys.D3DManager, this.dxgiManager);
                }

                // create byte stream and sink writer
                this.byteStream = new ByteStream(DestinationStream);
                this.sinkWriter = MediaFactory.CreateSinkWriterFromURL(null, this.byteStream, attrs);

                // create output media type
                using (var outMediaType = new SharpDX.MediaFoundation.MediaType()) {
                    outMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    outMediaType.Set(MediaTypeAttributeKeys.Subtype, this.videoFormat);
                    outMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, this.bitRate);
                    outMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    outMediaType.Set(MediaTypeAttributeKeys.FrameSize,
                                     ((long)FrameSize.Width << 32) | (uint)FrameSize.Height);
                    outMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1);
                    outMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1);

                    this.sinkWriter.AddStream(outMediaType, out this.streamIdx);
                }

                // create input media type
                using (var inMediaType = new SharpDX.MediaFoundation.MediaType()) {
                    inMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);

                    inMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    inMediaType.Set(MediaTypeAttributeKeys.FrameSize,
                                    ((long)FrameSize.Width << 32) | (uint)FrameSize.Height);
                    inMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1);
                    inMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1);

                    try {
                        // use NV12 YUV encoding
                        inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12);
                        this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null);
                    } catch (SharpDXException exception)
                        when(exception.ResultCode == SharpDX.MediaFoundation.ResultCode.InvalidMediaType)
                        {
                            // XXX: fall back to ARGB32
                            inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Argb32);
                            this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null);
                        }
                }

                this.sinkWriter.BeginWriting();
            }
        }
コード例 #6
0
        private static string GetVideoFormat(SharpDX.MediaFoundation.MediaType mediaType)
        {
            // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids
            var subTypeId     = mediaType.Get(MediaTypeAttributeKeys.Subtype);
            var fourccEncoded = BitConverter.ToInt32(subTypeId.ToByteArray(), 0);
            var fourcc        = new FourCC(fourccEncoded);

            return(fourcc.ToString());
        }
 /// <summary>
 /// Creates a new encoder that encodes to the specified output media type
 /// </summary>
 /// <param name="outputMediaType">Desired output media type</param>
 public SharpMediaFoundationEncoder(MediaType outputMediaType)
 {
     MediaManager.Startup();
     if (outputMediaType == null)
     {
         throw new ArgumentNullException("outputMediaType");
     }
     this.outputMediaType = outputMediaType;
 }
コード例 #8
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class.
        /// </summary>
        /// <param name="captureDevice">The capture device.</param>
        public MediaFoundationVideoReader(CaptureDeviceInfo captureDevice)
        {
            captureDevice.EnsureNotNullOrDisposed(nameof(captureDevice));

            try
            {
                // Create the source reader
                using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1))
                {
                    // We need the 'EnableVideoProcessing' attribute because of the RGB32 format
                    // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisconnectMediasourceOnShutdown, 1);

                    // Create the MediaSource object by given capture device
                    using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource())
                    {
                        // Create the source reader
                        m_sourceReader = new MF.SourceReader(mediaSource, mediaAttributes);
                    }
                }


                // Apply source configuration
                using (MF.MediaType mediaType = new MF.MediaType())
                {
                    mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                    mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32);
                    m_sourceReader.SetCurrentMediaType(
                        MF.SourceReaderIndex.FirstVideoStream,
                        mediaType);
                    m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true));
                }
                // Read some information about the source
                using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream))
                {
                    long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize);
                    m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong));
                }

                // Get additional properties
                m_durationLong    = 0;
                m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics);
            }
            catch (Exception)
            {
                this.Dispose();
                throw;
            }
        }
コード例 #9
0
 protected override void CreateMediaTarget(SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex)
 {
     using (MF.MediaType mediaTypeOut = new MF.MediaType())
     {
         mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
         mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT);
         mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.AvgBitrate, Bitrate);
         mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
         mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
         mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(Framerate, 1));
         sinkWriter.AddStream(mediaTypeOut, out streamIndex);
     }
 }
コード例 #10
0
 /// <summary>
 /// Creates a media target.
 /// </summary>
 /// <param name="sinkWriter">The previously created SinkWriter.</param>
 /// <param name="videoPixelSize">The pixel size of the video.</param>
 /// <param name="streamIndex">The stream index for the new target.</param>
 protected override void CreateMediaTarget(MF.SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex)
 {
     using (MF.MediaType mediaTypeOut = new MF.MediaType())
     {
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.AvgBitrate, base.Bitrate * 1000);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(base.Framerate, 1));
         sinkWriter.AddStream(mediaTypeOut, out streamIndex);
     }
 }
コード例 #11
0
ファイル: Transform.cs プロジェクト: QuantumDeveloper/SharpDX
        /// <summary>
        /// Gets an available media type for an output stream on this Media Foundation transform (MFT).
        /// </summary>
        /// <param name="dwOutputStreamID">Output stream identifier. To get the list of stream identifiers, call <strong><see cref="SharpDX.MediaFoundation.Transform.GetStreamIDs" /></strong>.</param>
        /// <param name="dwTypeIndex">Index of the media type to retrieve. Media types are indexed from zero and returned in approximate order of preference.</param>
        /// <param name="typeOut">Receives a pointer to the <strong><see cref="SharpDX.MediaFoundation.MediaType" /></strong> interface. The caller must release the interface.</param>
        /// <returns><c>true</c> if A media type for an output stream is available, <c>false</c> otherwise</returns>
        /// <msdn-id>ms703812</msdn-id>	
        /// <unmanaged>HRESULT IMFTransform::GetOutputAvailableType([In] unsigned int dwOutputStreamID,[In] unsigned int dwTypeIndex,[Out] IMFMediaType** ppType)</unmanaged>	
        /// <unmanaged-short>IMFTransform::GetOutputAvailableType</unmanaged-short>	
        public bool TryGetOutputAvailableType(int dwOutputStreamID, int dwTypeIndex, out MediaType typeOut)
        {
            bool mediaTypeAvailable = true;
            var result = GetOutputAvailableType(dwOutputStreamID, dwTypeIndex, out typeOut);

            //An object ran out of media types
            if (result == ResultCode.NoMoreTypes)
            {
                mediaTypeAvailable = false;
            }
            else
            {
                result.CheckError();
            }

            return mediaTypeAvailable;
        }
コード例 #12
0
        public MediaFoundationAudioWriter(MF.SinkWriter sinkWriter, ref WAVEFORMATEX waveFormat, int desiredBitRate = 192000)
        {
            var sharpWf = waveFormat.ToSharpDX();

            // Information on configuring an AAC media type can be found here:
            // http://msdn.microsoft.com/en-gb/library/windows/desktop/dd742785%28v=vs.85%29.aspx
            var outputMediaType = SelectMediaType(AudioFormat, sharpWf, desiredBitRate);

            if (outputMediaType == null)
            {
                throw new InvalidOperationException("No suitable encoders available");
            }

            var inputMediaType = new MF.MediaType();
            var size           = 18 + sharpWf.ExtraSize;

            sinkWriter.AddStream(outputMediaType, out streamIndex);

            MF.MediaFactory.InitMediaTypeFromWaveFormatEx(inputMediaType, new[] { sharpWf }, size);
            sinkWriter.SetInputMediaType(streamIndex, inputMediaType, null);
        }
コード例 #13
0
        public MediaFoundationVideoWriter(string filePath, Size2 videoPixelSize, Guid videoInputFormat, bool supportAudio = false)
        {
            bitrate   = 1500000;
            framerate = 15;

            if (!MFInitialized)
            {
                // Initialize MF library. MUST be called before any MF related operations.
                MF.MediaFactory.Startup(MF.MediaFactory.Version, 0);
            }

            sinkWriter = MF.MediaFactory.CreateSinkWriterFromURL(filePath, IntPtr.Zero, null);

            this.videoPixelSize = videoPixelSize;
            CreateMediaTarget(sinkWriter, videoPixelSize, out streamIndex);

            // Configure input
            using (MF.MediaType mediaTypeIn = new MF.MediaType())
            {
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, videoInputFormat);
                mediaTypeIn.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(framerate, 1));
                sinkWriter.SetInputMediaType(streamIndex, mediaTypeIn, null);
            }

            if (supportAudio)
            {
                // initialize audio writer
                var waveFormat = WAVEFORMATEX.DefaultPCM;
                audioWriter = new MP3AudioWriter(sinkWriter, ref waveFormat);
            }

            // Start writing the video file. MUST be called before write operations.
            sinkWriter.BeginWriting();

            // Set initial frame index
            frameIndex = -1;
        }
コード例 #14
0
ファイル: MediaFactory.cs プロジェクト: alexey-bez/SharpDX
 /// <summary>	
 /// <p><strong>Applies to: </strong>desktop apps only</p><p> </p><p>Creates an activation object for the sample grabber media sink.</p>	
 /// </summary>	
 /// <param name="iMFMediaTypeRef"><dd> <p> Pointer to the <strong><see cref="SharpDX.MediaFoundation.MediaType"/></strong> interface, defining the media type for the sample grabber's input stream. </p> </dd></param>	
 /// <param name="iMFSampleGrabberSinkCallbackRef"><dd> <p> Pointer to the <strong><see cref="SharpDX.MediaFoundation.SampleGrabberSinkCallback"/></strong> interface of a callback object. The caller must implement this interface. </p> </dd></param>	
 /// <param name="iActivateOut"><dd> <p> Receives a reference to the <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> interface. Use this interface to complete the creation of the sample grabber. The caller must release the interface. </p> </dd></param>	
 /// <returns><p>If this function succeeds, it returns <strong><see cref="SharpDX.Result.Ok"/></strong>. Otherwise, it returns an <strong><see cref="SharpDX.Result"/></strong> error code.</p></returns>	
 /// <remarks>	
 /// <p>To create the sample grabber sink, call <strong><see cref="SharpDX.MediaFoundation.Activate.ActivateObject"/></strong> on the reference received in the <em>ppIActivate</em> parameter.</p><p>Before calling <strong>ActivateObject</strong>, you can configure the sample grabber by setting any of the following attributes on the <em>ppIActivate</em> reference:</p><ul> <li> <see cref="SharpDX.MediaFoundation.SampleGrabberSinkAttributeKeys.IgnoreClock"/> </li> <li> <strong><see cref="SharpDX.MediaFoundation.SampleGrabberSinkAttributeKeys.SampleTimeOffset"/></strong> </li> </ul>	
 /// </remarks>	
 /// <include file='.\..\Documentation\CodeComments.xml' path="/comments/comment[@id='MFCreateSampleGrabberSinkActivate']/*"/>	
 /// <msdn-id>ms702068</msdn-id>	
 /// <unmanaged>HRESULT MFCreateSampleGrabberSinkActivate([In] IMFMediaType* pIMFMediaType,[In] IMFSampleGrabberSinkCallback* pIMFSampleGrabberSinkCallback,[Out] IMFActivate** ppIActivate)</unmanaged>	
 /// <unmanaged-short>MFCreateSampleGrabberSinkActivate</unmanaged-short>	
 public static void CreateSampleGrabberSinkActivate(MediaType mediaType, SampleGrabberSinkCallback callback, out Activate activate)
 {
     MediaFactory.CreateSampleGrabberSinkActivate(mediaType, SampleGrabberSinkCallbackShadow.ToIntPtr(callback), out activate);
 }
コード例 #15
0
ファイル: Video.WMS.cs プロジェクト: KennethYap/MonoGame
        private void PlatformInitialize()
        {
            if (Topology != null)
                return;

            MediaManagerState.CheckStartup();

            MediaFactory.CreateTopology(out _topology);

            SharpDX.MediaFoundation.MediaSource mediaSource;
            {
                SourceResolver resolver;
                MediaFactory.CreateSourceResolver(out resolver);

                ObjectType otype;
                ComObject source;
                resolver.CreateObjectFromURL(FileName, (int)SourceResolverFlags.MediaSource, null, out otype,
                                                out source);
                mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>();
                resolver.Dispose();
                source.Dispose();
            }

            PresentationDescriptor presDesc;
            mediaSource.CreatePresentationDescriptor(out presDesc);

            for (var i = 0; i < presDesc.StreamDescriptorCount; i++)
            {
                Bool selected;
                StreamDescriptor desc;
                presDesc.GetStreamDescriptorByIndex(i, out selected, out desc);

                if (selected)
                {
                    TopologyNode sourceNode;
                    MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode);

                    sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource);
                    sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc);
                    sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc);

                    TopologyNode outputNode;
                    MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode);

                    var majorType = desc.MediaTypeHandler.MajorType;
                    if (majorType == MediaTypeGuids.Video)
                    {
                        Activate activate;

                        SampleGrabber = new VideoSampleGrabber();

                        _mediaType = new MediaType();

                        _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);

                        // Specify that we want the data to come in as RGB32.
                        _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71"));

                        MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate);
                        outputNode.Object = activate;
                    }

                    if (majorType == MediaTypeGuids.Audio)
                    {
                        Activate activate;
                        MediaFactory.CreateAudioRendererActivate(out activate);

                        outputNode.Object = activate;
                    }

                    _topology.AddNode(sourceNode);
                    _topology.AddNode(outputNode);
                    sourceNode.ConnectOutput(0, outputNode, 0);

                    sourceNode.Dispose();
                    outputNode.Dispose();
                }

                desc.Dispose();
            }

            presDesc.Dispose();
            mediaSource.Dispose();
        }
コード例 #16
0
ファイル: AudioDecoder.cs プロジェクト: Ziriax/SharpDX
        private void Initialize(SourceReader reader)
        {
            // Invalidate selection for all streams
            reader.SetStreamSelection(SourceReaderIndex.AllStreams, false);

            // Select only audio stream
            reader.SetStreamSelection(SourceReaderIndex.FirstAudioStream, true);

            // Get the media type for the current stream.
            using (var mediaType = reader.GetNativeMediaType(SourceReaderIndex.FirstAudioStream, 0))
            {
                var majorType = mediaType.Get(MediaTypeAttributeKeys.MajorType);
                if (majorType != MediaTypeGuids.Audio)
                    throw new InvalidOperationException("Input stream doesn't contain an audio stream.");
            }

            // Set the type on the source reader to use PCM
            using (var partialType = new MediaType())
            {
                partialType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Audio);
                partialType.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm);
                reader.SetCurrentMediaType(SourceReaderIndex.FirstAudioStream, partialType);
            }

            // Retrieve back the real media type
            using (var realMediaType = reader.GetCurrentMediaType(SourceReaderIndex.FirstAudioStream))
            {
                int sizeRef;
                WaveFormat = realMediaType.ExtracttWaveFormat(out sizeRef);
            }

            Duration = new TimeSpan(reader.GetPresentationAttribute(SourceReaderIndex.MediaSource, PresentationDescriptionAttributeKeys.Duration));
        }
コード例 #17
0
        public MediaFoundationAudioWriter(MF.SinkWriter sinkWriter, ref WAVEFORMATEX waveFormat, int desiredBitRate = 192000)
        {
            var sharpWf = waveFormat.ToSharpDX();

            // Information on configuring an AAC media type can be found here:
            // http://msdn.microsoft.com/en-gb/library/windows/desktop/dd742785%28v=vs.85%29.aspx
            var outputMediaType = SelectMediaType(AudioFormat, sharpWf, desiredBitRate);
            if (outputMediaType == null) throw new InvalidOperationException("No suitable encoders available");

            var inputMediaType = new MF.MediaType();
            var size = 18 + sharpWf.ExtraSize;

            sinkWriter.AddStream(outputMediaType, out streamIndex);

            MF.MediaFactory.InitMediaTypeFromWaveFormatEx(inputMediaType, new[] { sharpWf }, size);
            sinkWriter.SetInputMediaType(streamIndex, inputMediaType, null);
        }
コード例 #18
0
        public MediaFoundationVideoWriter(string filePath, Size2 videoPixelSize, Guid videoInputFormat, bool supportAudio = false)
        {
            bitrate = 1500000;
            framerate = 15;

            if (!MFInitialized)
            {
                // Initialize MF library. MUST be called before any MF related operations.
                MF.MediaFactory.Startup(MF.MediaFactory.Version, 0);
            }

            sinkWriter = MF.MediaFactory.CreateSinkWriterFromURL(filePath, IntPtr.Zero, null);

            this.videoPixelSize = videoPixelSize;
            CreateMediaTarget(sinkWriter, videoPixelSize, out streamIndex);

            // Configure input
            using (MF.MediaType mediaTypeIn = new MF.MediaType())
            {
                mediaTypeIn.Set<Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                mediaTypeIn.Set<Guid>(MF.MediaTypeAttributeKeys.Subtype, videoInputFormat);
                mediaTypeIn.Set<int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
                mediaTypeIn.Set<long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
                mediaTypeIn.Set<long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(framerate, 1));
                sinkWriter.SetInputMediaType(streamIndex, mediaTypeIn, null);
            }

            if (supportAudio)
            {
                // initialize audio writer
                var waveFormat = WAVEFORMATEX.DefaultPCM;
                audioWriter = new MP3AudioWriter(sinkWriter, ref waveFormat);
            }

            // Start writing the video file. MUST be called before write operations.
            sinkWriter.BeginWriting();

            // Set initial frame index
            frameIndex = -1;
        }
コード例 #19
0
ファイル: SourceReader.cs プロジェクト: oeoen/SharpDX
 /// <summary>
 /// <p><strong>Applies to: </strong>desktop apps | Metro style apps</p><p>Sets the media type for a stream.</p><p>This media type defines that format that the Source Reader produces as output. It can differ from the native format provided by the media source. See Remarks for more information.</p>
 /// </summary>
 /// <param name="readerIndex">No documentation.</param>
 /// <param name="mediaTypeRef">No documentation.</param>
 /// <returns><p>The method returns an <strong><see cref="SharpDX.Result"/></strong>. Possible values include, but are not limited to, those in the following table.</p><table> <tr><th>Return code</th><th>Description</th></tr> <tr><td> <dl> <dt><strong><strong><see cref="SharpDX.Result.Ok"/></strong></strong></dt> </dl> </td><td> <p>The method succeeded.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDMEDIATYPE</strong></strong></dt> </dl> </td><td> <p>At least one decoder was found for the native stream type, but the type specified by <em>pMediaType</em> was rejected.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDREQUEST</strong></strong></dt> </dl> </td><td> <p>One or more sample requests are still pending.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDSTREAMNUMBER</strong></strong></dt> </dl> </td><td> <p>The <em>dwStreamIndex</em> parameter is invalid.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_TOPO_CODEC_NOT_FOUND</strong></strong></dt> </dl> </td><td> <p>Could not find a decoder for the native stream type.</p> </td></tr> </table><p>?</p></returns>
 /// <remarks>
 /// <p>For each stream, you can set the media type to any of the following:</p><ul> <li>One of the native types offered by the media source. To enumerate the native types, call <strong><see cref="SharpDX.MediaFoundation.SourceReader.GetNativeMediaType"/></strong>.</li> <li>If the native media type is compressed, you can specify a corresponding uncompressed format. The Source Reader will search for a decoder that can decode from the native format to the specified uncompressed format.</li> </ul><p>The source reader does not support audio resampling. If you need to resample the audio, you can use the <strong>Audio Resampler DSP</strong>.</p><p>If you set the <see cref="SharpDX.MediaFoundation.SourceReaderAttributeKeys.EnableVideoProcessing"/> attribute to <strong>TRUE</strong> when you create the Source Reader, the Source Reader will convert YUV video to RGB-32. This conversion is not optimized for real-time video playback.</p><p>This interface is available on Windows?Vista if Platform Update Supplement for Windows?Vista is installed.</p>
 /// </remarks>
 /// <msdn-id>dd374667</msdn-id>
 /// <unmanaged>HRESULT IMFSourceReader::SetCurrentMediaType([In] unsigned int dwStreamIndex,[In] unsigned int* pdwReserved,[In] IMFMediaType* pMediaType)</unmanaged>
 /// <unmanaged-short>IMFSourceReader::SetCurrentMediaType</unmanaged-short>
 public void SetCurrentMediaType(int readerIndex, MediaType mediaTypeRef)
 {
     SetCurrentMediaType(readerIndex, IntPtr.Zero, mediaTypeRef);
 }
コード例 #20
0
 /// <summary>
 /// Creates a new encoder that encodes to the specified output media type
 /// </summary>
 /// <param name="outputMediaType">Desired output media type</param>
 public SharpMediaFoundationEncoder(MediaType outputMediaType)
 {
     MediaManager.Startup();
     this.outputMediaType = outputMediaType ?? throw new ArgumentNullException(nameof(outputMediaType));
 }
コード例 #21
0
ファイル: MediaFactory.cs プロジェクト: Keldrim/SharpDX
 /// <summary>
 /// <p><strong>Applies to: </strong>desktop apps only</p><p> </p><p>Creates an activation object for the sample grabber media sink.</p>
 /// </summary>
 /// <param name="iMFMediaTypeRef"><dd> <p> Pointer to the <strong><see cref="SharpDX.MediaFoundation.MediaType"/></strong> interface, defining the media type for the sample grabber's input stream. </p> </dd></param>
 /// <param name="iMFSampleGrabberSinkCallbackRef"><dd> <p> Pointer to the <strong><see cref="SharpDX.MediaFoundation.SampleGrabberSinkCallback"/></strong> interface of a callback object. The caller must implement this interface. </p> </dd></param>
 /// <param name="iActivateOut"><dd> <p> Receives a reference to the <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> interface. Use this interface to complete the creation of the sample grabber. The caller must release the interface. </p> </dd></param>
 /// <returns><p>If this function succeeds, it returns <strong><see cref="SharpDX.Result.Ok"/></strong>. Otherwise, it returns an <strong><see cref="SharpDX.Result"/></strong> error code.</p></returns>
 /// <remarks>
 /// <p>To create the sample grabber sink, call <strong><see cref="SharpDX.MediaFoundation.Activate.ActivateObject"/></strong> on the reference received in the <em>ppIActivate</em> parameter.</p><p>Before calling <strong>ActivateObject</strong>, you can configure the sample grabber by setting any of the following attributes on the <em>ppIActivate</em> reference:</p><ul> <li> <see cref="SharpDX.MediaFoundation.SampleGrabberSinkAttributeKeys.IgnoreClock"/> </li> <li> <strong><see cref="SharpDX.MediaFoundation.SampleGrabberSinkAttributeKeys.SampleTimeOffset"/></strong> </li> </ul>
 /// </remarks>
 /// <include file='.\..\Documentation\CodeComments.xml' path="/comments/comment[@id='MFCreateSampleGrabberSinkActivate']/*"/>
 /// <msdn-id>ms702068</msdn-id>
 /// <unmanaged>HRESULT MFCreateSampleGrabberSinkActivate([In] IMFMediaType* pIMFMediaType,[In] IMFSampleGrabberSinkCallback* pIMFSampleGrabberSinkCallback,[Out] IMFActivate** ppIActivate)</unmanaged>
 /// <unmanaged-short>MFCreateSampleGrabberSinkActivate</unmanaged-short>
 public static void CreateSampleGrabberSinkActivate(MediaType mediaType, SampleGrabberSinkCallback callback, out Activate activate)
 {
     MediaFactory.CreateSampleGrabberSinkActivate(mediaType, SampleGrabberSinkCallbackShadow.ToIntPtr(callback), out activate);
 }
コード例 #22
0
ファイル: SourceReader.cs プロジェクト: oeoen/SharpDX
 /// <summary>
 /// <p><strong>Applies to: </strong>desktop apps | Metro style apps</p><p>Sets the media type for a stream.</p><p>This media type defines that format that the Source Reader produces as output. It can differ from the native format provided by the media source. See Remarks for more information.</p>
 /// </summary>
 /// <param name="readerIndex">No documentation.</param>
 /// <param name="mediaTypeRef">No documentation.</param>
 /// <returns><p>The method returns an <strong><see cref="SharpDX.Result"/></strong>. Possible values include, but are not limited to, those in the following table.</p><table> <tr><th>Return code</th><th>Description</th></tr> <tr><td> <dl> <dt><strong><strong><see cref="SharpDX.Result.Ok"/></strong></strong></dt> </dl> </td><td> <p>The method succeeded.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDMEDIATYPE</strong></strong></dt> </dl> </td><td> <p>At least one decoder was found for the native stream type, but the type specified by <em>pMediaType</em> was rejected.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDREQUEST</strong></strong></dt> </dl> </td><td> <p>One or more sample requests are still pending.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_INVALIDSTREAMNUMBER</strong></strong></dt> </dl> </td><td> <p>The <em>dwStreamIndex</em> parameter is invalid.</p> </td></tr> <tr><td> <dl> <dt><strong><strong>MF_E_TOPO_CODEC_NOT_FOUND</strong></strong></dt> </dl> </td><td> <p>Could not find a decoder for the native stream type.</p> </td></tr> </table><p>?</p></returns>
 /// <remarks>
 /// <p>For each stream, you can set the media type to any of the following:</p><ul> <li>One of the native types offered by the media source. To enumerate the native types, call <strong><see cref="SharpDX.MediaFoundation.SourceReader.GetNativeMediaType"/></strong>.</li> <li>If the native media type is compressed, you can specify a corresponding uncompressed format. The Source Reader will search for a decoder that can decode from the native format to the specified uncompressed format.</li> </ul><p>The source reader does not support audio resampling. If you need to resample the audio, you can use the <strong>Audio Resampler DSP</strong>.</p><p>If you set the <see cref="SharpDX.MediaFoundation.SourceReaderAttributeKeys.EnableVideoProcessing"/> attribute to <strong>TRUE</strong> when you create the Source Reader, the Source Reader will convert YUV video to RGB-32. This conversion is not optimized for real-time video playback.</p><p>This interface is available on Windows?Vista if Platform Update Supplement for Windows?Vista is installed.</p>
 /// </remarks>
 /// <msdn-id>dd374667</msdn-id>
 /// <unmanaged>HRESULT IMFSourceReader::SetCurrentMediaType([In] unsigned int dwStreamIndex,[In] unsigned int* pdwReserved,[In] IMFMediaType* pMediaType)</unmanaged>
 /// <unmanaged-short>IMFSourceReader::SetCurrentMediaType</unmanaged-short>
 public void SetCurrentMediaType(SourceReaderIndex readerIndex, MediaType mediaTypeRef)
 {
     SetCurrentMediaType((int)readerIndex, IntPtr.Zero, mediaTypeRef);
 }
コード例 #23
0
        private void buttonSetup_Click(object sender, EventArgs e)
        {
            logger.Debug("buttonSetup_Click(...)");

            sourceVideoFile = textBox1.Text;

            videoForm = new VideoForm
            {
                BackColor = Color.Black,
                //ClientSize = new Size(sampleArgs.Width, sampleArgs.Height),
                StartPosition = FormStartPosition.CenterScreen,
            };


            videoForm.Visible = true;

            if (presentationClock != null)
            {
                presentationClock.Dispose();
                presentationClock = null;
            }

            MediaFactory.CreatePresentationClock(out presentationClock);

            PresentationTimeSource timeSource = null;

            try
            {
                MediaFactory.CreateSystemTimeSource(out timeSource);
                presentationClock.TimeSource = timeSource;
            }
            finally
            {
                timeSource?.Dispose();
            }


            videoRenderer = new MfVideoRendererEx();

            videoRenderer.Init(videoForm.Handle, presentationClock);



            int sinkRequestSample = 0;
            int count             = 0;

            fileSource = new VideoFileSource();

            fileSource.Setup(sourceVideoFile, videoRenderer.D3DDeviceManager);

            //videoRenderer.Prerolled += () =>
            //{
            //	presentationClock.Start(MfTool.SecToMfTicks(1));

            //};

            fileSource.SampleReady += (flags, sample) =>
            {
                var _flags = (SourceReaderFlags)flags;
                if (_flags == SourceReaderFlags.StreamTick)
                {
                }
                else if (_flags == SourceReaderFlags.Currentmediatypechanged)
                {
                    var newMediaType = fileSource.GetCurrentMediaType();
                    //var log = MfTool.LogMediaType(newMediaType);
                    //Console.WriteLine("================Currentmediatypechanged======================");
                    //Console.WriteLine(log);


                    videoRenderer?.SetMediaType(newMediaType);
                    newMediaType.Dispose();
                }

                if (sample != null)
                {
                    var presentationTime = presentationClock.Time;
                    var sampleTime       = sample.SampleTime;
                    var diff             = sampleTime - presentationTime;
                    if (diff > 0)
                    {
                        var delay = (int)(MfTool.MfTicksToSec(diff) * 1000);
                        Console.WriteLine("Delay " + delay);
                        Thread.Sleep(delay);
                    }

                    sample.SampleTime     = 0;
                    sample.SampleDuration = 0;

                    //videoRenderer.ProcessDxva2Sample(sample);
                    videoRenderer._ProcessDxva2Sample(sample);
                    //videoRenderer.ProcessSample(sample);
                    sample.Dispose();
                    count++;
                }
            };

            fileSource.SourceStopped += () =>
            {
                Console.WriteLine("fileSource.SourceStopped()");
                fileSource.Close();
            };


            var srcMediaType = fileSource.GetCurrentMediaType();
            var subType      = srcMediaType.Get(MediaTypeAttributeKeys.Subtype);
            var frameSize    = srcMediaType.Get(MediaTypeAttributeKeys.FrameSize);
            var frameRate    = srcMediaType.Get(MediaTypeAttributeKeys.FrameRate);

            using (var mediaType = new SharpDX.MediaFoundation.MediaType())
            {
                mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12);                 //
                mediaType.Set(MediaTypeAttributeKeys.FrameSize, frameSize);

                mediaType.Set(MediaTypeAttributeKeys.InterlaceMode, 2);
                mediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                mediaType.Set(MediaTypeAttributeKeys.FrameRate, frameRate);

                videoRenderer.SetMediaType(mediaType);
            }


            videoRenderer.RequestSample += () =>
            {
                //if (presentationClock != null)
                //{
                //	presentationClock.GetState(0, out var clockState);
                //	if(clockState == ClockState.Running)
                //	{
                //		//fileSource.NextSample();
                //	}
                //}

                fileSource.NextSample();

                sinkRequestSample++;
            };

            videoRenderer.RendererStopped += () =>
            {
                videoRenderer.Close();

                GC.Collect();
            };

            videoRenderer.Resize(videoForm.ClientRectangle);


            videoForm.Paint += (o, a) =>
            {
                videoRenderer.Repaint();
            };

            videoForm.SizeChanged += (o, a) =>
            {
                var rect = videoForm.ClientRectangle;

                //Console.WriteLine(rect);
                videoRenderer.Resize(rect);
            };
        }
コード例 #24
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class.
        /// </summary>
        /// <param name="videoSource">The source video file.</param>
        public MediaFoundationVideoReader(ResourceLink videoSource)
        {
            videoSource.EnsureNotNull(nameof(videoSource));

            try
            {
                m_videoSource = videoSource;

                // Create the source reader
                using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1))
                {
                    // We need the 'EnableVideoProcessing' attribute because of the RGB32 format
                    // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1);

                    // Wrap the .net stream to a MF Bytestream
                    m_videoSourceStreamNet = m_videoSource.OpenInputStream();
                    m_videoSourceStream    = new MF.ByteStream(m_videoSourceStreamNet);
                    try
                    {
                        using (MF.MediaAttributes byteStreamAttributes = m_videoSourceStream.QueryInterface <MF.MediaAttributes>())
                        {
                            byteStreamAttributes.Set(MF.ByteStreamAttributeKeys.OriginName, "Dummy." + videoSource.FileExtension);
                        }
                    }
                    catch (SharpDXException)
                    {
                        // The interface MF.MediaAttributes is not available on some platforms
                        // (occured during tests on Windows 7 without Platform Update)
                    }

                    // Create the sourcereader by custom native method (needed because of the ByteStream arg)
                    IntPtr         sourceReaderPointer = IntPtr.Zero;
                    SharpDX.Result sdxResult           = NativeMethods.MFCreateSourceReaderFromByteStream_Native(
                        m_videoSourceStream.NativePointer,
                        mediaAttributes.NativePointer,
                        out sourceReaderPointer);
                    sdxResult.CheckError();

                    m_sourceReader = new MF.SourceReader(sourceReaderPointer);
                }

                // Apply source configuration
                using (MF.MediaType mediaType = new MF.MediaType())
                {
                    mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                    mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32);
                    m_sourceReader.SetCurrentMediaType(
                        MF.SourceReaderIndex.FirstVideoStream,
                        mediaType);
                    m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true));
                }

                // Read some information about the source
                using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream))
                {
                    long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize);
                    m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong));
                }

                // Get additional propertie3s
                m_durationLong = m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.PresentationDescriptionAttributeKeys.Duration);
                m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics);
            }
            catch (Exception)
            {
                this.Dispose();
                throw;
            }
        }
コード例 #25
0
ファイル: Video.cs プロジェクト: demiurghg/FusionEngine
		/// <summary>
		/// 
		/// </summary>
		private void PlatformInitialize( byte[] bytes, Stream stream, string url )
		{
			if (Topology != null) {
				return;
			}

			MediaFactory.CreateTopology(out _topology);

			SharpDX.MediaFoundation.MediaSource mediaSource;
			{
				SourceResolver resolver = new SourceResolver();
				
				ObjectType otype;
				ComObject source = null;

				if (url!=null) {
					source = resolver.CreateObjectFromURL(url, SourceResolverFlags.MediaSource, null, out otype);
				}

				if (stream!=null) {
					var bs = new ByteStream( stream );
					source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource, null, out otype);
				}

				if (bytes!=null) {
					var bs = new ByteStream( bytes );
					source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource|SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType, null, out otype);
				}

				if (source==null) {
					throw new ArgumentException("'stream' and 'url' are null!");
				}

				mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>();

				
				resolver.Dispose();
				source.Dispose();
			}


			PresentationDescriptor presDesc;
			mediaSource.CreatePresentationDescriptor(out presDesc);
			
			for (var i = 0; i < presDesc.StreamDescriptorCount; i++) {

				RawBool selected = false;
				StreamDescriptor desc;
				presDesc.GetStreamDescriptorByIndex(i, out selected, out desc);
				
				if (selected) {

					TopologyNode sourceNode;
					MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode);

					sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource);
					sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc);
					sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc);
					

					TopologyNode outputNode;
					MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode);

					var majorType = desc.MediaTypeHandler.MajorType;
					
					if (majorType == MediaTypeGuids.Video) {

						Activate activate;
						
						sampleGrabber = new VideoSampleGrabber();

						_mediaType = new MediaType();
						
						_mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);

						// Specify that we want the data to come in as RGB32.
						_mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71"));

						MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate);
						outputNode.Object = activate;


						long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get<long>(MediaTypeAttributeKeys.FrameSize);

						Width	= (int)(frameSize >> 32);
						Height	= (int) (frameSize & 0x0000FFFF);
					}

					if (majorType == MediaTypeGuids.Audio)
					{
						Activate activate;
						MediaFactory.CreateAudioRendererActivate(out activate);

						outputNode.Object = activate;
					}

					_topology.AddNode(sourceNode);
					_topology.AddNode(outputNode);
					sourceNode.ConnectOutput(0, outputNode, 0);
					

					Duration = new TimeSpan(presDesc.Get<long>(PresentationDescriptionAttributeKeys.Duration));
					

					sourceNode.Dispose();
					outputNode.Dispose();
				}

				desc.Dispose();
			}

			presDesc.Dispose();
			mediaSource.Dispose();


			videoFrame = new DynamicTexture(Game.Instance.RenderSystem, Width, Height, typeof(ColorBGRA), false, false);
		}
コード例 #26
0
ファイル: Video.WMS.cs プロジェクト: Annaero/FusionFramework
        private void PlatformInitialize()
        {
            if (Topology != null)
                return;

            //MediaManagerState.CheckStartup();

            MediaFactory.CreateTopology(out _topology);

            SharpDX.MediaFoundation.MediaSource mediaSource;
            {
                SourceResolver resolver = new SourceResolver();

                ObjectType otype;
                ComObject source = resolver.CreateObjectFromURL(FileName, SourceResolverFlags.MediaSource, null, out otype);
                mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>();

                resolver.Dispose();
                source.Dispose();
            }

            PresentationDescriptor presDesc;
            mediaSource.CreatePresentationDescriptor(out presDesc);

            for (var i = 0; i < presDesc.StreamDescriptorCount; i++)
            {
                RawBool selected = false;
                StreamDescriptor desc;
                presDesc.GetStreamDescriptorByIndex(i, out selected, out desc);

                if (selected)
                {
                    TopologyNode sourceNode;
                    MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode);

                    sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource);
                    sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc);
                    sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc);

                    TopologyNode outputNode;
                    MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode);

                    var majorType = desc.MediaTypeHandler.MajorType;

                    if (majorType == MediaTypeGuids.Video)
                    {
                        Activate activate;

                        SampleGrabber = new VideoSampleGrabber();

                        _mediaType = new MediaType();

                        _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);

                        // Specify that we want the data to come in as RGB32.
                        _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71"));

                        MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate);
                        outputNode.Object = activate;

                        long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get<long>(MediaTypeAttributeKeys.FrameSize);

                        Width	= (int)(frameSize >> 32);
                        Height	= (int) (frameSize & 0x0000FFFF);
                    }

                    if (majorType == MediaTypeGuids.Audio)
                    {
                        Activate activate;
                        MediaFactory.CreateAudioRendererActivate(out activate);

                        outputNode.Object = activate;
                    }

                    _topology.AddNode(sourceNode);
                    _topology.AddNode(outputNode);
                    sourceNode.ConnectOutput(0, outputNode, 0);

                    Duration = new TimeSpan(presDesc.Get<long>(PresentationDescriptionAttributeKeys.Duration));

                    sourceNode.Dispose();
                    outputNode.Dispose();
                }

                desc.Dispose();
            }

            presDesc.Dispose();
            mediaSource.Dispose();

            VideoFrame = new Texture2D(Game.Instance.GraphicsDevice, Width, Height, ColorFormat.Bgra8, false);
        }