Пример #1
0
    ///////////////////////////////////////////////////////////////////////
    //  Name: CreateSourceStreamNode
    //  Description:  Creates a source-stream node for a stream.
    //
    //  pSource: Media source.
    //  pSourcePD: Presentation descriptor for the media source.
    //  pSourceSD: Stream descriptor for the stream.
    //  ppNode: Receives a pointer to the new node.
    //
    //  Pre-conditions: Create the media source.
    /////////////////////////////////////////////////////////////////////////

    void CreateSourceStreamNode(
        IMFMediaSource pSource,
        IMFPresentationDescriptor pSourcePD,
        IMFStreamDescriptor pSourceSD,
        out IMFTopologyNode ppNode
        )
    {
        MFError throwonhr;

        // Create the source-stream node.
        throwonhr = MFExtern.MFCreateTopologyNode(MFTopologyType.SourcestreamNode, out ppNode);

        // Set attribute: Pointer to the media source.
        throwonhr = ppNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_SOURCE, pSource);

        // Set attribute: Pointer to the presentation descriptor.
        throwonhr = ppNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_PRESENTATION_DESCRIPTOR, pSourcePD);

        // Set attribute: Pointer to the stream descriptor.
        throwonhr = ppNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_STREAM_DESCRIPTOR, pSourceSD);
    }
Пример #2
0
        // This returns the default or attempts to compute it, in its absence.
        public void GetDefaultStride(out int pnStride)
        {
            int  nStride = 0;
            bool bFailed = false;

            // First try to get it from the attribute.
            try
            {
                HResult hr = GetMediaType().GetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, out nStride);
                MFError.ThrowExceptionForHR(hr);
            }
            catch
            {
                bFailed = true;
            }
            if (bFailed)
            {
                // Attribute not set. See if we can calculate the default stride.
                Guid subtype;

                int width  = 0;
                int height = 0;

                // First we need the subtype .
                GetSubType(out subtype);

                // Now we need the image width and height.
                GetFrameDimensions(out width, out height);

                // Now compute the stride for a particular bitmap type
                FourCC  f  = new FourCC(subtype);
                HResult hr = MFExtern.MFGetStrideForBitmapInfoHeader(f.ToInt32(), width, out nStride);
                MFError.ThrowExceptionForHR(hr);

                // Set the attribute for later reference.
                SetDefaultStride(nStride);
            }

            pnStride = nStride;
        }
Пример #3
0
    ///////////////////////////////////////////////////////////////////////
    //  Name: CreateVideoBranch
    //  Description:
    //  Adds and connects the nodes downstream from a video source node.
    //
    //  pTopology:      Pointer to the topology.
    //  pSourceNode:    Pointer to the source node.
    //  hVideoWindow:   Handle to the video window.
    //  clsidTransform: CLSID of an effect transform.
    /////////////////////////////////////////////////////////////////////////

    void CreateVideoBranch(
        IMFTopology pTopology,
        IMFTopologyNode pSourceNode,
        IntPtr hVideoWindow,
        Guid clsidTransform   // GUID_NULL = No effect transform.
        )
    {
        TRACE("CreateVideoBranch");

        IMFTopologyNode pOutputNode       = null;
        IMFActivate     pRendererActivate = null;

        // Create a downstream node.
        HResult hr = MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pOutputNode);

        MFError.ThrowExceptionForHR(hr);

        try
        {
            // Create an IMFActivate object for the video renderer.
            hr = MFExtern.MFCreateVideoRendererActivate(hVideoWindow, out pRendererActivate);
            MFError.ThrowExceptionForHR(hr);

            // Set the IActivate object on the output node.
            hr = pOutputNode.SetObject(pRendererActivate);
            MFError.ThrowExceptionForHR(hr);

            // Add the output node to the topology.
            hr = pTopology.AddNode(pOutputNode);
            MFError.ThrowExceptionForHR(hr);

            // Connect the source to the output.
            ConnectSourceToOutput(pTopology, pSourceNode, pOutputNode, clsidTransform);
        }
        finally
        {
            SafeRelease(pOutputNode);
            SafeRelease(pRendererActivate);
        }
    }
Пример #4
0
        ////////////////////////////////////////////////////////////////////////////////////////
        //  Name: CPlayer::CreateMediaSource (Private)
        //  Description:
        //      Creates a media source from URL
        //  Parameter:
        //      sURL: [in] File URL
        //      ppMediaSource: [out] Receives the media source
        /////////////////////////////////////////////////////////////////////////////////////////

        private void CreateMediaSource(
            string sURL,
            out IMFMediaSource ppMediaSource)
        {
            Debug.WriteLine("CPlayer::CreateMediaSource");

            if (sURL == null)
            {
                throw new COMException("null pointer", (int)HResult.E_POINTER);
            }

            HResult           hr;
            IMFSourceResolver pSourceResolver;
            object            pSourceUnk;

            hr = MFExtern.MFCreateSourceResolver(out pSourceResolver);
            MFError.ThrowExceptionForHR(hr);

            try
            {
                // Use the source resolver to create the media source.
                MFObjectType ObjectType = MFObjectType.Invalid;

                hr = pSourceResolver.CreateObjectFromURL(
                    sURL,                          // URL of the source.
                    MFResolution.MediaSource,      // Create a source object.
                    null,                          // Optional property store.
                    out ObjectType,                // Receives the created object type.
                    out pSourceUnk                 // Receives a pointer to the media source.
                    );
                MFError.ThrowExceptionForHR(hr);

                // Get the IMFMediaSource interface from the media source.
                ppMediaSource = (IMFMediaSource)pSourceUnk;
            }
            finally
            {
                SafeRelease(pSourceResolver);
            }
        }
Пример #5
0
    public IMFSample CreateSampleAndAllocateTexture()
    {
        IMFSample        pSample;
        IMFTrackedSample pTrackedSample;
        HResult          hr;

        // Create the video sample. This function returns an IMFTrackedSample per MSDN
        hr = MFExtern.MFCreateVideoSampleFromSurface(null, out pSample);
        MFError.ThrowExceptionForHR(hr);
        // Query the IMFSample to see if it implements IMFTrackedSample
        pTrackedSample = pSample as IMFTrackedSample;
        if (pTrackedSample == null)
        {
            // Throw an exception if we didn't get an IMFTrackedSample
            // but this shouldn't happen in practice.
            throw new InvalidCastException("MFCreateVideoSampleFromSurface returned a sample that did not implement IMFTrackedSample");
        }

        // Use our own class to allocate a texture
        SharpDX.Direct3D11.Texture2D availableTexture = AllocateTexture();
        // Convert the texture's native ID3D11Texture2D pointer into
        // an IUnknown (represented as as System.Object)
        object texNativeObject = Marshal.GetObjectForIUnknown(availableTexture.NativePointer);

        // Create the media buffer from the texture
        IMFMediaBuffer p2DBuffer;

        hr = MFExtern.MFCreateDXGISurfaceBuffer(s_IID_ID3D11Texture2D, texNativeObject, 0, false, out p2DBuffer);
        // Release the object-as-IUnknown we created above
        COMBase.SafeRelease(texNativeObject);
        // If media buffer creation failed, throw an exception
        MFError.ThrowExceptionForHR(hr);
        // Set the owning instance of this class as the allocator
        // for IMFTrackedSample to notify when the sample is released
        pTrackedSample.SetAllocator(this, null);
        // Attach the created buffer to the sample
        pTrackedSample.AddBuffer(p2DBuffer);
        return(pTrackedSample);
    }
Пример #6
0
    public CPlayer(IntPtr hVideo, IntPtr hEvent)
    {
        TRACE(("CPlayer::CPlayer"));

        Debug.Assert(hVideo != IntPtr.Zero);
        Debug.Assert(hEvent != IntPtr.Zero);

        HResult hr;

        m_pSession                  = null;
        m_pSource                   = null;
        m_pVideoDisplay             = null;
        m_hwndVideo                 = hVideo;
        m_hwndEvent                 = hEvent;
        m_state                     = PlayerState.Ready;
        m_pContentProtectionManager = null;

        m_hCloseEvent = new AutoResetEvent(false);

        hr = MFExtern.MFStartup(0x10070, MFStartup.Full);
        MFError.ThrowExceptionForHR(hr);
    }
Пример #7
0
    protected void CreateSourceStreamNode(
        IMFPresentationDescriptor pSourcePD,
        IMFStreamDescriptor pSourceSD,
        out IMFTopologyNode ppNode
        )
    {
        Debug.Assert(m_pSource != null);

        HResult         hr;
        IMFTopologyNode pNode = null;

        try
        {
            // Create the source-stream node.
            hr = MFExtern.MFCreateTopologyNode(MFTopologyType.SourcestreamNode, out pNode);
            MFError.ThrowExceptionForHR(hr);

            // Set attribute: Pointer to the media source.
            hr = pNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_SOURCE, m_pSource);
            MFError.ThrowExceptionForHR(hr);

            // Set attribute: Pointer to the presentation descriptor.
            hr = pNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_PRESENTATION_DESCRIPTOR, pSourcePD);
            MFError.ThrowExceptionForHR(hr);

            // Set attribute: Pointer to the stream descriptor.
            hr = pNode.SetUnknown(MFAttributesClsid.MF_TOPONODE_STREAM_DESCRIPTOR, pSourceSD);
            MFError.ThrowExceptionForHR(hr);

            // Return the IMFTopologyNode pointer to the caller.
            ppNode = pNode;
        }
        catch
        {
            // If we failed, release the pnode
            SafeRelease(pNode);
            throw;
        }
    }
Пример #8
0
        static void Main(string[] args)
        {
            if (args.Length != 2)
            {
                Console.WriteLine("Please specify source directory and destination directory.");
                return;
            }

            // Start up media foundation.
            HResult hr;

            hr = MFExtern.MFStartup(0x10070, MFStartup.Full);
            MFError.ThrowExceptionForHR(hr);

            string sourceDirectory = args[0];
            string destDirectory   = args[1];

            ProcessFiles(sourceDirectory, destDirectory);

            // Shut down media foundation.
            MFExtern.MFShutdown();
        }
Пример #9
0
        private static MFDevice[] GetDevices(Guid filterCategory)
        {
            IMFAttributes pAttributes = null;

            IMFActivate[] ppDevices = null;

            // Create an attribute store to specify the enumeration parameters.
            int hr = MFExtern.MFCreateAttributes(out pAttributes, 1);

            MFError.ThrowExceptionForHR(hr);

            //CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
            hr = pAttributes.SetGUID(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, filterCategory);
            MFError.ThrowExceptionForHR(hr);

            int count;

            hr = MFExtern.MFEnumDeviceSources(pAttributes, out ppDevices, out count);

            MFDevice[] devices = new MFDevice[count];

            for (int i = 0; i < count; i++)
            {
                int    ssize        = -1;
                string friendlyname = "";
                hr = ppDevices[i].GetAllocatedString(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, out friendlyname, out ssize);

                int    ssizesym = -1;
                string symlink;
                hr = ppDevices[i].GetAllocatedString(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, out symlink, out ssizesym);
                //Use this attribute as input to the MFCreateDeviceSourceActivate function.

                devices[i]            = new MFDevice();
                devices[i].Name       = friendlyname;
                devices[i].DevicePath = symlink;
            }

            return(devices);
        }
Пример #10
0
 protected void AddAudioInterface()
 {
     if (m_pSession == null || StreamingAudioRenderer == null)
     {
         return; //bad
     }
     if (SimpleAudioVolume == null)
     {
         object vol = null;
         //MFExtern.MFGetService(m_pSession as object, MFServices.MR_POLICY_VOLUME_SERVICE, typeof(IMFSimpleAudioVolume).GUID, out vol);
         MFExtern.MFGetService(StreamingAudioRenderer, MFServices.MR_STREAM_VOLUME_SERVICE, typeof(IMFAudioStreamVolume).GUID, out vol);
         if (vol == null)
         {
             throw new Exception("Could not GetService for MR_POLICY_VOLUME_SERVICE");
         }
         SimpleAudioVolume = vol as IMFAudioStreamVolume;
         if (SimpleAudioVolume == null)
         {
             throw new Exception("Could not QI for IMFSimpleAudioVolume");
         }
     }
 }
        /// <summary>
        /// Load an attribute store from a managed stream.
        /// </summary>
        /// <param name="attributes">A valid IMFAttributes instance.</param>
        /// <param name="stream">A managed stream where the attributes are read from.</param>
        /// <param name="options">On or more values of the MFAttributeSerializeOptions enumaration.</param>
        /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns>
        public static HResult DeserializeAttributesFromStream(this IMFAttributes attributes, Stream stream, MFAttributeSerializeOptions options = MFAttributeSerializeOptions.None)
        {
            if (attributes == null)
            {
                throw new ArgumentNullException("attributes");
            }

            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }

            if (!stream.CanRead)
            {
                throw new ArgumentException("stream must be readable.", "stream");
            }

            using (ComCallableStream comStream = new ComCallableStream(stream, false))
            {
                return(MFExtern.MFDeserializeAttributesFromStream(attributes, options, comStream));
            }
        }
Пример #12
0
        public Form1()
        {
            InitializeComponent();

            HResult hr = 0;

            // Set the default output file name
            tbOutputFile.Text = "capture.mp4";

            // Register for device notifications as capture devices get added or removed
            m_rdn = new RegisterDeviceNotifications(this.Handle, KSCATEGORY_CAPTURE);

            // Init MF
            hr = MFExtern.MFStartup(0x00020070, MFStartup.Full);

            // Enumerate the video capture devices.
            if (hr >= 0)
            {
                // Populate the device list
                hr = UpdateDeviceList();
            }

            if (hr >= 0)
            {
                // Enable/disable ui controls
                UpdateUI();

                // If there are no capture devices
                if (cbDeviceList.Items.Count == 0)
                {
                    MessageBox.Show(
                        this,
                        "Could not find any video capture devices.",
                        Application.ProductName,
                        MessageBoxButtons.OK
                        );
                }
            }
        }
Пример #13
0
        /////////////////////////////////////////////////////////////////////
        // Name: CreateContentInfo
        //
        // Reads the ASF Header Object from a byte stream and returns a
        // pointer to the ASF content information object.
        //
        // pStream:       Pointer to the byte stream. The byte stream's
        //                current read position must be at the start of the
        //                ASF Header Object.
        // ppContentInfo: Receives a pointer to the ASF content information
        //                object.
        /////////////////////////////////////////////////////////////////////

        void CreateContentInfo(
            IMFByteStream pStream,
            out IMFASFContentInfo ppContentInfo
            )
        {
            HResult hr;
            long    cbHeader = 0;

            const int MIN_ASF_HEADER_SIZE = 30;

            IMFMediaBuffer pBuffer;

            // Create the ASF content information object.
            hr = MFExtern.MFCreateASFContentInfo(out ppContentInfo);
            MFError.ThrowExceptionForHR(hr);

            // Read the first 30 bytes to find the total header size.
            ReadDataIntoBuffer(pStream, MIN_ASF_HEADER_SIZE, out pBuffer);

            try
            {
                hr = ppContentInfo.GetHeaderSize(pBuffer, out cbHeader);
                MFError.ThrowExceptionForHR(hr);

                // Pass the first 30 bytes to the content information object.
                hr = ppContentInfo.ParseHeader(pBuffer, 0);
                MFError.ThrowExceptionForHR(hr);
            }
            finally
            {
                SafeRelease(pBuffer);
            }

            // Read the rest of the header and finish parsing the header.
            ReadDataIntoBuffer(pStream, (int)(cbHeader - MIN_ASF_HEADER_SIZE), out pBuffer);

            hr = ppContentInfo.ParseHeader(pBuffer, MIN_ASF_HEADER_SIZE);
            MFError.ThrowExceptionForHR(hr);
        }
Пример #14
0
        private static void WriteAudioFrame(long frameDuration, IMFSinkWriter writer, int audioStreamIndex, long rtStart, byte[] audioFrameBuffer)
        {
            Marshal.ThrowExceptionForHR((int)MFExtern.MFCreateSample(out IMFSample sample));

            try
            {
                Marshal.ThrowExceptionForHR((int)MFExtern.MFCreateMemoryBuffer(audioFrameBuffer.Length, out IMFMediaBuffer buffer));

                try
                {
                    Marshal.ThrowExceptionForHR((int)buffer.Lock(out IntPtr pData, out int maxLength, out int currentLength));

                    try
                    {
                        Marshal.Copy(audioFrameBuffer, 0, pData, audioFrameBuffer.Length);
                    }
                    finally
                    {
                        Marshal.ThrowExceptionForHR((int)buffer.Unlock());
                    }

                    Marshal.ThrowExceptionForHR((int)buffer.SetCurrentLength(audioFrameBuffer.Length));
                    Marshal.ThrowExceptionForHR((int)sample.AddBuffer(buffer));
                }
                finally
                {
                    Marshal.ReleaseComObject(buffer);
                }

                Marshal.ThrowExceptionForHR((int)sample.SetSampleTime(rtStart));
                Marshal.ThrowExceptionForHR((int)sample.SetSampleDuration(frameDuration));
                Marshal.ThrowExceptionForHR((int)writer.WriteSample(audioStreamIndex, sample));
            }
            finally
            {
                Marshal.ReleaseComObject(sample);
            }
        }
Пример #15
0
        override protected void OnSetInputType()
        {
            MFError throwonhr;

            m_imageWidthInPixels  = 0;
            m_imageHeightInPixels = 0;
            m_cbImageSize         = 0;
            m_lInputStride        = 0;

            IMFMediaType pmt = InputType;

            // type can be null to clear
            if (pmt != null)
            {
                TraceAttributes(pmt);

                throwonhr = MFExtern.MFGetAttributeSize(pmt, MFAttributesClsid.MF_MT_FRAME_SIZE, out m_imageWidthInPixels, out m_imageHeightInPixels);

                throwonhr = pmt.GetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, out m_lInputStride);

                // Calculate the image size (not including padding)
                m_cbImageSize = m_imageHeightInPixels * m_lInputStride;

                // If the output type isn't set yet, we can pre-populate it,
                // since output is based on the input.  This can
                // save a (tiny) bit of time in negotiating types.

                OnSetOutputType();
            }
            else
            {
                // Since the input must be set before the output, nulling the
                // input must also clear the output.  Note that nulling the
                // input is only valid if we are not actively streaming.

                OutputType = null;
            }
        }
Пример #16
0
        /////////////////////////////////////////////////////////////////////
        // Name: ReadDataIntoBuffer
        //
        // Reads data from a byte stream and returns a media buffer that
        // contains the data.
        //
        // pStream: Pointer to the byte stream
        // cbToRead: Number of bytes to read
        // ppBuffer: Receives a pointer to the buffer.
        /////////////////////////////////////////////////////////////////////

        void ReadDataIntoBuffer(
            IMFByteStream pStream,      // Pointer to the byte stream.
            int cbToRead,               // Number of bytes to read
            out IMFMediaBuffer ppBuffer // Receives a pointer to the buffer.
            )
        {
            HResult hr;
            IntPtr  pData;
            int     cbRead; // Actual amount of data read
            int     iMax, iCur;

            // Create the media buffer. This function allocates the memory.
            hr = MFExtern.MFCreateMemoryBuffer(cbToRead, out ppBuffer);
            MFError.ThrowExceptionForHR(hr);

            // Access the buffer.
            hr = ppBuffer.Lock(out pData, out iMax, out iCur);
            MFError.ThrowExceptionForHR(hr);

            try
            {
                // Read the data from the byte stream.
                hr = pStream.Read(pData, cbToRead, out cbRead);
                MFError.ThrowExceptionForHR(hr);
            }
            finally
            {
                hr = ppBuffer.Unlock();
                MFError.ThrowExceptionForHR(hr);
                pData = IntPtr.Zero;
            }

            // Update the size of the valid data.
            hr = ppBuffer.SetCurrentLength(cbRead);
            MFError.ThrowExceptionForHR(hr);
        }
Пример #17
0
    ///////////////////////////////////////////////////////////////////////
    //  Name: CompleteEnable
    //  Description:  Completes the current action.
    //
    //  This method invokes the PMP session's callback.
    /////////////////////////////////////////////////////////////////////////

    public void CompleteEnable()
    {
        HResult hr;

        m_state = Enabler.Complete;

        // m_pCallback can be NULL if the BeginEnableContent was not called.
        // This is the case when the application initiates the enable action, eg
        // when MFCreatePMPMediaSession fails and returns an IMFActivate pointer.
        if (m_pCallback != null)
        {
            Debug.WriteLine(string.Format("ContentProtectionManager: Invoking the pipeline's callback. (status = 0x{0})", m_hrStatus));
            IMFAsyncResult pResult;

            hr = MFExtern.MFCreateAsyncResult(null, m_pCallback, m_punkState, out pResult);
            MFError.ThrowExceptionForHR(hr);

            hr = pResult.SetStatus(m_hrStatus);
            MFError.ThrowExceptionForHR(hr);

            hr = MFExtern.MFInvokeCallback(pResult);
            MFError.ThrowExceptionForHR(hr);
        }
    }
Пример #18
0
        public HResult SetVideoType(IMFMediaType pType)
        {
            HResult hr = HResult.S_OK;
            Guid    subtype;
            MFRatio PAR = new MFRatio();

            // Find the video subtype.
            hr = pType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subtype);
            if (Failed(hr))
            {
                goto done;
            }

            // Choose a conversion function.
            // (This also validates the format type.)

            hr = SetConversionFunction(subtype);
            if (Failed(hr))
            {
                goto done;
            }

            //
            // Get some video attributes.
            //

            // Get the frame size.
            hr = MFExtern.MFGetAttributeSize(pType, MFAttributesClsid.MF_MT_FRAME_SIZE, out m_width, out m_height);
            if (Failed(hr))
            {
                goto done;
            }

            // Get the image stride.
            hr = GetDefaultStride(pType, out m_lDefaultStride);
            if (Failed(hr))
            {
                goto done;
            }

            // Get the pixel aspect ratio. Default: Assume square pixels (1:1)
            hr = MFExtern.MFGetAttributeRatio(pType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, out PAR.Numerator, out PAR.Denominator);

            if (Succeeded(hr))
            {
                m_PixelAR = PAR;
            }
            else
            {
                m_PixelAR.Numerator = m_PixelAR.Denominator = 1;
            }

            FourCC f = new FourCC(subtype);

            m_format = (Format)f.ToInt32();

            // Create Direct3D swap chains.

            hr = CreateSwapChains();
            if (Failed(hr))
            {
                goto done;
            }

            // Update the destination rectangle for the correct
            // aspect ratio.

            UpdateDestinationRect();

done:
            if (Failed(hr))
            {
                m_format    = Format.Unknown;
                m_convertFn = null;
            }

            return(hr);
        }
Пример #19
0
        /// <summary>
        ///  Starts the process of recording. creates the sink writer. We do not
        ///  check to see if the filename is viable or already exists. This is
        ///  assumed to have been done before this call.
        /// </summary>
        /// <param name="outputFileName">the output file name</param>
        /// <param name="incomingVideoMediaType">the incoming media type</param>
        /// <param name="wantTimebaseRebaseIn">if true we rebase all incoming sample
        /// times to zero from the point we started recording and send a copy of the
        /// sample to the sink writer instead of the input sample</param>
        /// <returns>z success, nz fail</returns>
        public int StartRecording(string outputFileName, IMFMediaType incomingVideoMediaType, bool wantTimebaseRebaseIn)
        {
            HResult hr;

            LogMessage("MFTSampleGrabber_Sync, StartRecording called");
            // first stop any recordings now
            StopRecording();
            // check the output file name for sanity
            if ((outputFileName == null) || (outputFileName.Length == 0))
            {
                LogMessage("StartRecording (outputFileName==null)|| (outputFileName.Length==0)");
                return(100);
            }
            // check the media type for sanity
            if (incomingVideoMediaType == null)
            {
                LogMessage("StartRecording videoMediaType == null");
                return(150);
            }
            lock (sinkWriterLockObject)
            {
                // create the sink writer
                workingSinkWriter = OpenSinkWriter(outputFileName, true);
                if (workingSinkWriter == null)
                {
                    LogMessage("StartRecording failed to create sink writer");
                    return(200);
                }
                // now configure the SinkWriter. This sets up the sink writer so that it knows what format
                // the output data should be written in. The format we give the writer does not
                // need to be the same as the format receives as input - however to make life easier for ourselves
                // we will copy a lot of the settings from the videoType retrieved above
                // create a new empty media type for us to populate
                hr = MFExtern.MFCreateMediaType(out IMFMediaType encoderType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on call to MFCreateMediaType, retVal=" + hr.ToString());
                }
                // The major type defines the overall category of the media data. Major types include video, audio, script & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_MAJOR_TYPE, retVal=" + hr.ToString());
                }
                // The subtype GUID defines a specific media format type within a major type. For example, within video,
                // the subtypes include MFMediaType.H264 (MP4), MFMediaType.WMV3 (WMV), MJPEG & etc. Within audio, the
                // subtypes include PCM audio, Windows Media Audio 9, & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MEDIA_TYPETO_WRITE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_SUBTYPE, retVal=" + hr.ToString());
                }
                // this is the approximate data rate of the video stream, in bits per second, for a
                // video media type. The choice here is somewhat arbitrary but seems to work well.
                hr = encoderType.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, TARGET_BIT_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_AVG_BITRATE, retVal=" + hr.ToString());
                }
                // populate our new encoding type with the frame size of the videoType selected earlier
                hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_SIZE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_SIZE, retVal=" + hr.ToString());
                }
                // populate our new encoding type with the frame rate of the video type selected earlier
                hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_RATE, retVal=" + hr.ToString());
                }
                // populate our new encoding type with the pixel aspect ratio of the video type selected earlier
                hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_PIXEL_ASPECT_RATIO, retVal=" + hr.ToString());
                }
                // populate our new encoding type with the interlace mode of the video type selected earlier
                hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_INTERLACE_MODE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_INTERLACE_MODE, retVal=" + hr.ToString());
                }
                // add a stream to the sink writer for the output Media type. The
                // incomingVideoMediaType specifies the format of the samples that will
                // be written to the file. Note that it does not necessarily need to
                // match the input format.
                hr = workingSinkWriter.AddStream(encoderType, out sinkWriterVideoStreamId);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed adding the output stream(v), retVal=" + hr.ToString());
                }
                // Windows 10, by default, provides an adequate set of codecs which the Sink Writer can
                // find to write out the MP4 file. This is not true on Windows 7.
                // If we are not on Windows 10 we register (locally) a codec
                // the Sink Writer can find and use. The ColorConvertDMO is supplied by
                // microsoft it is just not available to enumerate on Win7 etc.
                // Making it available locally does not require administrator privs
                // but only this process can see it and it disappears when the process
                // closes
                OperatingSystem os        = Environment.OSVersion;
                int             versionID = ((os.Version.Major * 10) + os.Version.Minor);
                if (versionID < 62)
                {
                    Guid ColorConverterDMOGUID = new Guid("98230571-0087-4204-b020-3282538e57d3");
                    // Register the color converter DSP for this process, in the video
                    // processor category. This will enable the sink writer to enumerate
                    // the color converter when the sink writer attempts to match the
                    // media types.
                    hr = MFExtern.MFTRegisterLocalByCLSID(
                        ColorConverterDMOGUID,
                        MFTransformCategory.MFT_CATEGORY_VIDEO_PROCESSOR,
                        "",
                        MFT_EnumFlag.SyncMFT,
                        0,
                        null,
                        0,
                        null
                        );
                }
                // Set the input format for a stream on the sink writer. Note the use of the stream index here
                // The input format does not have to match the target format that is written to the media sink
                // If the formats do not match, this call attempts to load an transform
                // that can convert from the input format to the target format. If it cannot find one, (and this is not
                // a sure thing), it will throw an exception.
                hr = workingSinkWriter.SetInputMediaType(sinkWriterVideoStreamId, incomingVideoMediaType, null);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed on calling SetInputMediaType(v) on the writer, retVal=" + hr.ToString());
                }
                // set this flag now
                wantTimebaseRebase = wantTimebaseRebaseIn;
                // now we initialize the sink writer for writing. We call this method after configuring the
                // input streams but before we send any data to the sink writer. The underlying media sink must
                // have at least one input stream and we know it does because we set it up above
                hr = workingSinkWriter.BeginWriting();
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed on calling BeginWriting on the writer, retVal=" + hr.ToString());
                }
            }
            return(0);
        }
Пример #20
0
        /// <summary>
        /// Based on https://docs.microsoft.com/en-us/windows/desktop/medfound/shell-metadata-providers
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void infoToolStripMenuItem_Click(object sender, EventArgs e)
        {
            IMFSourceResolver pSourceResolver = null;
            IMFMediaSource    pSource         = null;
            object            pPropsObject    = null;
            var url = this.listView1.SelectedItems[0].Text;

            try
            {
                // Create the source resolver.
                HResult hr = MFExtern.MFCreateSourceResolver(out pSourceResolver);
                Validate(hr);
                if (pSourceResolver == null)
                {
                    throw new Exception("pSourceResolver is null");
                }
                // Get a pointer to the IMFMediaSource interface of the media source.
                hr = pSourceResolver.CreateObjectFromURL(url, MFResolution.MediaSource, null, out pSource);
                Validate(hr);
                if (pSource == null)
                {
                    throw new Exception("pSource is null");
                }

                hr = MFExtern.MFGetService(pSource, MFServices.MF_PROPERTY_HANDLER_SERVICE, typeof(IPropertyStore).GUID, out pPropsObject);
                Validate(hr);
                if (pPropsObject == null)
                {
                    throw new Exception("pPropsObject is null");
                }
                IPropertyStore pProps = pPropsObject as IPropertyStore;

                hr = pProps.GetCount(out int cProps);
                Validate(hr);

                var audioInfo = new AudioInfo();
                var videoInfo = new VideoInfo();

                for (int i = 0; i < cProps; ++i)
                {
                    var key = new MediaFoundation.Misc.PropertyKey();
                    hr = pProps.GetAt(i, key);
                    Validate(hr);

                    using (PropVariant pv = new PropVariant())
                    {
                        hr = pProps.GetValue(key, pv);
                        Validate(hr);
                        FillAudioProperty(audioInfo, key, pv);
                        FillVideoProperty(videoInfo, key, pv);
                    }
                }
                MessageBox.Show("Audio =\n" + audioInfo + ";\nVideo =\n" + videoInfo);
            }
            finally
            {
                if (pSource != null)
                {
                    Marshal.ReleaseComObject(pSource);
                }
                if (pSourceResolver != null)
                {
                    Marshal.ReleaseComObject(pSourceResolver);
                }
                if (pPropsObject != null)
                {
                    Marshal.ReleaseComObject(pPropsObject);
                }
            }
            // MessageBox.Show(url);
        }
Пример #21
0
 /// <summary>
 /// Creates a new instance of the MFPlay player object.
 /// </summary>
 /// <param name="url">A string that contains the URL of a media file to open.</param>
 /// <param name="startPlayback">If true, playback starts automatically. If false, playback does not start until the application calls IMFMediaPlayer.Play.</param>
 /// <param name="creationOptions">Bitwise OR of zero of more flags from the MFP_CREATION_OPTIONS enumeration.</param>
 /// <param name="callback">An instance of the IMFPMediaPlayerCallback interface of a callback object, implemented by the application.</param>
 /// <param name="hWnd">A handle to a window where the video will appear. For audio-only playback, this parameter can be IntPtr.Zero.</param>
 /// <param name="mediaPlayer">Receives an instance of to the IMFPMediaPlayer interface.</param>
 /// <returns></returns>
 public static HResult CreateMediaPlayer(string url, bool startPlayback, MFP_CREATION_OPTIONS creationOptions, IMFPMediaPlayerCallback callback, IntPtr hWnd, out IMFPMediaPlayer mediaPlayer)
 {
     return(MFExtern.MFPCreateMediaPlayer(url, startPlayback, creationOptions, callback, hWnd, out mediaPlayer));
 }
        protected void CreateOutputNode(
            IMFStreamDescriptor pSourceSD,
            out IMFTopologyNode ppNode
            )
        {
            IMFTopologyNode     pNode             = null;
            IMFMediaTypeHandler pHandler          = null;
            IMFActivate         pRendererActivate = null;

            Guid    guidMajorType = Guid.Empty;
            MFError throwonhr;

            // Get the stream ID.
            int streamID = 0;

            try
            {
                HResult hr;

                hr = pSourceSD.GetStreamIdentifier(out streamID); // Just for debugging, ignore any failures.
                if (MFError.Failed(hr))
                {
                    //TRACE("IMFStreamDescriptor::GetStreamIdentifier" + hr.ToString());
                }

                // Get the media type handler for the stream.
                throwonhr = pSourceSD.GetMediaTypeHandler(out pHandler);

                // Get the major media type.
                throwonhr = pHandler.GetMajorType(out guidMajorType);

                // Create a downstream node.
                throwonhr = MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pNode);

                // Create an IMFActivate object for the renderer, based on the media type.
                if (MFMediaType.Audio == guidMajorType)
                {
                    // Create the audio renderer.
                    //TRACE(string.Format("Stream {0}: audio stream", streamID));
                    throwonhr = MFExtern.MFCreateAudioRendererActivate(out pRendererActivate);

                    // Set the IActivate object on the output node.
                    throwonhr = pNode.SetObject(pRendererActivate);
                }
                else if (MFMediaType.Video == guidMajorType)
                {
                    // Create the video renderer.
                    //TRACE(string.Format("Stream {0}: video stream", streamID));
                    //throwonhr = MFExtern.MFCreateVideoRendererActivate(m_hwndVideo, out pRendererActivate);

                    mIMFTopologyNode.GetObject(out pRendererActivate);

                    throwonhr = pNode.SetObject(pRendererActivate);
                }
                else
                {
                    //TRACE(string.Format("Stream {0}: Unknown format", streamID));
                    //throw new COMException("Unknown format", (int)HResult.E_FAIL);
                }


                // Return the IMFTopologyNode pointer to the caller.
                ppNode = pNode;
            }
            catch
            {
                // If we failed, release the pNode
                //SafeRelease(pNode);
                throw;
            }
            finally
            {
                // Clean up.
                //SafeRelease(pHandler);
                //SafeRelease(pRendererActivate);
            }
        }
Пример #23
0
        override protected void OnSetInputType()
        {
            MFError throwonhr;

            m_imageWidthInPixels  = 0;
            m_imageHeightInPixels = 0;
            m_cbImageSize         = 0;
            m_lStrideIfContiguous = 0;

            IMFMediaType pmt = InputType;

            // type can be null to clear
            if (pmt != null)
            {
                throwonhr = MFExtern.MFGetAttributeSize(pmt, MFAttributesClsid.MF_MT_FRAME_SIZE, out m_imageWidthInPixels, out m_imageHeightInPixels);

                throwonhr = pmt.GetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, out m_lStrideIfContiguous);

                // Calculate the image size (not including padding)
                m_cbImageSize = m_imageHeightInPixels * m_lStrideIfContiguous;

                float fSize;

                // scale the font size in some portion to the video image
                fSize  = 9;
                fSize *= (m_imageWidthInPixels / 64.0f);

                if (m_fontOverlay != null)
                {
                    m_fontOverlay.Dispose();
                }

                m_fontOverlay = new Font("Times New Roman", fSize, System.Drawing.FontStyle.Bold,
                                         System.Drawing.GraphicsUnit.Point);

                // scale the font size in some portion to the video image
                fSize  = 5;
                fSize *= (m_imageWidthInPixels / 64.0f);

                if (m_transparentFont != null)
                {
                    m_transparentFont.Dispose();
                }

                m_transparentFont = new Font("Tahoma", fSize, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point);

                // If the output type isn't set yet, we can pre-populate it,
                // since output must always exactly equal input.  This can
                // save a (tiny) bit of time in negotiating types.

                OnSetOutputType();
            }
            else
            {
                // Since the input must be set before the output, nulling the
                // input must also clear the output.  Note that nulling the
                // input is only valid if we are not actively streaming.

                OutputType = null;
            }
        }
Пример #24
0
        //-------------------------------------------------------------------
        // SetDevice
        //
        // Set up preview for a specified video capture device.
        //-------------------------------------------------------------------

        public HResult SetDevice(MFDevice pDevice)
        {
            HResult hr = HResult.S_OK;

            IMFActivate    pActivate   = pDevice.Activator;
            IMFMediaSource pSource     = null;
            IMFAttributes  pAttributes = null;
            object         o           = null;

            lock (this)
            {
                try
                {
                    // Release the current device, if any.
                    hr = CloseDevice();

                    if (Succeeded(hr))
                    {
                        // Create the media source for the device.
                        hr = pActivate.ActivateObject(typeof(IMFMediaSource).GUID, out o);
                    }

                    if (Succeeded(hr))
                    {
                        pSource = (IMFMediaSource)o;
                    }

                    // Get Symbolic device link
                    m_pwszSymbolicLink = pDevice.SymbolicName;

                    //
                    // Create the source reader.
                    //

                    // Create an attribute store to hold initialization settings.

                    if (Succeeded(hr))
                    {
                        hr = MFExtern.MFCreateAttributes(out pAttributes, 2);
                    }

                    if (Succeeded(hr))
                    {
                        hr = pAttributes.SetUINT32(MFAttributesClsid.MF_READWRITE_DISABLE_CONVERTERS, 1);
                    }

                    if (Succeeded(hr))
                    {
                        hr = pAttributes.SetUnknown(MFAttributesClsid.MF_SOURCE_READER_ASYNC_CALLBACK, this);
                    }

                    IMFSourceReader pRead = null;
                    if (Succeeded(hr))
                    {
                        hr = MFExtern.MFCreateSourceReaderFromMediaSource(pSource, pAttributes, out pRead);
                    }

                    if (Succeeded(hr))
                    {
                        m_pReader = (IMFSourceReaderAsync)pRead;
                    }

                    if (Succeeded(hr))
                    {
                        // Try to find a suitable output type.
                        for (int i = 0; ; i++)
                        {
                            IMFMediaType pType;
                            hr = m_pReader.GetNativeMediaType((int)MF_SOURCE_READER.FirstVideoStream, i, out pType);
                            if (Failed(hr))
                            {
                                break;
                            }

                            try
                            {
                                hr = TryMediaType(pType);
                                if (Succeeded(hr))
                                {
                                    // Found an output type.
                                    break;
                                }
                            }
                            finally
                            {
                                SafeRelease(pType);
                            }
                        }
                    }

                    if (Succeeded(hr))
                    {
                        // Ask for the first sample.
                        hr = m_pReader.ReadSample((int)MF_SOURCE_READER.FirstVideoStream, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                    }

                    if (Failed(hr))
                    {
                        if (pSource != null)
                        {
                            pSource.Shutdown();

                            // NOTE: The source reader shuts down the media source
                            // by default, but we might not have gotten that far.
                        }
                        CloseDevice();
                    }
                }
                finally
                {
                    SafeRelease(pSource);
                    SafeRelease(pAttributes);
                }
            }

            return(hr);
        }
Пример #25
0
        /// <summary>Processes a block of audio data.</summary>
        /// <param name="pbDest">Destination buffer.</param>
        /// <param name="pbInputData">Buffer that contains the input data.</param>
        /// <param name="dwQuanta">Number of audio samples to process.</param>
        /// <returns></returns>
        unsafe private void ProcessAudio(IntPtr pbDest, IntPtr pbInputData, int dwQuanta)
        {
            int nWet = 0;  // Wet portion of wet/dry mix
            int sample = 0, channel = 0, cChannels = 0;

            cChannels = m_NumChannels;

            // Get the wet/dry mix.
            nWet = MFExtern.MFGetAttributeUINT32(Attributes, MF_AUDIODELAY_WET_DRY_MIX, DEFAULT_WET_DRY_MIX);

            // Clip the value to [0...100]
            nWet = Math.Min(nWet, 100);

            if (m_BitsPerSample == 8)
            {
                for (sample = 0; sample < dwQuanta; sample++)
                {
                    for (channel = 0; channel < cChannels; channel++)
                    {
                        // 8-bit sound is 0..255 with 128 == silence

                        // Get the input sample and normalize to -128 .. 127
                        int i = ((byte *)pbInputData)[sample * cChannels + channel] - 128;

                        // Get the delay sample and normalize to -128 .. 127
                        int delay = ((byte *)m_pbDelayPtr)[0] - 128;

                        ((byte *)m_pbDelayPtr)[0] = (byte)(i + 128);
                        IncrementDelayPtr(sizeof(byte));

                        i = (i * (100 - nWet)) / 100 + (delay * nWet) / 100;

                        // Truncate
                        if (i > 127)
                        {
                            i = 127;
                        }
                        else if (i < -128)
                        {
                            i = -128;
                        }

                        ((byte *)pbDest)[sample * cChannels + channel] = (byte)(i + 128);
                    }
                }
            }
            else  // 16-bit
            {
                for (sample = 0; sample < dwQuanta; sample++)
                {
                    for (channel = 0; channel < cChannels; channel++)
                    {
                        int i = ((short *)pbInputData)[sample * cChannels + channel];

                        int delay = ((short *)m_pbDelayPtr)[0];

                        ((short *)m_pbDelayPtr)[0] = (short)(i);
                        IncrementDelayPtr(sizeof(short));

                        i = (i * (100 - nWet)) / 100 + (delay * nWet) / 100;

                        // Truncate
                        if (i > 32767)
                        {
                            i = 32767;
                        }
                        else if (i < -32768)
                        {
                            i = -32768;
                        }

                        ((short *)pbDest)[sample * cChannels + channel] = (short)i;
                    }
                }
            }
        }
Пример #26
0
        /// <summary>Generates the "tail" of the audio effect.</summary>
        /// <param name="InputMessageNumber">Message number to use with OutputSample.</param>
        /// <remarks>
        /// Generates the "tail" of the audio effect. The tail is the portion
        /// of the delay effect that is heard after the input stream ends.
        ///
        /// To generate the tail, the client must drain the MFT by sending
        /// the MFT_MESSAGE_COMMAND_DRAIN message and then call ProcessOutput
        /// to get the tail samples.
        /// </remarks>
        private void ProcessEffectTail(int InputMessageNumber)
        {
            IMFMediaBuffer pOutputBuffer = null;

            MFError throwonhr;
            IntPtr  pbOutputData     = IntPtr.Zero; // Pointer to the memory in the output buffer.
            int     cbOutputLength   = 0;           // Size of the output buffer.
            int     cbBytesProcessed = 0;           // How much data we processed.

            IMFSample pOutSample = null;

            // Allocate an output buffer.
            throwonhr = MFExtern.MFCreateMemoryBuffer(m_cbTailSamples, out pOutputBuffer);

            try
            {
                throwonhr = MFExtern.MFCreateSample(out pOutSample);
                throwonhr = pOutSample.AddBuffer(pOutputBuffer);

                // Lock the output buffer.
                int cb;
                throwonhr = pOutputBuffer.Lock(out pbOutputData, out cbOutputLength, out cb);

                // Calculate how many audio samples we can process.
                cbBytesProcessed = Math.Min(m_cbTailSamples, cbOutputLength);

                // Round to the next lowest multiple of nBlockAlign.
                cbBytesProcessed -= (cbBytesProcessed % m_Alignment);

                // Fill the output buffer with silence, because we are also using it as the input buffer.
                FillBufferWithSilence(pbOutputData, cbBytesProcessed);

                // Process the data.
                ProcessAudio(pbOutputData, pbOutputData, cbBytesProcessed / m_Alignment);

                // Set the data length on the output buffer.
                throwonhr = pOutputBuffer.SetCurrentLength(cbBytesProcessed);

                if (m_rtTimestamp >= 0)
                {
                    long hnsDuration = (cbBytesProcessed / m_AvgBytesPerSec) * UNITS;

                    // Set the time stamp and duration on the output sample.
                    throwonhr = pOutSample.SetSampleTime(m_rtTimestamp);
                    throwonhr = pOutSample.SetSampleDuration(hnsDuration);
                }

                // Done.
                m_cbTailSamples = 0;

                OutputSample(pOutSample, InputMessageNumber);
            }
            catch
            {
                SafeRelease(pOutSample);
                throw;
            }
            finally
            {
                if (pbOutputData != IntPtr.Zero)
                {
                    pOutputBuffer.Unlock();
                }
                SafeRelease(pOutputBuffer);
            }
        }
Пример #27
0
        public void DoSplit()
        {
            HResult hr;
            bool    bHasVideo = false;

            IMFByteStream     pStream      = null;
            IMFASFContentInfo pContentInfo = null;
            IMFASFSplitter    pSplitter    = null;

            Console.WriteLine(string.Format("Opening {0}.", m_sFileName));

            try
            {
                // Start the Media Foundation platform.
                hr = MFExtern.MFStartup(0x10070, MFStartup.Full);
                MFError.ThrowExceptionForHR(hr);

                // Open the file.
                OpenFile(m_sFileName, out pStream);

                // Read the ASF header.
                CreateContentInfo(pStream, out pContentInfo);

                // Create the ASF splitter.
                CreateASFSplitter(pContentInfo, out pSplitter);

                // Select the first video stream.
                SelectVideoStream(pContentInfo, pSplitter, out bHasVideo);

                // Parse the ASF file.
                if (bHasVideo)
                {
                    DisplayKeyFrames(pStream, pSplitter);
                }
                else
                {
                    Console.WriteLine("No video stream.");
                }
            }
            catch (Exception e)
            {
                hr = (HResult)Marshal.GetHRForException(e);
                string s = MFError.GetErrorText(hr);

                if (s == null)
                {
                    s = e.Message;
                }
                else
                {
                    s = string.Format("{0} ({1})", s, e.Message);
                }

                Console.WriteLine(string.Format("Exception 0x{0:x}: {1}", hr, s));
            }
            finally
            {
                // Clean up.
                SafeRelease(pSplitter);
                SafeRelease(pContentInfo);
                SafeRelease(pStream);
            }

            // Shut down the Media Foundation platform.
            hr = MFExtern.MFShutdown();
            MFError.ThrowExceptionForHR(hr);
        }
Пример #28
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Constructor
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Originally Written
        /// </history>
        public frmMain()
        {
            bool    retBOOL = false;
            HResult hr      = 0;

            if (DesignMode == false)
            {
                // set the current directory equal to the exe directory. We do this because
                // people can start from a link and if the start-in directory is not right
                // it can put the log file in strange places
                Directory.SetCurrentDirectory(Application.StartupPath);

                // set up the Singleton g_Logger instance. Simply using it in a test
                // creates it.
                if (g_Logger == null)
                {
                    // did not work, nothing will start say so now in a generic way
                    MessageBox.Show("Logger Class Failed to Initialize. Nothing will work well.");
                    return;
                }
                // record this in the logger for everybodys use
                g_Logger.ApplicationMainForm   = this;
                g_Logger.DefaultDialogBoxTitle = APPLICATION_NAME;
                try
                {
                    // set the icon for this form and for all subsequent forms
                    g_Logger.AppIcon = new Icon(GetType(), "App.ico");
                    this.Icon        = new Icon(GetType(), "App.ico");
                }
                catch (Exception)
                {
                }

                // Register the global error handler as soon as we can in Main
                // to make sure that we catch as many exceptions as possible
                // this is a last resort. All execeptions should really be trapped
                // and handled by the code.
                OISGlobalExceptions ex1 = new OISGlobalExceptions();
                Application.ThreadException += new ThreadExceptionEventHandler(ex1.OnThreadException);

                // set the culture so our numbers convert consistently
                System.Threading.Thread.CurrentThread.CurrentCulture = g_Logger.GetDefaultCulture();
            }

            InitializeComponent();

            if (DesignMode == false)
            {
                // set up our logging
                retBOOL = g_Logger.InitLogging(DEFAULTLOGDIR, APPLICATION_NAME, false);
                if (retBOOL == false)
                {
                    // did not work, nothing will start say so now in a generic way
                    MessageBox.Show("The log file failed to create. No log file will be recorded.");
                }
                // pump out the header
                g_Logger.EmitStandardLogfileheader(APPLICATION_NAME);
                LogMessage("");
                LogMessage("Version: " + APPLICATION_VERSION);
                LogMessage("");

                // we always have to initialize MF. The 0x00020070 here is the WMF version
                // number used by the MF.Net samples. Not entirely sure if it is appropriate
                hr = MFExtern.MFStartup(0x00020070, MFStartup.Full);
                if (hr != 0)
                {
                    LogMessage("Constructor: call to MFExtern.MFStartup returned " + hr.ToString());
                }

                // some initial configuration
                this.textBoxVideoFileNameAndPath.Text         = INITIALFILE;
                ctlTantaEVRFilePlayer1.VideoFileAndPathToPlay = INITIALFILE;

                // set our heartbeat going
                LaunchHeartBeat();
            }
        }
Пример #29
0
        //-----------------------------------------------------------------------------
        // PresentSample
        //
        // Presents a video frame.
        //
        // pSample:  Pointer to the sample that contains the surface to present. If
        //           this parameter is NULL, the method paints a black rectangle.
        // llTarget: Target presentation time.
        //
        // This method is called by the scheduler and/or the presenter.
        //-----------------------------------------------------------------------------

        public void PresentSample(IMFSample pSample, long llTarget)
        {
            HResult             hr;
            IMFMediaBuffer      pBuffer    = null;
            IDirect3DSurface9   pSurface   = null;
            IDirect3DSwapChain9 pSwapChain = null;
            object o;

            try
            {
                if (pSample != null)
                {
                    // Get the buffer from the sample.
                    hr = pSample.GetBufferByIndex(0, out pBuffer);
                    MFError.ThrowExceptionForHR(hr);

                    // Get the surface from the buffer.
                    hr = MFExtern.MFGetService(pBuffer, MFServices.MR_BUFFER_SERVICE, typeof(IDirect3DSurface9).GUID, out o);
                    MFError.ThrowExceptionForHR(hr);
                    pSurface = o as IDirect3DSurface9;
                }
                else if (m_pSurfaceRepaint != null)
                {
                    // Redraw from the last surface.
                    pSurface = m_pSurfaceRepaint;
                }

                if (pSurface != null)
                {
                    // Get the swap chain from the surface.
                    pSurface.GetContainer(typeof(IDirect3DSwapChain9).GUID, out o);
                    pSwapChain = o as IDirect3DSwapChain9;

                    // Present the swap chain.
                    PresentSwapChain(pSwapChain, pSurface);

                    // Store this pointer in case we need to repaint the surface.
                    if (m_pSurfaceRepaint != pSurface)
                    {
                        SafeRelease(m_pSurfaceRepaint);
                        m_pSurfaceRepaint = pSurface;
                    }
                }
                else
                {
                    // No surface. All we can do is paint a black rectangle.
                    PaintFrameWithGDI();
                }
            }
            catch (Exception e)
            {
                hr = (HResult)Marshal.GetHRForException(e);
                if (hr == (HResult)D3DError.DeviceLost || hr == (HResult)D3DError.DeviceNotReset || hr == (HResult)D3DError.DeviceHung)
                {
                    // We failed because the device was lost. Fill the destination rectangle.
                    PaintFrameWithGDI();

                    // Ignore. We need to reset or re-create the device, but this method
                    // is probably being called from the scheduler thread, which is not the
                    // same thread that created the device. The Reset(Ex) method must be
                    // called from the thread that created the device.

                    // The presenter will detect the state when it calls CheckDeviceState()
                    // on the next sample.
                }
            }
            finally
            {
                SafeRelease(pSwapChain); pSwapChain = null;
                //SafeRelease(pSurface); pSurface = null;
                SafeRelease(pBuffer); pBuffer = null;
            }
        }
Пример #30
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the transforms for the currently selected category. Since each
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void DisplayTransformsForCurrentCategory()
        {
            int numResults;

            IMFActivate[] activatorArray;
            List <TantaMFTCapabilityContainer> transformList = new List <TantaMFTCapabilityContainer>();
            HResult hr;

            try
            {
                // clear what we have now
                listViewAvailableTransforms.Clear();
                // reset this
                listViewAvailableTransforms.ListViewItemSorter = null;

                // get the currently selected major category
                TantaGuidNamePair currentCategory = (TantaGuidNamePair)comboBoxTransformCategories.SelectedItem;
                if (currentCategory == null)
                {
                    return;
                }

                // we have multiple sub-categories. These are set by specific flags on the MFTEnumX call. We iterate
                // through each flag and get the matching transforms. If we already have it we just set the flag on
                // the exisiting one to show it is in multiple sub-categories

                foreach (MFT_EnumFlag flagVal in Enum.GetValues(typeof(MFT_EnumFlag)))
                {
                    // we do not need this one
                    if (flagVal == MFT_EnumFlag.None)
                    {
                        continue;
                    }
                    // The documentation states that there is no way to enumerate just local MFTs and nothing else.
                    // Setting Flags equal to MFT_ENUM_FLAG_LOCALMFT is equivalent to including the MFT_ENUM_FLAG_SYNCMFT flag
                    // which messes us up. This also appears to be true for the FieldOfUse and transcode only flags so we
                    // do not include them
                    if (flagVal == MFT_EnumFlag.LocalMFT)
                    {
                        continue;
                    }
                    if (flagVal == MFT_EnumFlag.FieldOfUse)
                    {
                        continue;
                    }
                    if (flagVal == MFT_EnumFlag.TranscodeOnly)
                    {
                        continue;
                    }
                    // some of the higher flags are just for sorting the return results
                    if (flagVal >= MFT_EnumFlag.All)
                    {
                        break;
                    }

                    hr = MFExtern.MFTEnumEx(currentCategory.GuidValue, flagVal, null, null, out activatorArray, out numResults);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayTransformsForCurrentCategory, call to MFExtern.MFTEnumEx failed. HR=" + hr.ToString());
                    }

                    // now loop through the returned activators
                    for (int i = 0; i < numResults; i++)
                    {
                        // extract the friendlyName and symbolicLinkName
                        Guid   outGuid      = TantaWMFUtils.GetGuidForKeyFromActivator(activatorArray[i], MFAttributesClsid.MFT_TRANSFORM_CLSID_Attribute);
                        string friendlyName = TantaWMFUtils.GetStringForKeyFromActivator(activatorArray[i], MFAttributesClsid.MFT_FRIENDLY_NAME_Attribute);

                        // create a new TantaMFTCapabilityContainer for it
                        TantaMFTCapabilityContainer workingMFTContainer = new TantaMFTCapabilityContainer(friendlyName, outGuid, currentCategory);
                        // do we have this in our list yet
                        int index = transformList.FindIndex(x => x.TransformGuidValue == workingMFTContainer.TransformGuidValue);
                        if (index >= 0)
                        {
                            // yes, it does contain this transform, just record the new sub-category
                            transformList[index].EnumFlags |= flagVal;
                        }
                        else
                        {
                            // no, it does not contain this transform yet, set the sub-category
                            workingMFTContainer.EnumFlags = flagVal;
                            // and add it
                            transformList.Add(workingMFTContainer);

                            if ((activatorArray[i] is IMFAttributes) == true)
                            {
                                StringBuilder outSb = null;
                                List <string> attributesToIgnore = new List <string>();
                                attributesToIgnore.Add("MFT_FRIENDLY_NAME_Attribute");
                                attributesToIgnore.Add("MFT_TRANSFORM_CLSID_Attribute");
                                attributesToIgnore.Add("MF_TRANSFORM_FLAGS_Attribute");
                                attributesToIgnore.Add("MF_TRANSFORM_CATEGORY_Attribute");
                                hr = TantaWMFUtils.EnumerateAllAttributeNamesAsText((activatorArray[i] as IMFAttributes), attributesToIgnore, 100, out outSb);
                            }
                        }

                        // clean up our activator
                        Marshal.ReleaseComObject(activatorArray[i]);
                    }
                }

                // now display the transforms
                foreach (TantaMFTCapabilityContainer mftCapability in transformList)
                {
                    ListViewItem lvi = new ListViewItem(new[] { mftCapability.TransformFriendlyName, mftCapability.IsSyncMFT, mftCapability.IsAsyncMFT, mftCapability.IsHardware, /* mftCapability.IsFieldOfUse, mftCapability.IsLocalMFT, mftCapability.IsTranscodeOnly, */ mftCapability.TransformGuidValueAsString });
                    lvi.Tag = mftCapability;
                    listViewAvailableTransforms.Items.Add(lvi);
                }

                listViewAvailableTransforms.Columns.Add("Name", 250);
                listViewAvailableTransforms.Columns.Add("IsSyncMFT", 70);
                listViewAvailableTransforms.Columns.Add("IsAsyncMFT", 90);
                listViewAvailableTransforms.Columns.Add("IsHardware", 90);
                //  listViewAvailableTransforms.Columns.Add("IsFieldOfUse", 90);
                //  listViewAvailableTransforms.Columns.Add("IsLocalMFT", 90);
                //  listViewAvailableTransforms.Columns.Add("IsTranscodeOnly", 90);
                listViewAvailableTransforms.Columns.Add("Guid", 200);
            }
            finally
            {
            }
        }