예제 #1
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Gets the major media type of a IMFMediaType as a text string
        ///
        /// Adapted from
        /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx
        /// </summary>
        /// <returns>S_OK for success, nz for fail</returns>
        /// <param name="mediaTypeObj">the media type object</param>
        /// <param name="outSb">The output string</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public static HResult GetMediaSubTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb)
        {
            Guid    subType;
            HResult hr;

            // we always return something here
            outSb = new StringBuilder();

            // sanity check
            if (mediaTypeObj == null)
            {
                return(HResult.E_FAIL);
            }

            // MF_MT_SUBTYPE
            // Subtype GUID which describes the basic media type, we return this as human readable text
            hr = mediaTypeObj.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subType);
            if (hr == HResult.S_OK)
            {
                // only report success
                outSb.Append("MF_MT_SUBTYPE=" + TantaWMFUtils.ConvertGuidToName(subType));
            }

            return(HResult.S_OK);
        }
예제 #2
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Gets the major media type of a IMFMediaType as a text string
        ///
        /// Adapted from
        /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx
        /// </summary>
        /// <returns>S_OK for success, nz for fail</returns>
        /// <param name="mediaTypeObj">the media type object</param>
        /// <param name="outSb">The output string</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public static HResult GetMediaMajorTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb)
        {
            Guid    majorType;
            HResult hr;

            // we always return something here
            outSb = new StringBuilder();

            // sanity check
            if (mediaTypeObj == null)
            {
                return(HResult.E_FAIL);
            }

            // MF_MT_MAJOR_TYPE
            // Major type GUID, we return this as human readable text
            hr = mediaTypeObj.GetMajorType(out majorType);
            if (hr == HResult.S_OK)
            {
                // only report success
                outSb.Append("MF_MT_MAJOR_TYPE=" + TantaWMFUtils.ConvertGuidToName(majorType));
            }

            return(HResult.S_OK);
        }
예제 #3
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Gets a list of all attributes contained in a media type and displays
        /// them as a human readable name. More or less just for practice
        ///
        /// Adapted from
        /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx
        /// </summary>
        /// <returns>S_OK for success, nz for fail</returns>
        /// <param name="mediaTypeObj">the media type object</param>
        /// <param name="maxAttributes">the maximum number of attributes</param>
        /// <param name="outSb">The output string</param>
        /// <param name="ignoreMajorType">if true we ignore the major type attribute</param>
        /// <param name="ignoreSubType">if true we ignore the sub type attribute</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public static HResult EnumerateAllAttributeNamesInMediaTypeAsText(IMFMediaType mediaTypeObj, bool ignoreMajorType, bool ignoreSubType, int maxAttributes, out StringBuilder outSb)
        {
            // we always return something here
            outSb = new StringBuilder();

            // sanity check
            if (mediaTypeObj == null)
            {
                return(HResult.E_FAIL);
            }
            if ((mediaTypeObj is IMFAttributes) == false)
            {
                return(HResult.E_FAIL);
            }

            // set up to ignore
            List <string> attributesToIgnore = new List <string>();

            if (ignoreMajorType == true)
            {
                attributesToIgnore.Add("MF_MT_MAJOR_TYPE");
            }
            if (ignoreSubType == true)
            {
                attributesToIgnore.Add("MF_MT_SUBTYPE");
            }

            // just call the generic TantaWMFUtils Attribute Enumerator
            return(TantaWMFUtils.EnumerateAllAttributeNamesAsText((mediaTypeObj as IMFAttributes), attributesToIgnore, maxAttributes, out outSb));
        }
예제 #4
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Write the text on the output buffer
        /// </summary>
        /// <param name="outputMediaBuffer">Output buffer</param>
        /// <history>
        ///    01 Nov 18  Cynic - Ported In
        /// </history>
        private void WriteTextOnBuffer(IMFMediaBuffer outputMediaBuffer)
        {
            IntPtr destRawDataPtr = IntPtr.Zero;            // Destination buffer.
            int    destStride     = 0;                      // Destination stride.
            bool   destIs2D       = false;

            try
            {
                // Lock the output buffer. Use the IMF2DBuffer interface
                // (if available) as it is faster
                if ((outputMediaBuffer is IMF2DBuffer) == false)
                {
                    // not an IMF2DBuffer - get the raw data from the IMFMediaBuffer
                    int maxLen     = 0;
                    int currentLen = 0;
                    TantaWMFUtils.LockIMFMediaBufferAndGetRawData(outputMediaBuffer, out destRawDataPtr, out maxLen, out currentLen);
                    // the stride is always this. The Lock function does not return it
                    destStride = m_lStrideIfContiguous;
                }
                else
                {
                    // we are an IMF2DBuffer, we get the stride here as well
                    TantaWMFUtils.LockIMF2DBufferAndGetRawData((outputMediaBuffer as IMF2DBuffer), out destRawDataPtr, out destStride);
                    destIs2D = true;
                }

                // count this now. We only use this to write it on the screen
                m_FrameCount++;

                // We could eventually offer the ability to write on other formats depending on the
                // current media type. We have this hardcoded to ARGB for now
                WriteImageOfTypeARGB(destRawDataPtr,
                                     destStride,
                                     m_imageWidthInPixels,
                                     m_imageHeightInPixels);

                // Set the data size on the output buffer. It probably is already there
                // since the output buffer is the input buffer
                HResult hr = outputMediaBuffer.SetCurrentLength(m_cbImageSize);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("WriteTextOnBuffer call to outputMediaBuffer.SetCurrentLength failed. Err=" + hr.ToString());
                }
            }
            finally
            {
                // we MUST unlock
                if (destIs2D == false)
                {
                    TantaWMFUtils.UnLockIMFMediaBuffer(outputMediaBuffer);
                }
                else
                {
                    TantaWMFUtils.UnLockIMF2DBuffer((outputMediaBuffer as IMF2DBuffer));
                }
            }
        }
예제 #5
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// The MFT defines a list of available media types for each input stream
        /// and orders them by preference. This method enumerates the available
        /// media types for an input stream.
        ///
        /// Many clients will just "try it on" with their preferred media type
        /// and if/when that gets rejected will start enumerating the types the
        /// transform prefers in order to see if they have one in common
        ///
        /// An override of the virtual version in TantaMFTBase_Sync.
        /// </summary>
        /// <param name="dwTypeIndex">The (zero-based) index of the type.</param>
        /// <param name="pInputType">The input type supported by the MFT.</param>
        /// <returns>S_Ok unless error.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Ported In
        /// </history>
        protected override HResult OnEnumInputTypes(int dwTypeIndex, out IMFMediaType pInputType)
        {
            // MF.Net Sample comments...
            // I'd like to skip implementing this, but while some clients
            // don't require it (PlaybackFX), some do (MEPlayer/IMFMediaEngine).
            // Although frame counting should be able to run against any type,
            // we must at a minimum provide a major type.

            return(TantaWMFUtils.CreatePartialMediaType(dwTypeIndex, MFMediaType.Video, m_MediaSubtypes, out pInputType));
        }
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Called when the sample grabber sink processes a sample. We can use this
        /// to do what we want with the sample
        /// </summary>
        /// <param name="guidMajorMediaType">the media type</param>
        /// <param name="sampleFlags">the sample flags</param>
        /// <param name="sampleSize">the sample size</param>
        /// <param name="sampleDuration">the sample duration</param>
        /// <param name="sampleTimeStamp">the sample time</param>
        /// <param name="sampleBuffer">the sample buffer</param>
        /// <param name="sampleAttributes">the attributes for the sample</param>
        /// <history>
        ///    01 Nov 18  Cynic - Ported in
        /// </history>
        public HResult OnProcessSampleEx(Guid guidMajorMediaType, int sampleFlags, long sampleTimeStamp, long sampleDuration, IntPtr sampleBuffer, int sampleSize, IMFAttributes sampleAttributes)
        {
            IMFSample outputSample = null;
            HResult   hr;

            try
            {
                if (sinkWriter == null)
                {
                    string errMsg = "OnProcessSample, Error sinkWriter==null";
                    SampleGrabberAsyncCallBackError(this, errMsg, null);
                    return(HResult.E_FAIL);
                }

                // we have all the information we need to create a new output sample
                outputSample = TantaWMFUtils.CreateMediaSampleFromIntPtr(sampleFlags, sampleTimeStamp, sampleDuration, sampleBuffer, sampleSize, sampleAttributes);
                if (outputSample == null)
                {
                    string errMsg = "OnProcessSample, Error on call to CreateMediaSampleFromBuffer outputSample == null";
                    SampleGrabberAsyncCallBackError(this, errMsg, null);
                    return(HResult.E_FAIL);
                }

                lock (sinkWriter)
                {
                    // write the sample out
                    hr = sinkWriter.WriteSample(sinkWriterMediaStreamId, outputSample);
                    if (Failed(hr))
                    {
                        string errMsg = "OnProcessSample, Error on WriteSample =" + hr.ToString();
                        SampleGrabberAsyncCallBackError(this, errMsg, null);
                        return(hr);
                    }

                    // if you do not set MF_SINK_WRITER_DISABLE_THROTTLING on the sink writer, the
                    // calls below can sometimes be useful to avoid the sink writer stalling for a bit
                    // while it waits for one stream or the other. In fact is a bit disconcerting
                    // that we do not seem to get any of these in the sample grabber.

                    //    sinkWriter.SendStreamTick(sinkWriterMediaStreamId, sampleTimeStamp);
                    //    sinkWriter.NotifyEndOfSegment(sinkWriterMediaStreamId);
                }
            }
            finally
            {
                if (outputSample != null)
                {
                    Marshal.ReleaseComObject(outputSample);
                    outputSample = null;
                }
            }

            return(HResult.S_OK);
        }
예제 #7
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        ///  Get the buffers and sizes to be modified, then pass them
        ///  to the appropriate Update_* routine.
        /// </summary>
        /// <param name="inputMediaBuffer">the mediaBuffer</param>
        /// <history>
        ///    01 Nov 18  Cynic - Ported over
        /// </history>
        private void DoWork(IMFMediaBuffer inputMediaBuffer)
        {
            IntPtr srcRawDataPtr = IntPtr.Zero;                     // Source buffer.
            int    srcStride;                                       // Source stride.
            bool   srcIs2D = false;

            try
            {
                // Lock the input buffer. Use the IMF2DBuffer interface
                // (if available) as it is faster
                if ((inputMediaBuffer is IMF2DBuffer) == false)
                {
                    // not an IMF2DBuffer - get the raw data from the IMFMediaBuffer
                    int maxLen     = 0;
                    int currentLen = 0;
                    TantaWMFUtils.LockIMFMediaBufferAndGetRawData(inputMediaBuffer, out srcRawDataPtr, out maxLen, out currentLen);
                    // the stride is always this. The Lock function does not return it
                    srcStride = m_lStrideIfContiguous;
                }
                else
                {
                    // we are an IMF2DBuffer, we get the stride here as well
                    TantaWMFUtils.LockIMF2DBufferAndGetRawData((inputMediaBuffer as IMF2DBuffer), out srcRawDataPtr, out srcStride);
                    srcIs2D = true;
                }
                // Invoke the image transform function.
                if (TransformImageFunction != null)
                {
                    TransformImageFunction(srcRawDataPtr, srcStride,
                                           m_imageWidthInPixels, m_imageHeightInPixels);
                }
                else
                {
                    throw new COMException("Transform type not set", (int)HResult.E_UNEXPECTED);
                }

                // Set the data size on the output buffer.
                MFError throwonhr = inputMediaBuffer.SetCurrentLength(m_cbImageSize);
            }
            finally
            {
                if (srcIs2D == false)
                {
                    TantaWMFUtils.UnLockIMFMediaBuffer(inputMediaBuffer);
                }
                else
                {
                    TantaWMFUtils.UnLockIMF2DBuffer((inputMediaBuffer as IMF2DBuffer));
                }
            }
        }
예제 #8
0
        // ########################################################################
        // ##### TantaMFTBase_Sync Overrides, all child classes must implement these
        // ########################################################################

        #region Overrides

        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Returns a value indicating if the proposed input type is acceptable to
        /// this MFT.
        ///
        /// An override of the abstract version in TantaMFTBase_Sync.
        /// </summary>
        /// <param name="pmt">The type to check.  Should never be null.</param>
        /// <returns>S_Ok if the type is valid or MF_E_INVALIDTYPE.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Ported In
        /// </history>
        override protected HResult OnCheckInputType(IMFMediaType pmt)
        {
            HResult hr;

            // We accept any input type as long as the output type
            // has not been set yet
            if (OutputType == null)
            {
                hr = HResult.S_OK;
            }
            else
            {
                // Otherwise, proposed input must be identical to the output.
                hr = TantaWMFUtils.IsMediaTypeIdentical(pmt, OutputType);
            }

            return(hr);
        }
예제 #9
0
        // ########################################################################
        // ##### TantaMFTBase_Sync Overrides, all child classes must implement these
        // ########################################################################

        #region Overrides

        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Returns a value indicating if the proposed input type is acceptable to
        /// this MFT.
        ///
        /// An override of the abstract version in TantaMFTBase_Sync.
        /// </summary>
        /// <param name="pmt">The type to check.  Should never be null.</param>
        /// <returns>S_Ok if the type is valid or MF_E_INVALIDTYPE.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Ported In
        /// </history>
        override protected HResult OnCheckInputType(IMFMediaType pmt)
        {
            HResult hr;

            // We assume the input type will get checked first
            if (OutputType == null)
            {
                // we do not have an output type, check that the proposed
                // input type is acceptable
                hr = OnCheckMediaType(pmt);
            }
            else
            {
                // we have an output type
                hr = TantaWMFUtils.IsMediaTypeIdentical(pmt, OutputType);
            }

            return(hr);
        }
예제 #10
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Called when the sample grabber sink processes a sample. We can use this
        /// to do what we want with the media data
        /// </summary>
        /// <param name="guidMajorMediaType">the media type</param>
        /// <param name="sampleFlags">the sample flags</param>
        /// <param name="sampleSize">the sample size</param>
        /// <param name="sampleDuration">the sample duration</param>
        /// <param name="sampleTimeStamp">the sample time</param>
        /// <param name="sampleBuffer">the sample buffer</param>
        /// <param name="sampleAttributes">the attributes for the sample</param>
        /// <history>
        ///    01 Nov 18  Cynic - Ported in
        /// </history>
        public HResult OnProcessSampleEx(Guid guidMajorMediaType, int sampleFlags, long sampleTimeStamp, long sampleDuration, IntPtr sampleBuffer, int sampleSize, IMFAttributes sampleAttributes)
        {
            IMFSample outputSample = null;
            HResult   hr;

            try
            {
                if (sinkWriter == null)
                {
                    string errMsg = "OnProcessSample, Error sinkWriter==null";
                    SampleGrabberAsyncCallBackError(this, errMsg, null);
                    return(HResult.E_FAIL);
                }

                // we have all the information we need to create a new output sample
                outputSample = TantaWMFUtils.CreateMediaSampleFromIntPtr(sampleFlags, sampleTimeStamp, sampleDuration, sampleBuffer, sampleSize, null);
                if (outputSample == null)
                {
                    string errMsg = "OnProcessSample, Error on call to CreateMediaSampleFromBuffer outputSample == null";
                    SampleGrabberAsyncCallBackError(this, errMsg, null);
                    return(HResult.E_FAIL);
                }

                // write the sample out
                hr = sinkWriter.WriteSample(sinkWriterMediaStreamId, outputSample);
                if (Failed(hr))
                {
                    string errMsg = "OnProcessSample, Error on WriteSample =" + hr.ToString();
                    SampleGrabberAsyncCallBackError(this, errMsg, null);
                    return(hr);
                }
            }
            finally
            {
                if (outputSample != null)
                {
                    Marshal.ReleaseComObject(outputSample);
                    outputSample = null;
                }
            }

            return(HResult.S_OK);
        }
예제 #11
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Validates a media type for this transform. Since both input and output types must be
        /// the same, they both call this routine.
        /// </summary>
        /// <param name="pmt">The media type to validate.</param>
        /// <returns>S_Ok or MF_E_INVALIDTYPE.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Ported over
        /// </history>
        private HResult OnCheckMediaType(IMFMediaType pmt)
        {
            int     interlace;
            HResult hr = HResult.S_OK;

            // see if the media type is one of our list of acceptable subtypes
            hr = TantaWMFUtils.CheckMediaType(pmt, MFMediaType.Video, m_MediaSubtypes);
            if (hr != HResult.S_OK)
            {
                throw new Exception("OnCheckMediaType call to TantaWMFUtils.CheckMediaType failed. Err=" + hr.ToString());
            }

            // Video must be progressive frames. Set this now
            m_MightBeInterlaced = false;

            // get the interlace mode
            hr = pmt.GetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, out interlace);
            if (hr != HResult.S_OK)
            {
                throw new Exception("OnCheckMediaType call to getting the interlace mode failed. Err=" + hr.ToString());
            }
            // set it now
            MFVideoInterlaceMode im = (MFVideoInterlaceMode)interlace;

            // Mostly we only accept Progressive.
            if (im == MFVideoInterlaceMode.Progressive)
            {
                return(HResult.S_OK);
            }
            // If the type MIGHT be interlaced, we'll accept it.
            if (im == MFVideoInterlaceMode.MixedInterlaceOrProgressive)
            {
                // But we will check to see if any samples actually
                // are interlaced, and reject them.
                m_MightBeInterlaced = true;

                return(HResult.S_OK);
            }
            // not a valid option
            return(HResult.MF_E_INVALIDTYPE);
        }
예제 #12
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Validates a media type for this transform. Since both input and output types must be
        /// the same, they both call this routine.
        /// </summary>
        /// <param name="pmt">The media type to validate.</param>
        /// <returns>S_Ok or MF_E_INVALIDTYPE.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Ported over
        /// </history>
        private HResult OnCheckMediaType(IMFMediaType pmt)
        {
            HResult hr;

            // Check the Major and Subtype
            hr = TantaWMFUtils.CheckMediaType(pmt, MFMediaType.Video, m_MediaSubtypes);
            if (Succeeded(hr))
            {
                int interlace;

                // Video must be progressive frames.
                m_MightBeInterlaced = false;

                MFError throwonhr = pmt.GetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, out interlace);

                MFVideoInterlaceMode im = (MFVideoInterlaceMode)interlace;

                // Mostly we only accept Progressive.
                if (im != MFVideoInterlaceMode.Progressive)
                {
                    // If the type MIGHT be interlaced, we'll accept it.
                    if (im != MFVideoInterlaceMode.MixedInterlaceOrProgressive)
                    {
                        hr = HResult.MF_E_INVALIDTYPE;
                    }
                    else
                    {
                        // But we will check to see if any samples actually
                        // are interlaced, and reject them.
                        m_MightBeInterlaced = true;
                    }
                }
            }

            return(hr);
        }
예제 #13
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the video devices on the system. Expects the MF system to have
        /// been started.
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public void DisplayVideoCaptureDevices()
        {
            StringBuilder sb = new StringBuilder();

            // Query MF for the devices, can also use MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID
            // here to see the audio capture devices
            List <TantaMFDevice> vcDevices = TantaWMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);

            if (vcDevices == null)
            {
                return;
            }

            foreach (TantaMFDevice mfDevice in vcDevices)
            {
                sb.Append("FriendlyName:" + mfDevice.FriendlyName);
                sb.Append("\r\n");
                sb.Append("Symbolic Name:" + mfDevice.SymbolicName);
                sb.Append("\r\n");
                sb.Append("\r\n");
            }
            // add all known devices
            comboBoxCaptureDevices.DataSource = vcDevices;
        }
예제 #14
0
 /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
 /// <summary>
 /// The MFT defines a list of available media types for each input stream
 /// and orders them by preference. This method enumerates the available
 /// media types for an input stream.
 ///
 /// Many clients will just "try it on" with their preferred media type
 /// and if/when that gets rejected will start enumerating the types the
 /// transform prefers in order to see if they have one in common
 ///
 /// An override of the virtual version in TantaMFTBase_Async.
 /// </summary>
 /// <param name="dwTypeIndex">The (zero-based) index of the type.</param>
 /// <param name="pInputType">The input type supported by the MFT.</param>
 /// <returns>S_Ok unless error.</returns>
 /// <history>
 ///    01 Nov 18  Cynic - Ported In
 /// </history>
 override protected HResult OnEnumInputTypes(int dwTypeIndex, out IMFMediaType pInputType)
 {
     return(TantaWMFUtils.CreatePartialMediaType(dwTypeIndex, MFMediaType.Video, m_MediaSubtypes, out pInputType));
 }
예제 #15
0
파일: frmMain.cs 프로젝트: yudigh/Tanta
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Does everything to copy a file. Opens the Source Reader and Sink Writer
        /// configures the streams and then, because a Synchronous version
        /// of the Source Reader is used, we sit in a loop and perform the copy.
        ///
        /// Any errors here simply throw an exception and must be trapped elsewhere
        ///
        /// Note that because this code is intended for demo purposes, it has been
        /// kept very simple and linear. Most of the things that could have been
        /// refactored in a common procedure (audio and video streams) are simply
        /// written out in duplicate in order to make it obvious what is going on.
        /// </summary>
        /// <param name="sourceFileName">the source file name</param>
        /// <param name="outputFileName">the name of the output file</param>
        /// <history>
        ///    01 Nov 18  Cynic - Originally Written
        /// </history>
        public void CopyFile(string sourceFileName, string outputFileName)
        {
            HResult hr;
            int     sinkWriterOutputVideoStreamId = -1;
            int     videoSamplesProcessed         = 0;
            bool    videoStreamIsAtEOS            = false;
            int     sourceReaderVideoStreamId     = -1;

            int  sinkWriterOutputAudioStreamId = -1;
            int  audioSamplesProcessed         = 0;
            bool audioStreamIsAtEOS            = false;
            int  sourceReaderAudioStreamId     = -1;

            // not keen on endless loops. This is the maximum number
            // of streams we will check in the source reader.
            const int MAX_SOURCEREADER_STREAMS = 100;

            // create the SourceReader
            sourceReader = TantaWMFUtils.CreateSourceReaderSyncFromFile(sourceFileName, DEFAULT_ALLOW_HARDWARE_TRANSFORMS);
            if (sourceReader == null)
            {
                // we failed
                throw new Exception("CopyFile: Failed to create SourceReader, Nothing will work.");
            }
            // create the SinkWriter
            sinkWriter = TantaWMFUtils.CreateSinkWriterFromFile(outputFileName, DEFAULT_ALLOW_HARDWARE_TRANSFORMS);
            if (sinkWriter == null)
            {
                // we failed
                throw new Exception("CopyFile: Failed to create Sink Writer, Nothing will work.");
            }

            // find the first audio and video stream and identify the default Media Type
            // they are using. We could look into the streams and enumerate all of the
            // types on offer and choose one from the list - but for a copy operation
            // the default will be quite suitable.

            sourceReaderNativeVideoMediaType = null;
            sourceReaderNativeAudioMediaType = null;
            for (int streamIndex = 0; streamIndex < MAX_SOURCEREADER_STREAMS; streamIndex++)
            {
                IMFMediaType workingType   = null;
                Guid         guidMajorType = Guid.Empty;

                // if we have found both the video and audio types (and their stream ids) leave now
                if ((sourceReaderNativeVideoMediaType != null) && (sourceReaderNativeAudioMediaType != null))
                {
                    break;
                }

                hr = sourceReader.GetNativeMediaType(streamIndex, 0, out workingType);
                if (hr == HResult.MF_E_NO_MORE_TYPES)
                {
                    break;
                }
                if (hr == HResult.MF_E_INVALIDSTREAMNUMBER)
                {
                    break;
                }
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: failed on call to GetNativeMediaType, retVal=" + hr.ToString());
                }
                if (workingType == null)
                {
                    // we failed
                    throw new Exception("CopyFile: failed on call to GetNativeMediaType, workingType == null");
                }

                // what major type does this stream have?
                hr = workingType.GetMajorType(out guidMajorType);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("CopyFile:  call to workingType.GetMajorType failed. Err=" + hr.ToString());
                }
                if (guidMajorType == null)
                {
                    throw new Exception("CopyFile:  call to workingType.GetMajorType failed. guidMajorType == null");
                }

                // test for video or audio (there can be others)
                if ((guidMajorType == MFMediaType.Video) && (sourceReaderNativeVideoMediaType == null))
                {
                    // this stream represents a video type
                    sourceReaderNativeVideoMediaType = workingType;
                    sourceReaderVideoStreamId        = streamIndex;
                    // the sourceReaderNativeVideoMediaType will be released elsewhere
                    continue;
                }
                else if ((guidMajorType == MFMediaType.Audio) && (sourceReaderNativeAudioMediaType == null))
                {
                    // this stream represents a audio type
                    sourceReaderNativeAudioMediaType = workingType;
                    sourceReaderAudioStreamId        = streamIndex;
                    // the sourceReaderNativeAudioMediaType will be released elsewhere
                    continue;
                }

                // if we get here release the type - we do not use it
                if (workingType != null)
                {
                    Marshal.ReleaseComObject(workingType);
                    workingType = null;
                }
            }

            // at this point we expect we can have a native video or a native audio media type
            // or both, but not neither. if we don't we cannot carry on
            if ((sourceReaderNativeVideoMediaType == null) && (sourceReaderNativeAudioMediaType == null))
            {
                // we failed
                throw new Exception("CopyFile: failed on call to GetNativeMediaType, sourceReaderNativeVideoMediaType == null");
            }

            // if we have a video stream in the source file we now set it up
            if (sourceReaderNativeVideoMediaType != null)
            {
                // set the media type on the reader - this is the media type it will output
                hr = sourceReader.SetCurrentMediaType(sourceReaderVideoStreamId, null, sourceReaderNativeVideoMediaType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: failed on call to SetCurrentMediaType(v), retVal=" + hr.ToString());
                }
                // add a stream to the sink writer. The mediaType specifies the format of the samples that will be written
                // to the file. Note that it does not necessarily need to match the input format. To set the input format
                // use SetInputMediaType. In this case it does match because we copied the video encoder information directly
                // out of the video source type
                hr = sinkWriter.AddStream(sourceReaderNativeVideoMediaType, out sinkWriterOutputVideoStreamId);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: Failed adding the output stream(v), retVal=" + hr.ToString());
                }
                // Set the input format for a stream on the sink writer. Note the use of the stream index here
                // The input format does not have to match the target format that is written to the media sink
                // If the formats do not match, this call attempts to load an transform
                // that can convert from the input format to the target format. If it cannot find one, and this is not
                // a sure thing, it will throw an exception.
                hr = sinkWriter.SetInputMediaType(sinkWriterOutputVideoStreamId, sourceReaderNativeVideoMediaType, null);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: Failed on calling SetInputMediaType(v) on the writer, retVal=" + hr.ToString());
                }
            }

            // if we have an audio stream in the source file we now set it up
            if (sourceReaderNativeAudioMediaType != null)
            {
                // set the media type on the reader - this is the media type it will output
                hr = sourceReader.SetCurrentMediaType(sourceReaderAudioStreamId, null, sourceReaderNativeAudioMediaType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: failed on call to SetCurrentMediaType(a), retVal=" + hr.ToString());
                }
                // add a stream to the sink writer. The mediaType specifies the format of the samples that will be written
                // to the file. Note that it does not necessarily need to match the input format. To set the input format
                // use SetInputMediaType. In this case it does match because we copied the video encoder information directly
                // out of the video source type
                hr = sinkWriter.AddStream(sourceReaderNativeAudioMediaType, out sinkWriterOutputAudioStreamId);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: Failed adding the output stream(a), retVal=" + hr.ToString());
                }
                // Set the input format for a stream on the sink writer. Note the use of the stream index here
                // The input format does not have to match the target format that is written to the media sink
                // If the formats do not match, this call attempts to load an transform
                // that can convert from the input format to the target format. If it cannot find one, and this is not
                // a sure thing, it will throw an exception.
                hr = sinkWriter.SetInputMediaType(sinkWriterOutputAudioStreamId, sourceReaderNativeAudioMediaType, null);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: Failed on calling SetInputMediaType(a) on the writer, retVal=" + hr.ToString());
                }
            }

            // begin writing on the sink writer
            hr = sinkWriter.BeginWriting();
            if (hr != HResult.S_OK)
            {
                // we failed
                throw new Exception("CopyFile: failed on call to BeginWriting, retVal=" + hr.ToString());
            }

            // we sit in a loop here and get the sample from the source reader and write it out
            // to the sink writer. An EOS (end of sample) value in the flags will signal the end.
            while (true)
            {
                int actualStreamIndex;
                MF_SOURCE_READER_FLAG actualStreamFlags;
                long      timeStamp          = 0;
                IMFSample workingMediaSample = null;

                // Request the next sample from the media source. Note that this could be
                // any type of media sample (video, audio, subtitles etc). We do not know
                // until we look at the stream ID. We saved the stream ID earlier when
                // we obtained the media types and so we can branch based on that.
                hr = sourceReader.ReadSample(
                    TantaWMFUtils.MF_SOURCE_READER_ANY_STREAM,
                    0,
                    out actualStreamIndex,
                    out actualStreamFlags,
                    out timeStamp,
                    out workingMediaSample
                    );
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("CopyFile: Failed on calling the ReadSample on the reader, retVal=" + hr.ToString());
                }

                // the sample may be null if either end of stream or a stream tick is returned
                if (workingMediaSample == null)
                {
                    // just ignore, the flags will have the information we need.
                }
                else
                {
                    // the sample is not null
                    if (actualStreamIndex == sourceReaderAudioStreamId)
                    {
                        // audio data
                        // ensure discontinuity is set for the first sample in each stream
                        if (audioSamplesProcessed == 0)
                        {
                            // audio data
                            hr = workingMediaSample.SetUINT32(MFAttributesClsid.MFSampleExtension_Discontinuity, 1);
                            if (hr != HResult.S_OK)
                            {
                                // we failed
                                throw new Exception("CopyFile: Failed on calling SetUINT32 on the sample, retVal=" + hr.ToString());
                            }
                            // remember this - we only do it once
                            audioSamplesProcessed++;
                        }
                        hr = sinkWriter.WriteSample(sinkWriterOutputAudioStreamId, workingMediaSample);
                        if (hr != HResult.S_OK)
                        {
                            // we failed
                            throw new Exception("CopyFile: Failed on calling the WriteSample on the writer, retVal=" + hr.ToString());
                        }
                    }
                    else if (actualStreamIndex == sourceReaderVideoStreamId)
                    {
                        // video data
                        // ensure discontinuity is set for the first sample in each stream
                        if (videoSamplesProcessed == 0)
                        {
                            // video data
                            hr = workingMediaSample.SetUINT32(MFAttributesClsid.MFSampleExtension_Discontinuity, 1);
                            if (hr != HResult.S_OK)
                            {
                                // we failed
                                throw new Exception("CopyFile: Failed on calling SetUINT32 on the sample, retVal=" + hr.ToString());
                            }
                            // remember this - we only do it once
                            videoSamplesProcessed++;
                        }
                        hr = sinkWriter.WriteSample(sinkWriterOutputVideoStreamId, workingMediaSample);
                        if (hr != HResult.S_OK)
                        {
                            // we failed
                            throw new Exception("CopyFile: Failed on calling the WriteSample on the writer, retVal=" + hr.ToString());
                        }
                    }

                    // release the sample
                    if (workingMediaSample != null)
                    {
                        Marshal.ReleaseComObject(workingMediaSample);
                        workingMediaSample = null;
                    }
                }

                // do we have a stream tick event?
                if ((actualStreamFlags & MF_SOURCE_READER_FLAG.StreamTick) != 0)
                {
                    if (actualStreamIndex == sourceReaderVideoStreamId)
                    {
                        // video stream
                        hr = sinkWriter.SendStreamTick(sinkWriterOutputVideoStreamId, timeStamp);
                    }
                    else if (actualStreamIndex == sourceReaderAudioStreamId)
                    {
                        // audio stream
                        hr = sinkWriter.SendStreamTick(sinkWriterOutputAudioStreamId, timeStamp);
                    }
                    else
                    {
                    }
                }

                // is this stream at an END of Segment
                if ((actualStreamFlags & MF_SOURCE_READER_FLAG.EndOfStream) != 0)
                {
                    // We have an EOS - but is it on the video or audio channel?
                    // we have to get it on both
                    if (actualStreamIndex == sourceReaderVideoStreamId)
                    {
                        // video stream
                        // have we seen this before?
                        if (videoStreamIsAtEOS == false)
                        {
                            hr = sinkWriter.NotifyEndOfSegment(sinkWriterOutputVideoStreamId);
                            if (hr != HResult.S_OK)
                            {
                                // we failed
                                throw new Exception("CopyFile: Failed on calling the NotifyEndOfSegment on video stream, retVal=" + hr.ToString());
                            }
                            videoStreamIsAtEOS = true;
                        }
                    }
                    else if (actualStreamIndex == sourceReaderAudioStreamId)
                    {
                        // audio stream
                        // have we seen this before?
                        if (audioStreamIsAtEOS == false)
                        {
                            hr = sinkWriter.NotifyEndOfSegment(sinkWriterOutputAudioStreamId);
                            if (hr != HResult.S_OK)
                            {
                                // we failed
                                throw new Exception("CopyFile: Failed on calling the NotifyEndOfSegment on audio stream, retVal=" + hr.ToString());
                            }
                            audioStreamIsAtEOS = true;
                        }
                        // audio stream
                    }
                    else
                    {
                    }

                    // our exit condition depends on which streams are in use
                    if ((sourceReaderNativeVideoMediaType != null) && (sourceReaderNativeAudioMediaType != null))
                    {
                        // if both streams are at EOS we can leave
                        if ((videoStreamIsAtEOS == true) && (audioStreamIsAtEOS == true))
                        {
                            break;
                        }
                    }
                    else if (sourceReaderNativeVideoMediaType != null)
                    {
                        // only video is active, if the video stream is EOS we can leave
                        if (videoStreamIsAtEOS == true)
                        {
                            break;
                        }
                    }
                    else if (sourceReaderNativeAudioMediaType != null)
                    {
                        // only audio is active, if the audio stream is EOS we can leave
                        if (audioStreamIsAtEOS == true)
                        {
                            break;
                        }
                    }
                }
            } // bottom of endless for loop

            hr = sinkWriter.Finalize_();
            if (hr != HResult.S_OK)
            {
                // we failed
                throw new Exception("Failed on call tosinkWriter.Finalize(), retVal=" + hr.ToString());
            }
        }
예제 #16
0
파일: frmMain.cs 프로젝트: yudigh/Tanta
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Starts capture of the data to a file.
        ///
        /// Because this code is intended for demo purposes and in the interests of
        /// reducing complexity it is extremely linear and step-by-step. Doubtless
        /// there is much refactoring that could be done.
        ///
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void CaptureToFile()
        {
            HResult       hr;
            IMFMediaType  videoType     = null;
            IMFMediaType  encoderType   = null;
            TantaMFDevice currentDevice = null;

            try
            {
                // get the current video device.
                currentDevice = ctlTantaVideoPicker1.CurrentDevice;
                if (currentDevice == null)
                {
                    MessageBox.Show("No current video device. Are there any video devices on this system?");
                    return;
                }

                // check our output filename is correct and usable
                if ((textBoxCaptureFileNameAndPath == null) || (textBoxCaptureFileNameAndPath.Text.Length == 0))
                {
                    MessageBox.Show("No Capture Filename and path. Cannot continue.");
                    return;
                }
                string pwszFileName = textBoxCaptureFileNameAndPath.Text;
                // check the path is rooted
                if (Path.IsPathRooted(pwszFileName) == false)
                {
                    MessageBox.Show("No Capture Filename and path is not rooted. A full directory and path is required. Cannot continue.");
                    return;
                }

                // create a new call back handler. This, once we get it all wired up, will act
                // as a pump to move the data from the source to the sink
                workingSourceReaderCallBackHandler = new TantaSourceReaderCallbackHandler();

                // the following code will create a SourceReader which is tied to a camera on the system,
                // a SinkWriter which is tied to a file output and will hook up the two. Because we are using
                // a SourceReader and SourceWriter we do not have the usual Topology or Pipeline. The SourceReader
                // and SourceWriter are connected directly (input to output) in the code below and transfer their
                // data via the callback handler. The callback handler also requests the next sample from
                // the SourceReader when it has written the data to the sink. Note however it is possible
                // that the SourceReader can automatically bring in a Transform for format conversion. This
                // is done internally and you never deal with it - other than perhaps making it available
                // to the process if it is not globally available.

                // create the source reader
                workingSourceReader = TantaWMFUtils.CreateSourceReaderAsyncFromDevice(currentDevice, workingSourceReaderCallBackHandler);
                if (workingSourceReader == null)
                {
                    MessageBox.Show("CreateSourceReaderAsyncFromDevice did not return a media source. Cannot continue.");
                    return;
                }

                // open up the sink Writer
                workingSinkWriter = OpenSinkWriter(pwszFileName);
                if (workingSinkWriter == null)
                {
                    MessageBox.Show("OpenSinkWriter workingSinkWriter == null. Cannot continue.");
                    return;
                }

                // now set the source and the sink in the callback handler. It needs to know these
                // in order to operate
                workingSourceReaderCallBackHandler.SourceReader = workingSourceReader;
                workingSourceReaderCallBackHandler.SinkWriter   = workingSinkWriter;
                workingSourceReaderCallBackHandler.InitForFirstSample();
                workingSourceReaderCallBackHandler.SourceReaderAsyncCallBackError = HandleSourceReaderAsyncCallBackErrors;

                // now we configure the video source. It will probably offer a lot of different types
                // this example offers two modes: one mode where you choose the format and mode and
                // effectively just say "Use this one". The other uses the general case where we
                // present a list of reasonable types we can accept and then let it auto
                // configure itself from one of those. Of course if it autoconfigures itself we
                // don't know which one it has chosen. This is why, you will later see the video
                // source being interrogated after the configuration so we know which one we hit.

                if (radioButtonUseSpecified.Checked == true)
                {
                    // we saved the video format container here - this is just the last one that came in
                    if ((radioButtonUseSpecified.Tag == null) || ((radioButtonUseSpecified.Tag is TantaMFVideoFormatContainer) == false))
                    {
                        MessageBox.Show("No source video device and format selected. Cannot continue.");
                        return;
                    }
                    // get the container
                    TantaMFVideoFormatContainer videoFormatCont = (radioButtonUseSpecified.Tag as TantaMFVideoFormatContainer);
                    // configure the Source Reader to use this format
                    hr = TantaWMFUtils.ConfigureSourceReaderWithVideoFormat(workingSourceReader, videoFormatCont);
                    if (hr != HResult.S_OK)
                    {
                        // we failed
                        MessageBox.Show("Failed on call to ConfigureSourceAsyncReaderWithVideoFormat (a), retVal=" + hr.ToString());
                        return;
                    }
                }
                else
                {
                    // prepare a list of subtypes we are prepared to accept from the video source
                    // device. These will be tested in order - the first match will be used.
                    List <Guid> subTypes = new List <Guid>();
                    subTypes.Add(MFMediaType.NV12);
                    subTypes.Add(MFMediaType.YUY2);
                    subTypes.Add(MFMediaType.UYVY);
                    subTypes.Add(MFMediaType.RGB32);
                    subTypes.Add(MFMediaType.RGB24);
                    subTypes.Add(MFMediaType.IYUV);

                    // make sure the default Media Type is one of the above video formats
                    hr = TantaWMFUtils.ConfigureSourceReaderWithVideoFormat(workingSourceReader, subTypes, false);
                    if (hr != HResult.S_OK)
                    {
                        // we failed
                        MessageBox.Show("Failed on call to ConfigureSourceAsyncReaderWithVideoFormat (b), retVal=" + hr.ToString());
                        return;
                    }
                }

                // if we get here we know the source reader now has a configured format but we might not
                // know which one it is. So we ask it. It will return a video type
                // we will use this later on to configure our sink writer. Note, we have to properly dispose
                // of the videoType object after we use it.
                hr = workingSourceReader.GetCurrentMediaType(TantaWMFUtils.MF_SOURCE_READER_FIRST_VIDEO_STREAM, out videoType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on call to GetCurrentMediaType, retVal=" + hr.ToString());
                }

                // now we configure the encoder. This sets up the sink writer so that it knows what format
                // the output data should be written in. The format we give the writer does not
                // need to be the same as the format it outputs to disk - however to make life easier for ourselves
                // we will copy a lot of the settings from the videoType retrieved above

                // create a new empty media type for us to populate
                hr = MFExtern.MFCreateMediaType(out encoderType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on call to MFCreateMediaType, retVal=" + hr.ToString());
                }

                // The major type defines the overall category of the media data. Major types include video, audio, script & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_MAJOR_TYPE, retVal=" + hr.ToString());
                }

                // The subtype GUID defines a specific media format type within a major type. For example, within video,
                // the subtypes include MFMediaType.H264 (MP4), MFMediaType.WMV3 (WMV), MJPEG & etc. Within audio, the
                // subtypes include PCM audio, Windows Media Audio 9, & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MEDIA_TYPETO_WRITE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_SUBTYPE, retVal=" + hr.ToString());
                }

                // this is the approximate data rate of the video stream, in bits per second, for a video media type
                // in the MF.Net sample code this is 240000 but I found 2000000 to be much better. I am not sure,
                // at this time, how this value is derived or what the tradeoffs are.
                hr = encoderType.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, TARGET_BIT_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_AVG_BITRATE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the frame size of the videoType selected earlier
                hr = TantaWMFUtils.CopyAttributeData(videoType, encoderType, MFAttributesClsid.MF_MT_FRAME_SIZE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_SIZE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the frame rate of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(videoType, encoderType, MFAttributesClsid.MF_MT_FRAME_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_RATE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the pixel aspect ratio of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(videoType, encoderType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_PIXEL_ASPECT_RATIO, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the interlace mode of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(videoType, encoderType, MFAttributesClsid.MF_MT_INTERLACE_MODE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_INTERLACE_MODE, retVal=" + hr.ToString());
                }

                // add a stream to the sink writer. The encoderType specifies the format of the samples that will be written
                // to the file. Note that it does not necessarily need to match the input format. To set the input format
                // use SetInputMediaType.
                int sink_stream = 0;
                hr = workingSinkWriter.AddStream(encoderType, out sink_stream);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed adding the output stream, retVal=" + hr.ToString());
                }

                // Windows 10, by default, provides an adequate set of codecs which the Sink Writer can
                // find to write out the MP4 file. This is not true on Windows 7.

                // If we are not on Windows 10 we register (locally) a codec
                // the Sink Writer can find and use. The ColorConvertDMO is supplied by
                // microsoft it is just not available to enumerate on Win7 etc.
                // Making it available locally does not require administrator privs
                // but only this process can see it and it disappears when the process
                // closes
                OperatingSystem os        = Environment.OSVersion;
                int             versionID = ((os.Version.Major * 10) + os.Version.Minor);
                if (versionID < 62)
                {
                    Guid ColorConvertDMOGUID = new Guid("98230571-0087-4204-b020-3282538e57d3");

                    // Register the color converter DSP for this process, in the video
                    // processor category. This will enable the sink writer to enumerate
                    // the color converter when the sink writer attempts to match the
                    // media types.
                    hr = MFExtern.MFTRegisterLocalByCLSID(
                        ColorConvertDMOGUID,
                        MFTransformCategory.MFT_CATEGORY_VIDEO_PROCESSOR,
                        "",
                        MFT_EnumFlag.SyncMFT,
                        0,
                        null,
                        0,
                        null
                        );
                }

                // Set the input format for a stream on the sink writer. Note the use of the stream index here
                // The input format does not have to match the target format that is written to the media sink
                // If the formats do not match, this call attempts to load an transform
                // that can convert from the input format to the target format. If it cannot find one, and this is not
                // a sure thing, it will throw an exception.
                hr = workingSinkWriter.SetInputMediaType(sink_stream, videoType, null);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on calling SetInputMediaType on the writer, retVal=" + hr.ToString());
                }

                // now we initialize the sink writer for writing. We call this method after configuring the
                // input streams but before we send any data to the sink writer. The underlying media sink must
                // have at least one input stream and we know it does because we set it up earlier
                hr = workingSinkWriter.BeginWriting();
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on calling BeginWriting on the writer, retVal=" + hr.ToString());
                }

                // Request the first video frame from the media source. The TantaSourceReaderCallbackHandler
                // set up earlier will be invoked and it will continue requesting and processing video
                // frames after that.
                hr = workingSourceReader.ReadSample(
                    TantaWMFUtils.MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                    0,
                    IntPtr.Zero,
                    IntPtr.Zero,
                    IntPtr.Zero,
                    IntPtr.Zero
                    );
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on calling the first ReadSample on the reader, retVal=" + hr.ToString());
                }

                // we are ready to start, flag this
                buttonStartStopCapture.Text = STOP_CAPTURE;
                // disable our screen controls
                SetEnableStateOnScreenControls(false);
            }
            finally
            {
                // setting this to null will cause it to be cleaned up
                currentDevice = null;

                // close and release
                if (videoType != null)
                {
                    Marshal.ReleaseComObject(videoType);
                    videoType = null;
                }

                if (encoderType != null)
                {
                    Marshal.ReleaseComObject(encoderType);
                    encoderType = null;
                }
            }
        }
예제 #17
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the video formats for the currently selected video device. This
        /// is more complicated than it looks. We have to open the video source, convert
        /// that to a Media Source and then interrogate the that source to find a list
        /// of video formats.
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void DisplayVideoFormatsForCurrentCaptureDevice()
        {
            IMFSourceReaderAsync tmpSourceReader = null;
            List <TantaMFVideoFormatContainer> formatList;
            HResult hr;

            try
            {
                // clear what we have now
                listViewSupportedFormats.Clear();
                // reset this
                listViewSupportedFormats.ListViewItemSorter = null;

                // get the currently selected device
                TantaMFDevice currentDevice = (TantaMFDevice)comboBoxCaptureDevices.SelectedItem;
                if (currentDevice == null)
                {
                    return;
                }

                // open up the media source
                tmpSourceReader = TantaWMFUtils.CreateSourceReaderAsyncFromDevice(currentDevice, null);
                if (tmpSourceReader == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice, CreateSourceReaderAsyncFromDevice did not return a media source. Cannot continue.");
                }

                // now get a list of all supported formats from the video device
                hr = TantaMediaTypeInfo.GetSupportedVideoFormatsFromSourceReaderInFormatContainers(currentDevice, tmpSourceReader, 100, out formatList);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice, GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed. HR=" + hr.ToString());
                }
                if (formatList == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice, GetSupportedVideoFormatsFromSourceReaderInFormatContainers did not return a format list. Cannot continue.");
                }

                // now display the formats
                foreach (TantaMFVideoFormatContainer videoFormat in formatList)
                {
                    ListViewItem lvi = new ListViewItem(new[] { videoFormat.SubTypeAsString, videoFormat.FrameSizeAsString, videoFormat.FrameRateAsString, videoFormat.FrameRateMaxAsString, videoFormat.AllAttributes });
                    lvi.Tag = videoFormat;
                    listViewSupportedFormats.Items.Add(lvi);
                }

                listViewSupportedFormats.Columns.Add("Type", 70);
                listViewSupportedFormats.Columns.Add("FrameSize WxH", 100);
                listViewSupportedFormats.Columns.Add("FrameRate f/s", 100);
                listViewSupportedFormats.Columns.Add("FrameRateMax f/s", 100);
                listViewSupportedFormats.Columns.Add("All Attributes", 2500);
            }
            finally
            {
                if (tmpSourceReader != null)
                {
                    // close and release
                    Marshal.ReleaseComObject(tmpSourceReader);
                    tmpSourceReader = null;
                }
            }
        }
예제 #18
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the transforms for the currently selected category. Since each
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void DisplayTransformsForCurrentCategory()
        {
            int numResults;

            IMFActivate[] activatorArray;
            List <TantaMFTCapabilityContainer> transformList = new List <TantaMFTCapabilityContainer>();
            HResult hr;

            try
            {
                // clear what we have now
                listViewAvailableTransforms.Clear();
                // reset this
                listViewAvailableTransforms.ListViewItemSorter = null;

                // get the currently selected major category
                TantaGuidNamePair currentCategory = (TantaGuidNamePair)comboBoxTransformCategories.SelectedItem;
                if (currentCategory == null)
                {
                    return;
                }

                // we have multiple sub-categories. These are set by specific flags on the MFTEnumX call. We iterate
                // through each flag and get the matching transforms. If we already have it we just set the flag on
                // the exisiting one to show it is in multiple sub-categories

                foreach (MFT_EnumFlag flagVal in Enum.GetValues(typeof(MFT_EnumFlag)))
                {
                    // we do not need this one
                    if (flagVal == MFT_EnumFlag.None)
                    {
                        continue;
                    }
                    // The documentation states that there is no way to enumerate just local MFTs and nothing else.
                    // Setting Flags equal to MFT_ENUM_FLAG_LOCALMFT is equivalent to including the MFT_ENUM_FLAG_SYNCMFT flag
                    // which messes us up. This also appears to be true for the FieldOfUse and transcode only flags so we
                    // do not include them
                    if (flagVal == MFT_EnumFlag.LocalMFT)
                    {
                        continue;
                    }
                    if (flagVal == MFT_EnumFlag.FieldOfUse)
                    {
                        continue;
                    }
                    if (flagVal == MFT_EnumFlag.TranscodeOnly)
                    {
                        continue;
                    }
                    // some of the higher flags are just for sorting the return results
                    if (flagVal >= MFT_EnumFlag.All)
                    {
                        break;
                    }

                    hr = MFExtern.MFTEnumEx(currentCategory.GuidValue, flagVal, null, null, out activatorArray, out numResults);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayTransformsForCurrentCategory, call to MFExtern.MFTEnumEx failed. HR=" + hr.ToString());
                    }

                    // now loop through the returned activators
                    for (int i = 0; i < numResults; i++)
                    {
                        // extract the friendlyName and symbolicLinkName
                        Guid   outGuid      = TantaWMFUtils.GetGuidForKeyFromActivator(activatorArray[i], MFAttributesClsid.MFT_TRANSFORM_CLSID_Attribute);
                        string friendlyName = TantaWMFUtils.GetStringForKeyFromActivator(activatorArray[i], MFAttributesClsid.MFT_FRIENDLY_NAME_Attribute);

                        // create a new TantaMFTCapabilityContainer for it
                        TantaMFTCapabilityContainer workingMFTContainer = new TantaMFTCapabilityContainer(friendlyName, outGuid, currentCategory);
                        // do we have this in our list yet
                        int index = transformList.FindIndex(x => x.TransformGuidValue == workingMFTContainer.TransformGuidValue);
                        if (index >= 0)
                        {
                            // yes, it does contain this transform, just record the new sub-category
                            transformList[index].EnumFlags |= flagVal;
                        }
                        else
                        {
                            // no, it does not contain this transform yet, set the sub-category
                            workingMFTContainer.EnumFlags = flagVal;
                            // and add it
                            transformList.Add(workingMFTContainer);

                            if ((activatorArray[i] is IMFAttributes) == true)
                            {
                                StringBuilder outSb = null;
                                List <string> attributesToIgnore = new List <string>();
                                attributesToIgnore.Add("MFT_FRIENDLY_NAME_Attribute");
                                attributesToIgnore.Add("MFT_TRANSFORM_CLSID_Attribute");
                                attributesToIgnore.Add("MF_TRANSFORM_FLAGS_Attribute");
                                attributesToIgnore.Add("MF_TRANSFORM_CATEGORY_Attribute");
                                hr = TantaWMFUtils.EnumerateAllAttributeNamesAsText((activatorArray[i] as IMFAttributes), attributesToIgnore, 100, out outSb);
                            }
                        }

                        // clean up our activator
                        Marshal.ReleaseComObject(activatorArray[i]);
                    }
                }

                // now display the transforms
                foreach (TantaMFTCapabilityContainer mftCapability in transformList)
                {
                    ListViewItem lvi = new ListViewItem(new[] { mftCapability.TransformFriendlyName, mftCapability.IsSyncMFT, mftCapability.IsAsyncMFT, mftCapability.IsHardware, /* mftCapability.IsFieldOfUse, mftCapability.IsLocalMFT, mftCapability.IsTranscodeOnly, */ mftCapability.TransformGuidValueAsString });
                    lvi.Tag = mftCapability;
                    listViewAvailableTransforms.Items.Add(lvi);
                }

                listViewAvailableTransforms.Columns.Add("Name", 250);
                listViewAvailableTransforms.Columns.Add("IsSyncMFT", 70);
                listViewAvailableTransforms.Columns.Add("IsAsyncMFT", 90);
                listViewAvailableTransforms.Columns.Add("IsHardware", 90);
                //  listViewAvailableTransforms.Columns.Add("IsFieldOfUse", 90);
                //  listViewAvailableTransforms.Columns.Add("IsLocalMFT", 90);
                //  listViewAvailableTransforms.Columns.Add("IsTranscodeOnly", 90);
                listViewAvailableTransforms.Columns.Add("Guid", 200);
            }
            finally
            {
            }
        }
예제 #19
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        ///  Starts the process of recording. creates the sink writer. We do not
        ///  check to see if the filename is viable or already exists. This is
        ///  assumed to have been done before this call.
        /// </summary>
        /// <param name="outputFileName">the output file name</param>
        /// <param name="incomingVideoMediaType">the incoming media type</param>
        /// <param name="wantTimebaseRebaseIn">if true we rebase all incoming sample
        /// times to zero from the point we started recording and send a copy of the
        /// sample to the sink writer instead of the input sample</param>
        /// <returns>z success, nz fail</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public int StartRecording(string outputFileName, IMFMediaType incomingVideoMediaType, bool wantTimebaseRebaseIn)
        {
            HResult      hr;
            IMFMediaType encoderType = null;

            LogMessage("MFTTantaSampleGrabber_Sync, StartRecording called");

            // first stop any recordings now
            StopRecording();

            // check the output file name for sanity
            if ((outputFileName == null) || (outputFileName.Length == 0))
            {
                LogMessage("StartRecording (outputFileName==null)|| (outputFileName.Length==0)");
                return(100);
            }

            // check the media type for sanity
            if (incomingVideoMediaType == null)
            {
                LogMessage("StartRecording videoMediaType == null");
                return(150);
            }

            lock (sinkWriterLockObject)
            {
                // create the sink writer
                workingSinkWriter = OpenSinkWriter(outputFileName, true);
                if (workingSinkWriter == null)
                {
                    LogMessage("StartRecording failed to create sink writer");
                    return(200);
                }

                // now configure the SinkWriter. This sets up the sink writer so that it knows what format
                // the output data should be written in. The format we give the writer does not
                // need to be the same as the format receives as input - however to make life easier for ourselves
                // we will copy a lot of the settings from the videoType retrieved above

                // create a new empty media type for us to populate
                hr = MFExtern.MFCreateMediaType(out encoderType);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed on call to MFCreateMediaType, retVal=" + hr.ToString());
                }

                // The major type defines the overall category of the media data. Major types include video, audio, script & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_MAJOR_TYPE, retVal=" + hr.ToString());
                }

                // The subtype GUID defines a specific media format type within a major type. For example, within video,
                // the subtypes include MFMediaType.H264 (MP4), MFMediaType.WMV3 (WMV), MJPEG & etc. Within audio, the
                // subtypes include PCM audio, Windows Media Audio 9, & etc.
                hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MEDIA_TYPETO_WRITE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_SUBTYPE, retVal=" + hr.ToString());
                }

                // this is the approximate data rate of the video stream, in bits per second, for a
                // video media type. The choice here is somewhat arbitrary but seems to work well.
                hr = encoderType.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, TARGET_BIT_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed setting the MF_MT_AVG_BITRATE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the frame size of the videoType selected earlier
                hr = TantaWMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_SIZE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_SIZE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the frame rate of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_RATE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_FRAME_RATE, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the pixel aspect ratio of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_PIXEL_ASPECT_RATIO, retVal=" + hr.ToString());
                }

                // populate our new encoding type with the interlace mode of the video type selected earlier
                hr = TantaWMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_INTERLACE_MODE);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("Failed copying the MF_MT_INTERLACE_MODE, retVal=" + hr.ToString());
                }

                // add a stream to the sink writer for the output Media type. The
                // incomingVideoMediaType specifies the format of the samples that will
                // be written to the file. Note that it does not necessarily need to
                // match the input format.
                hr = workingSinkWriter.AddStream(encoderType, out sinkWriterVideoStreamId);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed adding the output stream(v), retVal=" + hr.ToString());
                }

                // Windows 10, by default, provides an adequate set of codecs which the Sink Writer can
                // find to write out the MP4 file. This is not true on Windows 7.

                // If we are not on Windows 10 we register (locally) a codec
                // the Sink Writer can find and use. The ColorConvertDMO is supplied by
                // microsoft it is just not available to enumerate on Win7 etc.
                // Making it available locally does not require administrator privs
                // but only this process can see it and it disappears when the process
                // closes
                OperatingSystem os        = Environment.OSVersion;
                int             versionID = ((os.Version.Major * 10) + os.Version.Minor);
                if (versionID < 62)
                {
                    Guid ColorConverterDMOGUID = new Guid("98230571-0087-4204-b020-3282538e57d3");

                    // Register the color converter DSP for this process, in the video
                    // processor category. This will enable the sink writer to enumerate
                    // the color converter when the sink writer attempts to match the
                    // media types.
                    hr = MFExtern.MFTRegisterLocalByCLSID(
                        ColorConverterDMOGUID,
                        MFTransformCategory.MFT_CATEGORY_VIDEO_PROCESSOR,
                        "",
                        MFT_EnumFlag.SyncMFT,
                        0,
                        null,
                        0,
                        null
                        );
                }

                // Set the input format for a stream on the sink writer. Note the use of the stream index here
                // The input format does not have to match the target format that is written to the media sink
                // If the formats do not match, this call attempts to load an transform
                // that can convert from the input format to the target format. If it cannot find one, (and this is not
                // a sure thing), it will throw an exception.
                hr = workingSinkWriter.SetInputMediaType(sinkWriterVideoStreamId, incomingVideoMediaType, null);
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed on calling SetInputMediaType(v) on the writer, retVal=" + hr.ToString());
                }

                // set this flag now
                wantTimebaseRebase = wantTimebaseRebaseIn;

                // now we initialize the sink writer for writing. We call this method after configuring the
                // input streams but before we send any data to the sink writer. The underlying media sink must
                // have at least one input stream and we know it does because we set it up above
                hr = workingSinkWriter.BeginWriting();
                if (hr != HResult.S_OK)
                {
                    // we failed
                    throw new Exception("StartRecording Failed on calling BeginWriting on the writer, retVal=" + hr.ToString());
                }
            }

            return(0);
        }
예제 #20
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Gets a list of all supported video formats from a media type
        /// as a nice displayable bit of text. outSb will never be null can be
        /// empty.
        ///
        /// Adapted from
        /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx
        /// </summary>
        /// <returns>S_OK for success, nz for fail</returns>
        /// <param name="mediaTypeObj">the media type object</param>
        /// <param name="outSb">The output string</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public static HResult GetSupportedFormatsFromMediaTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb)
        {
            Guid majorType;
            Guid subType;
            int  attributeCount;
            int  frameSizeWidth;
            int  frameSizeHeight;
            int  frameRate;
            int  frameRateDenominator;
            int  frameRateMin;
            int  frameRateMinDenominator;
            int  frameRateMax;
            int  frameRateMaxDenominator;

            // we always return something here
            outSb = new StringBuilder();

            // sanity check
            if (mediaTypeObj == null)
            {
                return(HResult.E_FAIL);
            }

            // Retrieves the number of attributes that are set on this object.
            HResult hr = mediaTypeObj.GetCount(out attributeCount);

            if (hr != HResult.S_OK)
            {
                // if we failed here, bail out
                outSb.Append("failed getting attributeCount, retVal=" + hr.ToString());
                outSb.Append("\r\n");
                return(HResult.E_FAIL);
            }
            // put in this line now
            //   outSb.Append("attributeCount=" + attributeCount.ToString()+", ");

            // MF_MT_MAJOR_TYPE
            // Major type GUID, we return this as human readable text
            hr = mediaTypeObj.GetMajorType(out majorType);
            if (hr == HResult.S_OK)
            {
                // only report success
                outSb.Append("MF_MT_MAJOR_TYPE=" + TantaWMFUtils.ConvertGuidToName(majorType) + ", ");
            }

            // MF_MT_SUBTYPE
            // Subtype GUID which describes the basic media type, we return this as human readable text
            hr = mediaTypeObj.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subType);
            if (hr == HResult.S_OK)
            {
                // only report success
                outSb.Append("MF_MT_SUBTYPE=" + TantaWMFUtils.ConvertGuidToName(subType) + ", ");
            }

            // MF_MT_FRAME_SIZE
            // the Width and height of a video frame, in pixels
            hr = MFExtern.MFGetAttributeSize(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_SIZE, out frameSizeWidth, out frameSizeHeight);
            if (hr == HResult.S_OK)
            {
                // only report success
                outSb.Append("MF_MT_FRAME_SIZE (W,H)=(" + frameSizeWidth.ToString() + "," + frameSizeHeight.ToString() + "), ");
            }

            // MF_MT_FRAME_RATE
            // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator.
            // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001.
            // we report this back to the user as a decimal
            hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE, out frameRate, out frameRateDenominator);
            if (hr == HResult.S_OK)
            {
                // only report success
                if (frameRateDenominator < 0)
                {
                    outSb.Append("MF_MT_FRAME_RATE (frames/s)=(undefined),");
                }
                else
                {
                    outSb.Append("MF_MT_FRAME_RATE=" + ((decimal)frameRate / (decimal)frameRateDenominator).ToString() + "f/s, ");
                }
            }

            // MF_MT_FRAME_RATE_RANGE_MIN
            // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator.
            // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001.
            // we report this back to the user as a decimal
            hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MIN, out frameRateMin, out frameRateMinDenominator);
            if (hr == HResult.S_OK)
            {
                // only report success
                if (frameRateMinDenominator < 0)
                {
                    outSb.Append("MF_MT_FRAME_RATE_RANGE_MIN (frames/s)=(undefined),");
                }
                else
                {
                    outSb.Append("MF_MT_FRAME_RATE_RANGE_MIN=" + ((decimal)frameRateMin / (decimal)frameRateMinDenominator).ToString() + "f/s, ");
                }
            }

            // MF_MT_FRAME_RATE_RANGE_MAX
            // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator.
            // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001.
            // we report this back to the user as a decimal
            hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MAX, out frameRateMax, out frameRateMaxDenominator);
            if (hr == HResult.S_OK)
            {
                // only report success
                if (frameRateMaxDenominator < 0)
                {
                    outSb.Append("MF_MT_FRAME_RATE_RANGE_MAX (frames/s)=(undefined),");
                }
                else
                {
                    outSb.Append("MF_MT_FRAME_RATE_RANGE_MAX=" + ((decimal)frameRateMax / (decimal)frameRateMaxDenominator).ToString() + "f/s, ");
                }
            }

            // enumerate all of the possible Attributes so we can see which ones are present that we did not report on
            StringBuilder allAttrs = new StringBuilder();

            hr = EnumerateAllAttributeNamesInMediaTypeAsText(mediaTypeObj, attributeCount, out allAttrs);
            if (hr == HResult.S_OK)
            {
                outSb.Append("\r\n");
                outSb.Append("         AllAttrs=" + allAttrs.ToString());
            }

            return(HResult.S_OK);
        }
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Gets the playback rate capabilitys for the session
        /// </summary>
        /// <param name="mediaSession">the media session. If not null, we will
        /// populate this object from the session</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        public bool AcquirePlayBackRates(IMFMediaSession mediaSession)
        {
            bool  outBool;
            bool  wantThinned;
            float supportedRate;

            Reset();

            if (mediaSession == null)
            {
                return(false);
            }

            try
            {
                // first we acquire the thinned rates
                wantThinned = true;

                outBool = TantaWMFUtils.GetFastestRate(mediaSession, MFRateDirection.Forward, wantThinned, out supportedRate);
                if (outBool == true)
                {
                    fastestForwardSpeedThinned = Math.Abs(supportedRate);
                }
                outBool = TantaWMFUtils.GetSlowestRate(mediaSession, MFRateDirection.Forward, wantThinned, out supportedRate);
                if (outBool == true)
                {
                    slowestForwardSpeedThinned = Math.Abs(supportedRate);
                }

                // now test for the reverse being possible with this thinning mode
                outBool = TantaWMFUtils.IsRewindSupported(mediaSession, wantThinned);
                if (outBool == true)
                {
                    reverseSpeedIsSupportedThinned = true;
                    outBool = TantaWMFUtils.GetFastestRate(mediaSession, MFRateDirection.Reverse, wantThinned, out supportedRate);
                    if (outBool == true)
                    {
                        fastestReverseSpeedThinned = Math.Abs(supportedRate);
                    }
                    outBool = TantaWMFUtils.GetSlowestRate(mediaSession, MFRateDirection.Reverse, wantThinned, out supportedRate);
                    if (outBool == true)
                    {
                        slowestReverseSpeedThinned = Math.Abs(supportedRate);
                    }
                }

                // next we acquire the thinned rates
                wantThinned = false;

                outBool = TantaWMFUtils.GetFastestRate(mediaSession, MFRateDirection.Forward, wantThinned, out supportedRate);
                if (outBool == true)
                {
                    fastestForwardSpeedNonThinned = Math.Abs(supportedRate);
                }
                outBool = TantaWMFUtils.GetSlowestRate(mediaSession, MFRateDirection.Forward, wantThinned, out supportedRate);
                if (outBool == true)
                {
                    slowestForwardSpeedNonThinned = Math.Abs(supportedRate);
                }

                // now test for the reverse being possible with this thinning mode
                outBool = TantaWMFUtils.IsRewindSupported(mediaSession, wantThinned);
                if (outBool == true)
                {
                    reverseSpeedIsSupportedNonThinned = true;
                    outBool = TantaWMFUtils.GetFastestRate(mediaSession, MFRateDirection.Reverse, wantThinned, out supportedRate);
                    if (outBool == true)
                    {
                        fastestReverseSpeedNonThinned = Math.Abs(supportedRate);
                    }
                    outBool = TantaWMFUtils.GetSlowestRate(mediaSession, MFRateDirection.Reverse, wantThinned, out supportedRate);
                    if (outBool == true)
                    {
                        slowestReverseSpeedNonThinned = Math.Abs(supportedRate);
                    }
                }

                capabilityRequestSuccessful = true;
            }
            catch
            {
                capabilityRequestSuccessful = false;
            }
            return(CapabilityRequestSuccessful);
        }
예제 #22
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// This is the routine that performs the transform. Unless the sinkWriter object
        /// is set all we do is pass the sample on. If the sink writer object is set
        /// we give the sample to it for writing. There are two modes - one where we just
        /// give the sinkwriter the input sample and the other where we clone the input
        /// sample and rebase the timestamps.
        ///
        /// An override of the abstract version in TantaMFTBase_Sync.
        /// </summary>
        /// <param name="pOutputSamples">The structure to populate with output values.</param>
        /// <returns>S_Ok unless error.</returns>
        /// <history>
        ///    01 Nov 18  Cynic - Originally written
        /// </history>
        protected override HResult OnProcessOutput(ref MFTOutputDataBuffer outputSampleDataStruct)
        {
            HResult        hr = HResult.S_OK;
            IMFMediaBuffer inputMediaBuffer = null;
            IMFSample      sinkWriterSample = null;
            IMFAttributes  sampleAttributes = null;
            long           sampleDuration   = 0;
            int            sampleSize       = 0;
            long           sampleTimeStamp  = 0;
            int            sampleFlags      = 0;

            // in this MFT we are processing in place, the input sample is the output sample, the media buffer of the
            // input sample is the media buffer of the output sample. Thats for the pipeline. If a sink writer exists
            // we also write the sample data out to the sink writer. This provides the effect of displaying on the
            // screen and simultaneously recording.

            // There are two ways the sink writer can be given the media sample data. It can just be given the
            // input sample directly or a copy of the sample can be made and that copy given to the sink writer.

            // There is also an additional complication - the sample has a timestamp and video cameras tend
            // to just use the current date and time as a timestamp. There are several reports that MP4 files
            // need to have their first frame starting at zero and then every subsequent frame adjusted to that
            // new base time. Certainly the Microsoft supplied example code (and see the
            // TantaCaptureToFileViaReaderWriter also) take great care to do this. This requirement does not
            // seem to exist - my tests indicate it is not necessary to start from 0 in the mp4 file. Maybe the
            // Sink Writer has been improved and now does this automatically. For demonstration purposes
            // the timebase-rebase functionality has been included and choosing that mode copies the sample
            // and resets the time. If the user does not rebase the time we simply send the input sample
            // to the Sink Writer as-is.

            try
            {
                // Set status flags.
                outputSampleDataStruct.dwStatus = MFTOutputDataBufferFlags.None;

                // The output sample is the input sample. We get a new IUnknown for the Input
                // sample since we are going to release it below. The client will release this
                // new IUnknown
                outputSampleDataStruct.pSample = Marshal.GetIUnknownForObject(InputSample);

                // are we recording?
                if (workingSinkWriter != null)
                {
                    // we do everything in a lock
                    lock (sinkWriterLockObject)
                    {
                        // are we in timebase rebase mode?
                        if (wantTimebaseRebase == false)
                        {
                            // we are not. Just give the input sample to the Sink Writer which will
                            // write it out.
                            hr = workingSinkWriter.WriteSample(sinkWriterVideoStreamId, InputSample);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to WriteSample(a) failed. Err=" + hr.ToString());
                            }
                        }
                        else
                        {
                            // the timebase rebase option has been chosen. We need to create a copy of the input sample
                            // so we can adjust the time on it.

                            // Get the data buffer from the input sample. If the sample contains more than one buffer,
                            // this method copies the data from the original buffers into a new buffer, and replaces
                            // the original buffer list with the new buffer. The new buffer is returned in the inputMediaBuffer parameter.
                            // If the sample contains a single buffer, this method returns a pointer to the original buffer.
                            // In typical use, most samples do not contain multiple buffers.
                            hr = InputSample.ConvertToContiguousBuffer(out inputMediaBuffer);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to InputSample.ConvertToContiguousBuffer failed. Err=" + hr.ToString());
                            }

                            // get some other things from the input sample
                            hr = InputSample.GetSampleDuration(out sampleDuration);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to InputSample.GetSampleDuration failed. Err=" + hr.ToString());
                            }
                            hr = InputSample.GetTotalLength(out sampleSize);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to InputSample.GetTotalLength failed. Err=" + hr.ToString());
                            }
                            hr = InputSample.GetSampleTime(out sampleTimeStamp);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to InputSample.GetSampleTime failed. Err=" + hr.ToString());
                            }

                            // get the attributes from the input sample
                            if (InputSample is IMFAttributes)
                            {
                                sampleAttributes = (InputSample as IMFAttributes);
                            }
                            else
                            {
                                sampleAttributes = null;
                            }

                            // we have all the information we need to create a new output sample
                            sinkWriterSample = TantaWMFUtils.CreateMediaSampleFromBuffer(sampleFlags, sampleTimeStamp, sampleDuration, inputMediaBuffer, sampleSize, sampleAttributes);
                            if (sinkWriterSample == null)
                            {
                                throw new Exception("OnProcessOutput, Error on call to CreateMediaSampleFromBuffer sinkWriterSample == null");
                            }

                            // we have a sample, if so is it the first non null one?
                            if (isFirstSample)
                            {
                                // yes it is set up our timestamp
                                firstSampleBaseTime = sampleTimeStamp;
                                isFirstSample       = false;
                            }

                            // rebase the time stamp
                            sampleTimeStamp -= firstSampleBaseTime;

                            hr = sinkWriterSample.SetSampleTime(sampleTimeStamp);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to InputSample.SetSampleTime failed. Err=" + hr.ToString());
                            }

                            // write the sample out
                            hr = workingSinkWriter.WriteSample(sinkWriterVideoStreamId, sinkWriterSample);
                            if (hr != HResult.S_OK)
                            {
                                throw new Exception("OnProcessOutput call to WriteSample(b) failed. Err=" + hr.ToString());
                            }
                        }
                    }
                }
            }
            finally
            {
                // clean up
                if (inputMediaBuffer != null)
                {
                    Marshal.ReleaseComObject(inputMediaBuffer);
                    inputMediaBuffer = null;
                }

                if (sinkWriterSample != null)
                {
                    Marshal.ReleaseComObject(sinkWriterSample);
                    sinkWriterSample = null;
                }

                // Release the current input sample so we can get another one.
                // the act of setting it to null releases it because the property
                // is coded that way
                InputSample = null;
            }

            return(HResult.S_OK);
        }
예제 #23
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Sets the transform information panel on the control
        /// </summary>
        /// <param name="transformToDisplay">the transform to display</param>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void SetTransformInfoPanel(TantaMFTCapabilityContainer transformToDisplay)
        {
            List <IMFMediaType> outputTypes = null;
            List <IMFMediaType> inputTypes  = null;
            StringBuilder       displaySb   = new StringBuilder();
            StringBuilder       outputSb    = new StringBuilder();
            StringBuilder       inputSb     = new StringBuilder();
            StringBuilder       headerSb    = new StringBuilder();
            StringBuilder       outSb;
            HResult             hr;

            // clear it down
            ClearTransformInfoPanel();

            if (transformToDisplay == null)
            {
                return;
            }

            // set up our header information
            headerSb.Append(transformToDisplay.TransformFriendlyName);
            headerSb.Append("\r\n");
            headerSb.Append(transformToDisplay.TransformGuidValueAsString);
            headerSb.Append("\r\n");
            if (transformToDisplay.IsAsyncMFT == "x")
            {
                headerSb.Append("IsAsyncMFT" + ", ");
            }
            if (transformToDisplay.IsSyncMFT == "x")
            {
                headerSb.Append("IsSyncMFT" + ", ");
            }
            if (transformToDisplay.IsFieldOfUse == "x")
            {
                headerSb.Append("IsFieldOfUse" + ", ");
            }

            // we do not include these, the enum function does not give us this
            // if (transformToDisplay.IsHardware == "x") headerSb.Append("IsHardware" + ", ");
            // if (transformToDisplay.IsLocalMFT == "x") headerSb.Append("IsLocalMFT" + ", ");
            // if (transformToDisplay.IsTranscodeOnly == "x") headerSb.Append("IsTranscodeOnly" + ", ");
            headerSb.Append("\r\n");

            try
            {
                // populate the RichText box with the input media type capabilities
                inputTypes = TantaWMFUtils.GetInputMediaTypesFromTransformByGuid(transformToDisplay.TransformGuidValue, false);

                // do we have any input types?
                if ((inputTypes != null) && (inputTypes.Count != 0))
                {
                    // go through the types
                    foreach (IMFMediaType mediaType in inputTypes)
                    {
                        // the major media type
                        hr = TantaMediaTypeInfo.GetMediaMajorTypeAsText(mediaType, out outSb);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        if (outSb == null)
                        {
                            continue;
                        }
                        inputSb.Append(outSb);
                        inputSb.Append("\r\n");

                        // the sub media type
                        hr = TantaMediaTypeInfo.GetMediaSubTypeAsText(mediaType, out outSb);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        if (outSb == null)
                        {
                            continue;
                        }
                        inputSb.Append(outSb);
                        inputSb.Append("\r\n");

                        // enumerate all of the possible Attributes so we can see which ones are present that we did not report on
                        StringBuilder allAttrs = new StringBuilder();
                        hr = TantaMediaTypeInfo.EnumerateAllAttributeNamesInMediaTypeAsText(mediaType, true, true, TantaWMFUtils.MAX_TYPES_TESTED_PER_TRANSFORM, out allAttrs);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        char[] charsToTrim = { ',', '.', ' ' };
                        inputSb.Append("OtherAttrs=" + allAttrs.ToString().TrimEnd(charsToTrim));
                        inputSb.Append("\r\n");

                        inputSb.Append("\r\n");
                    }
                }
            }
            finally
            {
                // release the list of media type objects
                if ((inputTypes != null) && (inputTypes.Count != 0))
                {
                    foreach (IMFMediaType mediaType in inputTypes)
                    {
                        Marshal.ReleaseComObject(mediaType);
                    }
                }
            }

            try
            {
                // populate the RichText box with the output media type capabilities
                outputTypes = TantaWMFUtils.GetOutputMediaTypesFromTransformByGuid(transformToDisplay.TransformGuidValue, false);

                // do we have any output types?
                if ((outputTypes != null) && (outputTypes.Count != 0))
                {
                    // go through the types
                    foreach (IMFMediaType mediaType in outputTypes)
                    {
                        // the major media type
                        hr = TantaMediaTypeInfo.GetMediaMajorTypeAsText(mediaType, out outSb);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        if (outSb == null)
                        {
                            continue;
                        }
                        outputSb.Append(outSb);
                        outputSb.Append("\r\n");

                        // the sub media type
                        hr = TantaMediaTypeInfo.GetMediaSubTypeAsText(mediaType, out outSb);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        if (outSb == null)
                        {
                            continue;
                        }
                        outputSb.Append(outSb);
                        outputSb.Append("\r\n");

                        // enumerate all of the possible Attributes so we can see which ones are present that we did not report on
                        StringBuilder allAttrs = new StringBuilder();
                        hr = TantaMediaTypeInfo.EnumerateAllAttributeNamesInMediaTypeAsText(mediaType, true, true, TantaWMFUtils.MAX_TYPES_TESTED_PER_TRANSFORM, out allAttrs);
                        if (hr != HResult.S_OK)
                        {
                            continue;
                        }
                        char[] charsToTrim = { ',', '.', ' ' };
                        outputSb.Append("OtherAttrs=" + allAttrs.ToString().TrimEnd(charsToTrim));
                        outputSb.Append("\r\n");

                        outputSb.Append("\r\n");
                    }
                }
            }
            finally
            {
                // release the list of media type objects
                if ((outputTypes != null) && (outputTypes.Count != 0))
                {
                    foreach (IMFMediaType mediaType in outputTypes)
                    {
                        Marshal.ReleaseComObject(mediaType);
                    }
                }
            }

            // display what we have
            displaySb.Append(headerSb);
            displaySb.Append("\r\n");
            displaySb.Append("\r\n");

            displaySb.Append("####\r\n");
            displaySb.Append("#### INPUT TYPES\r\n");
            displaySb.Append("####\r\n");
            displaySb.Append("\r\n");
            if (inputSb.Length > 0)
            {
                displaySb.Append(inputSb);
            }
            else
            {
                displaySb.Append("<not known>");
                displaySb.Append("\r\n");
            }
            displaySb.Append("\r\n");

            displaySb.Append("####\r\n");
            displaySb.Append("#### OUTPUT TYPES\r\n");
            displaySb.Append("####\r\n");
            displaySb.Append("\r\n");
            if (outputSb.Length > 0)
            {
                displaySb.Append(outputSb);
            }
            else
            {
                displaySb.Append("<not known>");
                displaySb.Append("\r\n");
            }
            displaySb.Append("\r\n");

            richTextBoxtTransformDetails.Text = displaySb.ToString();
        }
예제 #24
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the video formats for the currently selected video device. This
        /// is more complicated than it looks. We have to open the video source, convert
        /// that to a Media Source and then interrogate the that source to find a list
        /// of video formats.
        ///
        /// NOTE: this function will throw exceptions - caller must trap them
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void DisplayVideoFormatsForCurrentCaptureDevice()
        {
            IMFPresentationDescriptor sourcePresentationDescriptor = null;
            int  sourceStreamCount = 0;
            bool streamIsSelected  = false;
            IMFStreamDescriptor videoStreamDescriptor = null;
            IMFMediaTypeHandler typeHandler           = null;
            int mediaTypeCount = 0;

            List <TantaMFVideoFormatContainer> formatList = new List <TantaMFVideoFormatContainer>();
            HResult        hr;
            IMFMediaSource mediaSource = null;

            try
            {
                // clear what we have now
                listViewSupportedFormats.Clear();
                // reset this
                listViewSupportedFormats.ListViewItemSorter = null;

                // get the currently selected device
                TantaMFDevice currentDevice = (TantaMFDevice)comboBoxCaptureDevices.SelectedItem;
                if (currentDevice == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice currentDevice == null");
                }

                // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data.
                // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each
                // media source contains one or more streams, and each stream delivers data of one type, such as audio or video.
                mediaSource = TantaWMFUtils.GetMediaSourceFromTantaDevice(currentDevice);
                if (mediaSource == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null");
                }

                // A presentation is a set of related media streams that share a common presentation time.
                // we don't need that functionality in this app but we do need to presentation descriptor
                // to find out the stream descriptors, these will give us the media types on offer
                hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString());
                }
                if (sourcePresentationDescriptor == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null");
                }

                // Now we get the number of stream descriptors in the presentation.
                // A presentation descriptor contains a list of one or more
                // stream descriptors.
                hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString());
                }
                if (sourceStreamCount == 0)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0");
                }

                // look for the video stream
                for (int i = 0; i < sourceStreamCount; i++)
                {
                    // we require the major type to be video
                    Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i);
                    if (guidMajorType != MFMediaType.Video)
                    {
                        continue;
                    }

                    // we also require the stream to be enabled
                    hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out videoStreamDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString());
                    }
                    if (videoStreamDescriptor == null)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null");
                    }
                    // if the stream is not selected (enabled) look for the next
                    if (streamIsSelected == false)
                    {
                        Marshal.ReleaseComObject(videoStreamDescriptor);
                        videoStreamDescriptor = null;
                        continue;
                    }

                    // Get the media type handler for the stream. IMFMediaTypeHandler
                    // interface is a standard way of looking at the media types on an stream
                    hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString());
                    }
                    if (typeHandler == null)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null");
                    }
                    // Now we get the number of media types in the stream descriptor.
                    hr = typeHandler.GetMediaTypeCount(out mediaTypeCount);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString());
                    }
                    if (mediaTypeCount == 0)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0");
                    }

                    // now loop through each media type
                    for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++)
                    {
                        // Now we have the handler, get the media type.
                        IMFMediaType workingMediaType = null;
                        hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out workingMediaType);
                        if (hr != HResult.S_OK)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString());
                        }
                        if (workingMediaType == null)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null");
                        }
                        TantaMFVideoFormatContainer tmpContainer = TantaMediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, currentDevice);
                        if (tmpContainer == null)
                        {
                            // we failed
                            throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject");
                        }
                        // now add it
                        formatList.Add(tmpContainer);
                        Marshal.ReleaseComObject(workingMediaType);
                        workingMediaType = null;
                    }

                    // NOTE: we only do the first enabled video stream we find.
                    // it is possible to have more but our control
                    // cannot cope with that
                    break;
                }

                // now display the formats
                foreach (TantaMFVideoFormatContainer videoFormat in formatList)
                {
                    ListViewItem lvi = new ListViewItem(new[] { videoFormat.SubTypeAsString, videoFormat.FrameSizeAsString, videoFormat.FrameRateAsString, videoFormat.FrameRateMaxAsString, videoFormat.AllAttributes });
                    lvi.Tag = videoFormat;
                    listViewSupportedFormats.Items.Add(lvi);
                }

                listViewSupportedFormats.Columns.Add("Type", 70);
                listViewSupportedFormats.Columns.Add("FrameSize WxH", 100);
                listViewSupportedFormats.Columns.Add("FrameRate f/s", 100);
                listViewSupportedFormats.Columns.Add("FrameRateMax f/s", 100);
                listViewSupportedFormats.Columns.Add("All Attributes", 2500);
            }
            finally
            {
                // close and release
                if (mediaSource != null)
                {
                    Marshal.ReleaseComObject(mediaSource);
                    mediaSource = null;
                }
                if (sourcePresentationDescriptor != null)
                {
                    Marshal.ReleaseComObject(sourcePresentationDescriptor);
                    sourcePresentationDescriptor = null;
                }
                if (videoStreamDescriptor != null)
                {
                    Marshal.ReleaseComObject(videoStreamDescriptor);
                    videoStreamDescriptor = null;
                }
                if (typeHandler != null)
                {
                    Marshal.ReleaseComObject(typeHandler);
                    typeHandler = null;
                }
            }
        }
예제 #25
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Opens prepares the media session and topology and opens the media source
        /// and media sink.
        ///
        /// Once the session and topology are setup, a MESessionTopologySet event
        /// will be triggered in the callback handler. After that the events there
        /// trigger other events and everything rolls along automatically.
        /// </summary>
        /// <param name="sourceFileName">the source file name</param>
        /// <param name="outputFileName">the name of the output file</param>
        /// <history>
        ///    01 Nov 18  Cynic - Originally Written
        /// </history>
        public void PrepareSessionAndTopology(string sourceFileName, string outputFileName)
        {
            HResult                   hr;
            IMFSourceResolver         pSourceResolver = null;
            IMFTopology               pTopology       = null;
            IMFPresentationDescriptor sourcePresentationDescriptor = null;
            int sourceStreamCount = 0;
            IMFStreamDescriptor audioStreamDescriptor = null;
            bool            streamIsSelected          = false;
            IMFTopologyNode sourceAudioNode           = null;
            IMFTopologyNode outputSinkNode            = null;
            IMFMediaType    currentAudioMediaType     = null;
            int             audioStreamIndex          = -1;

            LogMessage("PrepareSessionAndTopology ");

            // we sanity check the filenames - the existence of the path and if the file already exists
            // should have been checked before this call
            if ((sourceFileName == null) || (sourceFileName.Length == 0))
            {
                throw new Exception("PrepareSessionAndTopology: source file name is invalid. Cannot continue.");
            }

            if ((outputFileName == null) || (outputFileName.Length == 0))
            {
                throw new Exception("PrepareSessionAndTopology: output file name is invalid. Cannot continue.");
            }

            try
            {
                // reset everything
                CloseAllMediaDevices();

                // Create the media session.
                hr = MFExtern.MFCreateMediaSession(null, out mediaSession);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString());
                }
                if (mediaSession == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. mediaSession == null");
                }

                // set up our media session call back handler.
                mediaSessionAsyncCallbackHandler = new TantaAsyncCallbackHandler();
                mediaSessionAsyncCallbackHandler.Initialize();
                mediaSessionAsyncCallbackHandler.MediaSession = mediaSession;
                mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors;
                mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent;

                // Register the callback handler with the session and tell it that events can
                // start. This does not actually trigger an event it just lets the media session
                // know that it can now send them if it wishes to do so.
                hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString());
                }

                // Create a new topology.  A topology describes a collection of media sources, sinks, and transforms that are
                // connected in a certain order. These objects are represented within the topology by topology nodes,
                // which expose the IMFTopologyNode interface. A topology describes the path of multimedia data through these nodes.
                hr = MFExtern.MFCreateTopology(out pTopology);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. Err=" + hr.ToString());
                }
                if (pTopology == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. pTopology == null");
                }

                // ####
                // #### we now create the media source, this is an audio file
                // ####

                // use the file name to create the media source for the audio device. Media sources are objects that generate media data.
                // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each
                // media source contains one or more streams, and each stream delivers data of one type, such as audio or video.
                mediaSource = TantaWMFUtils.GetMediaSourceFromFile(sourceFileName);
                if (mediaSource == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource == null");
                }

                // A presentation is a set of related media streams that share a common presentation time.  We now get a copy of the media
                // source's presentation descriptor. Applications can use the presentation descriptor to select streams
                // and to get information about the source content.
                hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString());
                }
                if (sourcePresentationDescriptor == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null");
                }

                // Now we get the number of stream descriptors in the presentation. A presentation descriptor contains a list of one or more
                // stream descriptors. These describe the streams in the presentation. Streams can be either selected or deselected. Only the
                // selected streams produce data. Deselected streams are not active and do not produce any data.
                hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString());
                }
                if (sourceStreamCount == 0)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0");
                }

                // Look at each stream, there can be more than one stream here
                // Usually only one is enabled. This app uses the first "selected"
                // stream we come to which has the appropriate media type
                for (int i = 0; i < sourceStreamCount; i++)
                {
                    // we require the major type to be audio
                    Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i);
                    if (guidMajorType != MFMediaType.Audio)
                    {
                        continue;
                    }

                    // we also require the stream to be enabled
                    hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out audioStreamDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. Err=" + hr.ToString());
                    }
                    if (audioStreamDescriptor == null)
                    {
                        throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null");
                    }
                    // if the stream is selected, leave now we will release the audioStream descriptor later
                    if (streamIsSelected == true)
                    {
                        audioStreamIndex = i;  // record this
                        break;
                    }

                    // release the one we are not using
                    if (audioStreamDescriptor != null)
                    {
                        Marshal.ReleaseComObject(audioStreamDescriptor);
                        audioStreamDescriptor = null;
                    }
                    audioStreamIndex = -1;
                }

                // by the time we get here we should have a audioStreamDescriptor if
                // we do not, then we cannot proceed
                if (audioStreamDescriptor == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null");
                }
                if (audioStreamIndex < 0)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamIndex < 0");
                }

                // ####
                // #### we now create the media sink, we need the type from the stream to do
                // #### this which is why we wait until now to set it up
                // ####

                currentAudioMediaType = TantaWMFUtils.GetCurrentMediaTypeFromStreamDescriptor(audioStreamDescriptor);
                if (currentAudioMediaType == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to currentAudioMediaType == null");
                }

                mediaSink = OpenMediaFileSink(outputFileName);
                if (mediaSink == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSink == null");
                }

                // ####
                // #### we now make up a topology branch for the audio stream
                // ####

                // Create a source node for this stream.
                sourceAudioNode = TantaWMFUtils.CreateSourceNodeForStream(mediaSource, sourcePresentationDescriptor, audioStreamDescriptor);
                if (sourceAudioNode == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to CreateSourceNodeForStream failed. pSourceNode == null");
                }

                // Create the output node - this is a file sink in this case.
                outputSinkNode = TantaWMFUtils.CreateSinkNodeForStream(mediaSink);
                if (outputSinkNode == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to CreateOutputNodeForStream failed. outputSinkNode == null");
                }

                // Add the nodes to the topology. First the source
                hr = pTopology.AddNode(sourceAudioNode);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(sourceAudioNode) failed. Err=" + hr.ToString());
                }

                // then add the output
                hr = pTopology.AddNode(outputSinkNode);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(outputSinkNode) failed. Err=" + hr.ToString());
                }

                // Connect the output stream from the source node to the input stream of the output node. The parameters are:
                //    dwOutputIndex  -  Zero-based index of the output stream on this node.
                //    *pDownstreamNode  -  Pointer to the IMFTopologyNode interface of the node to connect to.
                //    dwInputIndexOnDownstreamNode  -  Zero-based index of the input stream on the other node.
                hr = sourceAudioNode.ConnectOutput(0, outputSinkNode, 0);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to  pSourceNode.ConnectOutput failed. Err=" + hr.ToString());
                }

                // Set the topology on the media session.
                // If SetTopology succeeds, the media session will queue an
                // MESessionTopologySet event.
                hr = mediaSession.SetTopology(0, pTopology);
                MFError.ThrowExceptionForHR(hr);

                // Release the topology
                if (pTopology != null)
                {
                    Marshal.ReleaseComObject(pTopology);
                }
            }
            catch (Exception ex)
            {
                LogMessage("Error: " + ex.Message);
                OISMessageBox(ex.Message);
            }
            finally
            {
                // Clean up
                if (pSourceResolver != null)
                {
                    Marshal.ReleaseComObject(pSourceResolver);
                }
                if (sourcePresentationDescriptor != null)
                {
                    Marshal.ReleaseComObject(sourcePresentationDescriptor);
                }
                if (audioStreamDescriptor != null)
                {
                    Marshal.ReleaseComObject(audioStreamDescriptor);
                }
                if (sourceAudioNode != null)
                {
                    Marshal.ReleaseComObject(sourceAudioNode);
                }
                if (outputSinkNode != null)
                {
                    Marshal.ReleaseComObject(outputSinkNode);
                }
                if (currentAudioMediaType != null)
                {
                    Marshal.ReleaseComObject(currentAudioMediaType);
                }
            }
        }
예제 #26
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Given the input and output media buffers, do the transform.
        /// </summary>
        /// <param name="inputMediaBuffer">Input buffer</param>
        /// <param name="outputMediaBuffer">Output buffer</param>
        /// <history>
        ///    01 Nov 18  Cynic - Ported In
        /// </history>
        private void ConvertMediaBufferToGrayscale(IMFMediaBuffer inputMediaBuffer, IMFMediaBuffer outputMediaBuffer)
        {
            IntPtr destRawDataPtr = IntPtr.Zero;            // Destination buffer.
            int    destStride     = 0;                      // Destination stride.
            bool   destIs2D       = false;

            IntPtr srcRawDataPtr = IntPtr.Zero;                     // Source buffer.
            int    srcStride;                                       // Source stride.
            bool   srcIs2D = false;

            if (TransformImageFunction == null)
            {
                throw new COMException("Transform type not set", (int)HResult.E_UNEXPECTED);
            }

            try
            {
                // Lock the output buffer. Use the IMF2DBuffer interface
                // (if available) as it is faster
                if ((outputMediaBuffer is IMF2DBuffer) == false)
                {
                    // not an IMF2DBuffer - get the raw data from the IMFMediaBuffer
                    int maxLen     = 0;
                    int currentLen = 0;
                    TantaWMFUtils.LockIMFMediaBufferAndGetRawData(outputMediaBuffer, out destRawDataPtr, out maxLen, out currentLen);
                    // the stride is always this. The Lock function does not return it
                    destStride = m_lStrideIfContiguous;
                }
                else
                {
                    // we are an IMF2DBuffer, we get the stride here as well
                    TantaWMFUtils.LockIMF2DBufferAndGetRawData((outputMediaBuffer as IMF2DBuffer), out destRawDataPtr, out destStride);
                    destIs2D = true;
                }

                // Lock the input buffer. Use the IMF2DBuffer interface
                // (if available) as it is faster
                if ((inputMediaBuffer is IMF2DBuffer) == false)
                {
                    // not an IMF2DBuffer - get the raw data from the IMFMediaBuffer
                    int maxLen     = 0;
                    int currentLen = 0;
                    TantaWMFUtils.LockIMFMediaBufferAndGetRawData(inputMediaBuffer, out srcRawDataPtr, out maxLen, out currentLen);
                    // the stride is always this. The Lock function does not return it
                    srcStride = m_lStrideIfContiguous;
                }
                else
                {
                    // we are an IMF2DBuffer, we get the stride here as well
                    TantaWMFUtils.LockIMF2DBufferAndGetRawData((inputMediaBuffer as IMF2DBuffer), out srcRawDataPtr, out srcStride);
                    srcIs2D = true;
                }

                // Invoke the image transform function.
                TransformImageFunction(destRawDataPtr,
                                       destStride,
                                       srcRawDataPtr,
                                       srcStride,
                                       m_imageWidthInPixels,
                                       m_imageHeightInPixels);

                // Set the data size on the output buffer.
                HResult hr = outputMediaBuffer.SetCurrentLength(m_cbImageSize);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("ConvertMediaBufferToGrayscale call to outputMediaBuffer.SetCurrentLength failed. Err=" + hr.ToString());
                }
            }
            finally
            {
                // we MUST unlock
                if (destIs2D == false)
                {
                    TantaWMFUtils.UnLockIMFMediaBuffer(outputMediaBuffer);
                }
                else
                {
                    TantaWMFUtils.UnLockIMF2DBuffer((outputMediaBuffer as IMF2DBuffer));
                }
                if (srcIs2D == false)
                {
                    TantaWMFUtils.UnLockIMFMediaBuffer(inputMediaBuffer);
                }
                else
                {
                    TantaWMFUtils.UnLockIMF2DBuffer((inputMediaBuffer as IMF2DBuffer));
                }
            }
        }