コード例 #1
0
ファイル: GraphBuilderBDA.cs プロジェクト: dgis/CodeTV
        protected static VideoInfoHeader2 GetVideoInfoHeader2H264PinFormat()
        {
            //VideoInfoHeader2 hdr = new VideoInfoHeader2();
            //hdr.SrcRect = new DsRect();
            //hdr.SrcRect.left = 0;		//0x00, 0x00, 0x00, 0x00,  //00  .hdr.rcSource.left              = 0x00000000
            //hdr.SrcRect.top = 0;			//0x00, 0x00, 0x00, 0x00,  //04  .hdr.rcSource.top               = 0x00000000
            //hdr.SrcRect.right = 0;		//0xD0, 0x02, 0x00, 0x00,  //08  .hdr.rcSource.right             = 0x000002d0 //720
            //hdr.SrcRect.bottom = 0;		//0x40, 0x02, 0x00, 0x00,  //0c  .hdr.rcSource.bottom            = 0x00000240 //576
            //hdr.TargetRect = new DsRect();
            //hdr.TargetRect.left = 0;		//0x00, 0x00, 0x00, 0x00,  //10  .hdr.rcTarget.left              = 0x00000000
            //hdr.TargetRect.top = 0;		//0x00, 0x00, 0x00, 0x00,  //14  .hdr.rcTarget.top               = 0x00000000
            //hdr.TargetRect.right = 0;	//0xD0, 0x02, 0x00, 0x00,  //18  .hdr.rcTarget.right             = 0x000002d0 //720
            //hdr.TargetRect.bottom = 0;	//0x40, 0x02, 0x00, 0x00,  //1c  .hdr.rcTarget.bottom            = 0x00000240// 576
            //hdr.BitRate = 0x003d0900;	//0x00, 0x09, 0x3D, 0x00,  //20  .hdr.dwBitRate                  = 0x003d0900
            //hdr.BitErrorRate = 0;		//0x00, 0x00, 0x00, 0x00,  //24  .hdr.dwBitErrorRate             = 0x00000000

            //////0x051736=333667-> 10000000/333667 = 29.97fps
            //////0x061A80=400000-> 10000000/400000 = 25fps
            //hdr.AvgTimePerFrame = 400000;				//0x80, 0x1A, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, //28  .hdr.AvgTimePerFrame            = 0x0000000000051763 ->1000000/ 40000 = 25fps
            //hdr.InterlaceFlags = AMInterlace.None;		//0x00, 0x00, 0x00, 0x00,                         //2c  .hdr.dwInterlaceFlags           = 0x00000000
            ////hdr.InterlaceFlags = AMInterlace.IsInterlaced | AMInterlace.OneFieldPerSample | AMInterlace.DisplayModeBobOnly;		//0x00, 0x00, 0x00, 0x00,                         //2c  .hdr.dwInterlaceFlags           = 0x00000000
            ////hdr.InterlaceFlags = AMInterlace.IsInterlaced | AMInterlace.DisplayModeBobOnly;		//0x00, 0x00, 0x00, 0x00,                         //2c  .hdr.dwInterlaceFlags           = 0x00000000
            ////hdr.InterlaceFlags = AMInterlace.IsInterlaced | AMInterlace.FieldPatBothRegular | AMInterlace.DisplayModeWeaveOnly;		//0x00, 0x00, 0x00, 0x00,                         //2c  .hdr.dwInterlaceFlags           = 0x00000000
            ////hdr.InterlaceFlags = AMInterlace.IsInterlaced | AMInterlace.DisplayModeBobOrWeave;		//0x00, 0x00, 0x00, 0x00,                         //2c  .hdr.dwInterlaceFlags           = 0x00000000
            //hdr.CopyProtectFlags = AMCopyProtect.None;	//0x00, 0x00, 0x00, 0x00,                         //30  .hdr.dwCopyProtectFlags         = 0x00000000
            //hdr.PictAspectRatioX = 0;// 4;					//0x04, 0x00, 0x00, 0x00,                         //34  .hdr.dwPictAspectRatioX         = 0x00000004
            //hdr.PictAspectRatioY = 0;// 3;					//0x03, 0x00, 0x00, 0x00,                         //38  .hdr.dwPictAspectRatioY         = 0x00000003
            //hdr.ControlFlags = AMControl.None;			//0x00, 0x00, 0x00, 0x00,                         //3c  .hdr.dwReserved1                = 0x00000000
            //hdr.Reserved2 = 0;							//0x00, 0x00, 0x00, 0x00,                         //40  .hdr.dwReserved2                = 0x00000000
            //hdr.BmiHeader = new BitmapInfoHeader();
            //hdr.BmiHeader.Size = 0x00000028;				//0x28, 0x00, 0x00, 0x00,  //44  .hdr.bmiHeader.biSize           = 0x00000028
            //hdr.BmiHeader.Width = 1920; // 720;					//0xD0, 0x02, 0x00, 0x00,  //48  .hdr.bmiHeader.biWidth          = 0x000002d0 //720
            //hdr.BmiHeader.Height = 1080; // 576;					//0x40, 0x02, 0x00, 0x00,  //4c  .hdr.bmiHeader.biHeight         = 0x00000240 //576
            //hdr.BmiHeader.Planes = 0; // 1 ?					//0x00, 0x00,              //50  .hdr.bmiHeader.biPlanes         = 0x0000
            //hdr.BmiHeader.BitCount = 0;					//0x00, 0x00,              //54  .hdr.bmiHeader.biBitCount       = 0x0000
            //hdr.BmiHeader.Compression = 0;				//0x00, 0x00, 0x00, 0x00,  //58  .hdr.bmiHeader.biCompression    = 0x00000000
            //hdr.BmiHeader.ImageSize = 0;					//0x00, 0x00, 0x00, 0x00,  //5c  .hdr.bmiHeader.biSizeImage      = 0x00000000
            //hdr.BmiHeader.XPelsPerMeter = 0x000007d0;	//0xD0, 0x07, 0x00, 0x00,  //60  .hdr.bmiHeader.biXPelsPerMeter  = 0x000007d0
            //hdr.BmiHeader.YPelsPerMeter = 0x0000cf27;	//0x27, 0xCF, 0x00, 0x00,  //64  .hdr.bmiHeader.biYPelsPerMeter  = 0x0000cf27
            //hdr.BmiHeader.ClrUsed = 0;					//0x00, 0x00, 0x00, 0x00,  //68  .hdr.bmiHeader.biClrUsed        = 0x00000000
            //hdr.BmiHeader.ClrImportant = 0;				//0x00, 0x00, 0x00, 0x00,  //6c  .hdr.bmiHeader.biClrImportant   = 0x00000000

            VideoInfoHeader2 hdr = new VideoInfoHeader2();
            hdr.BmiHeader = new BitmapInfoHeader();
            hdr.BmiHeader.Size = 28; // 0x00000028;				//0x28, 0x00, 0x00, 0x00,  //44  .hdr.bmiHeader.biSize           = 0x00000028
            hdr.BmiHeader.Width = 1920; // 720;
            hdr.BmiHeader.Height = 1080; // 576;
            hdr.PictAspectRatioX = 0;
            hdr.PictAspectRatioY = 0;
            hdr.BmiHeader.Planes = 0;
            hdr.BmiHeader.BitCount = 24;
            hdr.BmiHeader.Compression = 0; //new MediaFoundation.Misc.FourCC("H264").ToInt32();
            return hdr;
        }
コード例 #2
0
ファイル: Overlay.cs プロジェクト: OmerMor/DirectShowLib-FORK
        /// <summary>
        /// This method calls the GSSF and specifies the media type to use.  This
        /// overridable method builds a generic AMMediaType based on the arguments
        /// passed to the constructor.
        /// </summary>
        /// <param name="psc">Interface pointer to the GSSF</param>
        public override void SetMediaType(IGenericSampleConfig2 psc)
        {
            VideoInfoHeader2 vih2 = new VideoInfoHeader2();

            vih2.BmiHeader = new BitmapInfoHeader();
            vih2.SrcRect = new DsRect(0, 0, m_Width, m_Height);
            vih2.TargetRect = new DsRect(0, 0, m_Width, m_Height);

            // Build a BitmapInfo struct using the parms from the file
            vih2.BmiHeader.Size = Marshal.SizeOf(typeof(BitmapInfoHeader));
            vih2.BmiHeader.Width = m_Width;
            vih2.BmiHeader.Height = m_Height;
            vih2.BmiHeader.Planes = 1;
            vih2.BmiHeader.BitCount = m_bpp;
            vih2.BmiHeader.Compression = (int)m_SubType;
            vih2.BmiHeader.ImageSize = ((vih2.BmiHeader.BitCount * vih2.BmiHeader.Width) / 8) * vih2.BmiHeader.Height;
            vih2.BmiHeader.XPelsPerMeter = 0;
            vih2.BmiHeader.YPelsPerMeter = 0;
            vih2.BmiHeader.ClrUsed = 0;
            vih2.BmiHeader.ClrImportant = 0;
            vih2.BmiHeader.Height *= -1; // The bitmap is a top-down DIB

            vih2.BitRate = (int)(vih2.BmiHeader.ImageSize * 8 * m_Fps);
            vih2.BitErrorRate = 0;
            vih2.AvgTimePerFrame = UNIT / m_Fps;
            vih2.InterlaceFlags = 0;
            vih2.CopyProtectFlags = 0;
            vih2.PictAspectRatioX = 4;
            vih2.PictAspectRatioY = 3;
            vih2.ControlFlags = 0;
            vih2.Reserved2 = 0;

            // Free any previous media type
            if (m_pmt != null)
            {
                DsUtils.FreeAMMediaType(m_pmt);
            }

            m_pmt = new AMMediaType();
            m_pmt.majorType = MediaType.Video;
            m_pmt.fixedSizeSamples = true;
            m_pmt.temporalCompression = false;
            m_pmt.formatType = FormatType.VideoInfo2;
            m_pmt.sampleSize = vih2.BmiHeader.ImageSize;

            int iStride;

            if ((int)m_SubType == 3) // 3 == BI_BITFIELDS
            {
                Debug.Assert(vih2.BmiHeader.BitCount == 32); // 16bit uses a slightly different format

                m_pmt.subType = MediaSubType.ARGB32; // Can't use the compression type to compute the subtype

                m_pmt.formatSize = Marshal.SizeOf(vih2) + (3 * (m_bpp / 8)); // Make room for the bitfields
                m_pmt.formatPtr = Marshal.AllocCoTaskMem(m_pmt.formatSize);

                if (m_bpp != 16)
                {
                    vih2.BmiHeader.Size += 3 * sizeof(int);

                    // One byte per color
                    Marshal.WriteInt32(m_pmt.formatPtr, 0xff); // Red
                    Marshal.WriteInt32(m_pmt.formatPtr, 4, 0xff00); // Green
                    Marshal.WriteInt32(m_pmt.formatPtr, 8, 0xff0000); // Blue
                }
                else
                {
                    // Todo - 555, 565
                }

                iStride = (m_Width * m_bpp) / 8;
            }
            else
            {
                // Calculate the stride from the compression type
                MFExtern.MFGetStrideForBitmapInfoHeader(vih2.BmiHeader.Compression, vih2.BmiHeader.Width, out iStride);

                m_pmt.subType = (Guid)m_SubType;
                m_pmt.formatSize = Marshal.SizeOf(vih2);
                m_pmt.formatPtr = Marshal.AllocCoTaskMem(m_pmt.formatSize);
            }

            m_pmt.sampleSize = iStride * m_Height;
            Marshal.StructureToPtr(vih2, m_pmt.formatPtr, false);

            int hr = psc.SetPinMediaType(m_pmt);
            DsError.ThrowExceptionForHR(hr);
        }
コード例 #3
0
ファイル: VMR9.cs プロジェクト: doskabouter/MediaPortal-1
    public void SetDeinterlaceMode()
    {
      if (!GUIGraphicsContext.IsEvr)
      {
        if (!_isVmr9Initialized)
        {
          return;
        }
        Log.Debug("VMR9: SetDeinterlaceMode()");
        IVMRDeinterlaceControl9 deinterlace = (IVMRDeinterlaceControl9)_vmr9Filter;
        IPin InPin = null;
        int hr = _vmr9Filter.FindPin("VMR Input0", out InPin);
        if (hr != 0)
        {
          Log.Error("VMR9: failed finding InPin {0:X}", hr);
        }
        AMMediaType mediatype = new AMMediaType();
        InPin.ConnectionMediaType(mediatype);
        //Start by getting the media type of the video stream.
        //Only VideoInfoHeader2 formats can be interlaced.
        if (mediatype.formatType == FormatType.VideoInfo2)
        {
          Log.Debug("VMR9: SetDeinterlaceMode - FormatType = VideoInfo2");
          int numModes = 0;
          VideoInfoHeader2 VideoHeader2 = new VideoInfoHeader2();
          Marshal.PtrToStructure(mediatype.formatPtr, VideoHeader2);
          VMR9VideoDesc VideoDesc = new VMR9VideoDesc();
          // If the FormatType is VideoInfo2, check the dwInterlaceFlags field for the AMInterlace.IsInterlaced flag.
          //The presence of this flag indicates the video is interlaced.
          if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
          {
            Log.Debug("VMR9: SetDeinterlaceMode - Interlaced frame detected");
            //Fill in the VMR9VideoDesc structure with a description of the video stream.
            VideoDesc.dwSize = Marshal.SizeOf(VideoDesc); // dwSize: Set this field to sizeof(VMR9VideoDesc).
            VideoDesc.dwSampleWidth = VideoHeader2.BmiHeader.Width; // dwSampleWidth: Set this field to pBMI->biWidth. 
            VideoDesc.dwSampleHeight = VideoHeader2.BmiHeader.Height;
            // dwSampleHeight: Set this field to abs(pBMI->biHeight). 
            //SampleFormat: This field describes the interlace characteristics of the media type.
            //Check the dwInterlaceFlags field in the VIDEOINFOHEADER2 structure, and set SampleFormat equal to the equivalent VMR9_SampleFormat flag.
            if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
            {
              if ((VideoHeader2.InterlaceFlags & AMInterlace.DisplayModeBobOnly) == 0)
              {
                VideoDesc.SampleFormat = VMR9SampleFormat.ProgressiveFrame;
              }
              if ((VideoHeader2.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0)
              {
                if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                {
                  VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleEven;
                }
                else
                {
                  VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleOdd;
                }
              }
              if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
              {
                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedEvenFirst;
              }
              else
              {
                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedOddFirst;
              }
            }
            //InputSampleFreq: This field gives the input frequency, which can be calculated from the AvgTimePerFrame field in the VIDEOINFOHEADER2 structure.
            //In the general case, set dwNumerator to 10000000, and set dwDenominator to AvgTimePerFrame. 
            VideoDesc.InputSampleFreq.dwDenominator = 10000000;
            VideoDesc.InputSampleFreq.dwNumerator = (int)VideoHeader2.AvgTimePerFrame;
            //OutputFrameFreq: This field gives the output frequency, which can be calculated from the InputSampleFreq value and the interleaving characteristics of the input stream:
            //Set OutputFrameFreq.dwDenominator equal to InputSampleFreq.dwDenominator.
            //If the input video is interleaved, set OutputFrameFreq.dwNumerator to 2 x InputSampleFreq.dwNumerator. (After deinterlacing, the frame rate is doubled.)
            //Otherwise, set the value to InputSampleFreq.dwNumerator.
            VideoDesc.OutputFrameFreq.dwDenominator = 10000000;
            VideoDesc.OutputFrameFreq.dwNumerator = (int)VideoHeader2.AvgTimePerFrame * 2;
            VideoDesc.dwFourCC = VideoHeader2.BmiHeader.Compression; //dwFourCC: Set this field to pBMI->biCompression.
            //Pass the structure to the IVMRDeinterlaceControl9::GetNumberOfDeinterlaceModes method.
            //Call the method twice. The first call returns the number of deinterlace modes the hardware supports for the specified format.
            hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, null);
            if (hr == 0 && numModes != 0)
            {
              Guid[] modes = new Guid[numModes];
              {
                //Allocate an array of GUIDs of this size, and call the method again, passing in the address of the array.
                //The second call fills the array with GUIDs. Each GUID identifies one deinterlacing mode. 
                hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, modes);
                for (int i = 0; i < numModes; i++)
                {
                  //To get the capabiltiies of a particular mode, call the IVMRDeinterlaceControl9::GetDeinterlaceModeCaps method.
                  //Pass in the same VMR9VideoDesc structure, along with one of the GUIDs from the array.
                  //The method fills a VMR9DeinterlaceCaps structure with the mode capabilities. 
                  VMR9DeinterlaceCaps caps = new VMR9DeinterlaceCaps();
                  caps.dwSize = Marshal.SizeOf(typeof (VMR9DeinterlaceCaps));
                  hr = deinterlace.GetDeinterlaceModeCaps(modes[i], ref VideoDesc, ref caps);
                  if (hr == 0)
                  {
                    Log.Debug("VMR9: AvailableDeinterlaceMode - {0}: {1}", i, modes[i]);
                    switch (caps.DeinterlaceTechnology)
                    {
                        //The algorithm is unknown or proprietary
                      case VMR9DeinterlaceTech.Unknown:
                        {
                          Log.Info("VMR9: Unknown H/W de-interlace mode");
                          break;
                        }
                        //The algorithm creates each missing line by repeating the line above it or below it.
                        //This method creates jagged artifacts and is not recommended.
                      case VMR9DeinterlaceTech.BOBLineReplicate:
                        {
                          Log.Info("VMR9: BOB Line Replicate capable");
                          break;
                        }
                        //The algorithm creates the missing lines by vertically stretching each video field by a factor of two.
                        //For example, it might average two lines or use a (-1, 9, 9, -1)/16 filter across four lines.
                        //Slight vertical adjustments are made to ensure that the resulting image does not "bob" up and down
                      case VMR9DeinterlaceTech.BOBVerticalStretch:
                        {
                          Log.Info("VMR9: BOB Vertical Stretch capable");
                          verticalStretch = modes[i].ToString();
                          break;
                        }
                        //The algorithm uses median filtering to recreate the pixels in the missing lines.
                      case VMR9DeinterlaceTech.MedianFiltering:
                        {
                          Log.Info("VMR9: Median Filtering capable");
                          medianFiltering = modes[i].ToString();
                          break;
                        }
                        //The algorithm uses an edge filter to create the missing lines.
                        //In this process, spatial directional filters are applied to determine the orientation of edges in the picture content.
                        //Missing pixels are created by filtering along (rather than across) the detected edges.
                      case VMR9DeinterlaceTech.EdgeFiltering:
                        {
                          Log.Info("VMR9: Edge Filtering capable");
                          break;
                        }
                        //The algorithm uses spatial or temporal interpolation, switching between the two on a field-by-field basis, depending on the amount of motion.
                      case VMR9DeinterlaceTech.FieldAdaptive:
                        {
                          Log.Info("VMR9: Field Adaptive capable");
                          break;
                        }
                        //The algorithm uses spatial or temporal interpolation, switching between the two on a pixel-by-pixel basis, depending on the amount of motion.
                      case VMR9DeinterlaceTech.PixelAdaptive:
                        {
                          Log.Info("VMR9: Pixel Adaptive capable");
                          pixelAdaptive = modes[i].ToString();
                          break;
                        }
                        //The algorithm identifies objects within a sequence of video fields.
                        //Before it recreates the missing pixels, it aligns the movement axes of the individual objects in the scene to make them parallel with the time axis.
                      case VMR9DeinterlaceTech.MotionVectorSteered:
                        {
                          Log.Info("VMR9: Motion Vector Steered capable");
                          break;
                        }
                    }
                  }
                }
              }
              //Set the MP preferred h/w de-interlace modes in order of quality
              //pixel adaptive, then median filtering & finally vertical stretch
              if (pixelAdaptive != "")
              {
                Guid DeinterlaceMode = new Guid(pixelAdaptive);
                Log.Debug("VMR9: trying pixel adaptive");
                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                if (hr != 0)
                {
                  Log.Error("VMR9: pixel adaptive failed!");
                }
                else
                {
                  Log.Info("VMR9: setting pixel adaptive succeeded");
                  medianFiltering = "";
                  verticalStretch = "";
                }
              }
              if (medianFiltering != "")
              {
                Guid DeinterlaceMode = new Guid(medianFiltering);
                Log.Debug("VMR9: trying median filtering");
                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                if (hr != 0)
                {
                  Log.Error("VMR9: median filtering failed!");
                }
                else
                {
                  Log.Info("VMR9: setting median filtering succeeded");
                  verticalStretch = "";
                }
              }
              if (verticalStretch != "")
              {
                Guid DeinterlaceMode = new Guid(verticalStretch);
                Log.Debug("VMR9: trying vertical stretch");
                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                if (hr != 0)
                {
                  Log.Error("VMR9: Cannot set H/W de-interlace mode - using VMR9 fallback");
                }
                Log.Info("VMR9: setting vertical stretch succeeded");
              }
            }
            else
            {
              Log.Info("VMR9: No H/W de-interlaced modes supported, using fallback preference");
            }
          }
          else
          {
            Log.Info("VMR9: progressive mode detected - no need to de-interlace");
          }
        }
          //If the format type is VideoInfo, it must be a progressive frame.
        else
        {
          Log.Info("VMR9: no need to de-interlace this video source");
        }
        DsUtils.FreeAMMediaType(mediatype);
        //release the VMR9 pin
        hr = DirectShowUtil.ReleaseComObject(InPin);

        InPin = null;
        mediatype = null;
      }
    }
コード例 #4
0
        DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq)
        {
            UsingSBEFilter = false;  // Not using stream buffer

            // Init variables
            IPin[] pin = new IPin[1];
            string dPin = string.Empty;
            string sName = string.Empty;
            string dName = string.Empty;
            string sPin = string.Empty;
            FileInfo fiInputFile = new FileInfo(strq.FileName);
            string txtOutputFNPath = fiInputFile.FullName + ".wmv";
            if (
                (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) ||
                (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
               ) return DSStreamResultCodes.ErrorInvalidFileType;

            int hr = 0;
            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter); // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter;  // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                SendDebugMessage("Setting filename", 0);
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio
                SendDebugMessage("Adding ACM Wrapper", 0);
                IBaseFilter ACMFilter =  FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder);
                dc.Add(ACMFilter);

                // Render file - then build graph
                SendDebugMessage("Rendering file", 0);
                graphbuilder.RenderFile(fiInputFile.FullName, null);
                SendDebugMessage("Saving graph", 0);
                FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf");

                // Are both our ASF pins connected?
                IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent tempEvent = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);

                // Get media type from vid input pin for ASF writer
                AMMediaType pmt = new AMMediaType();
                hr = ASFVidInputPin.ConnectionMediaType(pmt);

                FrameSize SourceFrameSize = null;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height);
                }
                else if (pmt.formatType == FormatType.VideoInfo)  //{05589f80-c356-11ce-bf01-00aa0055595a}
                {
                    VideoInfoHeader pvih = new VideoInfoHeader();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih);
                    SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height);
                }
                else
                    SourceFrameSize = new FrameSize(200, 200); // SQUARE

                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS != FilterState.Stopped)
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                // Free up media type
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // (re)Configure the ASF writer with the selected WM Profile
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // source
                Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null;
                Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return DSStreamResultCodes.ErrorExceptionOccurred;
            }

            return DSStreamResultCodes.OK;
        }
コード例 #5
0
        DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq)
        {
            // Init variables
            //IPin[] pin = new IPin[1];
            IBaseFilter DecFilterAudio = null;
            IBaseFilter DecFilterVideo = null;
            IBaseFilter MainAudioDecoder = null;
            IBaseFilter MainVideoDecoder = null;
            string dPin = string.Empty;
            string sName = string.Empty;
            string dName = string.Empty;
            string sPin = string.Empty;
            FileInfo fiInputFile = new FileInfo(strq.FileName);
            string txtOutputFNPath = fiInputFile.FullName + ".wmv";
            if (
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) &&
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
               ) return DSStreamResultCodes.ErrorInvalidFileType;

            int hr = 0;
            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object",0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Add the DVRMS/WTV file / filter to the graph
                SendDebugMessage("Add SBE Source Filter", 0);

                hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable
                DsError.ThrowExceptionForHR(hr);
                dc.Add(currentSBEfilter);

                // Get the SBE audio and video out pins
                IPin SBEVidOutPin, SBEAudOutPin;
                SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null);
                SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null);

                // Set up two decrypt filters according to file extension (assume audio and video both present )
                if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                {
                    // Add DVR-MS decrypt filters
                    SendDebugMessage("Add DVRMS (bda) decryption", 0);
                    DecFilterAudio = (IBaseFilter)new DTFilter();  // THESE ARE FOR DVR-MS (BDA DTFilters)
                    DecFilterVideo = (IBaseFilter)new DTFilter();
                    graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag");
                    graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001");
                }
                else  // Add WTV decrypt filters
                {
                    SendDebugMessage("Add WTV (pbda) decryption", 0);
                    DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder);
                    DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001");

                }
                dc.Add(DecFilterAudio);
                dc.Add(DecFilterVideo);

                // Make the first link in the graph: SBE => Decrypts
                SendDebugMessage("Connect SBE => Decrypt filters", 0);
                IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false);
                IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0);
                if (DecAudioInPin == null)
                    SendDebugMessage("WARNING: No Audio Input to decrypt filter.");
                else
                    FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false);

                // Get Dec Audio Out pin
                IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0);

                // Examine Dec Audio out for audio format
                SendDebugMessage("Examining source audio", 0);
                AMMediaType AudioMediaType = null;
                getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType);
                SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString());
                SendDebugMessage("Examining Audio StreamInfo");
                StreamInfo si = FileInformation.GetStreamInfo(AudioMediaType);
                bool AudioIsAC3 = (si.SimpleType == "AC-3");
                if (AudioIsAC3)
                    SendDebugMessage("Audio type is AC3");
                else
                    SendDebugMessage("Audio type is not AC3");
                si = null;
                DsUtils.FreeAMMediaType(AudioMediaType);

                // Add an appropriate audio decoder
                if (AudioIsAC3)
                {
                    if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID))
                    {
                        SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected.");
                        return DSStreamResultCodes.ErrorAC3CodecNotFound;
                    }
                    else
                    {
                        MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder);   //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph);
                        Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid);
                        SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString());
                    }
                }
                else
                    MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder);

                // Add a video decoder
                SendDebugMessage("Add DTV decoder", 0);
                MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder);
                dc.Add(MainAudioDecoder);
                dc.Add(MainVideoDecoder);

                //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder);

                // Add a null renderer
                SendDebugMessage("Add null renderer", 0);
                NullRenderer MyNullRenderer = new NullRenderer();
                dc.Add(MyNullRenderer);
                hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Link up video through to null renderer
                SendDebugMessage("Connect video to null renderer", 0);
                // Make the second link:  Decrypts => DTV
                IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0);
                IPin DTVVideoInPin = DsFindPin.ByName(MainVideoDecoder, @"Video Input");  // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0);  // first one should be video input?  //
                FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false);
                // 3. DTV => Null renderer
                IPin NullRInPin = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0);
                IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false);
                Marshal.ReleaseComObject(NullRInPin); NullRInPin = null;

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent tempEvent = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                DsError.ThrowExceptionForHR(tempControl.Run());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);
                //DsError.ThrowExceptionForHR(hr);  // DO *NOT* DO THIS HERE!  THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS == FilterState.Running)
                    DsError.ThrowExceptionForHR(tempControl.Stop());

                // Remove null renderer
                hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer);

                // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                AMMediaType pmt = null;
                getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt);
                FrameSize SourceFrameSize;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    int VideoWidth = pvih2.BmiHeader.Width;
                    int VideoHeight = pvih2.BmiHeader.Height;
                    SourceFrameSize = new FrameSize(VideoWidth, VideoHeight);
                }
                else
                    SourceFrameSize = new FrameSize(320, 240);

                // Free up
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // Link up audio
                // 2. Audio Decrypt -> Audio decoder
                IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false);

                // Add ASF Writer
                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter); // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter;  // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Make the final links:  DTV => writer
                SendDebugMessage("Linking audio/video through to decoder and writer", 0);
                IPin DTVAudioOutPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0);
                IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);
                IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false);
                if (ASFVideoInputPin != null)
                    FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false);

                // Configure ASFWriter
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                    // dec
                Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin = null;
                Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin = null;
                Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null;
                Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null;
                    // dtv
                Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null;
                Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin = null;
                Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null;
                Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null;
                    // asf
                Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null;
                Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return DSStreamResultCodes.ErrorExceptionOccurred;
            }

            return DSStreamResultCodes.OK;
        }
コード例 #6
0
ファイル: Capture.cs プロジェクト: simongh/DirectShow.Capture
		/// <summary>
		///  Set the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		private object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
		{
			if (streamConfig == null)
				throw new NotSupportedException();
			assertStopped();
			derenderGraph();

			object returnValue = null;
			//IntPtr pmt = IntPtr.Zero;
			AMMediaType mediaType = new AMMediaType();

			try
			{
				// Get the current format info
				Marshal.ThrowExceptionForHR(streamConfig.GetFormat(out mediaType));
				//Marshal.PtrToStructure(pmt, mediaType);

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if (mediaType.formatType == FormatType.WaveEx)
					formatStruct = new WaveFormatEx();
				else if (mediaType.formatType == FormatType.VideoInfo)
					formatStruct = new VideoInfoHeader();
				else if (mediaType.formatType == FormatType.VideoInfo2)
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException("This device does not support a recognized format block.");

				// Retrieve the nested structure
				Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

				// Find the required field
				Type structType = formatStruct.GetType();
				System.Reflection.FieldInfo fieldInfo = structType.GetField(fieldName);
				if (fieldInfo == null)
					throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");

				// Update the value of the field
				fieldInfo.SetValue(formatStruct, newValue);

				// PtrToStructure copies the data so we need to copy it back
				Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

				// Save the changes
				Marshal.ThrowExceptionForHR(streamConfig.SetFormat(mediaType));
			}
			finally
			{
				DsUtils.FreeAMMediaType(mediaType);
				//Marshal.FreeCoTaskMem(pmt);
			}
			renderGraph();
			startPreviewIfNeeded();

			return returnValue;
		}