Example #1
0
        public NullRenderer(ITimeline timeline, IDESCombineCB audioCallback, IDESCombineCB videoCallback)
            : base(timeline)
        {
            RenderToNullRenderer(audioCallback, videoCallback);

            ChangeState(RendererState.Initialized);
        }
Example #2
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="pGroup">Timeline group info</param>
        /// <param name="pCallback">Client callback</param>
        /// <param name="pEventSink">Event sync to call on file complete</param>
        /// <param name="ec">Event code to send on file completion</param>
        public CallbackHandler(
            IGroup pGroup,
            IDESCombineCB pCallback,
            IMediaEventSink pEventSink,
            EventCode ec
            )
        {
            m_pCallback  = pCallback;
            m_Group      = pGroup;
            m_pEventSink = pEventSink;
            m_ec         = ec;

            m_iCurFrame = 0;
            m_iCurFile  = 0;
            // TODO: fix this or chuck it
            MediaFile mf = null; // m_Group.File(m_iCurFile);

            if (mf != null)
            {
                m_CurFileName = mf.FileName;
                m_iMaxFrame   = mf.LengthInFrames;
            }
            else
            {
                m_CurFileName = null;
                m_iMaxFrame   = int.MaxValue;
            }
        }
Example #3
0
    /// <summary>
    /// 
    /// </summary>
    /// <param name="pGroup">Timeline group info</param>
    /// <param name="pCallback">Client callback</param>
    /// <param name="pEventSink">Event sync to call on file complete</param>
    /// <param name="ec">Event code to send on file completion</param>
    public AVCallback(
        MediaGroup pGroup,
        IDESCombineCB pCallback,
        IMediaEventSink pEventSink,
        EventCode ec
        )
    {
      m_pCallback = pCallback;
      m_Group = pGroup;
      m_pEventSink = pEventSink;
      m_ec = ec;

      m_iCurFrame = 0;
      m_iCurFile = 0;
      MediaFile mf = m_Group.File(m_iCurFile);
      if (mf != null)
      {
        m_CurFileName = mf.FileName;
        m_iMaxFrame = mf.LengthInFrames;
      }
      else
      {
        m_CurFileName = null;
        m_iMaxFrame = int.MaxValue;
      }
    }
Example #4
0
        private void RenderToNullRenderer(IDESCombineCB audioCallback, IDESCombineCB videoCallback)
        {
            int hr;

            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = icgb.SetFiltergraph(_graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter audioDest = StandardFilters.RenderNull(_dc, _graph);
                IBaseFilter videoDest = StandardFilters.RenderNull(_dc, _graph);

                try
                {
                    RenderGroups(icgb, null, null, audioDest, videoDest, audioCallback, videoCallback);
                }
                finally
                {
                    if (audioDest != null)
                    {
                        Marshal.ReleaseComObject(audioDest);
                    }
                    if (videoDest != null)
                    {
                        Marshal.ReleaseComObject(videoDest);
                    }
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="pGroup">Timeline group info</param>
        /// <param name="pCallback">Client callback</param>
        /// <param name="pEventSink">Event sync to call on file complete</param>
        /// <param name="ec">Event code to send on file completion</param>
        public AVCallback(
            MediaGroup pGroup,
            IDESCombineCB pCallback,
            IMediaEventSink pEventSink,
            EventCode ec
            )
        {
            m_pCallback  = pCallback;
            m_Group      = pGroup;
            m_pEventSink = pEventSink;
            m_ec         = ec;

            m_iCurFrame = 0;
            m_iCurFile  = 0;
            MediaFile mf = m_Group.File(m_iCurFile);

            if (mf != null)
            {
                m_CurFileName = mf.FileName;
                m_iMaxFrame   = mf.LengthInFrames;
            }
            else
            {
                m_CurFileName = null;
                m_iMaxFrame   = int.MaxValue;
            }
        }
Example #6
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="pDESGroup">Timeline DESGroup info</param>
        /// <param name="pCallback">Client callback</param>
        /// <param name="pEventSink">Event sync to call on file complete</param>
        /// <param name="ec">Event code to send on file completion</param>
        public DESCallback(
            DESGroup pDESGroup,
            IDESCombineCB pCallback,
            IMediaEventSink pEventSink,
            EventCode ec,
            List <Layer> myDS
            )
        {
            m_pCallback  = pCallback;
            m_DESGroup   = pDESGroup;
            m_pEventSink = pEventSink;
            m_ec         = ec;
            MarqueeDS    = myDS;
            m_iCurFrame  = 0;
            m_iCurFile   = 0;
            MediaFile mf = m_DESGroup.File(m_iCurFile);

            if (mf != null)
            {
                m_CurFileName = mf.FileName;
                m_iMaxFrame   = mf.LengthInFrames;
            }
            else
            {
                m_CurFileName = null;
                m_iMaxFrame   = int.MaxValue;
            }
            OffsetArrX = new float[MarqueeDS.Count];
            OffsetArrY = new float[MarqueeDS.Count];
        }
Example #7
0
        public void RenderToWavDest(
            string outputFile,
            IBaseFilter audioCompressor,
            AMMediaType mediaType,
            IDESCombineCB audioCallback)
        {
            if (audioCompressor != null)
            {
                _dc.Add(audioCompressor);
            }

            int hr;

            if (_firstAudioGroup == null)
            {
                throw new SplicerException("No audio stream to render");
            }

            if (outputFile == null)
            {
                throw new SplicerException("Output file name cannot be null");
            }

            // Contains useful routines for creating the graph
            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            _dc.Add(icgb);

            try
            {
                hr = icgb.SetFiltergraph(_graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter wavDestFilter = StandardFilters.RenderWavDest(_dc, _graph);
                IBaseFilter fileSink      = StandardFilters.RenderFileDestination(_dc, _graph, outputFile);

                try
                {
                    RenderGroups(icgb, audioCompressor, null, wavDestFilter, audioCallback, null);

                    FilterGraphTools.ConnectFilters(_graph, wavDestFilter, fileSink, true);

                    // if supplied, apply the media type to the filter
                    if (mediaType != null)
                    {
                        FilterGraphTools.SetFilterFormat(mediaType, audioCompressor);
                    }
                }
                finally
                {
                    Marshal.ReleaseComObject(wavDestFilter);
                    Marshal.ReleaseComObject(fileSink);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #8
0
        public WavFileRenderer(ITimeline timeline, string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType,
                               IDESCombineCB audioCallback)
            : base(timeline)
        {
            RenderToWavDest(outputFile, audioCompressor, mediaType, audioCallback);

            ChangeState(RendererState.Initialized);
        }
Example #9
0
        public WindowsMediaRenderer(ITimeline timeline, string file, string profileData, IDESCombineCB pVideoCallback,
                                    IDESCombineCB pAudioCallback)
            : base(timeline)
        {
            RenderToAsfWriter(file, profileData, pVideoCallback, pAudioCallback);

            ChangeState(RendererState.Initialized);
        }
Example #10
0
        public AviFileRenderer(ITimeline timeline, string outputFile, IBaseFilter videoCompressor,
                               IBaseFilter audioCompressor, IDESCombineCB pVideoCallback,
                               IDESCombineCB pAudioCallback)
            : base(timeline)
        {
            RenderToAVI(outputFile, videoCompressor, audioCompressor, pVideoCallback, pAudioCallback);

            ChangeState(RendererState.Initialized);
        }
Example #11
0
        private void RenderToAVI(
            string sOutputFile,
            IBaseFilter ibfVideoCompressor,
            IBaseFilter ibfAudioCompressor,
            IDESCombineCB pVideoCallback,
            IDESCombineCB pAudioCallback)
        {
            if (_firstVideoGroup == null)
            {
                throw new SplicerException("Can not render to AVI when no video group exists");
            }

            int hr;

            if (sOutputFile == null)
            {
                throw new SplicerException("Output file name cannot be null");
            }

            // Contains useful routines for creating the graph
            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = icgb.SetFiltergraph(_graph);
                DESError.ThrowExceptionForHR(hr);

                // Create the file writer
                IBaseFilter pMux = StandardFilters.RenderAviDest(_dc, icgb, sOutputFile);

                try
                {
                    RenderGroups(icgb, ibfAudioCompressor, ibfVideoCompressor, pMux, pAudioCallback, pVideoCallback);
                }
                finally
                {
                    Marshal.ReleaseComObject(pMux);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #12
0
        protected void RenderToAsfWriter(
            string file,
            string profileData,
            IDESCombineCB pVideoCallback,
            IDESCombineCB pAudioCallback)
        {
            int hr;

            if (file == null)
            {
                throw new SplicerException("Output file name cannot be null");
            }

            // Contains useful routines for creating the graph
            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = icgb.SetFiltergraph(_graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter pMux = StandardFilters.RenderAsfWriterWithProfile(_dc, _graph, profileData, file);

                ValidateAsfWriterIsSuitable(pMux);

                _dc.Add(pMux);

                try
                {
                    RenderGroups(icgb, null, null, pMux, pAudioCallback, pVideoCallback);
                }
                finally
                {
                    Marshal.ReleaseComObject(pMux);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #13
0
 protected void RenderGroups(ICaptureGraphBuilder2 icgb, IBaseFilter audioCompressor, IBaseFilter videoCompressor,
                             IBaseFilter pMux, IDESCombineCB pAudioCallback, IDESCombineCB pVideoCallback)
 {
     RenderGroups(icgb, audioCompressor, videoCompressor, pMux, pMux, pAudioCallback, pVideoCallback);
 }
Example #14
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="pAudioCallback"></param>
 public void RenderAudioToOutput(IDESCombineCB pAudioCallback)
 {
   RenderToWindow(IntPtr.Zero, null, pAudioCallback, false, true);
 }
Example #15
0
 public WindowRenderer(ITimeline timeline, IntPtr hWnd, IDESCombineCB pVideoCallback,
                       IDESCombineCB pAudioCallback)
     : base(timeline)
 {
     RenderToWindow(hWnd, pVideoCallback, pAudioCallback);
 }
Example #16
0
        public WavFileRenderer(ITimeline timeline, string outputFile, AudioFormat format, IDESCombineCB audioCallback)
            : base(timeline)
        {
            AudioCompressor compressor = null;

            try
            {
                compressor = AudioCompressorFactory.Create(format);

                _dc.Add(compressor.Filter);

                RenderToWavDest(outputFile, compressor.Filter, compressor.MediaType, audioCallback);

                ChangeState(RendererState.Initialized);
            }
            finally
            {
                if ((compressor != null) && (compressor.MediaType != null))
                {
                    DsUtils.FreeAMMediaType(compressor.MediaType);
                }
            }
        }
Example #17
0
    /// <summary>
    /// 
    /// </summary>
    /// <param name="pAudioCallback"></param>
    public void RenderAudioToFile(IDESCombineCB pAudioCallback)
    {

    }
Example #18
0
        /// <summary>
        /// Configure the graph to output the results to a video window.
        /// </summary>
        /// <remarks>
        /// The callback routines are invoked once for each sample.  This allows for additional processing to
        /// be performed on the video or audio buffers.
        /// </remarks>
        /// <param name="hWnd">Window handle to render to, or IntPtr.Zero to render to its own window</param>
        /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param>
        /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param>
        /// <param name="video">Render only video</param>
        /// <param name="audio">Render only audio</param>
        private void RenderToWindow(IntPtr hWnd, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback, bool video, bool audio)
        {
            int            hr;
            IPin           pPin;
            IVideoWindow   pVidWindow;
            IAMTimelineObj pGroup;

            // Perform initialization common to all render routines
            RenderCommon();

            // Contains useful routines for creating the graph
            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = icgb.SetFiltergraph(m_pGraph);
                DESError.ThrowExceptionForHR(hr);

                int NumGroups;
                hr = m_pTimeline.GetGroupCount(out NumGroups);
                DESError.ThrowExceptionForHR(hr);

                // Walk the groups.  For DESCombine, there is one group that
                // contains all the video, and a second group for the audio.
                for (int i = 0; i < NumGroups; i++)
                {
                    hr = m_pTimeline.GetGroup(out pGroup, i);
                    DESError.ThrowExceptionForHR(hr);

                    try
                    {
                        // Inform the graph we will be previewing (rather than writing to disk)
                        IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup;
                        hr = pTLGroup.SetPreviewMode(true);
                        DESError.ThrowExceptionForHR(hr);
                    }
                    finally
                    {
                        // Release the group
                        Marshal.ReleaseComObject(pGroup);
                    }

                    // Get the IPin for the current group
                    hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin);
                    DESError.ThrowExceptionForHR(hr);

                    try
                    {
                        // If this is the video pin
                        if (video && IsVideo(pPin))
                        {
                            // Get a video renderer
                            IBaseFilter ibfVideoRenderer = (IBaseFilter) new VideoRenderer();

                            try
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete);
                                RenderWindowHelper(icgb, mcb, "Video", pPin, ibfVideoRenderer);
                            }
                            finally
                            {
                                Marshal.ReleaseComObject(ibfVideoRenderer);
                            }
                        }
                        else if (audio)
                        {
                            // Get an audio renderer
                            IBaseFilter ibfAudioRenderer = (IBaseFilter) new AudioRender();

                            try
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete);
                                RenderWindowHelper(icgb, mcb, "Audio", pPin, ibfAudioRenderer);
                            }
                            finally
                            {
                                Marshal.ReleaseComObject(ibfAudioRenderer);
                            }
                        }
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(pPin);
                    }
                }

                if (video)
                {
                    // Configure the video window
                    pVidWindow = (IVideoWindow)m_pGraph;

                    // If a window handle was supplied, use it
                    if (hWnd != IntPtr.Zero)
                    {
                        hr = pVidWindow.put_Owner(hWnd);
                        DESError.ThrowExceptionForHR(hr);
                    }
                    else
                    {
                        // Use our own window

                        hr = pVidWindow.put_Caption("Video Rendering Window");
                        DESError.ThrowExceptionForHR(hr);

                        // since no user interaction is allowed, remove
                        // system menu and maximize/minimize buttons
                        WindowStyle lStyle = 0;
                        hr = pVidWindow.get_WindowStyle(out lStyle);
                        DESError.ThrowExceptionForHR(hr);

                        lStyle &= ~(WindowStyle.MinimizeBox | WindowStyle.MaximizeBox | WindowStyle.SysMenu);
                        hr      = pVidWindow.put_WindowStyle(lStyle);
                        DESError.ThrowExceptionForHR(hr);
                    }
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #19
0
        /// <summary>
        /// Configure the graph to output the results to an AVI file.
        /// </summary>
        /// <param name="sOutputFile">File name for output (must not be null)</param>
        /// <param name="ibfVideoCompressor">IBaseFilter of a video compressor to use (or null for none).
        /// Note that <b><i>no</i></b> configuration of this compressor is done by this method.  It merely adds it
        /// to the graph in the appropriate place.  Also, the pointer is not invalidated, so any configuration
        /// of the compressor that needs to be done after being added to the graph can still be done.</param>
        /// <param name="ibfAudioCompressor">IBaseFilter of an audio compressor to use (or null for none).
        /// Note that <b><i>no</i></b> configuration of this compressor is done by this method.  It merely adds it
        /// to the graph in the appropriate place.  Also, the pointer is not invalidated, so any configuration
        /// of the compressor that needs to be done after being added to the graph can still be done.</param>
        /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param>
        /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param>
        /// <remarks>
        /// The callback routines are invoked once for each sample.  This allows for additional processing to
        /// be performed on the video or audio buffers.
        /// </remarks>
        public void RenderToAVI(
            string sOutputFile,
            IBaseFilter ibfVideoCompressor,
            IBaseFilter ibfAudioCompressor,
            IDESCombineCB pVideoCallback,
            IDESCombineCB pAudioCallback)
        {
            int  hr;
            IPin pPin;

            if (sOutputFile == null)
            {
                throw new Exception("Output file name cannot be null");
            }

            // Perform initialization common to all render routines
            RenderCommon();

            // Contains useful routines for creating the graph
            ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = icgb.SetFiltergraph(m_pGraph);
                DESError.ThrowExceptionForHR(hr);

                // Create the file writer
                IBaseFilter     pMux;
                IFileSinkFilter pFilter;
                hr = icgb.SetOutputFileName(MediaSubType.Avi, sOutputFile, out pMux, out pFilter);
                DESError.ThrowExceptionForHR(hr);

                // We don't need this, so let it go
                Marshal.ReleaseComObject(pFilter);

                try
                {
                    int NumGroups;
                    hr = m_pTimeline.GetGroupCount(out NumGroups);
                    DESError.ThrowExceptionForHR(hr);

                    // Walk the groups.  For this class, there is one group that
                    // contains all the video, and a second group for the audio.
                    for (int i = 0; i < NumGroups; i++)
                    {
                        IAMTimelineObj pGroup;

                        hr = m_pTimeline.GetGroup(out pGroup, i);
                        DESError.ThrowExceptionForHR(hr);

                        try
                        {
                            // Inform the graph we will be writing to disk (rather than previewing)
                            IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup;
                            hr = pTLGroup.SetPreviewMode(false);
                            DESError.ThrowExceptionForHR(hr);
                        }
                        finally
                        {
                            Marshal.ReleaseComObject(pGroup);
                        }

                        // Get the IPin for the current group
                        hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin);
                        DESError.ThrowExceptionForHR(hr);

                        try
                        {
                            if (IsVideo(pPin))
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete);
                                RenderHelper(icgb, mcb, "Video", pPin, ibfVideoCompressor, pMux);
                            }
                            else
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete);
                                RenderHelper(icgb, mcb, "Audio", pPin, ibfAudioCompressor, pMux);
                            }
                        }
                        finally
                        {
                            Marshal.ReleaseComObject(pPin);
                        }
                    }
                }
                finally
                {
                    Marshal.ReleaseComObject(pMux);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(icgb);
            }
        }
Example #20
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="pAudioCallback"></param>
 public void RenderAudioToFile(IDESCombineCB pAudioCallback)
 {
 }
Example #21
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="pAudioCallback"></param>
 public void RenderAudioToOutput(IDESCombineCB pAudioCallback)
 {
     RenderToWindow(IntPtr.Zero, null, pAudioCallback, false, true);
 }
Example #22
0
    /// <summary>
    /// Configure the graph to output the results to a video window.
    /// </summary>
    /// <remarks>
    /// The callback routines are invoked once for each sample.  This allows for additional processing to
    /// be performed on the video or audio buffers.
    /// </remarks>
    /// <param name="hWnd">Window handle to render to, or IntPtr.Zero to render to its own window</param>
    /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param>
    /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param>
    /// <param name="video">Render only video</param>
    /// <param name="audio">Render only audio</param>
    private void RenderToWindow(IntPtr hWnd, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback, bool video, bool audio)
    {
      int hr;
      IPin pPin;
      IVideoWindow pVidWindow;
      IAMTimelineObj pGroup;

      // Perform initialization common to all render routines
      RenderCommon();

      // Contains useful routines for creating the graph
      ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

      try
      {
        hr = icgb.SetFiltergraph(m_pGraph);
        DESError.ThrowExceptionForHR(hr);

        int NumGroups;
        hr = m_pTimeline.GetGroupCount(out NumGroups);
        DESError.ThrowExceptionForHR(hr);

        // Walk the groups.  For DESCombine, there is one group that 
        // contains all the video, and a second group for the audio.
        for (int i = 0; i < NumGroups; i++)
        {
          hr = m_pTimeline.GetGroup(out pGroup, i);
          DESError.ThrowExceptionForHR(hr);

          try
          {
            // Inform the graph we will be previewing (rather than writing to disk)
            IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup;
            hr = pTLGroup.SetPreviewMode(true);
            DESError.ThrowExceptionForHR(hr);
          }
          finally
          {
            // Release the group
            Marshal.ReleaseComObject(pGroup);
          }

          // Get the IPin for the current group
          hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin);
          DESError.ThrowExceptionForHR(hr);

          try
          {
            // If this is the video pin
            if (video && IsVideo(pPin))
            {
              // Get a video renderer
              IBaseFilter ibfVideoRenderer = (IBaseFilter)new VideoRenderer();

              try
              {
                // Create a sample grabber, add it to the graph and connect it all up
                AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete);
                RenderWindowHelper(icgb, mcb, "Video", pPin, ibfVideoRenderer);
              }
              finally
              {
                Marshal.ReleaseComObject(ibfVideoRenderer);
              }
            }
            else if (audio)
            {
              // Get an audio renderer
              IBaseFilter ibfAudioRenderer = (IBaseFilter)new AudioRender();

              try
              {
                // Create a sample grabber, add it to the graph and connect it all up
                AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete);
                RenderWindowHelper(icgb, mcb, "Audio", pPin, ibfAudioRenderer);
              }
              finally
              {
                Marshal.ReleaseComObject(ibfAudioRenderer);
              }
            }
          }
          finally
          {
            Marshal.ReleaseComObject(pPin);
          }
        }

        if (video)
        {

          // Configure the video window
          pVidWindow = (IVideoWindow)m_pGraph;

          // If a window handle was supplied, use it
          if (hWnd != IntPtr.Zero)
          {
            hr = pVidWindow.put_Owner(hWnd);
            DESError.ThrowExceptionForHR(hr);
          }
          else
          {
            // Use our own window

            hr = pVidWindow.put_Caption("Video Rendering Window");
            DESError.ThrowExceptionForHR(hr);

            // since no user interaction is allowed, remove
            // system menu and maximize/minimize buttons
            WindowStyle lStyle = 0;
            hr = pVidWindow.get_WindowStyle(out lStyle);
            DESError.ThrowExceptionForHR(hr);

            lStyle &= ~(WindowStyle.MinimizeBox | WindowStyle.MaximizeBox | WindowStyle.SysMenu);
            hr = pVidWindow.put_WindowStyle(lStyle);
            DESError.ThrowExceptionForHR(hr);
          }
        }
      }
      finally
      {
        Marshal.ReleaseComObject(icgb);
      }
    }
Example #23
0
    /// <summary>
    /// Configure the graph to output the results to an AVI file.
    /// </summary>
    /// <param name="sOutputFile">File name for output (must not be null)</param>
    /// <param name="ibfVideoCompressor">IBaseFilter of a video compressor to use (or null for none).  
    /// Note that <b><i>no</i></b> configuration of this compressor is done by this method.  It merely adds it
    /// to the graph in the appropriate place.  Also, the pointer is not invalidated, so any configuration
    /// of the compressor that needs to be done after being added to the graph can still be done.</param>
    /// <param name="ibfAudioCompressor">IBaseFilter of an audio compressor to use (or null for none).
    /// Note that <b><i>no</i></b> configuration of this compressor is done by this method.  It merely adds it
    /// to the graph in the appropriate place.  Also, the pointer is not invalidated, so any configuration
    /// of the compressor that needs to be done after being added to the graph can still be done.</param>
    /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param>
    /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param>
    /// <remarks>
    /// The callback routines are invoked once for each sample.  This allows for additional processing to
    /// be performed on the video or audio buffers.
    /// </remarks>
    public void RenderToAVI(
        string sOutputFile,
        IBaseFilter ibfVideoCompressor,
        IBaseFilter ibfAudioCompressor,
        IDESCombineCB pVideoCallback,
        IDESCombineCB pAudioCallback)
    {
      int hr;
      IPin pPin;

      if (sOutputFile == null)
      {
        throw new Exception("Output file name cannot be null");
      }

      // Perform initialization common to all render routines
      RenderCommon();

      // Contains useful routines for creating the graph
      ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

      try
      {
        hr = icgb.SetFiltergraph(m_pGraph);
        DESError.ThrowExceptionForHR(hr);

        // Create the file writer
        IBaseFilter pMux;
        IFileSinkFilter pFilter;
        hr = icgb.SetOutputFileName(MediaSubType.Avi, sOutputFile, out pMux, out pFilter);
        DESError.ThrowExceptionForHR(hr);

        // We don't need this, so let it go
        Marshal.ReleaseComObject(pFilter);

        try
        {
          int NumGroups;
          hr = m_pTimeline.GetGroupCount(out NumGroups);
          DESError.ThrowExceptionForHR(hr);

          // Walk the groups.  For this class, there is one group that 
          // contains all the video, and a second group for the audio.
          for (int i = 0; i < NumGroups; i++)
          {
            IAMTimelineObj pGroup;

            hr = m_pTimeline.GetGroup(out pGroup, i);
            DESError.ThrowExceptionForHR(hr);

            try
            {
              // Inform the graph we will be writing to disk (rather than previewing)
              IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup;
              hr = pTLGroup.SetPreviewMode(false);
              DESError.ThrowExceptionForHR(hr);
            }
            finally
            {
              Marshal.ReleaseComObject(pGroup);
            }

            // Get the IPin for the current group
            hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin);
            DESError.ThrowExceptionForHR(hr);

            try
            {
              if (IsVideo(pPin))
              {
                // Create a sample grabber, add it to the graph and connect it all up
                AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete);
                RenderHelper(icgb, mcb, "Video", pPin, ibfVideoCompressor, pMux);
              }
              else
              {
                // Create a sample grabber, add it to the graph and connect it all up
                AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete);
                RenderHelper(icgb, mcb, "Audio", pPin, ibfAudioCompressor, pMux);
              }
            }
            finally
            {
              Marshal.ReleaseComObject(pPin);
            }
          }
        }
        finally
        {
          Marshal.ReleaseComObject(pMux);
        }
      }
      finally
      {
        Marshal.ReleaseComObject(icgb);
      }
    }
Example #24
0
        protected void RenderGroups(ICaptureGraphBuilder2 icgb, IBaseFilter audioCompressor, IBaseFilter videoCompressor,
                                    IBaseFilter audioDest, IBaseFilter videoDest, IDESCombineCB pAudioCallback,
                                    IDESCombineCB pVideoCallback)
        {
            int hr = 0;

            if (audioCompressor != null)
            {
                _dc.Add(audioCompressor);
            }
            if (videoCompressor != null)
            {
                _dc.Add(videoCompressor);
            }
            if (audioDest != null)
            {
                _dc.Add(audioDest);
            }
            if ((videoDest != null) && (audioDest != videoDest))
            {
                _dc.Add(videoDest);
            }

            IAMTimeline desTimeline = _timeline.DesTimeline;

            int NumGroups;

            hr = desTimeline.GetGroupCount(out NumGroups);
            DESError.ThrowExceptionForHR(hr);

            // Walk the groups.  For this class, there is one group that
            // contains all the video, and a second group for the audio.
            for (int i = (NumGroups - 1); i >= 0; i--)
            {
                IAMTimelineObj pGroup;

                hr = desTimeline.GetGroup(out pGroup, i);
                DESError.ThrowExceptionForHR(hr);

                try
                {
                    // Inform the graph we will be writing to disk (rather than previewing)
                    IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup;
                    hr = pTLGroup.SetPreviewMode(false);
                    DESError.ThrowExceptionForHR(hr);
                }
                finally
                {
                    Marshal.ReleaseComObject(pGroup);
                }

                IPin pPin;

                // Get the IPin for the current group
                hr = _renderEngine.GetGroupOutputPin(i, out pPin);
                _dc.Add(pPin);
                DESError.ThrowExceptionForHR(hr);

                try
                {
                    if (PinUtils.IsVideo(pPin))
                    {
                        // Create a sample grabber, add it to the graph and connect it all up
                        CallbackHandler mcb =
                            new CallbackHandler(_firstVideoGroup, pVideoCallback, (IMediaEventSink)_graph,
                                                EC_VideoFileComplete);
                        RenderHelper(icgb, mcb, "Video", pPin, videoCompressor, videoDest);
                    }
                    else
                    {
                        // Create a sample grabber, add it to the graph and connect it all up
                        CallbackHandler mcb =
                            new CallbackHandler(_firstAudioGroup, pAudioCallback, (IMediaEventSink)_graph,
                                                EC_AudioFileComplete);
                        RenderHelper(icgb, mcb, "Audio", pPin, audioCompressor, audioDest);
                    }
                }
                finally
                {
                    Marshal.ReleaseComObject(pPin);
                }
            }
        }