Exemple #1
0
        public void RenderWmvWithImageWatermark()
        {
            string outputFile = "RenderWmvWithImageWatermark.wmv";

            using (Image waterMarkImage = Image.FromFile("..\\..\\corner_watermark.png"))
                using (ITimeline timeline = new DefaultTimeline())
                {
                    IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240);
                    ITrack videoTrack = videoGroup.AddTrack();

                    IClip videoClip =
                        videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2);

                    IGroup audioGroup = timeline.AddAudioGroup();
                    ITrack audioTrack = audioGroup.AddTrack();
                    audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                    ICallbackParticipant[] videoParticipants =
                        new ICallbackParticipant[]
                    { new ImageWatermarkParticipant(32, 320, 240, true, waterMarkImage, new Point(200, 0)) };

                    using (
                        WindowsMediaRenderer renderer =
                            new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo,
                                                     videoParticipants, null))
                    {
                        renderer.Render();
                    }
                }
        }
Exemple #2
0
        private void RenderToNullRenderer(ICallbackParticipant[] audioParticipants,
                                          ICallbackParticipant[] videoParticipants)
        {
            int hr;

            var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = graphBuilder.SetFiltergraph(Graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter audioDest = StandardFilters.RenderNull(Cleanup, Graph);
                IBaseFilter videoDest = StandardFilters.RenderNull(Cleanup, Graph);

                try
                {
                    RenderGroups(graphBuilder, null, null, audioDest, videoDest, audioParticipants, videoParticipants);
                }
                finally
                {
                    if (audioDest != null) Marshal.ReleaseComObject(audioDest);
                    if (videoDest != null) Marshal.ReleaseComObject(videoDest);
                }

                DisableClock();
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
        public void RenderWmvWithSampleTimeWatermark()
        {
            string outputFile = "RenderWmvWithSampleTimeWatermark.wmv";

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240);
                ITrack videoTrack = videoGroup.AddTrack();

                IClip videoClip =
                    videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2);

                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                ICallbackParticipant[] videoParticipants =
                    new ICallbackParticipant[] {new SampleTimeWatermarkParticipant(32, 320, 240, true)};

                using (
                    WindowsMediaRenderer renderer =
                        new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo,
                                                 videoParticipants, null))
                {
                    renderer.Render();
                }
            }
        }
        public WavFileRenderer(ITimeline timeline, string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType,
                               ICallbackParticipant[] audioParticipants)
            : base(timeline)
        {
            RenderToWavDest(outputFile, audioCompressor, mediaType, audioParticipants);

            ChangeState(RendererState.Initialized);
        }
Exemple #5
0
        public NullRenderer(ITimeline timeline, ICallbackParticipant[] audioParticipants,
                            ICallbackParticipant[] videoParticipants)
            : base(timeline)
        {
            RenderToNullRenderer(audioParticipants, videoParticipants);

            ChangeState(RendererState.Initialized);
        }
        public AviFileRenderer(ITimeline timeline, string outputFile, IBaseFilter videoCompressor,
                               IBaseFilter audioCompressor, ICallbackParticipant[] videoParticipants,
                               ICallbackParticipant[] audioParticipants)
            : base(timeline)
        {
            RenderToAVI(outputFile, videoCompressor, audioCompressor, videoParticipants, audioParticipants);

            ChangeState(RendererState.Initialized);
        }
Exemple #7
0
        private void RenderToWavDest(
            string outputFile,
            IBaseFilter audioCompressor,
            AMMediaType mediaType,
            ICallbackParticipant[] audioParticipants)
        {
            if (audioCompressor != null) Cleanup.Add(audioCompressor);

            int hr;

            if (FirstAudioGroup == null)
            {
                throw new SplicerException(Resources.ErrorNoAudioStreamToRender);
            }

            if (outputFile == null)
            {
                throw new SplicerException(Resources.ErrorInvalidOutputFileName);
            }

            // Contains useful routines for creating the graph
            var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            Cleanup.Add(graphBuilder);

            try
            {
                hr = graphBuilder.SetFiltergraph(Graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter wavDestFilter = StandardFilters.RenderWavDestination(Cleanup, Graph);
                IBaseFilter fileSink = StandardFilters.RenderFileDestination(Cleanup, Graph, outputFile);

                try
                {
                    RenderGroups(graphBuilder, audioCompressor, null, wavDestFilter, audioParticipants, null);

                    FilterGraphTools.ConnectFilters(Graph, wavDestFilter, fileSink, true);

                    // if supplied, apply the media type to the filter
                    if (mediaType != null)
                    {
                        FilterGraphTools.SetFilterFormat(mediaType, audioCompressor);
                    }

                    DisableClock();
                }
                finally
                {
                    if (wavDestFilter != null) Marshal.ReleaseComObject(wavDestFilter);
                    if (fileSink != null) Marshal.ReleaseComObject(fileSink);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
        public WindowsMediaRenderer(ITimeline timeline, string file, string profileData,
                                    ICallbackParticipant[] videoParticipants,
                                    ICallbackParticipant[] audioParticipants)
            : base(timeline)
        {
            RenderToAsfWriter(file, profileData, videoParticipants, audioParticipants);

            ChangeState(RendererState.Initialized);
        }
        private void RenderToAsfWriter(
            string file,
            string profileData,
            ICallbackParticipant[] videoParticipants,
            ICallbackParticipant[] audioParticipants)
        {
            int hr;

            if (file == null)
            {
                throw new SplicerException(Resources.ErrorInvalidOutputFileName);
            }

            // Contains useful routines for creating the graph
            var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = graphBuilder.SetFiltergraph(Graph);
                DESError.ThrowExceptionForHR(hr);

                IBaseFilter pMux = StandardFilters.RenderAsfWriterWithProfile(Cleanup, Graph, profileData, file);

                ValidateAsfWriterIsSuitable(pMux);

                Cleanup.Add(pMux);

                try
                {
                    RenderGroups(graphBuilder, null, null, pMux, audioParticipants, videoParticipants);
                }
                finally
                {
                    Marshal.ReleaseComObject(pMux);
                }

                DisableClock();
            }
            catch (Exception ex)
            {
                throw;
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
Exemple #10
0
        /// <summary>
        /// Renders to AVI.
        /// </summary>
        /// <param name="outputFile">The output file.</param>
        /// <param name="videoCompressor">The video compressor.</param>
        /// <param name="audioCompressor">The audio compressor.</param>
        /// <param name="videoParticipants">The video participants.</param>
        /// <param name="audioParticipants">The audio participants.</param>
        private void RenderToAVI(
            string outputFile,
            IBaseFilter videoCompressor,
            IBaseFilter audioCompressor,
            ICallbackParticipant[] videoParticipants,
            ICallbackParticipant[] audioParticipants)
        {
            if (string.IsNullOrEmpty(outputFile)) throw new ArgumentNullException("outputFile");
            if (FirstVideoGroup == null)
                throw new SplicerException(Resources.ErrorCanNotRenderAviWhenNoVideoGroupExists);

            int hr;

            if (outputFile == null)
            {
                throw new SplicerException(Resources.ErrorInvalidOutputFileName);
            }

            // Contains useful routines for creating the graph
            var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = graphBuilder.SetFiltergraph(Graph);
                DESError.ThrowExceptionForHR(hr);

                // Create the file writer
                IBaseFilter multiplexer = StandardFilters.RenderAviDestination(Cleanup, graphBuilder, outputFile);

                try
                {
                    RenderGroups(graphBuilder, audioCompressor, videoCompressor, multiplexer, audioParticipants,
                                 videoParticipants);
                }
                finally
                {
                    Marshal.ReleaseComObject(multiplexer);
                }

                DisableClock();
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
Exemple #11
0
        public WavFileRenderer(ITimeline timeline, string outputFile, AudioFormat format,
                               ICallbackParticipant[] audioParticipants)
            : base(timeline)
        {
            AudioCompressor compressor = null;

            try
            {
                compressor = AudioCompressorFactory.Create(format);

                Cleanup.Add(compressor.Filter);

                RenderToWavDest(outputFile, compressor.Filter, compressor.MediaType, audioParticipants);

                ChangeState(RendererState.Initialized);
            }
            finally
            {
                if ((compressor != null) && (compressor.MediaType != null))
                {
                    DsUtils.FreeAMMediaType(compressor.MediaType);
                }
            }
        }
Exemple #12
0
        /// <summary>
        /// Configure the graph to output the results to a video window.
        /// </summary>
        /// <remarks>
        /// The callback routines are invoked once for each sample.  This allows for additional processing to
        /// be performed on the video or audio buffers.
        /// </remarks>
        /// <param name="windowHandle">Window handle to render to, or IntPtr.Zero to render to its own window</param>
        /// <param name="videoParticipants">Callback routine to be called for each video frame or null for no callback</param>
        /// <param name="audioParticipants">Callback routine to be called for each audio frame or null for no callback</param>
        private void RenderToWindow(IntPtr windowHandle, ICallbackParticipant[] videoParticipants,
                                    ICallbackParticipant[] audioParticipants)
        {
            int hr;
            IPin pin;
            IVideoWindow videoWindow;
            IAMTimelineObj group;
            IAMTimeline desTimeline = Timeline.DesTimeline;

            // Contains useful routines for creating the graph
            var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                hr = graphBuilder.SetFiltergraph(Graph);
                DESError.ThrowExceptionForHR(hr);

                int NumGroups;
                hr = desTimeline.GetGroupCount(out NumGroups);
                DESError.ThrowExceptionForHR(hr);

                // Walk the groups.  For DESCombine, there is one group that
                // contains all the video, and a second group for the audio.
                for (int i = 0; i < NumGroups; i++)
                {
                    hr = desTimeline.GetGroup(out group, i);
                    DESError.ThrowExceptionForHR(hr);

                    try
                    {
                        // Inform the graph we will be previewing (rather than writing to disk)
                        var pTLGroup = (IAMTimelineGroup) group;
                        hr = pTLGroup.SetPreviewMode(true);
                        DESError.ThrowExceptionForHR(hr);
                    }
                    finally
                    {
                        // Release the group
                        Marshal.ReleaseComObject(group);
                    }

                    // Get the IPin for the current group
                    hr = RenderEngine.GetGroupOutputPin(i, out pin);
                    DESError.ThrowExceptionForHR(hr);

                    try
                    {
                        // If this is the video pin
                        if (FilterGraphTools.IsVideo(pin))
                        {
                            // Get a video renderer
                            var ibfVideoRenderer = (IBaseFilter) new VideoRenderer();

                            try
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                var mcb =
                                    new CallbackHandler(videoParticipants);
                                RenderWindowHelper(graphBuilder, mcb, "Video", pin, ibfVideoRenderer);
                            }
                            finally
                            {
                                Marshal.ReleaseComObject(ibfVideoRenderer);
                            }
                        }
                        else
                        {
                            // Get an audio renderer
                            var ibfAudioRenderer = (IBaseFilter) new AudioRender();

                            try
                            {
                                // Create a sample grabber, add it to the graph and connect it all up
                                var mcb =
                                    new CallbackHandler(audioParticipants);
                                RenderWindowHelper(graphBuilder, mcb, "Audio", pin, ibfAudioRenderer);
                            }
                            finally
                            {
                                Marshal.ReleaseComObject(ibfAudioRenderer);
                            }
                        }
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(pin);
                    }
                }

                // Configure the video window
                videoWindow = (IVideoWindow) Graph;

                // If a window handle was supplied, use it
                if (windowHandle != IntPtr.Zero)
                {
                    hr = videoWindow.put_Owner(windowHandle);
                    DESError.ThrowExceptionForHR(hr);
                }
                else
                {
                    // Use our own window

                    hr = videoWindow.put_Caption(Resources.DefaultVideoRenderingWindowCaption);
                    DESError.ThrowExceptionForHR(hr);

                    // since no user interaction is allowed, remove
                    // system menu and maximize/minimize buttons
                    WindowStyle lStyle = 0;
                    hr = videoWindow.get_WindowStyle(out lStyle);
                    DESError.ThrowExceptionForHR(hr);

                    lStyle &= ~(WindowStyle.MinimizeBox | WindowStyle.MaximizeBox | WindowStyle.SysMenu);
                    hr = videoWindow.put_WindowStyle(lStyle);
                    DESError.ThrowExceptionForHR(hr);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
Exemple #13
0
 public WindowRenderer(ITimeline timeline, IntPtr windowHandle, ICallbackParticipant[] videoParticipants,
                       ICallbackParticipant[] audioParticipants)
     : base(timeline)
 {
     RenderToWindow(windowHandle, videoParticipants, audioParticipants);
 }
        protected void RenderGroups(ICaptureGraphBuilder2 graphBuilder, IBaseFilter audioCompressor,
                                    IBaseFilter videoCompressor,
                                    IBaseFilter audioDestination, IBaseFilter videoDestination,
                                    ICallbackParticipant[] audioParticipants,
                                    ICallbackParticipant[] videoParticipants)
        {
            int hr = 0;

            if (audioCompressor != null) _cleanup.Add(audioCompressor);
            if (videoCompressor != null) _cleanup.Add(videoCompressor);
            if (audioDestination != null) _cleanup.Add(audioDestination);
            if ((videoDestination != null) && (audioDestination != videoDestination)) _cleanup.Add(videoDestination);

            IAMTimeline desTimeline = _timeline.DesTimeline;

            int groupCount;
            hr = desTimeline.GetGroupCount(out groupCount);
            DESError.ThrowExceptionForHR(hr);

            // Walk the groups.  For this class, there is one group that
            // contains all the video, and a second group for the audio.
            for (int i = (groupCount - 1); i >= 0; i--)
            {
                IAMTimelineObj group;

                hr = desTimeline.GetGroup(out group, i);
                DESError.ThrowExceptionForHR(hr);

                try
                {
                    // Inform the graph we will be writing to disk (rather than previewing)
                    var timelineGroup = (IAMTimelineGroup) group;
                    hr = timelineGroup.SetPreviewMode(false);
                    DESError.ThrowExceptionForHR(hr);
                }
                finally
                {
                    Marshal.ReleaseComObject(group);
                }

                IPin pPin;

                // Get the IPin for the current group
                hr = _renderEngine.GetGroupOutputPin(i, out pPin);
                _cleanup.Add(pPin);
                DESError.ThrowExceptionForHR(hr);

                try
                {
                    if (FilterGraphTools.IsVideo(pPin))
                    {
                        // Create a sample grabber, add it to the graph and connect it all up
                        var mcb =
                            new CallbackHandler(videoParticipants);
                        RenderHelper(graphBuilder, mcb, "Video", pPin, videoCompressor, videoDestination);
                    }
                    else
                    {
                        // Create a sample grabber, add it to the graph and connect it all up
                        var mcb =
                            new CallbackHandler(audioParticipants);
                        RenderHelper(graphBuilder, mcb, "Audio", pPin, audioCompressor, audioDestination);
                    }
                }
                finally
                {
                    Marshal.ReleaseComObject(pPin);
                }
            }
        }
 protected void RenderGroups(ICaptureGraphBuilder2 graphBuilder, IBaseFilter audioCompressor,
                             IBaseFilter videoCompressor,
                             IBaseFilter multiplexer, ICallbackParticipant[] audioParticipants,
                             ICallbackParticipant[] videoParticipants)
 {
     RenderGroups(graphBuilder, audioCompressor, videoCompressor, multiplexer, multiplexer, audioParticipants,
                  videoParticipants);
 }