Пример #1
0
        public void CanRenderAudioVideoAndImages()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100);
                ITrack videoTrack = videoGroup.AddTrack();
                videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1);
                videoTrack.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1);

                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" />
		</track>
	</group>
	<group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""1"" src=""..\..\transitions.wmv"" mstart=""0"" />
			<clip start=""1"" stop=""2"" src=""..\..\image1.jpg"" />
		</track>
	</group>
</timeline>");
                }
            }
        }
Пример #2
0
 protected void PrepareToExecute(ITimeline timeline, string expectedXml)
 {
     using (var renderer = new NullRenderer(timeline))
     {
         PrepareToExecute(renderer, expectedXml);
     }
 }
Пример #3
0
        public void RenderAudioAndVideo()
        {
            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240);
                ITrack videoTrack = videoGroup.AddTrack();
                videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2);

                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
	<group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\transitions.wmv"" mstart=""0"" />
		</track>
	</group>
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" />
		</track>
	</group>
</timeline>");
                }
            }
        }
Пример #4
0
        public void AddAndRemoveHandler()
        {
            bool eventTriggered = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track      = audioGroup.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1);

                using (NullRenderer renderer = new NullRenderer(timeline))
                {
                    EventHandler handler = new EventHandler(delegate
                    {
                        eventTriggered = true;
                    });

                    renderer.RenderCompleted += handler;
                    renderer.RenderCompleted -= handler;

                    renderer.BeginRender(null, null);
                    renderer.Cancel();

                    Assert.IsFalse(eventTriggered);
                }
            }
        }
Пример #5
0
        private SampleGrabber BuildGrabber(string filterNamePrefix, IPin srcOutputPin,
                                           Guid majorType, Guid subType, Guid formatType, SampleGrabberCallback.BufferCBEventHandler callback)
        {
            // Create Filter
            SampleGrabber sampleGrabber = CreateSampleGrabber(majorType, subType, formatType, callback);
            NullRenderer  nullRenderer  = CreateNullRenderer();

            // Add Filter
            GraphBuilder.AddFilter(sampleGrabber as IBaseFilter, filterNamePrefix + " Sample Grabber");
            GraphBuilder.AddFilter(nullRenderer as IBaseFilter, filterNamePrefix + " Null Renderer");

            // Connect srcOutput -> grabberInput, grabberOutput -> rendererInput
            IPin grabberIn  = Util.FindInputPin(sampleGrabber as IBaseFilter);
            IPin grabberOut = Util.FindOutputPin(sampleGrabber as IBaseFilter);
            IPin rendererIn = Util.FindInputPin(nullRenderer as IBaseFilter);

            GraphBuilder.Connect(srcOutputPin, grabberIn);
            GraphBuilder.Connect(grabberOut, rendererIn);
            Util.FreePin(rendererIn);
            Util.FreePin(grabberOut);
            Util.FreePin(grabberIn);

            Marshal.ReleaseComObject(nullRenderer);
            return(sampleGrabber);
        }
        public void InitDevice(DsDevice device, int iWidth, int iHeight)
        {
            int    hr;
            object camDevice;
            Guid   iid = typeof(IBaseFilter).GUID;

            device.Mon.BindToObject(null, null, ref iid, out camDevice);
            IBaseFilter camFilter = camDevice as IBaseFilter;

            m_CameraControl = camFilter as IAMCameraControl;
            m_VideoControl  = camFilter as IAMVideoProcAmp;
            ISampleGrabber sampGrabber = null;

            graphBuilder = (IGraphBuilder) new FilterGraph();

            //Create the Capture Graph Builder
            ICaptureGraphBuilder2 captureGraphBuilder = null;

            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            // Attach the filter graph to the capture graph
            hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            //Add the Video input device to the graph
            hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            // Configure the sample grabber
            sampGrabber = new SampleGrabber() as ISampleGrabber;
            ConfigureSampleGrabber(sampGrabber);
            IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter;

            //Add the Video compressor filter to the graph
            hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter nullRender = new NullRenderer() as IBaseFilter;

            graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber);
            InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight);

            hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender);
            DsError.ThrowExceptionForHR(hr);


            SaveSizeInfo(sampGrabber);

            Marshal.ReleaseComObject(sampGrabber);
            Marshal.ReleaseComObject(captureGraphBuilder);
        }
Пример #7
0
        public void CancelBeforeStart()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track      = audioGroup.AddTrack();
                track.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);

                using (var renderer = new NullRenderer(timeline))
                {
                    renderer.Cancel();
                }
            }
        }
        public void CancelBeforeStart()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track = audioGroup.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);

                using (var renderer = new NullRenderer(timeline))
                {
                    renderer.Cancel();
                }
            }
        }
Пример #9
0
 public FrameGrabber(DsDevice camDevice)
 {
     IFilterGraph2 filterGraph;
     ICaptureGraphBuilder2 graphBuilder;
     IBaseFilter camBase, nullRenderer;
     ISampleGrabber sampleGrabber;
     filterGraph = new FilterGraph() as IFilterGraph2;
     mediaCtrl = filterGraph as IMediaControl;
     graphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;
     HRCheck(graphBuilder.SetFiltergraph(filterGraph));
     // Add camera
     HRCheck(filterGraph.AddSourceFilterForMoniker(
         camDevice.Mon, null, camDevice.Name, out camBase));
     // Add sample grabber
     sampleGrabber = new SampleGrabber() as ISampleGrabber;
     var mType = new AMMediaType()
     {
         majorType = MediaType.Video,
         subType = MediaSubType.RGB24,
         formatType = FormatType.VideoInfo
     };
     HRCheck(sampleGrabber.SetMediaType(mType));
     DsUtils.FreeAMMediaType(mType);
     HRCheck(sampleGrabber.SetCallback(this, 1));
     HRCheck(filterGraph.AddFilter(sampleGrabber as IBaseFilter, "CamGrabber"));
     // Add null renderer
     nullRenderer = new NullRenderer() as IBaseFilter;
     HRCheck(filterGraph.AddFilter(nullRenderer, "Null renderer"));
     // Render the webcam through the grabber and the renderer
     HRCheck(graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video,
         camBase, sampleGrabber as IBaseFilter, nullRenderer));
     // Get resulting picture size
     mType = new AMMediaType();
     HRCheck(sampleGrabber.GetConnectedMediaType(mType));
     if (mType.formatType != FormatType.VideoInfo || mType.formatPtr == IntPtr.Zero)
     {
         throw new NotSupportedException("Unknown grabber media format");
     }
     var videoInfoHeader = Marshal.PtrToStructure(mType.formatPtr,
         typeof(VideoInfoHeader)) as VideoInfoHeader;
     width = videoInfoHeader.BmiHeader.Width;
     height = videoInfoHeader.BmiHeader.Height;
     Console.WriteLine("{0} x {1}", width, height); 
     stride = width * (videoInfoHeader.BmiHeader.BitCount / 8);
     DsUtils.FreeAMMediaType(mType);
     HRCheck(mediaCtrl.Run());
 }
Пример #10
0
        public void RenderToCompletion()
        {
            bool eventTriggered = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track      = audioGroup.AddTrack();
                track.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 1);

                using (var renderer = new NullRenderer(timeline))
                {
                    renderer.RenderCompleted += delegate { eventTriggered = true; };

                    renderer.Render();

                    Assert.AreEqual(RendererState.GraphCompleted, renderer.State);
                    Assert.IsTrue(eventTriggered);
                }
            }
        }
Пример #11
0
        public void WriteSomeImages()
        {
            using (var timeline = new DefaultTimeline())
            {
                timeline.AddVideoGroup(24, 320, 240).AddTrack(); // we want 320x240 sized images
                timeline.AddVideo("..\\..\\transitions.wmv");

                var participant = new ImagesToDiskParticipant(24, 320, 240, Environment.CurrentDirectory, 1, 2, 3, 4, 5,
                                                              6, 7);

                using (var render = new NullRenderer(timeline, null, new ICallbackParticipant[] { participant }))
                {
                    render.Render();
                }

                for (int i = 0; i < 6; i++)
                {
                    Assert.IsTrue(File.Exists(string.Format("frame{0}.jpg", i)));
                }
            }
        }
Пример #12
0
        public void UseInRender()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                using (NullRenderer renderer = new NullRenderer(timeline, new ConsoleProgressCallback(), null))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }
            }
        }
        public void CancelRender()
        {
            bool eventTriggered = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track = audioGroup.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);

                using (var renderer = new NullRenderer(timeline))
                {
                    renderer.RenderCompleted += delegate { eventTriggered = true; };

                    renderer.BeginRender(null, null);
                    renderer.Cancel();

                    Assert.AreEqual(RendererState.Canceled, renderer.State);
                    Assert.IsTrue(eventTriggered);
                }
            }
        }
Пример #14
0
        public void RenderAudio()
        {
            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("..\\..\\testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""..\..\testinput.wav"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }
            }
        }
        public void UseInRender()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                using (
                    var renderer =
                        new NullRenderer(timeline, new ICallbackParticipant[] {new ConsoleProgressParticipant()}, null))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
            <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
            <track>
            <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/>
            </track>
            </group>
            </timeline>");
                }
            }
        }
        public void AddAndRemoveHandler()
        {
            bool eventTriggered = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track = audioGroup.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);

                using (var renderer = new NullRenderer(timeline))
                {
                    EventHandler handler = delegate { eventTriggered = true; };

                    renderer.RenderCompleted += handler;
                    renderer.RenderCompleted -= handler;

                    renderer.BeginRender(null, null);
                    renderer.Cancel();

                    Assert.IsFalse(eventTriggered);
                }
            }
        }
        public void RenderAudio()
        {
            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
            <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
            <track>
            <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/>
            </track>
            </group>
            </timeline>");
                }
            }
        }
Пример #18
0
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels)
        {
            int hr;

            ISampleGrabber sampGrabber = null;
            IBaseFilter capFilter = null;
            ICaptureGraphBuilder2 capGraph = null;
            IBaseFilter baseGrabFlt = null;
            IBaseFilter nullrenderer = null;
            IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter;

            // Get the graphbuilder object
            m_FilterGraph = (IFilterGraph2)new FilterGraph();
            m_mediaCtrl = m_FilterGraph as IMediaControl;
            try {
                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                // Get the SampleGrabber interface
                sampGrabber = (ISampleGrabber)new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the audio device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // If any of the default config items are set
                if (iSampleRate + iChannels > 0) {
                    SetConfigParms(capGraph, capFilter, iSampleRate, iChannels);
                }

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;
                baseGrabFlt = sampGrabber as IBaseFilter;

                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the file filter to the sample grabber

                // Hopefully this will be the audio pin, we could check by reading it's mediatype
                IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

                // Get the input pin from the sample grabber
                IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Add the null renderer to the graph
                nullrenderer = new NullRenderer() as IBaseFilter;
                hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the sample grabber to the null renderer
                iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
                iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Read and cache the resulting settings
                SaveSizeInfo(sampGrabber);
            } finally {
                if (capFilter != null) {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null) {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (capGraph != null) {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
            }
        }
Пример #19
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="device"></param>
        /// <param name="iFrameRate"></param>
        /// <param name="iWidth"></param>
        /// <param name="iHeight"></param>
        /// <param name="grabberCallback"></param>
        /// <param name="subType"></param>
        /// <returns></returns>
        public int CaptureVideo(DsDevice device, int iFrameRate, int iWidth, int iHeight, ISampleGrabberCB grabberCallback, Guid subType)
        {
            int         hr           = 0;
            IBaseFilter sourceFilter = null;
            IBaseFilter renderFilter = null;

            try
            {
                // Get DirectShow interfaces
                hr = OpenInterfaces();

                // Attach the filter graph to the capture graph
                hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder);
                DsError.ThrowExceptionForHR(hr);

                // Use the system device enumerator and class enumerator to find
                // a video capture/preview device, such as a desktop USB video camera.
                sourceFilter = SelectCaptureDevice(device);
                // Add Capture filter to graph.
                hr = this.graphBuilder.AddFilter(sourceFilter, "DirectShowCam");
                DsError.ThrowExceptionForHR(hr);

                // Configure preview settings.
                SetConfigParams(this.captureGraphBuilder, sourceFilter, iFrameRate, iWidth, iHeight, subType);

                // Initialize SampleGrabber.
                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                // Configure SampleGrabber. Add preview callback.
                ConfigureSampleGrabber(sampleGrabber, subType, grabberCallback);
                // Add SampleGrabber to graph.
                hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback");
                DsError.ThrowExceptionForHR(hr);

                // Add the Null Render to the filter graph
                renderFilter = new NullRenderer() as IBaseFilter;
                hr           = this.graphBuilder.AddFilter(renderFilter, "NullRenderer");

                // Render the preview
                hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), renderFilter);
                //hr = this.captureGraphBuilder.RenderStream( PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), null );
                DsError.ThrowExceptionForHR(hr);

                SaveSizeInfo(sampleGrabber);

                // Add our graph to the running object table, which will allow
                // the GraphEdit application to "spy" on our graph
                rot = new DsROTEntry(this.graphBuilder);

                // Start previewing video data
                //hr = this.mediaControl.Run();
                //DsError.ThrowExceptionForHR( hr );
            }
            catch
            {
                ////MessageBox.Show( "An unrecoverable error has occurred." );
            }
            finally
            {
                if (sourceFilter != null)
                {
                    Marshal.ReleaseComObject(sourceFilter);
                    sourceFilter = null;
                }

                if (sampleGrabber != null)
                {
                    Marshal.ReleaseComObject(sampleGrabber);
                    sampleGrabber = null;
                }
            }

            return(hr);
        }
Пример #20
0
        public override void BuildGraph()
        {
            try
            {
                useWPF = Settings.UseWPF;

                int hr = 0;

                this.graphBuilder = (IFilterGraph2) new FilterGraph();
                rot = new DsROTEntry(this.graphBuilder);

                string extension = Path.GetExtension(this.fileName).ToLower();

                if (extension == ".ts")
                {
                    TsFileSource fileSource = new TsFileSource();
                    this.filterSource = fileSource as IBaseFilter;
                    if (this.filterSource != null)
                    {
                        this.graphBuilder.AddFilter(this.filterSource, "TsFileSource");

                        IFileSourceFilter interFaceFile = (IFileSourceFilter)fileSource;
                        interFaceFile.Load(this.fileName, null);

                        ITSFileSource tsFileSource = fileSource as ITSFileSource;
                        ushort        audioPid     = 0;
                        tsFileSource.GetAudioPid(ref audioPid);

                        ushort videoPid = 0;
                        tsFileSource.GetVideoPid(ref videoPid);

                        byte[] videoPidTypeByteBuffer = new byte[16];
                        tsFileSource.GetVideoPidType(videoPidTypeByteBuffer);

                        int posCharZero = 0;
                        for (; posCharZero < videoPidTypeByteBuffer.Length; posCharZero++)
                        {
                            if (videoPidTypeByteBuffer[posCharZero] == 0)
                            {
                                break;
                            }
                        }

                        char[] videoPidTypeCharBuffer = new char[posCharZero];
                        Array.Copy(videoPidTypeByteBuffer, 0, videoPidTypeCharBuffer, 0, posCharZero);
                        string videoPidType = new string(videoPidTypeCharBuffer);
                        // "MPEG 2", "H.264"


                        AddMPEG2DemuxFilter();

                        //IMpeg2Demultiplexer mpeg2Demultiplexer = this.mpeg2Demux as IMpeg2Demultiplexer;

                        ////Log.WriteFile(Log.LogType.Log, false, "DVBGraphBDA: create mpg4 video pin");
                        //AMMediaType mediaMPG4 = new AMMediaType();
                        //mediaMPG4.majorType = MediaType.Video;
                        //mediaMPG4.subType = new Guid(0x8d2d71cb, 0x243f, 0x45e3, 0xb2, 0xd8, 0x5f, 0xd7, 0x96, 0x7e, 0xc0, 0x9b);
                        //mediaMPG4.sampleSize = 0;
                        //mediaMPG4.temporalCompression = false;
                        //mediaMPG4.fixedSizeSamples = false;
                        //mediaMPG4.unkPtr = IntPtr.Zero;
                        //mediaMPG4.formatType = FormatType.Mpeg2Video;
                        //mediaMPG4.formatSize = Mpeg2ProgramVideo.GetLength(0);
                        //mediaMPG4.formatPtr = System.Runtime.InteropServices.Marshal.AllocCoTaskMem(mediaMPG4.formatSize);
                        //System.Runtime.InteropServices.Marshal.Copy(Mpeg2ProgramVideo, 0, mediaMPG4.formatPtr, mediaMPG4.formatSize);

                        //int hr = mpeg2Demultiplexer.CreateOutputPin(mediaMPG4, "MPG4", out this.pinDemuxerVideoMPEG4);
                        //if (this.pinDemuxerVideoMPEG4 != null)
                        //{
                        //    IMPEG2PIDMap mpeg2PIDMap = this.pinDemuxerVideoMPEG4 as IMPEG2PIDMap;
                        //    if (mpeg2PIDMap != null)
                        //        hr = mpeg2PIDMap.MapPID(1, new int[] { 0x00a2 }, MediaSampleContent.ElementaryStream);
                        //    Marshal.ReleaseComObject(this.pinDemuxerVideoMPEG4);
                        //}
                        ////if (hr < 0 || this.pinDemuxerVideoMPEG4 == null)
                        ////{

                        ////    _lastError = String.Format("failed to add mpg4 video pin");
                        ////    Log.WriteFile(Log.LogType.Log, true, "DVBGraphBDA:FAILED to create MPG4 pin:0x{0:X}", hr);
                        ////}


                        //DsDevice[] tunDevices = DeviceEnumerator.GetH264Devices();
                        //if (tunDevices.Length > 0)
                        //{
                        //    IBaseFilter elecardMPEG4VideoDecoder;
                        //    hr = this.graphBuilder.AddSourceFilterForMoniker(tunDevices[0].Mon, null, tunDevices[0].Name, out elecardMPEG4VideoDecoder);
                        //    DsError.ThrowExceptionForHR(hr);
                        //}

                        AddRenderers();
                        if (!useWPF)
                        {
                            ConfigureVMR9InWindowlessMode(2);
                        }

                        //IVMRMixerControl9 vmrMixerControl9 = this.videoRenderer as IVMRMixerControl9;
                        //vmrMixerControl9.SetZOrder(0, 1);



                        //// Connect the MPEG-2 Demux output pin for the "BDA MPEG2 Transport Information Filter"
                        //IPin pinOut = DsFindPin.ByDirection(this.filterSource, PinDirection.Output, 0);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        //ConnectFilters();
                        //IPin pinOut = DsFindPin.ByDirection(this.mpeg2Demux, PinDirection.Output, 0);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        //pinOut = DsFindPin.ByDirection(this.mpeg2Demux, PinDirection.Output, 1);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        IPin pinOut = DsFindPin.ByDirection(this.filterSource, PinDirection.Output, 0);
                        if (pinOut != null)
                        {
                            hr = this.graphBuilder.Render(pinOut);
                            //DsError.ThrowExceptionForHR(hr);
                            // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                            Marshal.ReleaseComObject(pinOut);
                        }

                        AddAndConnectNullRendererForWPF();

                        this.hostingControl.CurrentGraphBuilder = this;

                        OnGraphStarted();

                        return;
                    }
                }



                AddRenderers();
                if (!useWPF)
                {
                    ConfigureVMR9InWindowlessMode();
                }

                this.graphBuilder.RenderFile(this.fileName, null);

                //AddAndConnectNullRendererForWPF();
                if (useWPF)
                {
                    // In order to keep the audio/video synchro, we need the NullRenderer
                    IBaseFilter nullRenderer = new NullRenderer() as IBaseFilter;
                    hr = graphBuilder.AddFilter(nullRenderer, "NullRenderer");
                    ThrowExceptionForHR("Adding the NullRenderer: ", hr);

                    IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                    if (pinOutFromFilterOut != null)
                    {
                        UnRender(pinOutFromFilterOut);
                        try
                        {
                            IPin pinInFromFilterOut = DsFindPin.ByDirection(nullRenderer, PinDirection.Input, 0);
                            if (pinInFromFilterOut != null)
                            {
                                try
                                {
                                    hr = this.graphBuilder.Connect(pinOutFromFilterOut, pinInFromFilterOut);
                                }
                                finally
                                {
                                    Marshal.ReleaseComObject(pinInFromFilterOut);
                                }
                            }
                        }
                        finally
                        {
                            Marshal.ReleaseComObject(pinOutFromFilterOut);
                        }
                    }

                    //IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                    //if (pinOutFromFilterOut != null)
                    //{
                    //    hr = this.graphBuilder.Render(pinOutFromFilterOut);
                    //    Marshal.ReleaseComObject(pinOutFromFilterOut);
                    //}

                    WpfUpdateVideoSize();                     //WPF
                }

                this.hostingControl.CurrentGraphBuilder = this;

                OnGraphStarted();
            }
            catch (Exception ex)
            {
                Decompose();
                throw ex;
            }
        }
Пример #21
0
        DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq)
        {
            // Init variables
            //IPin[] pin = new IPin[1];
            IBaseFilter DecFilterAudio   = null;
            IBaseFilter DecFilterVideo   = null;
            IBaseFilter MainAudioDecoder = null;
            IBaseFilter MainVideoDecoder = null;
            string      dPin             = string.Empty;
            string      sName            = string.Empty;
            string      dName            = string.Empty;
            string      sPin             = string.Empty;
            FileInfo    fiInputFile      = new FileInfo(strq.FileName);
            string      txtOutputFNPath  = fiInputFile.FullName + ".wmv";

            if (
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) &&
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Add the DVRMS/WTV file / filter to the graph
                SendDebugMessage("Add SBE Source Filter", 0);

                hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable
                DsError.ThrowExceptionForHR(hr);
                dc.Add(currentSBEfilter);

                // Get the SBE audio and video out pins
                IPin SBEVidOutPin, SBEAudOutPin;
                SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null);
                SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null);

                // Set up two decrypt filters according to file extension (assume audio and video both present )
                if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                {
                    // Add DVR-MS decrypt filters
                    SendDebugMessage("Add DVRMS (bda) decryption", 0);
                    DecFilterAudio = (IBaseFilter) new DTFilter();  // THESE ARE FOR DVR-MS (BDA DTFilters)
                    DecFilterVideo = (IBaseFilter) new DTFilter();
                    graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag");
                    graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001");
                }
                else  // Add WTV decrypt filters
                {
                    SendDebugMessage("Add WTV (pbda) decryption", 0);
                    DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder);
                    DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001");
                }
                dc.Add(DecFilterAudio);
                dc.Add(DecFilterVideo);

                // Make the first link in the graph: SBE => Decrypts
                SendDebugMessage("Connect SBE => Decrypt filters", 0);
                IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false);
                IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0);
                if (DecAudioInPin == null)
                {
                    SendDebugMessage("WARNING: No Audio Input to decrypt filter.");
                }
                else
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false);
                }

                // Get Dec Audio Out pin
                IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0);

                // Examine Dec Audio out for audio format
                SendDebugMessage("Examining source audio", 0);
                AMMediaType AudioMediaType = null;
                getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType);
                SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString());
                SendDebugMessage("Examining Audio StreamInfo");
                StreamInfo si         = FileInformation.GetStreamInfo(AudioMediaType);
                bool       AudioIsAC3 = (si.SimpleType == "AC-3");
                if (AudioIsAC3)
                {
                    SendDebugMessage("Audio type is AC3");
                }
                else
                {
                    SendDebugMessage("Audio type is not AC3");
                }
                si = null;
                DsUtils.FreeAMMediaType(AudioMediaType);

                // Add an appropriate audio decoder
                if (AudioIsAC3)
                {
                    if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID))
                    {
                        SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected.");
                        return(DSStreamResultCodes.ErrorAC3CodecNotFound);
                    }
                    else
                    {
                        MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder);   //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph);
                        Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid);
                        SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString());
                    }
                }
                else
                {
                    MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder);
                }

                // Add a video decoder
                SendDebugMessage("Add DTV decoder", 0);
                MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder);
                dc.Add(MainAudioDecoder);
                dc.Add(MainVideoDecoder);

                //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder);

                // Add a null renderer
                SendDebugMessage("Add null renderer", 0);
                NullRenderer MyNullRenderer = new NullRenderer();
                dc.Add(MyNullRenderer);
                hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Link up video through to null renderer
                SendDebugMessage("Connect video to null renderer", 0);
                // Make the second link:  Decrypts => DTV
                IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0);
                IPin DTVVideoInPin  = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0);  // first one should be video input?  //
                FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false);
                // 3. DTV => Null renderer
                IPin NullRInPin     = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0);
                IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false);
                Marshal.ReleaseComObject(NullRInPin); NullRInPin = null;

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                DsError.ThrowExceptionForHR(tempControl.Run());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);
                //DsError.ThrowExceptionForHR(hr);  // DO *NOT* DO THIS HERE!  THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS == FilterState.Running)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }

                // Remove null renderer
                hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer);

                // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                AMMediaType pmt = null;
                getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt);
                FrameSize SourceFrameSize;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    int VideoWidth  = pvih2.BmiHeader.Width;
                    int VideoHeight = pvih2.BmiHeader.Height;
                    SourceFrameSize = new FrameSize(VideoWidth, VideoHeight);
                }
                else
                {
                    SourceFrameSize = new FrameSize(320, 240);
                }

                // Free up
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // Link up audio
                // 2. Audio Decrypt -> Audio decoder
                IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false);

                // Add ASF Writer
                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Make the final links:  DTV => writer
                SendDebugMessage("Linking audio/video through to decoder and writer", 0);
                IPin DTVAudioOutPin   = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0);
                IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);
                IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false);
                if (ASFVideoInputPin != null)
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false);
                }

                // Configure ASFWriter
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // dec
                Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin   = null;
                Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin   = null;
                Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null;
                Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null;
                // dtv
                Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null;
                Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin   = null;
                Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null;
                Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null;
                // asf
                Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null;
                Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
Пример #22
0
        public override void BuildGraph()
        {
            try
            {
                useWPF = Settings.UseWPF;

                int hr = 0;

                this.graphBuilder = (IFilterGraph2)new FilterGraph();
                rot = new DsROTEntry(this.graphBuilder);

                string extension = Path.GetExtension(this.fileName).ToLower();

                if (extension == ".ts")
                {
                    TsFileSource fileSource = new TsFileSource();
                    this.filterSource = fileSource as IBaseFilter;
                    if (this.filterSource != null)
                    {
                        this.graphBuilder.AddFilter(this.filterSource, "TsFileSource");

                        IFileSourceFilter interFaceFile = (IFileSourceFilter)fileSource;
                        interFaceFile.Load(this.fileName, null);

                        ITSFileSource tsFileSource = fileSource as ITSFileSource;
                        ushort audioPid = 0;
                        tsFileSource.GetAudioPid(ref audioPid);

                        ushort videoPid = 0;
                        tsFileSource.GetVideoPid(ref videoPid);

                        byte[] videoPidTypeByteBuffer = new byte[16];
                        tsFileSource.GetVideoPidType(videoPidTypeByteBuffer);

                        int posCharZero = 0;
                        for (; posCharZero < videoPidTypeByteBuffer.Length; posCharZero++) if (videoPidTypeByteBuffer[posCharZero] == 0) break;

                        char[] videoPidTypeCharBuffer = new char[posCharZero];
                        Array.Copy(videoPidTypeByteBuffer, 0, videoPidTypeCharBuffer, 0, posCharZero);
                        string videoPidType = new string(videoPidTypeCharBuffer);
                        // "MPEG 2", "H.264"

                        AddMPEG2DemuxFilter();

                        //IMpeg2Demultiplexer mpeg2Demultiplexer = this.mpeg2Demux as IMpeg2Demultiplexer;

                        ////Log.WriteFile(Log.LogType.Log, false, "DVBGraphBDA: create mpg4 video pin");
                        //AMMediaType mediaMPG4 = new AMMediaType();
                        //mediaMPG4.majorType = MediaType.Video;
                        //mediaMPG4.subType = new Guid(0x8d2d71cb, 0x243f, 0x45e3, 0xb2, 0xd8, 0x5f, 0xd7, 0x96, 0x7e, 0xc0, 0x9b);
                        //mediaMPG4.sampleSize = 0;
                        //mediaMPG4.temporalCompression = false;
                        //mediaMPG4.fixedSizeSamples = false;
                        //mediaMPG4.unkPtr = IntPtr.Zero;
                        //mediaMPG4.formatType = FormatType.Mpeg2Video;
                        //mediaMPG4.formatSize = Mpeg2ProgramVideo.GetLength(0);
                        //mediaMPG4.formatPtr = System.Runtime.InteropServices.Marshal.AllocCoTaskMem(mediaMPG4.formatSize);
                        //System.Runtime.InteropServices.Marshal.Copy(Mpeg2ProgramVideo, 0, mediaMPG4.formatPtr, mediaMPG4.formatSize);

                        //int hr = mpeg2Demultiplexer.CreateOutputPin(mediaMPG4, "MPG4", out this.pinDemuxerVideoMPEG4);
                        //if (this.pinDemuxerVideoMPEG4 != null)
                        //{
                        //    IMPEG2PIDMap mpeg2PIDMap = this.pinDemuxerVideoMPEG4 as IMPEG2PIDMap;
                        //    if (mpeg2PIDMap != null)
                        //        hr = mpeg2PIDMap.MapPID(1, new int[] { 0x00a2 }, MediaSampleContent.ElementaryStream);
                        //    Marshal.ReleaseComObject(this.pinDemuxerVideoMPEG4);
                        //}
                        ////if (hr < 0 || this.pinDemuxerVideoMPEG4 == null)
                        ////{

                        ////    _lastError = String.Format("failed to add mpg4 video pin");
                        ////    Log.WriteFile(Log.LogType.Log, true, "DVBGraphBDA:FAILED to create MPG4 pin:0x{0:X}", hr);
                        ////}

                        //DsDevice[] tunDevices = DeviceEnumerator.GetH264Devices();
                        //if (tunDevices.Length > 0)
                        //{
                        //    IBaseFilter elecardMPEG4VideoDecoder;
                        //    hr = this.graphBuilder.AddSourceFilterForMoniker(tunDevices[0].Mon, null, tunDevices[0].Name, out elecardMPEG4VideoDecoder);
                        //    DsError.ThrowExceptionForHR(hr);
                        //}

                        AddRenderers();
                        if (!useWPF)
                            ConfigureVMR9InWindowlessMode(2);

                        //IVMRMixerControl9 vmrMixerControl9 = this.videoRenderer as IVMRMixerControl9;
                        //vmrMixerControl9.SetZOrder(0, 1);

                        //// Connect the MPEG-2 Demux output pin for the "BDA MPEG2 Transport Information Filter"
                        //IPin pinOut = DsFindPin.ByDirection(this.filterSource, PinDirection.Output, 0);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        //ConnectFilters();
                        //IPin pinOut = DsFindPin.ByDirection(this.mpeg2Demux, PinDirection.Output, 0);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        //pinOut = DsFindPin.ByDirection(this.mpeg2Demux, PinDirection.Output, 1);
                        //if (pinOut != null)
                        //{
                        //    hr = this.graphBuilder.Render(pinOut);
                        //    //DsError.ThrowExceptionForHR(hr);
                        //    // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                        //    Marshal.ReleaseComObject(pinOut);
                        //}

                        IPin pinOut = DsFindPin.ByDirection(this.filterSource, PinDirection.Output, 0);
                        if (pinOut != null)
                        {
                            hr = this.graphBuilder.Render(pinOut);
                            //DsError.ThrowExceptionForHR(hr);
                            // In fact the last pin don't render since i havn't added the BDA MPE Filter...
                            Marshal.ReleaseComObject(pinOut);
                        }

                        AddAndConnectNullRendererForWPF();

                        this.hostingControl.CurrentGraphBuilder = this;

                        OnGraphStarted();

                        return;
                    }
                }

                AddRenderers();
                if (!useWPF)
                    ConfigureVMR9InWindowlessMode();

                this.graphBuilder.RenderFile(this.fileName, null);

                //AddAndConnectNullRendererForWPF();
                if (useWPF)
                {
                    // In order to keep the audio/video synchro, we need the NullRenderer
                    IBaseFilter nullRenderer = new NullRenderer() as IBaseFilter;
                    hr = graphBuilder.AddFilter(nullRenderer, "NullRenderer");
                    ThrowExceptionForHR("Adding the NullRenderer: ", hr);

                    IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                    if (pinOutFromFilterOut != null)
                    {
                        UnRender(pinOutFromFilterOut);
                        try
                        {
                            IPin pinInFromFilterOut = DsFindPin.ByDirection(nullRenderer, PinDirection.Input, 0);
                            if (pinInFromFilterOut != null)
                            {
                                try
                                {
                                    hr = this.graphBuilder.Connect(pinOutFromFilterOut, pinInFromFilterOut);
                                }
                                finally
                                {
                                    Marshal.ReleaseComObject(pinInFromFilterOut);
                                }
                            }
                        }
                        finally
                        {
                            Marshal.ReleaseComObject(pinOutFromFilterOut);
                        }
                    }

                    //IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                    //if (pinOutFromFilterOut != null)
                    //{
                    //    hr = this.graphBuilder.Render(pinOutFromFilterOut);
                    //    Marshal.ReleaseComObject(pinOutFromFilterOut);
                    //}

                    WpfUpdateVideoSize(); //WPF
                }

                this.hostingControl.CurrentGraphBuilder = this;

                OnGraphStarted();
            }
            catch (Exception ex)
            {
                Decompose();
                throw ex;
            }
        }
Пример #23
0
        void ReleaseInterfaces()
        {
            //Marshal.ReleaseComObject(this.win); 
            //this.win = null;

            Marshal.ReleaseComObject(this.mediaControl); this.mediaControl = null;
            //Marshal.ReleaseComObject(this.mediaEventEx); this.mediaEventEx = null;
            Marshal.ReleaseComObject(this.graphBuilder); this.graphBuilder = null;
            Marshal.ReleaseComObject(this.captureGraphBuilder); this.captureGraphBuilder = null;
            Marshal.ReleaseComObject(this.sampleGrabber); this.sampleGrabber = null;
            Marshal.ReleaseComObject(this.nullRenderer); this.nullRenderer = null;

            // release filters based on other objects
            fsourceBaseFilter = null;
            grabberFilter = null;

        }
Пример #24
0
        void AcquireInterfaces()
        {
            graphBuilder = (IGraphBuilder)new FilterGraph();
            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            mediaControl = (IMediaControl)graphBuilder;
            //win = (IVideoWindow)graphBuilder;

            // Null Renderer
            nullRenderer = new NullRenderer();
            nullRendererFilter = (IBaseFilter)nullRenderer;

            // Sample grabber
            sampleGrabber = (ISampleGrabber)new SampleGrabber();
            grabberFilter = (IBaseFilter)sampleGrabber;

        }
Пример #25
0
        // Build the capture graph for grabber and renderer.</summary>
        // (Control to show video in, Filename to play)
        private void SetupGraph(string FileName)
        {
            int hr;

            // Get the graphbuilder object
            m_FilterGraph = new FilterGraph() as IFilterGraph2;

            // Get a ICaptureGraphBuilder2 to help build the graph
            ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;

            try
            {
                // Link the ICaptureGraphBuilder2 to the IFilterGraph2
                hr = icgb2.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the filters necessary to render the file.  This function will
                // work with a number of different file types.
                IBaseFilter sourceFilter = null;
                hr = m_FilterGraph.AddSourceFilter(FileName, FileName, out sourceFilter);
                DsError.ThrowExceptionForHR(hr);

                // Get the SampleGrabber interface
                m_sampGrabber = (ISampleGrabber)new SampleGrabber();
                IBaseFilter baseGrabFlt = (IBaseFilter)m_sampGrabber;

                // Configure the Sample Grabber
                ConfigureSampleGrabber(m_sampGrabber);

                // Add it to the filter
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                // Add the null renderer to the graph
                IBaseFilter nullrenderer = new NullRenderer() as IBaseFilter;
                hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer");
                DsError.ThrowExceptionForHR(hr);

                // Connect the pieces together, use the default renderer
                hr = icgb2.RenderStream(null, null, sourceFilter, baseGrabFlt, nullrenderer);
                DsError.ThrowExceptionForHR(hr);

                // Now that the graph is built, read the dimensions of the bitmaps we'll be getting
                SaveSizeInfo(m_sampGrabber);

                // Grab some other interfaces
                m_mediaEvent = m_FilterGraph as IMediaEvent;
                m_mediaCtrl = m_FilterGraph as IMediaControl;
            }
            finally
            {
                if (icgb2 != null)
                {
                    Marshal.ReleaseComObject(icgb2);
                    icgb2 = null;
                }
            }
            #if DEBUG
            // Double check to make sure we aren't releasing something
            // important.
            GC.Collect();
            GC.WaitForPendingFinalizers();
            #endif
        }
        public void CanRenderAudioVideoAndImages()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100);
                ITrack videoTrack = videoGroup.AddTrack();
                videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1);
                videoTrack.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1);

                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
            <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
            <track>
            <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" />
            </track>
            </group>
            <group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0"">
            <track>
            <clip start=""0"" stop=""1"" src=""transitions.wmv"" mstart=""0"" />
            <clip start=""1"" stop=""2"" src=""image1.jpg"" />
            </track>
            </group>
            </timeline>");
                }
            }
        }
Пример #27
0
        private void SetupSampleGrabber()
        {
            if (_graph == null)
                return;

            int hr;

            //Get directsound filter
            IBaseFilter directSoundFilter;
            hr = _graph.FindFilterByName(DEFAULT_AUDIO_RENDERER_NAME, out directSoundFilter);
            DsError.ThrowExceptionForHR(hr);

            IPin rendererPinIn = DsFindPin.ByConnectionStatus(directSoundFilter, PinConnectedStatus.Connected, 0);

            if (rendererPinIn != null)
            {
                IPin audioPinOut;
                hr = rendererPinIn.ConnectedTo(out audioPinOut);
                DsError.ThrowExceptionForHR(hr);

                if (audioPinOut != null)
                {
                    //Disconect audio decoder to directsound renderer
                    hr = audioPinOut.Disconnect();
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.RemoveFilter(directSoundFilter);
                    DsError.ThrowExceptionForHR(hr);

                    //Add Sample Grabber
                    ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber;
                    hr = sampleGrabber.SetCallback(this, 1);
                    DsError.ThrowExceptionForHR(hr);

                    AMMediaType media;
                    media = new AMMediaType();
                    media.majorType = MediaType.Audio;
                    media.subType = MediaSubType.PCM;
                    media.formatType = FormatType.WaveEx;
                    hr = sampleGrabber.SetMediaType(media);
                    DsError.ThrowExceptionForHR(hr);

                    IPin sampleGrabberPinIn = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Input, 0);
                    IPin sampleGrabberPinOut = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Output, 0);
                    hr = _graph.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);

                    PinInfo pinInfo;
                    hr = audioPinOut.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(hr);

                    FilterInfo filterInfo;
                    hr = pinInfo.filter.QueryFilterInfo(out filterInfo);
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.Connect(audioPinOut, sampleGrabberPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    //Add null renderer
                    NullRenderer nullRenderer = new NullRenderer();
                    hr = _graph.AddFilter((IBaseFilter)nullRenderer, "NullRenderer");
                    DsError.ThrowExceptionForHR(hr);

                    IPin nullRendererPinIn = DsFindPin.ByDirection((IBaseFilter)nullRenderer, PinDirection.Input, 0);
                    hr = _graph.Connect(sampleGrabberPinOut, nullRendererPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    _audioEngine.Setup(this.GetSampleGrabberFormat(sampleGrabber));
                }
            }
        }
        public void RenderVideo()
        {
            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240);
                ITrack rootTrack = videoGroup.AddTrack();
                rootTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
            <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0"">
            <track>
            <clip start=""0"" stop=""2"" src=""transitions.wmv"" mstart=""0""/>
            </track>
            </group>
            </timeline>");
                }
            }
        }
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels)
        {
            int hr;

            ISampleGrabber        sampGrabber  = null;
            IBaseFilter           capFilter    = null;
            ICaptureGraphBuilder2 capGraph     = null;
            IBaseFilter           baseGrabFlt  = null;
            IBaseFilter           nullrenderer = null;
            IMediaFilter          mediaFilt    = m_FilterGraph as IMediaFilter;

            // Get the graphbuilder object
            m_FilterGraph = (IFilterGraph2) new FilterGraph();
            m_mediaCtrl   = m_FilterGraph as IMediaControl;
            try {
                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Get the SampleGrabber interface
                sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the audio device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // If any of the default config items are set
                if (iSampleRate + iChannels > 0)
                {
                    SetConfigParms(capGraph, capFilter, iSampleRate, iChannels);
                }

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;
                baseGrabFlt = sampGrabber as IBaseFilter;

                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);


                // ---------------------------------
                // Connect the file filter to the sample grabber

                // Hopefully this will be the audio pin, we could check by reading it's mediatype
                IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

                // Get the input pin from the sample grabber
                IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Add the null renderer to the graph
                nullrenderer = new NullRenderer() as IBaseFilter;
                hr           = m_FilterGraph.AddFilter(nullrenderer, "Null renderer");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the sample grabber to the null renderer
                iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
                iPinIn  = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Read and cache the resulting settings
                SaveSizeInfo(sampGrabber);
            } finally {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
            }
        }
        private void SetupGraph(string FileName)
        {
            int hr;

            ISampleGrabber sampGrabber  = null;
            IBaseFilter    baseGrabFlt  = null;
            IBaseFilter    capFilter    = null;
            IBaseFilter    nullrenderer = null;

            // Get the graphbuilder object
            m_FilterGraph = new FilterGraph() as IFilterGraph2;
            m_mediaCtrl   = m_FilterGraph as IMediaControl;
            m_MediaEvent  = m_FilterGraph as IMediaEvent;

            IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter;

            try {
                // Add the video source
                hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;
                baseGrabFlt = sampGrabber as IBaseFilter;

                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the file filter to the sample grabber

                // Hopefully this will be the audio pin, we could check by reading it's mediatype
                IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

                // Get the input pin from the sample grabber
                IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Add the null renderer to the graph
                nullrenderer = new NullRenderer() as IBaseFilter;
                hr           = m_FilterGraph.AddFilter(nullrenderer, "Null renderer");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the sample grabber to the null renderer

                iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
                iPinIn  = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Turn off the clock.  This causes the frames to be sent
                // thru the graph as fast as possible
                hr = mediaFilt.SetSyncSource(null);
                DsError.ThrowExceptionForHR(hr);

                // Read and cache the image sizes
                SaveSizeInfo(sampGrabber);
            } finally {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (nullrenderer != null)
                {
                    Marshal.ReleaseComObject(nullrenderer);
                    nullrenderer = null;
                }
            }
        }
Пример #31
0
        DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq)
        {
            // Init variables
            //IPin[] pin = new IPin[1];
            IBaseFilter DecFilterAudio = null;
            IBaseFilter DecFilterVideo = null;
            IBaseFilter MainAudioDecoder = null;
            IBaseFilter MainVideoDecoder = null;
            string dPin = string.Empty;
            string sName = string.Empty;
            string dName = string.Empty;
            string sPin = string.Empty;
            FileInfo fiInputFile = new FileInfo(strq.FileName);
            string txtOutputFNPath = fiInputFile.FullName + ".wmv";
            if (
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) &&
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
               ) return DSStreamResultCodes.ErrorInvalidFileType;

            int hr = 0;
            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object",0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Add the DVRMS/WTV file / filter to the graph
                SendDebugMessage("Add SBE Source Filter", 0);

                hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable
                DsError.ThrowExceptionForHR(hr);
                dc.Add(currentSBEfilter);

                // Get the SBE audio and video out pins
                IPin SBEVidOutPin, SBEAudOutPin;
                SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null);
                SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null);

                // Set up two decrypt filters according to file extension (assume audio and video both present )
                if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                {
                    // Add DVR-MS decrypt filters
                    SendDebugMessage("Add DVRMS (bda) decryption", 0);
                    DecFilterAudio = (IBaseFilter)new DTFilter();  // THESE ARE FOR DVR-MS (BDA DTFilters)
                    DecFilterVideo = (IBaseFilter)new DTFilter();
                    graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag");
                    graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001");
                }
                else  // Add WTV decrypt filters
                {
                    SendDebugMessage("Add WTV (pbda) decryption", 0);
                    DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder);
                    DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001");

                }
                dc.Add(DecFilterAudio);
                dc.Add(DecFilterVideo);

                // Make the first link in the graph: SBE => Decrypts
                SendDebugMessage("Connect SBE => Decrypt filters", 0);
                IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false);
                IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0);
                if (DecAudioInPin == null)
                    SendDebugMessage("WARNING: No Audio Input to decrypt filter.");
                else
                    FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false);

                // Get Dec Audio Out pin
                IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0);

                // Examine Dec Audio out for audio format
                SendDebugMessage("Examining source audio", 0);
                AMMediaType AudioMediaType = null;
                getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType);
                SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString());
                SendDebugMessage("Examining Audio StreamInfo");
                StreamInfo si = FileInformation.GetStreamInfo(AudioMediaType);
                bool AudioIsAC3 = (si.SimpleType == "AC-3");
                if (AudioIsAC3)
                    SendDebugMessage("Audio type is AC3");
                else
                    SendDebugMessage("Audio type is not AC3");
                si = null;
                DsUtils.FreeAMMediaType(AudioMediaType);

                // Add an appropriate audio decoder
                if (AudioIsAC3)
                {
                    if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID))
                    {
                        SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected.");
                        return DSStreamResultCodes.ErrorAC3CodecNotFound;
                    }
                    else
                    {
                        MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder);   //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph);
                        Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid);
                        SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString());
                    }
                }
                else
                    MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder);

                // Add a video decoder
                SendDebugMessage("Add DTV decoder", 0);
                MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder);
                dc.Add(MainAudioDecoder);
                dc.Add(MainVideoDecoder);

                //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder);

                // Add a null renderer
                SendDebugMessage("Add null renderer", 0);
                NullRenderer MyNullRenderer = new NullRenderer();
                dc.Add(MyNullRenderer);
                hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Link up video through to null renderer
                SendDebugMessage("Connect video to null renderer", 0);
                // Make the second link:  Decrypts => DTV
                IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0);
                IPin DTVVideoInPin = DsFindPin.ByName(MainVideoDecoder, @"Video Input");  // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0);  // first one should be video input?  //
                FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false);
                // 3. DTV => Null renderer
                IPin NullRInPin = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0);
                IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false);
                Marshal.ReleaseComObject(NullRInPin); NullRInPin = null;

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent tempEvent = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                DsError.ThrowExceptionForHR(tempControl.Run());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);
                //DsError.ThrowExceptionForHR(hr);  // DO *NOT* DO THIS HERE!  THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS == FilterState.Running)
                    DsError.ThrowExceptionForHR(tempControl.Stop());

                // Remove null renderer
                hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer);

                // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                AMMediaType pmt = null;
                getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt);
                FrameSize SourceFrameSize;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    int VideoWidth = pvih2.BmiHeader.Width;
                    int VideoHeight = pvih2.BmiHeader.Height;
                    SourceFrameSize = new FrameSize(VideoWidth, VideoHeight);
                }
                else
                    SourceFrameSize = new FrameSize(320, 240);

                // Free up
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // Link up audio
                // 2. Audio Decrypt -> Audio decoder
                IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false);

                // Add ASF Writer
                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter); // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter;  // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Make the final links:  DTV => writer
                SendDebugMessage("Linking audio/video through to decoder and writer", 0);
                IPin DTVAudioOutPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0);
                IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);
                IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false);
                if (ASFVideoInputPin != null)
                    FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false);

                // Configure ASFWriter
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                    // dec
                Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin = null;
                Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin = null;
                Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null;
                Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null;
                    // dtv
                Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null;
                Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin = null;
                Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null;
                Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null;
                    // asf
                Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null;
                Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return DSStreamResultCodes.ErrorExceptionOccurred;
            }

            return DSStreamResultCodes.OK;
        }
Пример #32
0
        private void SetupSampleGrabber()
        {
            if (_graph == null)
            {
                return;
            }

            int hr;

            //Get directsound filter
            IBaseFilter directSoundFilter;

            hr = _graph.FindFilterByName(DEFAULT_AUDIO_RENDERER_NAME, out directSoundFilter);
            DsError.ThrowExceptionForHR(hr);

            IPin rendererPinIn = DsFindPin.ByConnectionStatus(directSoundFilter, PinConnectedStatus.Connected, 0);

            if (rendererPinIn != null)
            {
                IPin audioPinOut;
                hr = rendererPinIn.ConnectedTo(out audioPinOut);
                DsError.ThrowExceptionForHR(hr);

                if (audioPinOut != null)
                {
                    //Disconect audio decoder to directsound renderer
                    hr = audioPinOut.Disconnect();
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.RemoveFilter(directSoundFilter);
                    DsError.ThrowExceptionForHR(hr);

                    //Add Sample Grabber
                    ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber;
                    hr = sampleGrabber.SetCallback(this, 1);
                    DsError.ThrowExceptionForHR(hr);

                    AMMediaType media;
                    media            = new AMMediaType();
                    media.majorType  = MediaType.Audio;
                    media.subType    = MediaSubType.PCM;
                    media.formatType = FormatType.WaveEx;
                    hr = sampleGrabber.SetMediaType(media);
                    DsError.ThrowExceptionForHR(hr);

                    IPin sampleGrabberPinIn  = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Input, 0);
                    IPin sampleGrabberPinOut = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Output, 0);
                    hr = _graph.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);

                    PinInfo pinInfo;
                    hr = audioPinOut.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(hr);

                    FilterInfo filterInfo;
                    hr = pinInfo.filter.QueryFilterInfo(out filterInfo);
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.Connect(audioPinOut, sampleGrabberPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    //Add null renderer
                    NullRenderer nullRenderer = new NullRenderer();
                    hr = _graph.AddFilter((IBaseFilter)nullRenderer, "NullRenderer");
                    DsError.ThrowExceptionForHR(hr);

                    IPin nullRendererPinIn = DsFindPin.ByDirection((IBaseFilter)nullRenderer, PinDirection.Input, 0);
                    hr = _graph.Connect(sampleGrabberPinOut, nullRendererPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    _audioEngine.Setup(this.GetSampleGrabberFormat(sampleGrabber));
                }
            }
        }
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(string FileName)
        {
            int hr;

            ISampleGrabber sampGrabber = null;
            IBaseFilter baseGrabFlt = null;
            IBaseFilter capFilter = null;
            IBaseFilter nullrenderer = null;

            // Get the graphbuilder object
            m_FilterGraph = new FilterGraph() as IFilterGraph2;
            m_mediaCtrl = m_FilterGraph as IMediaControl;
            m_MediaEvent = m_FilterGraph as IMediaEvent;

            IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter;

            try
            {
            #if DEBUG
                m_rot = new DsROTEntry(m_FilterGraph);
            #endif

                // Add the video source
                hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;
                baseGrabFlt = sampGrabber as IBaseFilter;

                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the file filter to the sample grabber

                // Hopefully this will be the video pin, we could check by reading it's mediatype
                IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

                // Get the input pin from the sample grabber
                IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Add the null renderer to the graph
                nullrenderer = new NullRenderer() as IBaseFilter;
                hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer");
                DsError.ThrowExceptionForHR(hr);

                // ---------------------------------
                // Connect the sample grabber to the null renderer

                iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
                iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

                hr = m_FilterGraph.Connect(iPinOut, iPinIn);
                DsError.ThrowExceptionForHR(hr);

                // Turn off the clock.  This causes the frames to be sent
                // thru the graph as fast as possible
                hr = mediaFilt.SetSyncSource(null);
                DsError.ThrowExceptionForHR(hr);

                // Read and cache the image sizes
                SaveSizeInfo(sampGrabber);
            }
            finally
            {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (nullrenderer != null)
                {
                    Marshal.ReleaseComObject(nullrenderer);
                    nullrenderer = null;
                }
            }
        }
Пример #34
0
        protected void AddAndConnectNullRendererForWPF()
        {
            if (useWPF)
            {
                // In order to keep the audio/video synchro, we need the NullRenderer
                IBaseFilter nullRenderer = new NullRenderer() as IBaseFilter;
                int hr = graphBuilder.AddFilter(nullRenderer, "NullRenderer");
                ThrowExceptionForHR("Adding the NullRenderer: ", hr);

                IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                if (pinOutFromFilterOut != null)
                {
                    try
                    {
                        IPin pinInFromFilterOut = DsFindPin.ByDirection(nullRenderer, PinDirection.Input, 0);
                        if (pinInFromFilterOut != null)
                        {
                            try
                            {
                                hr = this.graphBuilder.Connect(pinOutFromFilterOut, pinInFromFilterOut);
                            }
                            finally
                            {
                                Marshal.ReleaseComObject(pinInFromFilterOut);
                            }
                        }
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(pinOutFromFilterOut);
                    }
                }

                //IPin pinOutFromFilterOut = DsFindPin.ByDirection(this.videoRenderer, PinDirection.Output, 0);
                //if (pinOutFromFilterOut != null)
                //{
                //    hr = this.graphBuilder.Render(pinOutFromFilterOut);
                //    Marshal.ReleaseComObject(pinOutFromFilterOut);
                //}
            }
        }
        public void InitDevice(DsDevice device, int iWidth, int iHeight)
        {
            int hr;
            object camDevice;
            Guid iid = typeof(IBaseFilter).GUID;
            device.Mon.BindToObject(null, null, ref iid, out camDevice);
            IBaseFilter camFilter = camDevice as IBaseFilter;
            m_CameraControl = camFilter as IAMCameraControl;
            m_VideoControl = camFilter as IAMVideoProcAmp;
            ISampleGrabber sampGrabber = null;

            graphBuilder = (IGraphBuilder)new FilterGraph();

            //Create the Capture Graph Builder
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

            // Attach the filter graph to the capture graph
            hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            //Add the Video input device to the graph
            hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            // Configure the sample grabber
            sampGrabber = new SampleGrabber() as ISampleGrabber;
            ConfigureSampleGrabber(sampGrabber);
            IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter;

            //Add the Video compressor filter to the graph
            hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter nullRender = new NullRenderer() as IBaseFilter;
            graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber);
            InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight);

            hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender);
            DsError.ThrowExceptionForHR(hr);

            SaveSizeInfo(sampGrabber);

            Marshal.ReleaseComObject(sampGrabber);
            Marshal.ReleaseComObject(captureGraphBuilder);
        }
Пример #36
0
            public void Open(bool run = true)
            {
                DsDevice device = null;

                foreach (DsDevice d in DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice))
                {
                    if (d.Name == this.Name)
                    {
                        device = d; break;
                    }
                }

                if (device == null)
                {
                    throw new NullReferenceException("DsDevice");
                }

                try
                {
                    // Create

                    this.captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;
                    if (this.captureGraphBuilder == null)
                    {
                        throw new NullReferenceException("ICaptureGraphBuilder2");
                    }

                    this.filterGraph = new FilterGraph() as IFilterGraph2;
                    if (this.filterGraph == null)
                    {
                        throw new NullReferenceException("IFilterGraph2");
                    }

                    this.mediaControl = this.filterGraph as IMediaControl;
                    if (this.mediaControl == null)
                    {
                        throw new NullReferenceException("IMediaControl");
                    }

                    // Filter Graph (Video Capture -> Sample Grabber -> Null Renderer)

                    int hr = this.captureGraphBuilder.SetFiltergraph(this.filterGraph);
                    if (hr < 0)
                    {
                        throw new COMException("ICaptureGraphBuilder2::SetFiltergraph", hr);
                    }

                    // Video Capture

                    hr = this.filterGraph.AddSourceFilterForMoniker(device.Mon, null, device.Name, out this.videoCapture);
                    if (hr < 0)
                    {
                        throw new COMException("IFilterGraph2::AddSourceFilterForMoniker", hr);
                    }

                    if (run)
                    {
                        hr = this.captureGraphBuilder.FindInterface(PinCategory.Capture, DirectShowLib.MediaType.Video, this.videoCapture, typeof(IAMStreamConfig).GUID, out object intrface);
                        if (hr < 0)
                        {
                            throw new COMException("ICaptureGraphBuilder2::FindInterface::IAMStreamConfig", hr);
                        }

                        IAMStreamConfig streamConfig = intrface as IAMStreamConfig;
                        if (streamConfig == null)
                        {
                            throw new NullReferenceException("IAMStreamConfig");
                        }

                        hr = streamConfig.GetFormat(out AMMediaType media);
                        if (hr < 0)
                        {
                            throw new COMException("IAMStreamConfig::GetFormat", hr);
                        }

                        if (this.MediaType == null)
                        {
                            this.MediaType = new VideoInfoHeader();
                            Marshal.PtrToStructure(media.formatPtr, this.MediaType);
                            DsUtils.FreeAMMediaType(media); media = null;
                        }
                        else
                        {
                            Marshal.StructureToPtr(this.MediaType, media.formatPtr, false);
                            hr = streamConfig.SetFormat(media);
                            DsUtils.FreeAMMediaType(media); media = null;
                            if (hr < 0)
                            {
                                throw new COMException("IAMStreamConfig::SetFormat", hr);
                            }
                        }

                        this.Width     = this.MediaType.BmiHeader.Width;
                        this.Height    = this.MediaType.BmiHeader.Height;
                        this.FrameRate = 10000000.0 / this.MediaType.AvgTimePerFrame;

                        // Sample Grabber

                        ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber;
                        media            = new AMMediaType();
                        media.majorType  = DirectShowLib.MediaType.Video;
                        media.subType    = MediaSubType.RGB24;
                        media.formatType = FormatType.VideoInfo;
                        hr = sampleGrabber.SetMediaType(media);
                        DsUtils.FreeAMMediaType(media); media = null;
                        if (hr < 0)
                        {
                            throw new COMException("ISampleGrabber::SetMediaType", hr);
                        }

                        hr = sampleGrabber.SetCallback(this, 1);
                        if (hr < 0)
                        {
                            throw new COMException("ISampleGrabber::SetCallback", hr);
                        }

                        hr = this.filterGraph.AddFilter(sampleGrabber as IBaseFilter, "SampleGrabber");
                        if (hr < 0)
                        {
                            throw new COMException("IFilterGraph2::AddFilter::SampleGrabber", hr);
                        }

                        // Null Renderer

                        NullRenderer nullRenderer = new NullRenderer();
                        hr = this.filterGraph.AddFilter(nullRenderer as IBaseFilter, "NullRenderer");
                        if (hr < 0)
                        {
                            throw new COMException("IFilterGraph2::AddFilter::NullRenderer", hr);
                        }

                        hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, DirectShowLib.MediaType.Video, this.videoCapture, sampleGrabber as IBaseFilter, nullRenderer as IBaseFilter);
                        if (hr < 0)
                        {
                            throw new COMException("ICaptureGraphBuilder2::RenderStream", hr);
                        }

                        // ROT (Running Object Table) Entry

                        this.rotEntry = new DsROTEntry(this.filterGraph);

                        // Frames

                        this.frames = new LinkedList <Frame>();

                        for (int b = 0; b < this.Backtrace; b++)
                        {
                            this.frames.AddLast(new Frame());
                        }

                        // Run Filter Graph

                        hr = this.mediaControl.Run();
                        if (hr < 0)
                        {
                            throw new COMException("IMediaControl::Run", hr);
                        }
                    }
                }
                catch (Exception e)
                {
                    this.Close(); throw e;
                }
            }