Ejemplo n.º 1
0
        public static IBaseFilter RenderAsfWriterWithProfile(DisposalCleanup dc, IGraphBuilder graph, string profileData,
                                                             string outputFile)
        {
            if (dc == null) throw new ArgumentNullException("dc");
            if (graph == null) throw new ArgumentNullException("graph");
            if (string.IsNullOrEmpty(profileData)) throw new ArgumentNullException("profileData");
            if (string.IsNullOrEmpty(outputFile)) throw new ArgumentNullException("outputFile");

            int hr = 0;

            var asfWriterFilter = (IBaseFilter) new WMAsfWriter();
            dc.Add(asfWriterFilter);
            hr = graph.AddFilter(asfWriterFilter, Resources.DefaultAsfWriterName);
            DsError.ThrowExceptionForHR(hr);

            // Create an appropriate IWMProfile from the data
            IWMProfileManager profileManager = ProfileManager.CreateInstance();
            dc.Add(profileManager);

            IntPtr wmProfile = profileManager.LoadProfileByData(profileData);
            dc.Add(wmProfile);

            // Set the profile on the writer
            var configWriter = (IConfigAsfWriter2) asfWriterFilter;
            configWriter.ConfigureFilterUsingProfile(wmProfile);

            hr = ((IFileSinkFilter) asfWriterFilter).SetFileName(outputFile, null);
            DsError.ThrowExceptionForHR(hr);

            return asfWriterFilter;
        }
Ejemplo n.º 2
0
        public static IBaseFilter CreateAudioCompressor(DisposalCleanup dc, IGraphBuilder graph, IPin outPin,
                                                        AudioFormat settings)
        {
            if (dc == null) throw new ArgumentNullException("dc");
            if (graph == null) throw new ArgumentNullException("graph");
            if (outPin == null) throw new ArgumentNullException("outPin");
            if (settings == null) throw new ArgumentNullException("settings");

            int hr = 0;

            using (AudioCompressor compressor = AudioCompressorFactory.Create(settings))
            {
                IBaseFilter compressorFilter = compressor.Filter;
                dc.Add(compressorFilter);

                hr = graph.AddFilter(compressorFilter, settings.AudioCompressor);
                DsError.ThrowExceptionForHR(hr);

                FilterGraphTools.ConnectFilters(graph, outPin, compressorFilter, true);

                // set the media type on the output pin of the compressor
                if (compressor.MediaType != null)
                {
                    FilterGraphTools.SetFilterFormat(compressor.MediaType, compressorFilter);
                }

                return compressorFilter;
            }
        }
Ejemplo n.º 3
0
        private void GetInterface()
        {
            object o;
            int hr;

            m_pGraph = (IGraphBuilder)new FilterGraph();
            IBaseFilter pSource;
            hr = m_pGraph.AddSourceFilter(@"C:\SourceForge\mflib\Test\Media\AspectRatio4x3.wmv", null, out pSource);
            DsError.ThrowExceptionForHR(hr);
            IBaseFilter pEVR = (IBaseFilter)new EnhancedVideoRenderer();
            hr = m_pGraph.AddFilter(pEVR, "EVR");
            DsError.ThrowExceptionForHR(hr);

            ICaptureGraphBuilder2 cgb;
            cgb = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

            hr = cgb.SetFiltergraph(m_pGraph);
            DsError.ThrowExceptionForHR(hr);
            hr = cgb.RenderStream(null, MediaType.Video, pSource, null, pEVR);
            DsError.ThrowExceptionForHR(hr);

            IMFGetService gs = pEVR as IMFGetService;
            hr = gs.GetService(MFServices.MR_VIDEO_MIXER_SERVICE, typeof(IMFVideoProcessor).GUID, out o);
            MFError.ThrowExceptionForHR(hr);

            m_vp = o as IMFVideoProcessor;
        }
Ejemplo n.º 4
0
        public static IBaseFilter AddFilterById(IGraphBuilder graph, Guid guid, string name)
        {
            Ensure.IsNotNull(Log, graph, "graph is null");

            IBaseFilter filter = null;

            try
            {
                var type = Type.GetTypeFromCLSID(guid);
                filter = (IBaseFilter)Activator.CreateInstance(type);

                var hr = graph.AddFilter(filter, name);
                DsError.ThrowExceptionForHR(hr);
            }
            catch (Exception ex)
            {
                if (filter != null)
                {
                    graph.RemoveFilter(filter);
                    Marshal.ReleaseComObject(filter);
                    filter = null;
                }

                Log.Fatal(string.Format("Filter {0} is not added to the graph", name) + ex);
            }

            return filter;
        }
        public static IBaseFilter AddFilterFromClsid(IGraphBuilder graphBuilder, Guid clsid, string name)
        {
            int hr = 0;
            IBaseFilter filter = null;

            if (graphBuilder == null)
                throw new ArgumentNullException("graphBuilder");

            try
            {
                Type type = Type.GetTypeFromCLSID(clsid);
                filter = (IBaseFilter)Activator.CreateInstance(type);

                hr = graphBuilder.AddFilter(filter, name);
                DsError.ThrowExceptionForHR(hr);
            }
            catch
            {
                if (filter != null)
                {
                    Marshal.ReleaseComObject(filter);
                    filter = null;
                }
            }

            return filter;
        }
Ejemplo n.º 6
0
        public MainForm()
        {
            InitializeComponent();
            graphbuilder = (IGraphBuilder)new FilterGraph();
            samplegrabber = (ISampleGrabber)new SampleGrabber();
            graphbuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber");

            mt = new AMMediaType();
            mt.majorType = MediaType.Video;
            mt.subType = MediaSubType.RGB24;
            mt.formatType = FormatType.VideoInfo;
            samplegrabber.SetMediaType(mt);
            PrintSeconds();
        }
Ejemplo n.º 7
0
    public static void AddStreamSourceFilter(string sourceFilterName, IResourceAccessor resourceAccessor, IGraphBuilder graphBuilder)
    {
      IBaseFilter sourceFilter = null;
      try
      {
        if (sourceFilterName == Utils.FilterName)
        {
          var filterPath = FileUtils.BuildAssemblyRelativePath(@"MPUrlSourceSplitter\MPUrlSourceSplitter.ax");
          sourceFilter = FilterLoader.LoadFilterFromDll(filterPath, new Guid(Utils.FilterCLSID));
          if (sourceFilter != null)
          {
            graphBuilder.AddFilter(sourceFilter, Utils.FilterName);
          }
        }
        else
        {
          sourceFilter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, sourceFilterName);
        }

        if (sourceFilter == null)
          throw new UPnPRendererExceptions(string.Format("Could not create instance of source filter: '{0}'", sourceFilterName));

        string url = resourceAccessor.ResourcePathName;

        var filterStateEx = sourceFilter as OnlineVideos.MPUrlSourceFilter.IFilterStateEx;
        if (filterStateEx != null)
          LoadAndWaitForMPUrlSourceFilter(url, filterStateEx);
        else
        {
          var fileSourceFilter = sourceFilter as IFileSourceFilter;
          if (fileSourceFilter != null)
            Marshal.ThrowExceptionForHR(fileSourceFilter.Load(resourceAccessor.ResourcePathName, null));
          else
            throw new UPnPRendererExceptions(string.Format("'{0}' does not implement IFileSourceFilter", sourceFilterName));
        }

        FilterGraphTools.RenderOutputPins(graphBuilder, sourceFilter);
      }
      finally
      {
        FilterGraphTools.TryRelease(ref sourceFilter);
      }
    }
Ejemplo n.º 8
0
        public static IBaseFilter RenderFileDestination(DisposalCleanup dc, IGraphBuilder graph, string outputFile)
        {
            if (dc == null) throw new ArgumentNullException("dc");
            if (graph == null) throw new ArgumentNullException("graph");
            if (string.IsNullOrEmpty(outputFile)) throw new ArgumentNullException("outputFile");

            int hr = 0;

            var fileFilter = (IBaseFilter) new FileWriter();

            hr = ((IFileSinkFilter) fileFilter).SetFileName(outputFile, null);
            DsError.ThrowExceptionForHR(hr);

            hr = graph.AddFilter(fileFilter, Resources.DefaultFileDestinationName);
            DsError.ThrowExceptionForHR(hr);

            dc.Add(fileFilter);

            return fileFilter;
        }
Ejemplo n.º 9
0
        protected CaptureGraph(FilterInfo fiSource)
        {
            try
            {
                // Fgm initialization
                fgm = new FilgraphManagerClass();
                iFG = (IFilterGraph)fgm;
                iGB = (IGraphBuilder)fgm;
                rotID = FilterGraph.AddToRot(iGB);

                // Create source filter and initialize it
                source = (SourceFilter)Filter.CreateFilter(fiSource);
                iGB.AddFilter(source.BaseFilter, source.FriendlyName);
                source.AddedToGraph(fgm);
            }
            catch(Exception)
            {
                Cleanup();
                throw;
            }
        }
Ejemplo n.º 10
0
        public static IBaseFilter AddFilterToGraph(IGraphBuilder graphBuilder, string strFilterName, Guid clsid)
        {
            try
            {
                IBaseFilter NewFilter = null;
                foreach (Filter filter in Filters.LegacyFilters)
                {
                    if (String.Compare(filter.Name, strFilterName, true) == 0 && (clsid == Guid.Empty || filter.CLSID == clsid))
                    {
                        NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString);

                        int hr = graphBuilder.AddFilter(NewFilter, strFilterName);
                        if (hr < 0)
                        {
                            //Log.Error("Failed: Unable to add filter: {0} to graph", strFilterName);
                            NewFilter = null;
                        }
                        else
                        {
                            //Log.Info("Added filter: {0} to graph", strFilterName);
                        }
                        break;
                    }
                }
                if (NewFilter == null)
                {
                    //Log.Error("Failed filter: {0} not found", strFilterName);
                }
                return NewFilter;
            }
            catch (Exception ex)
            {
                //Log.Error("Failed filter: {0} not found {0}", strFilterName, ex.Message);
                return null;
            }
        }
Ejemplo n.º 11
0
        private void loadVideo(String videoPath)
        {
            videoFilepath = videoPath;
                videoFileName.Text = getDisplayVideoName();

                if (graph != null)
                {
                    graph = null;

                }
                if (mediaControl != null)
                {
                    // Stop media playback
                    this.mediaControl.Stop();
                    mediaControl = null;
                }

                if (videoWindow != null)
                {
                    videoWindow.put_Owner(IntPtr.Zero);
                    videoWindow = null;
                }

                if (mediaSeeking != null)
                {

                    mediaSeeking = null;
                }
                if (basicAudio != null)
                {

                    basicAudio = null;
                }
                GC.Collect();

               /* if (mediaPosition != null)
                {
                    mediaPosition = null;
                }*/

                graph = (IGraphBuilder)new FilterGraph();
                mediaControl = (IMediaControl)graph;
                //mediaPosition = (IMediaPosition)graph;
                videoWindow = (IVideoWindow)graph;
                mediaSeeking = (IMediaSeeking)graph;
                basicAudio = (IBasicAudio)graph;

                AviSplitter spliter = new AviSplitter();
                graph.AddFilter((IBaseFilter)spliter, null);
                graph.RenderFile(videoPath, null);
                graph.SetDefaultSyncSource();

                /*
                 * AMSeekingSeekingCapabilities cap = AMSeekingSeekingCapabilities.CanGetCurrentPos;
                if (mediaSeeking.CheckCapabilities(ref cap) > 0)
                {
                    this.consoleErreur.AppendText("Impossible de recuperer la position de la frame");
                }
                 * */

                videoWindow.put_Owner(videoPanel.Handle);

                videoWindow.put_MessageDrain(videoPanel.Handle);

                videoWindow.put_WindowStyle(WindowStyle.Child);
                videoWindow.put_WindowStyleEx(WindowStyleEx.ControlParent);
                videoWindow.put_Left(0);
                videoWindow.put_Top(0);
                videoWindow.put_Width(videoPanel.Width);
                videoWindow.put_Height(videoPanel.Height);

                //positionTrackbar.Enabled = true;
                speedTrackBar.Enabled = true;
                mediaSeeking.SetTimeFormat(TimeFormat.Frame);

                double rate;
                mediaSeeking.GetRate(out rate);
                rateText.Text = rate.ToString();
                speedTrackBar.Value = (int)(speedTrackBar.Maximum * rate / 2);

                trackBar1.Value = trackBar1.Maximum / 2;
                this.basicAudio.put_Volume(-5000 + 5000 * trackBar1.Value / trackBar1.Maximum);
            //mediaPosition.put_Rate(0.5);
                running = false;
                frameChanged = false;
        }
Ejemplo n.º 12
0
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
     try
     {
         if (!Supports(format))
         {
             return(false);
         }
         string ext = System.IO.Path.GetExtension(info.file);
         if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg")
         {
             Log.Info("TSReader2WMV: wrong file format");
             return(false);
         }
         Log.Info("TSReader2WMV: create graph");
         graphBuilder = (IGraphBuilder) new FilterGraph();
         _rotEntry    = new DsROTEntry((IFilterGraph)graphBuilder);
         Log.Info("TSReader2WMV: add filesource");
         TsReader reader = new TsReader();
         tsreaderSource = (IBaseFilter)reader;
         //ITSReader ireader = (ITSReader)reader;
         //ireader.SetTsReaderCallback(this);
         //ireader.SetRequestAudioChangeCallback(this);
         IBaseFilter filter = (IBaseFilter)tsreaderSource;
         graphBuilder.AddFilter(filter, "TSReader Source");
         IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
         Log.Info("TSReader2WMV: load file:{0}", info.file);
         int hr = fileSource.Load(info.file, null);
         //add audio/video codecs
         string strVideoCodec     = "";
         string strH264VideoCodec = "";
         string strAudioCodec     = "";
         string strAACAudioCodec  = "";
         using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
         {
             strVideoCodec     = xmlreader.GetValueAsString("mytv", "videocodec", "");
             strAudioCodec     = xmlreader.GetValueAsString("mytv", "audiocodec", "");
             strAACAudioCodec  = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
             strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
         }
         //Find the type of decoder required for the output video & audio pins on TSReader.
         Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders");
         IPin pinOut0, pinOut1;
         IPin pinIn0, pinIn1;
         pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
         pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
         if (pinOut0 == null || pinOut1 == null)
         {
             Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader");
             Cleanup();
             return(false);
         }
         bool            usingAAC = false;
         IEnumMediaTypes enumMediaTypes;
         hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
         while (true)
         {
             AMMediaType[] mediaTypes = new AMMediaType[1];
             int           typesFetched;
             hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
             if (hr != 0 || typesFetched == 0)
             {
                 break;
             }
             if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
             {
                 Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader");
                 usingAAC = true;
             }
         }
         bool usingH264 = false;
         hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
         while (true)
         {
             AMMediaType[] mediaTypes = new AMMediaType[1];
             int           typesFetched;
             hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
             if (hr != 0 || typesFetched == 0)
             {
                 break;
             }
             if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
             {
                 Log.Info("TSReader2WMV: found H.264 video out pin on tsreader");
                 usingH264 = true;
             }
         }
         //Add the type of decoder required for the output video & audio pins on TSReader.
         Log.Info("TSReader2WMV: add audio/video decoders to graph");
         if (usingH264 == false)
         {
             Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec);
             VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
             if (VideoCodec == null)
             {
                 Log.Error("TSReader2WMV: unable to add mpeg2 video decoder");
                 Cleanup();
                 return(false);
             }
         }
         else
         {
             Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec);
             VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
             if (VideoCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec");
                 Cleanup();
                 return(false);
             }
         }
         if (usingAAC == false)
         {
             Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
             AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
             if (AudioCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec");
                 Cleanup();
                 return(false);
             }
         }
         else
         {
             Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec);
             AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
             if (AudioCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec");
                 Cleanup();
                 return(false);
             }
         }
         Log.Info("TSReader2WMV: connect tsreader->audio/video decoders");
         //connect output #0 (audio) of tsreader->audio decoder input pin 0
         //connect output #1 (video) of tsreader->video decoder input pin 0
         pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
         pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
         if (pinIn0 == null || pinIn1 == null)
         {
             Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs");
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut0, pinIn0);
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut1, pinIn1);
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("TSReader2WMV: start pre-run");
         mediaControl = graphBuilder as IMediaControl;
         mediaSeeking = tsreaderSource as IMediaSeeking;
         mediaEvt     = graphBuilder as IMediaEventEx;
         mediaPos     = graphBuilder as IMediaPosition;
         //get file duration
         long lTime = 5 * 60 * 60;
         lTime *= 10000000;
         long pStop = 0;
         hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                        AMSeekingSeekingFlags.NoPositioning);
         if (hr == 0)
         {
             long lStreamPos;
             mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
             m_dDuration = lStreamPos;
             lTime       = 0;
             mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                       AMSeekingSeekingFlags.NoPositioning);
         }
         double duration = m_dDuration / 10000000d;
         Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         int maxCount = 20;
         while (true)
         {
             long lCurrent;
             mediaSeeking.GetCurrentPosition(out lCurrent);
             double dpos = (double)lCurrent;
             dpos /= 10000000d;
             System.Threading.Thread.Sleep(100);
             if (dpos >= 2.0d)
             {
                 break;
             }
             maxCount--;
             if (maxCount <= 0)
             {
                 break;
             }
         }
         Log.Info("TSReader2WMV: pre-run done");
         Log.Info("TSReader2WMV: Get duration of movie");
         mediaControl.Stop();
         FilterState state;
         mediaControl.GetState(500, out state);
         GC.Collect();
         GC.Collect();
         GC.Collect();
         GC.WaitForPendingFinalizers();
         Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer");
         graphBuilder.RemoveFilter(fileWriterbase);
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("TSReader2WMV: Start transcoding");
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
     }
     catch (Exception e)
     {
         // TODO: Handle exceptions.
         Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
         return(false);
     }
     return(true);
 }
Ejemplo n.º 13
0
        /// <summary> build the capture graph. </summary>
        bool SetupGraph()
        {
            int             hr;
            IBaseFilter     mux  = null;
            IFileSinkFilter sink = null;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                //DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle);

                Guid sub = MediaSubType.Avi;
                hr = capGraph.SetOutputFileName(ref sub, fileName, out mux, out sink);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Capture;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, mux); // stream to file
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // preview window
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return(false);
            }
            finally
            {
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                mux = null;
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }
                sink = null;
            }
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Creates a new VMR9 renderer and configures it with an allocator
        /// </summary>
        /// <returns>An initialized DirectShow VMR9 renderer</returns>
        private IBaseFilter CreateVideoMixingRenderer9(IGraphBuilder graph)
        {
            var vmr9 = new VideoMixingRenderer9() as IBaseFilter;

            var filterConfig = vmr9 as IVMRFilterConfig9;

            if (filterConfig == null)
                throw new Exception("Could not query filter configuration.");

            /* We will only have one video stream connected to the filter */
            int hr = filterConfig.SetNumberOfStreams(4);
            DsError.ThrowExceptionForHR(hr);

            /* Setting the renderer to "Renderless" mode
             * sounds counter productive, but its what we
             * need to do for setting up a custom allocator */
            hr = filterConfig.SetRenderingMode(VMR9Mode.Renderless);
            DsError.ThrowExceptionForHR(hr);

            /* Query the allocator interface */
            var vmrSurfAllocNotify = vmr9 as IVMRSurfaceAllocatorNotify9;

            if (vmrSurfAllocNotify == null)
                throw new Exception("Could not query the VMR surface allocator.");

            /* We supply an hWnd so Direct3D can initialize */
            var allocator = new Vmr9Allocator(HwndHelper.Handle);

            /* We supply our custom allocator to the renderer */
            hr = vmrSurfAllocNotify.AdviseSurfaceAllocator(userId, allocator);
            DsError.ThrowExceptionForHR(hr);

            hr = allocator.AdviseNotify(vmrSurfAllocNotify);
            DsError.ThrowExceptionForHR(hr);

            RegisterCustomAllocator(allocator);

            hr = graph.AddFilter(vmr9, //"Renderer: 1");
                                 string.Format("Renderer: {0}",
                                 VideoRendererType.VideoMixingRenderer9));

            DsError.ThrowExceptionForHR(hr);

            return vmr9;
        }
Ejemplo n.º 15
0
        /// <summary> create the used COM components and get the interfaces. </summary>
        protected bool GetInterfaces()
        {
            Vmr9 = null;
            if (IsRadio == false)
            {
                Vmr9 = new VMR9Util();

                // switch back to directx fullscreen mode
                Log.Info("RTSPPlayer: Enabling DX9 exclusive mode");
                GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null);
                GUIWindowManager.SendMessage(msg);
            }
            //Type comtype = null;
            //object comobj = null;

            DsRect rect = new DsRect();

            rect.top    = 0;
            rect.bottom = GUIGraphicsContext.form.Height;
            rect.left   = 0;
            rect.right  = GUIGraphicsContext.form.Width;


            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();

                Log.Info("RTSPPlayer: add source filter");
                if (IsRadio == false)
                {
                    Vmr9.AddVMR9(graphBuilder);
                    Vmr9.Enable(false);
                }

                _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer();
                graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer");

                _rtspSource = (IBaseFilter) new RtpSourceFilter();
                int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter");
                if (hr != 0)
                {
                    Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr);
                    return(false);
                }

                // add preferred video & audio codecs
                Log.Info("RTSPPlayer: add video/audio codecs");
                string strVideoCodec               = "";
                string strAudioCodec               = "";
                string strAudiorenderer            = "";
                int    intFilters                  = 0;  // FlipGer: count custom filters
                string strFilters                  = ""; // FlipGer: collect custom filters
                string postProcessingFilterSection = "mytv";
                using (Settings xmlreader = new MPSettings())
                {
                    if (_mediaType == g_Player.MediaType.Video)
                    {
                        strVideoCodec               = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", "");
                        strAudioCodec               = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", "");
                        strAudiorenderer            = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device");
                        postProcessingFilterSection = "movieplayer";
                    }
                    else
                    {
                        strVideoCodec               = xmlreader.GetValueAsString("mytv", "videocodec", "");
                        strAudioCodec               = xmlreader.GetValueAsString("mytv", "audiocodec", "");
                        strAudiorenderer            = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device");
                        postProcessingFilterSection = "mytv";
                    }
                    enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false);
                    // FlipGer: load infos for custom filters
                    int intCount = 0;
                    while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") !=
                           "undefined")
                    {
                        if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false))
                        {
                            strFilters +=
                                xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") +
                                ";";
                            intFilters++;
                        }
                        intCount++;
                    }
                }
                string extension = Path.GetExtension(m_strCurrentFile).ToLower();
                if (IsRadio == false)
                {
                    if (strVideoCodec.Length > 0)
                    {
                        DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
                    }
                }
                if (strAudioCodec.Length > 0)
                {
                    DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
                }

                if (enableDvbSubtitles == true)
                {
                    try
                    {
                        _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder);
                        SubtitleRenderer.GetInstance().SetPlayer(this);
                        dvbSubRenderer = SubtitleRenderer.GetInstance();
                    }
                    catch (Exception e)
                    {
                        Log.Error(e);
                    }
                }

                Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null));
                // FlipGer: add custom filters to graph
                string[] arrFilters = strFilters.Split(';');
                for (int i = 0; i < intFilters; i++)
                {
                    DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]);
                }
                if (strAudiorenderer.Length > 0)
                {
                    audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false);
                }

                Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile);
                IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource;
                if (interfaceFile == null)
                {
                    Log.Error("RTSPPlayer:Failed to get IFileSourceFilter");
                    return(false);
                }

                //Log.Info("RTSPPlayer: open file:{0}",filename);
                hr = interfaceFile.Load(m_strCurrentFile, null);
                if (hr != 0)
                {
                    Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr);
                    return(false);
                }

                #region connect rtspsource->demux

                Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux");
                IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0);
                if (pinTsOut == null)
                {
                    Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
                    return(false);
                }
                IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
                if (pinDemuxIn == null)
                {
                    Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
                    return(false);
                }

                hr = graphBuilder.Connect(pinTsOut, pinDemuxIn);
                if (hr != 0)
                {
                    Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr);
                    return(false);
                }
                DirectShowUtil.ReleaseComObject(pinTsOut);
                DirectShowUtil.ReleaseComObject(pinDemuxIn);

                #endregion

                #region render demux output pins

                if (IsRadio)
                {
                    Log.Info("RTSPPlayer:render audio demux outputs");
                    IEnumPins enumPins;
                    _mpegDemux.EnumPins(out enumPins);
                    IPin[] pins    = new IPin[2];
                    int    fetched = 0;
                    while (enumPins.Next(1, pins, out fetched) == 0)
                    {
                        if (fetched != 1)
                        {
                            break;
                        }
                        PinDirection direction;
                        pins[0].QueryDirection(out direction);
                        if (direction == PinDirection.Input)
                        {
                            continue;
                        }
                        IEnumMediaTypes enumMediaTypes;
                        pins[0].EnumMediaTypes(out enumMediaTypes);
                        AMMediaType[] mediaTypes = new AMMediaType[20];
                        int           fetchedTypes;
                        enumMediaTypes.Next(20, mediaTypes, out fetchedTypes);
                        for (int i = 0; i < fetchedTypes; ++i)
                        {
                            if (mediaTypes[i].majorType == MediaType.Audio)
                            {
                                graphBuilder.Render(pins[0]);
                                break;
                            }
                        }
                    }
                }
                else
                {
                    Log.Info("RTSPPlayer:render audio/video demux outputs");
                    IEnumPins enumPins;
                    _mpegDemux.EnumPins(out enumPins);
                    IPin[] pins    = new IPin[2];
                    int    fetched = 0;
                    while (enumPins.Next(1, pins, out fetched) == 0)
                    {
                        if (fetched != 1)
                        {
                            break;
                        }
                        PinDirection direction;
                        pins[0].QueryDirection(out direction);
                        if (direction == PinDirection.Input)
                        {
                            continue;
                        }
                        graphBuilder.Render(pins[0]);
                    }
                }

                #endregion

                // Connect DVB subtitle filter pins in the graph
                if (_mpegDemux != null && enableDvbSubtitles == true)
                {
                    IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer;
                    hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr);

                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinPcr OK");

                        IPin pDemuxerPcr  = DsFindPin.ByName(_mpegDemux, "Pcr");
                        IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr");
                        hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr);
                    }

                    hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle);
                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinSubtitle OK");

                        IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle");
                        IPin pSubtitle        = DsFindPin.ByName(_subtitleFilter, "In");
                        hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr);
                    }

                    hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT);
                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinPMT OK");

                        IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT");
                        IPin pSubtitle        = DsFindPin.ByName(_subtitleFilter, "PMT");
                        hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr);
                    }
                }


                if (IsRadio == false)
                {
                    if (!Vmr9.IsVMR9Connected)
                    {
                        //VMR9 is not supported, switch to overlay
                        Log.Info("RTSPPlayer: vmr9 not connected");
                        _mediaCtrl = null;
                        Cleanup();
                        return(false);
                    }
                    Vmr9.SetDeinterlaceMode();
                }

                _mediaCtrl    = (IMediaControl)graphBuilder;
                mediaEvt      = (IMediaEventEx)graphBuilder;
                _mediaSeeking = (IMediaSeeking)graphBuilder;
                mediaPos      = (IMediaPosition)graphBuilder;
                basicAudio    = graphBuilder as IBasicAudio;
                //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched);
                DirectShowUtil.EnableDeInterlace(graphBuilder);
                if (Vmr9 != null)
                {
                    m_iVideoWidth  = Vmr9.VideoWidth;
                    m_iVideoHeight = Vmr9.VideoHeight;
                }
                if (audioRendererFilter != null)
                {
                    Log.Info("RTSPPlayer9:set reference clock");
                    IMediaFilter    mp    = graphBuilder as IMediaFilter;
                    IReferenceClock clock = audioRendererFilter as IReferenceClock;
                    hr = mp.SetSyncSource(null);
                    hr = mp.SetSyncSource(clock);
                    Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr);
                }
                Log.Info("RTSPPlayer: graph build successfull");
                return(true);
            }
            catch (Exception ex)
            {
                Error.SetError("Unable to play movie", "Unable build graph for VMR9");
                Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace);
                return(false);
            }
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Build a graph with sampleGrabber.  Render it, and get the media type.
        /// </summary>
        /// <param name="payload"></param>
        /// <returns></returns>
        public bool Build(PayloadType payload, RtpStream newStream)
        {
            this.stream = newStream;
            this.ssrc   = newStream.SSRC;

            //Required as of RC3:
            this.stream.IsUsingNextFrame = true;

            if ((ssrc == 0) || !((payload == PayloadType.dynamicVideo) || (payload == PayloadType.dynamicAudio)))
            {
                errorMsg = "Invalid inputs to build method.";
                return(false);
            }

            fgm = new FilgraphManagerClass();

            MSR.LST.MDShow.IBaseFilter bfSource = null;
            IGraphBuilder iGB = (IGraphBuilder)fgm;

            //if (false)
            //	rotnum = FilterGraph.AddToRot(iGB); //AddToRot(iGB);

            try
            {
                bfSource = RtpSourceClass.CreateInstance();
                ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(this.stream);
                iGB.AddFilter(bfSource, "RtpSource");
                MSR.LST.MDShow.IPin sourceOutput = Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty,
                                                                 Guid.Empty, false, 0);


                //Add SampleGrabber filter
                MSR.LST.MDShow.IBaseFilter bfGrabber = SampleGrabberClass.CreateInstance();
                iGB.AddFilter(bfGrabber, "Grabber");
                UW.CSE.MDShow.ISampleGrabber sgGrabber = (UW.CSE.MDShow.ISampleGrabber)bfGrabber;

                //Set mediatype
                UW.CSE.MDShow._AMMediaType mt = new UW.CSE.MDShow._AMMediaType();
                if (payload == PayloadType.dynamicVideo)
                {
                    mt.majortype = MediaType.MajorType.MEDIATYPE_Video;
                    //PRI2: RGB24 seems to work for all video?  We have used YUY2 in the past, but that won't work
                    // for screen streaming.  Probably could use more testing
                    //mt.subtype = MediaType.SubType.MEDIASUBTYPE_YUY2;
                    mt.subtype = MediaType.SubType.MEDIASUBTYPE_RGB24;
                }
                else
                {
                    mt.majortype = MediaType.MajorType.MEDIATYPE_Audio;
                    mt.subtype   = MediaType.SubType.MEDIASUBTYPE_PCM;                   //MEDIASUBTYPE_PCM;
                }

                sgGrabber.SetMediaType(ref mt);

                //Add samplegrabber callback
                //0 is sampleCB, 1 is bufferCB.  Only bufferCB is actually returning data so far.
                sgGrabber.SetCallback(callBack, 1);
                sgGrabber.SetOneShot(0);
                sgGrabber.SetBufferSamples(0);

                iGB.Render(sourceOutput);

                UW.CSE.MDShow._AMMediaType uwmt = new UW.CSE.MDShow._AMMediaType();
                sgGrabber.GetConnectedMediaType(ref uwmt);
                connectedMT = copy_AMMediaType(uwmt);
            }
            catch (Exception e)
            {
                errorMsg = e.Message;
                Debug.WriteLine("Exception while building graph: " + e.ToString());
                eventLog.WriteEntry("Exception while building graph: " + e.ToString(), EventLogEntryType.Error, 1001);
                return(false);
            }
            return(true);
        }
Ejemplo n.º 17
0
        private bool SetupGraph()
        {
            int hr;

            try {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                //DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle);

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee) {
                return(false);
            }
        }
Ejemplo n.º 18
0
        /// <summary>
        /// Creates an instance of the EVR
        /// </summary>
        private IBaseFilter CreateEnhancedVideoRenderer(IGraphBuilder graph, int streamCount)
        {
            EvrPresenter presenter;
            IBaseFilter  filter;

            lock (m_videoRendererInitLock)
            {
                var evr = new EnhancedVideoRenderer();
                filter = evr as IBaseFilter;

                int hr = graph.AddFilter(filter, string.Format("Renderer: {0}", VideoRendererType.EnhancedVideoRenderer));
                DsError.ThrowExceptionForHR(hr);

                /* QueryInterface for the IMFVideoRenderer */
                var videoRenderer = filter as IMFVideoRenderer;

                if (videoRenderer == null)
                {
                    throw new Exception("Could not QueryInterface for the IMFVideoRenderer");
                }

                /* Create a new EVR presenter */
                presenter = EvrPresenter.CreateNew();

                /* Initialize the EVR renderer with the custom video presenter */
                hr = videoRenderer.InitializeRenderer(null, presenter.VideoPresenter);
                DsError.ThrowExceptionForHR(hr);

                var presenterSettings = presenter.VideoPresenter as IEVRPresenterSettings;
                if (presenterSettings == null)
                {
                    throw new Exception("Could not QueryInterface for the IEVRPresenterSettings");
                }

                presenterSettings.SetBufferCount(3);

                /* Use our interop hWnd */
                IntPtr handle = GetDesktopWindow();//HwndHelper.Handle;

                /* QueryInterface the IMFVideoDisplayControl */
                var displayControl = presenter.VideoPresenter as IMFVideoDisplayControl;

                if (displayControl == null)
                {
                    throw new Exception("Could not QueryInterface the IMFVideoDisplayControl");
                }

                /* Configure the presenter with our hWnd */
                hr = displayControl.SetVideoWindow(handle);
                DsError.ThrowExceptionForHR(hr);

                var filterConfig = filter as IEVRFilterConfig;

                if (filterConfig != null)
                {
                    filterConfig.SetNumberOfStreams(streamCount);
                }
            }


            RegisterCustomAllocator(presenter);

            return(filter);
        }
Ejemplo n.º 19
0
        public void Init(Hashtable config = null)
        {
            //m_FilterGraph = (IFilterGraph2)new FilterGraph();
            m_FilterGraph = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

            // Get the ICaptureGraphBuilder2
            Guid clsid = Clsid.CaptureGraphBuilder2;
            Guid riid  = typeof(ICaptureGraphBuilder2).GUID;
            ICaptureGraphBuilder2 capGraph        = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
            IBaseFilter           capVideoFilter  = null;
            IBaseFilter           capAudioFilter  = null;
            IBaseFilter           asfWriter       = null;
            IServiceProvider      serviceProvider = null;
            int    hr;
            object iwmWriter2;

            try
            {
                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                Marshal.ThrowExceptionForHR(hr);

                // Add the video device to the graph
                if (videoDevChosen != null)
                {
                    capVideoFilter = GetCapFilter(ref videoDevChosen);
                    hr             = m_FilterGraph.AddFilter(capVideoFilter, "Video Capture Device");
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Add the audio device to the graph
                if (audioDevChosen != null)
                {
                    capAudioFilter = GetCapFilter(ref audioDevChosen);
                    hr             = m_FilterGraph.AddFilter(capAudioFilter, "Audio Capture Device");
                    Marshal.ThrowExceptionForHR(hr);
                }
                // if we need some shitty quality
                if (config.Contains("shitty"))
                {
                    InitAsfWriter(out asfWriter, true);
                }
                else
                {
                    InitAsfWriter(out asfWriter);
                }


                //GEtting IWMAdvancedWriter2;
                serviceProvider = (IServiceProvider)asfWriter;
                Guid IID_IWMWriterAdvanced2 = new Guid("{962dc1ec-c046-4db8-9cc7-26ceae500817}");
                hr = serviceProvider.QueryService(IID_IWMWriterAdvanced2, IID_IWMWriterAdvanced2, out iwmWriter2);
                Marshal.ThrowExceptionForHR(hr);

                m_writerAdvanced2 = (IWMWriterAdvanced2)iwmWriter2;
                m_writerAdvanced2.SetLiveSource(true);

                if (config.ContainsKey("cap"))
                {
                    outputFilename = config["cap"] as string;
                    Console.WriteLine("[MODE] Capturing to a local file: {0}", outputFilename);
                }
                IFileSinkFilter cap = (IFileSinkFilter)asfWriter;
                cap.SetFileName(outputFilename, null);

                if (!config.ContainsKey("cap"))
                {
                    //deleting useless sink (writer to a file on a disk).
                    IWMWriterSink uselessSink = null;
                    m_writerAdvanced2.GetSink(0, out uselessSink);
                    m_writerAdvanced2.RemoveSink(uselessSink);
                    if (uselessSink != null)
                    {
                        Marshal.ReleaseComObject(uselessSink);
                        uselessSink = null;
                    }
                }

                if (config.Contains("send"))
                {
                    string url = config["send"] as string;
                    Console.WriteLine("[MODE] Streaming to a remote server: {0}", url);
                    WriterNetworkSink sender = new WriterNetworkSink(url);
                    m_writerAdvanced2.AddSink(sender);
                }
                if (config.Contains("share"))
                {
                    int port = (int)config["share"];
                    WriterNetworkSink listener = new WriterNetworkSink(port);
                    Console.WriteLine("[MODE] Started listening on port {0}", port);
                    m_writerAdvanced2.AddSink(listener);
                }
                //Connecting VideoDev to asfWriter
                if (videoDevChosen != null)
                {
                    hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capVideoFilter, null, asfWriter);
                    //hr = capGraph.RenderStream(null, null, capVideoFilter, null, asfWriter);
                    Marshal.ThrowExceptionForHR(hr);
                }
                //Connecting AudioDev to asfWriter
                if (audioDevChosen != null)
                {
                    hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Audio, capAudioFilter, null, asfWriter);
                    //hr = capGraph.RenderStream(null, null, capAudioFilter, null, asfWriter);
                    Marshal.ThrowExceptionForHR(hr);
                }
                m_mediaCtrl = m_FilterGraph as IMediaControl;
                //debug, dumps graph
                //DirectShowLib.Utils.FilterGraphTools.SaveGraphFile(m_FilterGraph, ".\\mygraph.grf");
            }
            finally
            {
                if (capVideoFilter != null)
                {
                    Marshal.ReleaseComObject(capVideoFilter);
                    capVideoFilter = null;
                }
                if (capAudioFilter != null)
                {
                    Marshal.ReleaseComObject(capAudioFilter);
                    capAudioFilter = null;
                }
                if (asfWriter != null)
                {
                    Marshal.ReleaseComObject(asfWriter);
                    asfWriter = null;
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
                if (serviceProvider != null)
                {
                    Marshal.ReleaseComObject(serviceProvider);
                    serviceProvider = null;
                }
            }
            Console.WriteLine("INIT done");
        }
Ejemplo n.º 20
0
        // Thread entry point
        public void WorkerThread()
        {
            int  hr;
            Guid cat;
            Guid med;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder graphBuilder = null;

            DShowNET.ICaptureGraphBuilder2 captureGraphBuilder = null;
            IBaseFilter    videoDeviceFilter = null;
            IBaseFilter    grabberFilter     = null;
            ISampleGrabber sg = null;
            IMediaControl  mc = null;

            try
            {
                // Make a new filter graph
                graphObj     = Activator.CreateInstance(Type.GetTypeFromCLSID(DShowNET.Clsid.FilterGraph, true));
                graphBuilder = (IGraphBuilder)graphObj;

                // Get the Capture Graph Builder
                Guid clsid = DShowNET.Clsid.CaptureGraphBuilder2;
                Guid riid  = typeof(DShowNET.ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (DShowNET.ICaptureGraphBuilder2)DShowNET.DsBugWO.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph((DShowNET.IGraphBuilder)graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                int rotCookie = 0;
                DShowNET.DsROT.AddGraphToRot(graphBuilder, out rotCookie);

                // Get the video device and add it to the filter graph
                if (deviceMoniker != null)
                {
                    videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(deviceMoniker);
                    hr = graphBuilder.AddFilter(videoDeviceFilter,
                                                "Video Capture Device");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // create sample grabber, object and filter
                grabberObj    = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
                grabberFilter = (IBaseFilter)grabberObj;
                sg            = (ISampleGrabber)grabberObj;

                // add sample grabber filter to filter graph
                hr = graphBuilder.AddFilter(grabberFilter, "grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Try looking for an video device interleaved media type
                IBaseFilter testFilter = videoDeviceFilter;
                // grabberFilter (not supported)
                object o;
                cat = DShowNET.PinCategory.Capture;
                med = DShowNET.MediaType.Interleaved;
                Guid iid = typeof(DShowNET.IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr  = captureGraphBuilder.FindInterface(
                        ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o);

                    if (hr != 0)
                    {
                        o = null;
                    }
                }

                // Set the video stream configuration to data member
                videoStreamConfig = o as DShowNET.IAMStreamConfig;
                o = null;

                //modifies the stream size and frame rate
                if (modifyStream)
                {
                    //set size of frame
                    BitmapInfoHeader bmiHeader;
                    bmiHeader        = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                    bmiHeader.Width  = streamSize.Width;
                    bmiHeader.Height = streamSize.Height;
                    setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader);

                    //set frame rate (not supported on the cameras we have)

                    /*
                     * long avgTimePerFrame = (long)(10000000 / framerate);
                     * setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame);
                     */
                }

                // connect pins (Turns on the video device)
                if (graphBuilder.Connect((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)videoDeviceFilter, 0), (IPin)AForge.Video.DirectShow.Internals.Tools.GetInPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // Set the sample grabber media type settings
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType   = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // get media type and set sample grabber parameters
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));
                    if (vih.BmiHeader.Compression != 0)
                    {
                        YUYV = true;
                        grabber.setYUYV(YUYV);
                    }
                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    //mt.Dispose();
                }

                // Set various sample grabber properties
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                if (!preventFreezing)
                {
                    // render
                    graphBuilder.Render((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0));

                    // Do not show active (source) window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(0);
                    win = null;
                }

                // get media control
                mc = (IMediaControl)graphBuilder;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message));
                }
            }
            // finalization block
            finally
            {
                // release all objects
                mc                  = null;
                graphBuilder        = null;
                captureGraphBuilder = null;
                videoDeviceFilter   = null;
                grabberFilter       = null;
                sg                  = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
Ejemplo n.º 21
0
        /// <summary>
        /// WMV 形式のファイルを読み込む処理
        /// </summary>
        /// <remarks>
        ///		RenderStream (NULL, MEDIATYPE_Video, source, videoGrabber, renderner)<br/>
        ///		RenderStream (NULL, MEDIATYPE_Audio, source, audioGrabber, renderner)<br/>
        ///		<pre>
        ///		 source          grabber          mux        renderner
        ///		+--------+     +---------+     +-------+     +-------+
        ///		|  audio 0 ----0  audio  0 --- 1       0 --- 0       |
        ///		|        |     +---------+     |       |     +-------+
        ///		|        |                     |       |
        ///		|        |     +---------+     |       |
        ///		|  video 1 --- 0  video  0 --- 0       |
        ///		+--------+     +---------+     |       |
        ///		                               2       |
        ///		                               +-------+
        ///		</pre>
        /// </remarks>
        public static void Sample31()
        {
            string __FUNCTION__ = MethodBase.GetCurrentMethod().Name;

            Console.WriteLine(__FUNCTION__);

            IGraphBuilder         graph         = null;
            ICaptureGraphBuilder2 builder       = null;
            IBaseFilter           videoSource   = null;
            IBaseFilter           videoGrabber  = null;
            IBaseFilter           audioGrabber  = null;
            IBaseFilter           videoRenderer = null;
            IBaseFilter           audioRenderer = null;
            var    videoGrabberCB = new CxSampleGrabberCB();
            var    audioGrabberCB = new CxSampleGrabberCB();
            string src_filename   = Path.Combine(TestFiles, "stopwatch_320x240.wmv");

            try
            {
                #region グラフビルダーの生成:
                {
                    graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph);
                    if (graph == null)
                    {
                        throw new System.IO.IOException("Failed to create a GraphBuilder.");
                    }

                    builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2);
                    if (builder == null)
                    {
                        throw new System.IO.IOException("Failed to create a GraphBuilder.");
                    }
                    builder.SetFiltergraph(graph);
                }
                #endregion

                #region  像入力用: ソースフィルタを生成します.
                {
#if true
                    graph.AddSourceFilter(src_filename, "VideoSource", ref videoSource);
                    if (videoSource == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoSource.");
                    }
#else
                    videoSource = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_WMAsfReader);
                    if (videoSource == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoSource.");
                    }
                    graph.AddFilter(videoSource, "VideoSource");

                    // Configure the file source filter.
                    var pConfig = (IFileSourceFilter)videoSource;
                    {
                        HRESULT hr = (HRESULT)pConfig.Load(src_filename, IntPtr.Zero);
                        if (hr < HRESULT.S_OK)
                        {
                            throw new System.IO.IOException("Failed to set the src_filename.");
                        }
                    }
#endif
                }
                #endregion

                #region  像捕獲用: サンプルグラバーを生成します.
                {
                    videoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                    if (videoGrabber == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoGrabber.");
                    }
                    graph.AddFilter(videoGrabber, "videoGrabber");

                    // サンプルグラバフィルタの入力形式設定.
                    // SetMediaType で必要なメディア タイプを指定します。
                    //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                    // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                    // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                    // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                    // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                    // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                    {
                        var grabber = (ISampleGrabber)videoGrabber;

                        var mt = new AM_MEDIA_TYPE();
                        mt.majortype  = new Guid(GUID.MEDIATYPE_Video);
                        mt.subtype    = new Guid(GUID.MEDIASUBTYPE_RGB24);
                        mt.formattype = new Guid(GUID.FORMAT_VideoInfo);
                        grabber.SetMediaType(mt);
                        grabber.SetBufferSamples(false);                                        // サンプルコピー 無効.
                        grabber.SetOneShot(false);                                              // One Shot 無効.
                        //grabber.SetCallback(videoGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                        grabber.SetCallback(videoGrabberCB, 1);                                 // 1:BufferCB メソッドを呼び出すよう指示する.
                    }
                }
                #endregion

                #region 音声捕獲用: サンプルグラバーを生成します.
                {
                    audioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                    if (audioGrabber == null)
                    {
                        throw new System.IO.IOException("Failed to create a audioGrabber.");
                    }
                    graph.AddFilter(audioGrabber, "audioGrabber");

                    // サンプルグラバフィルタの入力形式設定.
                    // SetMediaType で必要なメディア タイプを指定します。
                    //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                    // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                    // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                    // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                    // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                    // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                    {
                        var grabber = (ISampleGrabber)audioGrabber;

                        var mt = new AM_MEDIA_TYPE();
                        mt.majortype  = new Guid(GUID.MEDIATYPE_Audio);
                        mt.subtype    = new Guid(GUID.MEDIASUBTYPE_PCM);
                        mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx);
                        grabber.SetMediaType(mt);
                        grabber.SetBufferSamples(false);                                        // サンプルコピー 無効.
                        grabber.SetOneShot(false);                                              // One Shot 無効.
                        //grabber.SetCallback(audioGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                        grabber.SetCallback(audioGrabberCB, 1);                                 // 1:BufferCB メソッドを呼び出すよう指示する.
                    }
                }
                #endregion

                #region  像出力用: レンダラーを生成します.
                {
                    videoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                    if (videoRenderer == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoRenderer.");
                    }
                    graph.AddFilter(videoRenderer, "videoRenderer");
                }
                #endregion

                #region 音声出力用: レンダラーを生成します.
                {
                    audioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                    if (audioRenderer == null)
                    {
                        throw new System.IO.IOException("Failed to create a audioRenderer.");
                    }
                    graph.AddFilter(audioRenderer, "audioRenderer");
                }
                #endregion

                #region フィルタの接続:
                unsafe
                {
                    HRESULT hr;

                    // フィルタの接続: (映像入力)
                    var mediatype_video = new Guid(GUID.MEDIATYPE_Video);
                    hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), videoSource, videoGrabber, videoRenderer);
                    if (hr < HRESULT.S_OK)
                    {
                        throw new CxDSException(hr);
                    }

                    // フィルタの接続: (音声入力)
                    var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio);
                    hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), videoSource, audioGrabber, audioRenderer);
                    if (hr < HRESULT.S_OK)
                    {
                        throw new CxDSException(hr);
                    }
                }
                #endregion

                #region DEBUG: GraphEdit ファイルを保存します.

                /*
                 * 現在のフィルタ構成を指定されたファイル(GRF 拡張子)に保存します。
                 * 保存されたファイルは graphedt.exe (Windws SDK 同梱) で確認できます。
                 */
                try
                {
                    Axi.SaveGraphFile(graph, Path.GetFullPath(__FUNCTION__ + ".GRF"));
                }
                catch (System.Exception ex)
                {
                    Console.WriteLine(ex.StackTrace);
                }
                #endregion

                // ------------------------------

                #region 取り込み処理:
                {
                    var mediaControl = (IMediaControl)graph;
                    var mediaEvent   = (IMediaEvent)graph;
                    var mediaSeeking = (IMediaSeeking)graph;

                    // 映像サイズの取得.
                    var vih    = Axi.GetVideoInfo((ISampleGrabber)videoGrabber);
                    var images = new List <Bitmap>();

                    var watch = new Stopwatch();
                    watch.Start();

                    // 取り込み処理.
                    videoGrabberCB.Notify += delegate(object _sender, CxSampleGrabberEventArgs _e)
                    {
                        Console.WriteLine("{0}: SampleTime={1:F6}", images.Count, _e.SampleTime);
                        images.Add(_e.ToImage(vih));
                    };

                    // 再生.
                    Console.WriteLine("Run ...");
                    {
                        HRESULT hr;
                        int     state;
                        hr = (HRESULT)mediaControl.Run();
                        hr = (HRESULT)mediaControl.GetState(1000, out state);
                    }
                    Console.WriteLine("Running ... {0:F3} msec", watch.Elapsed.TotalMilliseconds);

                    // 再生が完了するまで待機する.
                    {
                        HRESULT hr;
                        int     code;
                        hr = (HRESULT)mediaEvent.WaitForCompletion(-1, out code);
                        hr = (HRESULT)mediaControl.Stop();
                    }

                    // 確認用:
                    Console.WriteLine("Save ... {0:F3} msec", watch.Elapsed.TotalMilliseconds);
                    {
                        string subdir = Path.Combine(Results, __FUNCTION__);
                        if (Directory.Exists(subdir) == false)
                        {
                            Directory.CreateDirectory(subdir);
                        }

                        for (int i = 0; i < images.Count; i++)
                        {
                            var filename = string.Format("image{0}.png", i);
                            images[i].Save(Path.Combine(subdir, filename));
                        }
                    }

                    Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds);
                }
                #endregion
            }
            catch (System.Exception ex)
            {
                Console.WriteLine("{0}", ex.StackTrace);
            }
            finally
            {
                #region 解放:
                if (videoSource != null)
                {
                    Marshal.ReleaseComObject(videoSource);
                }
                videoSource = null;

                if (videoGrabber != null)
                {
                    Marshal.ReleaseComObject(videoGrabber);
                }
                videoGrabber = null;

                if (audioGrabber != null)
                {
                    Marshal.ReleaseComObject(audioGrabber);
                }
                audioGrabber = null;

                if (videoRenderer != null)
                {
                    Marshal.ReleaseComObject(videoRenderer);
                }
                videoRenderer = null;

                if (audioRenderer != null)
                {
                    Marshal.ReleaseComObject(audioRenderer);
                }
                audioRenderer = null;

                if (builder != null)
                {
                    Marshal.ReleaseComObject(builder);
                }
                builder = null;

                if (graph != null)
                {
                    Marshal.ReleaseComObject(graph);
                }
                graph = null;
                #endregion
            }
        }
Ejemplo n.º 22
0
        /// <summary>
        /// Configures the DirectShow graph to play the selected video capture
        /// device with the selected parameters
        /// </summary>
        private void SetupGraph()
        {
            /* Clean up any messes left behind */
            FreeResources();

            try
            {
                /* Create a new graph */
                m_graph = (IGraphBuilder) new FilterGraphNoThread();

#if DEBUG
                m_rotEntry = new DsROTEntry(m_graph);
#endif

                /* Create a capture graph builder to help
                 * with rendering a capture graph */
                var captureGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                /* Set our filter graph to the capture graph */
                int hr = captureGraph.SetFiltergraph(m_graph);
                DsError.ThrowExceptionForHR(hr);

                /* Add our capture device source to the graph */
                if (m_videoCaptureSourceChanged)
                {
                    m_captureDevice = AddFilterByName(m_graph,
                                                      FilterCategory.VideoInputDevice,
                                                      VideoCaptureSource);

                    m_videoCaptureSourceChanged = false;
                }
                else if (m_videoCaptureDeviceChanged)
                {
                    m_captureDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.VideoInputDevice,
                                                            VideoCaptureDevice.DevicePath);

                    m_videoCaptureDeviceChanged = false;
                }

                /* If we have a null capture device, we have an issue */
                if (m_captureDevice == null)
                {
                    throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource));
                }

                if (UseYuv && !EnableSampleGrabbing)
                {
                    /* Configure the video output pin with our parameters and if it fails
                     * then just use the default media subtype*/
                    if (!SetVideoCaptureParameters(captureGraph, m_captureDevice, MediaSubType.YUY2))
                    {
                        SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty);
                    }
                }
                else
                {
                    /* Configure the video output pin with our parameters */
                    SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty);
                }

                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates a video renderer and register the allocator with the base class */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 1);

                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null && !EnableSampleGrabbing && UseYuv)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                        /* Prefer YUV */
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                if (EnableSampleGrabbing)
                {
                    m_sampleGrabber = (ISampleGrabber) new SampleGrabber();
                    SetupSampleGrabber(m_sampleGrabber);
                    hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);
                }

                var videoOutPin = DsFindPin.ByDirection(m_captureDevice, PinDirection.Output, 0);

                if (videoOutPin == null)
                {
                    throw new Exception("Could not query the video output pin on source filter");
                }

                /* Intelligently connect the pins in the graph to the renderer */
                hr = m_graph.Render(videoOutPin);

                Marshal.ReleaseComObject(videoOutPin);

                //hr = captureGraph.RenderStream(PinCategory.Capture,
                //                               MediaType.Video,
                //                               m_captureDevice,
                //                               null,
                //                               m_renderer);

                DsError.ThrowExceptionForHR(hr);

                /* Register the filter graph
                 * with the base classes */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);

                HasVideo = true;

                /* Make sure we Release() this COM reference */
                Marshal.ReleaseComObject(captureGraph);
            }
            catch (Exception ex)
            {
                /* Something got fuct up */
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            /* Success */
            InvokeMediaOpened();
        }
Ejemplo n.º 23
0
        public static IBaseFilter RenderNull(DisposalCleanup dc, IGraphBuilder graph)
        {
            if (dc == null) throw new ArgumentNullException("dc");
            if (graph == null) throw new ArgumentNullException("graph");

            var filter = (IBaseFilter) new NullRenderer();
            dc.Add(filter);

            graph.AddFilter(filter, Resources.DefaultNullRendererName);

            return filter;
        }
Ejemplo n.º 24
0
        private void BuildGraph()
        {
            int hr;

            try
            {
                lblTotalTime.Text = mvs.PlayTime.ToString();
                TimeSpan tt = TimeSpan.Parse(mvs.PlayTime);
                DateTime dt = new DateTime(tt.Ticks);
                lblTotalTime.Text = String.Format("{0:HH:mm:ss}", dt);

                if (mvs.LocalMedia[0].IsDVD)
                {
                    mediaToPlay = mvs.LocalMedia[0];
                    MediaState mediaState = mediaToPlay.State;
                    if (mediaState == MediaState.NotMounted)
                    {
                        MountResult result = mediaToPlay.Mount();
                    }



                    string videoPath = mediaToPlay.GetVideoPath();

                    if (videoPath != null)
                    {
                        FirstPlayDvd(videoPath);
                    }
                    else
                    {
                        FirstPlayDvd(mvs.LocalMedia[0].File.FullName);
                    }
                    // Add delegates for Windowless operations
                    AddHandlers();
                    MainForm_ResizeMove(null, null);
                }
                else
                {
                    _graphBuilder = (IFilterGraph2) new FilterGraph();
                    _rotEntry     = new DsROTEntry((IFilterGraph)_graphBuilder);
                    _mediaCtrl    = (IMediaControl)_graphBuilder;
                    _mediaSeek    = (IMediaSeeking)_graphBuilder;
                    _mediaPos     = (IMediaPosition)_graphBuilder;
                    _mediaStep    = (IVideoFrameStep)_graphBuilder;
                    _vmr9Filter   = (IBaseFilter) new VideoMixingRenderer9();
                    ConfigureVMR9InWindowlessMode();
                    AddHandlers();
                    MainForm_ResizeMove(null, null);
                    hr = _graphBuilder.AddFilter(_vmr9Filter, "Video Mixing Render 9");
                    AddPreferedCodecs(_graphBuilder);
                    DsError.ThrowExceptionForHR(hr);
                    hr = _graphBuilder.RenderFile(mvs.LocalMedia[0].File.FullName, null);
                    DsError.ThrowExceptionForHR(hr);
                }
            }
            catch (Exception e)
            {
                CloseDVDInterfaces();
                logger.ErrorException("An error occured during the graph building : \r\n\r\n", e);
            }
        }
Ejemplo n.º 25
0
		// --------------------- Private Methods -----------------------
		
		/// <summary> 
		///  Create a new filter graph and add filters (devices, compressors, 
		///  misc), but leave the filters unconnected. Call renderGraph()
		///  to connect the filters.
		/// </summary>
		protected void createGraph()
		{
			Guid					cat;
			Guid					med;
			int						hr;

			// Ensure required properties are set
			if ( videoDevice == null && audioDevice == null )
				throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" );

			// Skip if we are already created
			if ( (int)graphState < (int)GraphState.Created )
			{
				// Garbage collect, ensure that previous filters are released
				GC.Collect();

				// Make a new filter graph
#if DSHOWNET
                // Make a new filter graph
                graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
#else
				FilterGraph graph = new FilterGraph();
				graphBuilder = (IGraphBuilder)graph;

				// Get the Capture Graph Builder
				captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
#endif

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Add the graph to the Running Object Table so it can be
                // viewed with GraphEdit
#if DEBUG
#if DSHOWNET
				DsROT.AddGraphToRot(graphBuilder, out rotCookie);
#else
                rotCookie = new DsROTEntry(graphBuilder);
#endif
#endif

                // Get the video device and add it to the filter graph
				if ( VideoDevice != null )
				{
					videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString );
					hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the audio device and add it to the filter graph
				if ( AudioDevice != null )
				{
					audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString );
					hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

                // Get the video compressor and add it to the filter graph
				if ( VideoCompressor != null )
				{
					videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the audio compressor and add it to the filter graph
				if ( AudioCompressor != null )
				{
					audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Retrieve the stream control interface for the video device
				// FindInterface will also add any required filters
				// (WDM devices in particular may need additional
				// upstream filters to function).

				// Try looking for an interleaved media type
				object o;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved;
				Guid iid = typeof(IAMStreamConfig).GUID;
#if DSHOWNET
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif

				if ( hr != 0 )
				{
					// If not found, try looking for a video media type
					med = MediaType.Video;
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface(
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				
					if ( hr != 0 )
						o = null;
				}
				videoStreamConfig = o as IAMStreamConfig;

// #if NEWCODE
				// Start of new Brian's Low code
				// Retrieve the stream control interface for the video device
				// FindInterface will also add any required filters
				// (WDM devices in particular may need additional
				// upstream filters to function).

				// Try looking for an interleaved media type
				o = null;
				cat = PinCategory.Preview;
				med = MediaType.Interleaved;
				iid = typeof(IAMStreamConfig).GUID;
#if DSHOWNET
				hr = captureGraphBuilder.FindInterface(
					ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif

				if ( hr != 0 )
				{
					// If not found, try looking for a video media type
					med = MediaType.Video;
#if DSHOWNET
					hr = captureGraphBuilder.FindInterface(
						ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface(
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				
					if ( hr != 0 )
						o = null;
				}
				this.previewStreamConfig = o as IAMStreamConfig;
				// End of new Brian's Low code

				if( (this.videoStreamConfig != null)||
					(this.previewStreamConfig != null) )
				{
					this.dxUtils = new DxUtils();
					bool result = this.dxUtils.InitDxUtils(this.videoDeviceFilter);

					if((!result)&&(!this.dxUtils.FindMediaData(this.videoStreamConfig)))
					{
						this.dxUtils.Dispose();
						this.dxUtils = null;
					}
				}
// #endif
				// Retrieve the stream control interface for the audio device
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Audio ;
				iid = typeof(IAMStreamConfig).GUID;
				if( (this.AudioViaPci)&&
					(audioDeviceFilter == null)&&(videoDeviceFilter != null) )
				{
                    hr = captureGraphBuilder.FindInterface(
#if DSHOWNET
						ref cat, ref med, videoDeviceFilter, ref iid, out o );
#else
                        DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o);
#endif
				}
				else
				{
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, audioDeviceFilter, ref iid, out o);
#else
                    hr = captureGraphBuilder.FindInterface(
	    				DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				}

				if (hr != 0)
					o = null;
				audioStreamConfig = o as IAMStreamConfig;

				// Retreive the media control interface (for starting/stopping graph)
				mediaControl = (IMediaControl) graphBuilder;

				// Reload any video crossbars
				if ( videoSources != null ) videoSources.Dispose(); videoSources = null;

				// Reload any audio crossbars
				if ( audioSources != null ) audioSources.Dispose(); audioSources = null;
				
				// Reload any property pages exposed by filters
                this.PropertyPages = null;

				// Reload capabilities of video device
				videoCaps = null;
				previewCaps = null;

				// Reload capabilities of video device
				audioCaps = null;

				// Retrieve TV Tuner if available
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved; 
				iid = typeof(IAMTVTuner).GUID;
#if DSHOWNET
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else

				hr = captureGraphBuilder.FindInterface( 
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				if ( hr != 0 )
				{
					med = MediaType.Video ;
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface( 
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
					if ( hr != 0 )
						o = null;
				}
				IAMTVTuner t = o as IAMTVTuner;
				if ( t != null )
				{
					tuner = new Tuner(t);
					// Do not forget to set proper country code (Netherlands is 31)
				}

				// No check on TV Audio needed, it will show up in the
				// PropertyPages when it is available
				// Code for finding the TV audio interface
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved;
				iid = typeof(IAMTVAudio).GUID;
				hr = captureGraphBuilder.FindInterface(
#if DSHOWNET
					ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
                    cat, med, videoDeviceFilter, iid, out o);
#endif
				if ( hr != 0 )
				{
					med = MediaType.Video;
#if DSHOWNET
					hr = captureGraphBuilder.FindInterface(
						ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					cat, med, videoDeviceFilter, iid, out o);
#endif
					if ( hr != 0 )
					{
						o = null;
					}
				}

				if((o != null)&&(tuner != null))
				{
					IAMTVAudio a = o as IAMTVAudio;
					TvAudio = a;
#if DEBUG
					Debug.WriteLine("FindInterface tuner.TvAudio");
#endif // DEBUG
				}

				/*
							// ----------- VMR 9 -------------------
							//## check out samples\inc\vmrutil.h :: RenderFileToVMR9

							IBaseFilter vmr = null;
							if ( ( VideoDevice != null ) && ( previewWindow != null ) )
							{
								vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); 
								hr = graphBuilder.AddFilter( vmr, "VMR" );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr;
								hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr;	
								hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
							}
							//------------------------------------------- 

							// ---------- SmartTee ---------------------

							IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); 
							hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" );
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// Video -> SmartTee
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, null, smartTeeFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> mux
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, muxFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> vmr
							cat = PinCategory.Preview;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, vmr ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// -------------------------------------
				*/		
				// Update the state now that we are done
				graphState = GraphState.Created;
			}
		}
Ejemplo n.º 26
0
        /// <summary> create the used COM components and get the interfaces. </summary>
        protected virtual bool GetDVDInterfaces(string path)
        {
            int hr;
            //Type	            comtype = null;
            object comobj = null;

            _freeNavigator = true;
            _dvdInfo       = null;
            _dvdCtrl       = null;
            bool   useAC3Filter    = false;
            string dvdNavigator    = "";
            string aspectRatioMode = "";
            string displayMode     = "";

            _videoPref = DvdPreferredDisplayMode.DisplayContentDefault;
            using (MediaPortal.Profile.Settings xmlreader = new MPSettings())
            {
                dvdNavigator    = xmlreader.GetValueAsString("dvdplayer", "navigator", "DVD Navigator");
                aspectRatioMode = xmlreader.GetValueAsString("dvdplayer", "armode", "").ToLower();

                dvdNavigator = "dslibdvdnav";

                if (aspectRatioMode == "crop")
                {
                    arMode = AspectRatioMode.Crop;
                }
                if (aspectRatioMode == "letterbox")
                {
                    arMode = AspectRatioMode.LetterBox;
                }
                if (aspectRatioMode == "stretch")
                {
                    arMode = AspectRatioMode.Stretched;
                }
                //if ( aspectRatioMode == "stretch" ) arMode = AspectRatioMode.zoom14to9;
                if (aspectRatioMode == "follow stream")
                {
                    arMode = AspectRatioMode.StretchedAsPrimary;
                }
                useAC3Filter = xmlreader.GetValueAsBool("dvdplayer", "ac3", false);
                displayMode  = xmlreader.GetValueAsString("dvdplayer", "displaymode", "").ToLower();
                if (displayMode == "default")
                {
                    _videoPref = DvdPreferredDisplayMode.DisplayContentDefault;
                }
                if (displayMode == "16:9")
                {
                    _videoPref = DvdPreferredDisplayMode.Display16x9;
                }
                if (displayMode == "4:3 pan scan")
                {
                    _videoPref = DvdPreferredDisplayMode.Display4x3PanScanPreferred;
                }
                if (displayMode == "4:3 letterbox")
                {
                    _videoPref = DvdPreferredDisplayMode.Display4x3LetterBoxPreferred;
                }
            }
            try
            {
                _dvdGraph = (IDvdGraphBuilder) new DvdGraphBuilder();

                hr = _dvdGraph.GetFiltergraph(out _graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder);

                _vmr9Filter = (IBaseFilter) new VideoMixingRenderer9();
                IVMRFilterConfig9 config = _vmr9Filter as IVMRFilterConfig9;
                hr             = config.SetNumberOfStreams(1);
                hr             = config.SetRenderingMode(VMR9Mode.Windowless);
                windowlessCtrl = (IVMRWindowlessControl9)_vmr9Filter;
                windowlessCtrl.SetVideoClippingWindow(this.panVideoWin.Handle);


                //                config.SetRenderingPrefs(VMR9RenderPrefs.

                _graphBuilder.AddFilter(_vmr9Filter, "Video Mixing Renderer 9");

                //               _vmr7 = new VMR7Util();
                //               _vmr7.AddVMR7(_graphBuilder);

                try
                {
                    _dvdbasefilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, dvdNavigator);
                    if (_dvdbasefilter != null)
                    {
                        IDvdControl2 cntl = (IDvdControl2)_dvdbasefilter;
                        if (cntl != null)
                        {
                            _dvdInfo = (IDvdInfo2)cntl;
                            _dvdCtrl = (IDvdControl2)cntl;
                            if (path != null)
                            {
                                if (path.Length != 0)
                                {
                                    cntl.SetDVDDirectory(path);
                                }
                            }
                            _dvdCtrl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true); // use new HMSF timecode format
                            _dvdCtrl.SetOption(DvdOptionFlag.ResetOnStop, false);

                            AddPreferedCodecs(_graphBuilder);
                            DirectShowUtil.RenderOutputPins(_graphBuilder, _dvdbasefilter);


//                            _videoWin = _graphBuilder as IVideoWindow;
                            _freeNavigator = false;
                        }

                        //DirectShowUtil.ReleaseComObject( _dvdbasefilter); _dvdbasefilter = null;
                    }
                }
                catch (Exception ex)
                {
                    string strEx = ex.Message;
                }

                Guid riid;

                if (_dvdInfo == null)
                {
                    riid = typeof(IDvdInfo2).GUID;
                    hr   = _dvdGraph.GetDvdInterface(riid, out comobj);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                    _dvdInfo = (IDvdInfo2)comobj;
                    comobj   = null;
                }

                if (_dvdCtrl == null)
                {
                    riid = typeof(IDvdControl2).GUID;
                    hr   = _dvdGraph.GetDvdInterface(riid, out comobj);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                    _dvdCtrl = (IDvdControl2)comobj;
                    comobj   = null;
                }

                _mediaCtrl  = (IMediaControl)_graphBuilder;
                _mediaEvt   = (IMediaEventEx)_graphBuilder;
                _basicAudio = _graphBuilder as IBasicAudio;
                _mediaPos   = (IMediaPosition)_graphBuilder;
                _mediaSeek  = (IMediaSeeking)_graphBuilder;
                _mediaStep  = (IVideoFrameStep)_graphBuilder;
                _basicVideo = _graphBuilder as IBasicVideo2;
                _videoWin   = _graphBuilder as IVideoWindow;

                // disable Closed Captions!
                IBaseFilter baseFilter;
                _graphBuilder.FindFilterByName("Line 21 Decoder", out baseFilter);
                if (baseFilter == null)
                {
                    _graphBuilder.FindFilterByName("Line21 Decoder", out baseFilter);
                }
                if (baseFilter != null)
                {
                    _line21Decoder = (IAMLine21Decoder)baseFilter;
                    if (_line21Decoder != null)
                    {
                        AMLine21CCState state = AMLine21CCState.Off;
                        hr = _line21Decoder.SetServiceState(state);
                        if (hr == 0)
                        {
                            logger.Info("DVDPlayer:Closed Captions disabled");
                        }
                        else
                        {
                            logger.Info("DVDPlayer:failed 2 disable Closed Captions");
                        }
                    }
                }

                /*
                 *      // get video window
                 *      if (_videoWin==null)
                 *      {
                 *        riid = typeof( IVideoWindow ).GUID;
                 *        hr = _dvdGraph.GetDvdInterface( ref riid, out comobj );
                 *        if( hr < 0 )
                 *          Marshal.ThrowExceptionForHR( hr );
                 *        _videoWin = (IVideoWindow) comobj; comobj = null;
                 *      }
                 */
                // GetFrameStepInterface();

                DirectShowUtil.SetARMode(_graphBuilder, arMode);
                DirectShowUtil.EnableDeInterlace(_graphBuilder);
                //m_ovMgr = new OVTOOLLib.OvMgrClass();
                //m_ovMgr.SetGraph(_graphBuilder);

                return(true);
            }
            catch (Exception)
            {
                //MessageBox.Show( this, "Could not get interfaces\r\n" + ee.Message, "DVDPlayer.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop );
                CloseDVDInterfaces();
                return(false);
            }
            finally
            {
                if (comobj != null)
                {
                    DirectShowUtil.ReleaseComObject(comobj);
                }
                comobj = null;
            }
        }
Ejemplo n.º 27
0
        protected CaptureGraph(FilterInfo fiSource)
        {
            try
            {
                // Fgm initialization
                fgm = new FilgraphManagerClass();
                iFG = (IFilterGraph)fgm;
                iGB = (IGraphBuilder)fgm;
                rotID = FilterGraph.AddToRot(iGB);

                // Create source filter and initialize it
                source = (SourceFilter)Filter.CreateFilter(fiSource);
                iGB.AddFilter(source.BaseFilter, source.FriendlyName);
                source.AddedToGraph(fgm);

                // Pass flags to the RtpRenderer filter from the config file.
                this.rtpRendererFlags = 0;
                string setting = ConfigurationManager.AppSettings[AppConfig.MDS_RtpRendererFlags];
                if (!String.IsNullOrEmpty(setting)) {
                    if (!byte.TryParse(setting,out rtpRendererFlags)) {
                        rtpRendererFlags = 0;
                    }
                }
            }
            catch(Exception)
            {
                Cleanup();
                throw;
            }
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public VideoPlayer(string FileName, GraphicsDevice graphicsDevice)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                // Construct the rest of the FilterGraph
                DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));

                // Set SampleGrabber Properties
                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)gb;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));

                // Create byte arrays to hold video data
                videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel)

                // Create Output Frame Texture2D with the height and width of the video
                outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch
            {
                throw new Exception("Unable to Load or Play the video file");
            }
        }
        public void InitDevice(DsDevice device, int iWidth, int iHeight)
        {
            int hr;
            object camDevice;
            Guid iid = typeof(IBaseFilter).GUID;
            device.Mon.BindToObject(null, null, ref iid, out camDevice);
            IBaseFilter camFilter = camDevice as IBaseFilter;
            m_CameraControl = camFilter as IAMCameraControl;
            m_VideoControl = camFilter as IAMVideoProcAmp;
            ISampleGrabber sampGrabber = null;

            graphBuilder = (IGraphBuilder)new FilterGraph();

            //Create the Capture Graph Builder
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

            // Attach the filter graph to the capture graph
            hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            //Add the Video input device to the graph
            hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            // Configure the sample grabber
            sampGrabber = new SampleGrabber() as ISampleGrabber;
            ConfigureSampleGrabber(sampGrabber);
            IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter;

            //Add the Video compressor filter to the graph
            hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter nullRender = new NullRenderer() as IBaseFilter;
            graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber);
            InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight);

            hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender);
            DsError.ThrowExceptionForHR(hr);

            SaveSizeInfo(sampGrabber);

            Marshal.ReleaseComObject(sampGrabber);
            Marshal.ReleaseComObject(captureGraphBuilder);
        }
Ejemplo n.º 30
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();

                    // Wait for the stop signal
                    while (!_stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    _control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                Release();
            }
        }
Ejemplo n.º 31
0
        /// <summary>
        /// 工作线程
        /// </summary>
        private void WorkerThread()
        {
            VideoSourceFinishedReasonType reasonToStop = VideoSourceFinishedReasonType.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;
            IMediaEventEx  mediaEvent    = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new VideoSourceException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(Source, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new VideoSourceException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new VideoSourceException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType();
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = PinHelper.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = PinHelper.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new VideoSourceException("Cannot find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose();
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!PreventFreezing)
                {
                    // render pin
                    graph.Render(PinHelper.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!ReferenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;
                IntPtr   p1, p2;
                DsEvCode code;

                // run
                mediaControl.Run();

                while (!stopEvent.WaitOne(0, false))
                {
                    Thread.Sleep(100);

                    if (mediaEvent != null)
                    {
                        if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                        {
                            mediaEvent.FreeEventParams(code, p1, p2);

                            if (code == DsEvCode.Complete)
                            {
                                reasonToStop = VideoSourceFinishedReasonType.EndOfStreamReached;
                                break;
                            }
                        }
                    }
                }

                // stop
                mediaControl.Stop();
            }
            catch (Exception ex)
            {
                // provide information to clients
                if (VideoSourceException != null)
                {
                    VideoSourceException(this, new VideoSourceExceptionEventArgs(ex.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (VideoSourceFinished != null)
            {
                VideoSourceFinished(this, new VideoSourceFinishedEventArgs(reasonToStop));
            }
        }
Ejemplo n.º 32
0
        private void CaptureVideo()
        {
            int retVal;

            graph   = (IGraphBuilder) new FilterGraph();
            capture = (ICaptureGraphBuilder2) new CaptureGraphBuilder();

            IMediaControl control = (IMediaControl)graph;
            IMediaEventEx eventEx = (IMediaEventEx)graph;

            retVal = capture.SetFiltergraph(graph);

            Dictionary <string, IMoniker> devices = EnumDevices(Clsid.VideoInputDeviceCategory);

            if (devices.Count == 0)
            {
                return;
            }

            IMoniker moniker = devices.First().Value;
            object   obj     = null;

            moniker.BindToObject(null, null, typeof(IBaseFilter).GUID, out obj);

            IBaseFilter baseFilter = (IBaseFilter)obj;

            retVal = graph.AddFilter(baseFilter, devices.First().Key);

            Guid        CLSID_SampleGrabber = new Guid("{C1F400A0-3F08-11D3-9F0B-006008039E37}");
            IBaseFilter grabber             = Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)) as IBaseFilter;

            var media = new AMMediaType();

            media.MajorType  = MediaType.Video;
            media.SubType    = MediaSubType.RGB24;
            media.FormatType = FormatType.VideoInfo;
            retVal           = ((ISampleGrabber)grabber).SetMediaType(media);

            object configObj;

            retVal = capture.FindInterface(PinCategory.Capture, MediaType.Video, baseFilter, typeof(IAMStreamConfig).GUID, out configObj);
            IAMStreamConfig config = (IAMStreamConfig)configObj;

            AMMediaType pmt;

            retVal = config.GetFormat(out pmt);

            var header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.FormatPtr, typeof(VideoInfoHeader));
            var width  = header.BmiHeader.Width;
            var height = header.BmiHeader.Height;
            var stride = 4 * ((24 * width + 31) / 32); //width * (header.BmiHeader.BitCount / 8);

            callback = new SampleGrabberCallback()
            {
                Width = width, Height = height, Stride = stride
            };
            callback.callback = Image_OnPreview;
            retVal            = ((ISampleGrabber)grabber).SetCallback(callback, 0);

            retVal = graph.AddFilter(grabber, "SampleGrabber");

            IPin output  = GetPin(baseFilter, p => p.Name == "Capture");
            IPin input   = GetPin(grabber, p => p.Name == "Input");
            IPin preview = GetPin(grabber, p => p.Name == "Output");

            //retVal = graph.ConnectDirect(output, input, pmt);
            //retVal = graph.Connect(output, input);

            retVal = capture.RenderStream(PinCategory.Preview, MediaType.Video, baseFilter, grabber, null);


            //var wih = new WindowInteropHelper(this);
            var panel = FindName("PART_VideoPanel") as System.Windows.Forms.Panel;


            IVideoWindow window = (IVideoWindow)graph;

            retVal = window.put_Owner(panel.Handle);
            retVal = window.put_WindowStyle(WindowStyles.WS_CHILD | WindowStyles.WS_CLIPCHILDREN);
            retVal = window.SetWindowPosition(0, 0, (int)panel.ClientSize.Width, (int)panel.ClientSize.Height);
            retVal = window.put_MessageDrain(panel.Handle);
            retVal = window.put_Visible(-1); //OATRUE

            retVal = control.Run();
        }
Ejemplo n.º 33
0
        /// <summary>
        /// build the capture graph for grabber.
        /// </summary>
        private void SetupGraph()
        {
            int  hr;
            Guid cat;
            Guid med;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB32;
                media.formatType = FormatType.VideoInfo;                // ???

                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                object o;
                cat = PinCategory.Capture;
                med = MediaType.Video;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = capGraph.FindInterface(
                    ref cat, ref med, capFilter, ref iid, out o);

                videoStreamConfig = o as IAMStreamConfig;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                BitmapInfoHeader bmiHeader;
                bmiHeader        = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                bmiHeader.Width  = cameraWidth;
                bmiHeader.Height = cameraHeight;
                setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader);

                bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                if (bmiHeader.Width != cameraWidth)
                {
                    throw new GoblinException("Could not change the resolution to " + cameraWidth + "x" +
                                              cameraHeight + ". The resolution has to be " + bmiHeader.Width + "x" +
                                              bmiHeader.Height);
                }

                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
            }
            catch (Exception ee)
            {
                throw new GoblinException("Could not setup graph\r\n" + ee.Message);
            }
        }
Ejemplo n.º 34
0
        /// <summary>
        /// Configures the DirectShow graph to play the selected video capture
        /// device with the selected parameters
        /// </summary>
        private void SetupGraph()
        {
            /* Clean up any messes left behind */
            FreeResources();

            try
            {
                /* Create a new graph */
                m_graph = (IGraphBuilder)new FilterGraphNoThread();

            #if DEBUG
                m_rotEntry = new DsROTEntry(m_graph);
            #endif

                /* Create a capture graph builder to help
                 * with rendering a capture graph */
                var graphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                /* Set our filter graph to the capture graph */
                int hr = graphBuilder.SetFiltergraph(m_graph);
                DsError.ThrowExceptionForHR(hr);

                /* Add our capture device source to the graph */
                if (m_videoCaptureSourceChanged)
                {
                    m_captureDevice = AddFilterByName(m_graph,
                                                      FilterCategory.VideoInputDevice,
                                                      VideoCaptureSource);

                    m_videoCaptureSourceChanged = false;
                }
                else if (m_videoCaptureDeviceChanged)
                {
                    m_captureDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.VideoInputDevice,
                                                            VideoCaptureDevice.DevicePath);

                    m_videoCaptureDeviceChanged = false;
                }

                /* If we have a null capture device, we have an issue */
                if (m_captureDevice == null)
                    throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource));

                if (UseYuv && !EnableSampleGrabbing)
                {
                    /* Configure the video output pin with our parameters and if it fails
                     * then just use the default media subtype*/
                    if (!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2))
                        SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty);
                }
                else
                    /* Configure the video output pin with our parameters */
                    SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty);

                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates a video renderer and register the allocator with the base class */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 1);

                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null && !EnableSampleGrabbing && UseYuv)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                        /* Prefer YUV */
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                if (EnableSampleGrabbing)
                {
                    m_sampleGrabber = (ISampleGrabber)new SampleGrabber();
                    SetupSampleGrabber(m_sampleGrabber);
                    hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);
                }

                IBaseFilter mux = null;
                IFileSinkFilter sink = null;
                if (!string.IsNullOrEmpty(this.m_fileName))
                {
                    hr = graphBuilder.SetOutputFileName(MediaSubType.Asf, this.m_fileName, out mux, out sink);
                    DsError.ThrowExceptionForHR(hr);

                    hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, mux);
                    DsError.ThrowExceptionForHR(hr);

                    // use the first audio device
                    var audioDevices = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice);

                    if (audioDevices.Length > 0)
                    {
                        var audioDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.AudioInputDevice,
                                                            audioDevices[0].DevicePath);

                        hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, audioDevice, null, mux);
                        DsError.ThrowExceptionForHR(hr);
                    }
                }

                hr = graphBuilder.RenderStream(PinCategory.Preview,
                                               MediaType.Video,
                                               m_captureDevice,
                                               null,
                                               m_renderer);

                DsError.ThrowExceptionForHR(hr);

                /* Register the filter graph
                 * with the base classes */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);

                HasVideo = true;

                /* Make sure we Release() this COM reference */
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }

                Marshal.ReleaseComObject(graphBuilder);
            }
            catch (Exception ex)
            {
                /* Something got fuct up */
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            /* Success */
            InvokeMediaOpened();
        }
Ejemplo n.º 35
0
        /// <summary>
        ///  Create a new filter graph and add filters (devices, compressors, misc),
        ///  but leave the filters unconnected. Call RenderGraph()
        ///  to connect the filters.
        /// </summary>
        void CreateGraph()
        {
            //Skip if already created
            if ((int)_actualGraphState < (int)GraphState.Created)
            {
                // Make a new filter graph
                _graphBuilder = (IGraphBuilder) new FilterGraph();

                // Get the Capture Graph Builder
                _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Link the CaptureGraphBuilder to the filter graph
                var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                var comObj = new SampleGrabber();
                _sampGrabber = (ISampleGrabber)comObj;

                _baseGrabFlt = (IBaseFilter)_sampGrabber;

                var media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);

                    hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    media.majorType           = MediaType.Video;
                    media.subType             = MediaSubType.RGB32; //RGB24;
                    media.formatType          = FormatType.VideoInfo;
                    media.temporalCompression = true;               //New

                    hr = _sampGrabber.SetMediaType(media);

                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                var cat = PinCategory.Capture;
                var med = MediaType.Interleaved;
                var iid = typeof(IAMStreamConfig).GUID;
                hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out var o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr  = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o);

                    if (hr != 0)
                    {
                        // ReSharper disable once RedundantAssignment
                        o = null;
                    }
                }

                //VideoStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                _mediaControl = (IMediaControl)_graphBuilder;

                // Reload any video crossbars
                //if (videoSources != null) videoSources.Dispose(); videoSources = null;

                _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr);
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = _sampGrabber.SetBufferSamples(true);

                if (hr == 0)
                {
                    hr = _sampGrabber.SetOneShot(false);
                }

                if (hr == 0)
                {
                    hr = _sampGrabber.SetCallback(null, 0);
                }

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            // Update the state now that we are done
            _actualGraphState = GraphState.Created;
        }
Ejemplo n.º 36
0
        private void StartGraph()
        {
            int hr = 0;

              CloseGraph();

              string path = GetMoviePath();
              if (path == string.Empty)
            return;

              try
              {
            graph = (IGraphBuilder) new FilterGraph();
            filter = (IBaseFilter) new VideoMixingRenderer9();

            IVMRFilterConfig9 filterConfig = (IVMRFilterConfig9) filter;

            hr = filterConfig.SetRenderingMode(VMR9Mode.Renderless);
            DsError.ThrowExceptionForHR(hr);

            hr = filterConfig.SetNumberOfStreams(2);
            DsError.ThrowExceptionForHR(hr);

            SetAllocatorPresenter();

            hr = graph.AddFilter(filter, "Video Mixing Renderer 9");
            DsError.ThrowExceptionForHR(hr);

            hr = graph.RenderFile(path, null);
            DsError.ThrowExceptionForHR(hr);

            mediaControl = (IMediaControl) graph;

            hr = mediaControl.Run();
            DsError.ThrowExceptionForHR(hr);
              }
              catch
              {
              }
        }
Ejemplo n.º 37
0
        private bool AddWmAsfWriter(string fileName, Quality quality, Standard standard)
        {
            //add asf file writer
            IPin pinOut0, pinOut1;
            IPin pinIn0, pinIn1;

            Log.Info("TSReader2WMV: add WM ASF Writer to graph");
            string monikerAsfWriter =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{7C23220E-55BB-11D3-8B16-00C04FB6BD3D}";

            fileWriterbase = Marshal.BindToMoniker(monikerAsfWriter) as IBaseFilter;
            if (fileWriterbase == null)
            {
                Log.Error("TSReader2WMV:FAILED:Unable to create ASF WM Writer");
                Cleanup();
                return(false);
            }
            int hr = graphBuilder.AddFilter(fileWriterbase, "WM ASF Writer");

            if (hr != 0)
            {
                Log.Error("TSReader2WMV:FAILED:Add ASF WM Writer to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            IFileSinkFilter2 fileWriterFilter = fileWriterbase as IFileSinkFilter2;

            if (fileWriterFilter == null)
            {
                Log.Error("DVR2XVID:FAILED:Add unable to get IFileSinkFilter for filewriter");
                Cleanup();
                return(false);
            }
            hr = fileWriterFilter.SetFileName(fileName, null);
            hr = fileWriterFilter.SetMode(AMFileSinkFlags.OverWrite);
            Log.Info("TSReader2WMV: connect audio/video codecs outputs -> ASF WM Writer");
            //connect output #0 of videocodec->asf writer pin 1
            //connect output #0 of audiocodec->asf writer pin 0
            pinOut0 = DsFindPin.ByDirection((IBaseFilter)AudioCodec, PinDirection.Output, 0);
            pinOut1 = DsFindPin.ByDirection((IBaseFilter)VideoCodec, PinDirection.Output, 0);
            if (pinOut0 == null || pinOut1 == null)
            {
                Log.Error("TSReader2WMV:FAILED:unable to get outpins of video codec");
                Cleanup();
                return(false);
            }
            pinIn0 = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
            if (pinIn0 == null)
            {
                Log.Error("TSReader2WMV:FAILED:unable to get pins of asf wm writer");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut0, pinIn0);
            if (hr != 0)
            {
                Log.Error("TSReader2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn1 = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 1);
            if (pinIn1 == null)
            {
                Log.Error("TSReader2WMV:FAILED:unable to get pins of asf wm writer");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut1, pinIn1);
            if (hr != 0)
            {
                Log.Error("TSReader2WMV:FAILED:unable to connect video pins :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            IConfigAsfWriter   config          = fileWriterbase as IConfigAsfWriter;
            IWMProfileManager  profileManager  = null;
            IWMProfileManager2 profileManager2 = null;
            IWMProfile         profile         = null;

            hr = WMLib.WMCreateProfileManager(out profileManager);
            string strprofileType = "";

            switch (quality)
            {
            case Quality.HiDef:
                //hr = WMLib.WMCreateProfileManager(out profileManager);
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV HiDef quality profile {0}", strprofileType);
                break;

            case Quality.VeryHigh:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV Very High quality profile {0}", strprofileType);
                break;

            case Quality.High:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV High quality profile {0}", strprofileType);
                break;

            case Quality.Medium:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV Medium quality profile {0}", strprofileType);
                break;

            case Quality.Low:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV Low quality profile {0}", strprofileType);
                break;

            case Quality.Portable:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-PAL.prx");
                }
                Log.Info("TSReader2WMV: set WMV Portable quality profile {0}", strprofileType);
                break;

            case Quality.Custom:
                //load custom profile
                string customBitrate = "";
                //Adjust the parameters to suit the custom settings the user has selected.
                switch (bitrate)
                {
                case 0:
                    customBitrate = "100Kbs";
                    break;

                case 1:
                    customBitrate = "256Kbs";
                    break;

                case 2:
                    customBitrate = "384Kbs";
                    break;

                case 3:
                    customBitrate = "768Kbs";
                    break;

                case 4:
                    customBitrate = "1536Kbs";
                    break;

                case 5:
                    customBitrate = "3072Kbs";
                    break;

                case 6:
                    customBitrate = "5376Kbs";
                    break;
                }
                Log.Info("TSReader2WMV: custom bitrate = {0}", customBitrate);
                //TODO: get fps values & frame size
                //TODO: adjust settings required
                //Call the SetCutomPorfile method to load the custom profile, adjust it's params from user settings & then save it.
                //SetCutomProfile(videoBitrate, audioBitrate, videoHeight, videoWidth, videoFps); //based on user inputs
                //We then reload it after as per other quality settings / profiles.
                strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPCustom.prx");
                Log.Info("TSReader2WMV: set WMV Custom quality profile {0}", strprofileType);
                break;
            }
            //Loads profile from the above quality selection.
            using (StreamReader prx = new StreamReader(strprofileType))
            {
                String profileContents = prx.ReadToEnd();
                profileManager2 = profileManager as IWMProfileManager2;
                hr = profileManager2.LoadProfileByData(profileContents, out profile);
            }


            if (hr != 0)
            {
                Log.Info("TSReader2WMV: get WMV profile - FAILED! {0}", hr);
                Cleanup();
                return(false);
            }
            Log.Info("TSReader2WMV: load profile - SUCCESS!");
            //configures the WM ASF Writer to the chosen profile
            hr = config.ConfigureFilterUsingProfile(profile);
            if (hr != 0)
            {
                Log.Info("TSReader2WMV: configure profile - FAILED! {0}", hr);
                Cleanup();
                return(false);
            }
            Log.Info("TSReader2WMV: configure profile - SUCCESS!");
            //TODO: Add DB recording information into WMV.

            //Release resorces
            if (profile != null)
            {
                Marshal.ReleaseComObject(profile);
                profile = null;
            }
            if (profileManager != null)
            {
                Marshal.ReleaseComObject(profileManager);
                profileManager = null;
            }
            return(true);
        }
Ejemplo n.º 38
0
        private void SetupSampleGrabber()
        {
            if (_graph == null)
            {
                return;
            }

            int hr;

            //Get directsound filter
            IBaseFilter directSoundFilter;

            hr = _graph.FindFilterByName(DEFAULT_AUDIO_RENDERER_NAME, out directSoundFilter);
            DsError.ThrowExceptionForHR(hr);

            IPin rendererPinIn = DsFindPin.ByConnectionStatus(directSoundFilter, PinConnectedStatus.Connected, 0);

            if (rendererPinIn != null)
            {
                IPin audioPinOut;
                hr = rendererPinIn.ConnectedTo(out audioPinOut);
                DsError.ThrowExceptionForHR(hr);

                if (audioPinOut != null)
                {
                    //Disconect audio decoder to directsound renderer
                    hr = audioPinOut.Disconnect();
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.RemoveFilter(directSoundFilter);
                    DsError.ThrowExceptionForHR(hr);

                    //Add Sample Grabber
                    ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber;
                    hr = sampleGrabber.SetCallback(this, 1);
                    DsError.ThrowExceptionForHR(hr);

                    AMMediaType media;
                    media            = new AMMediaType();
                    media.majorType  = MediaType.Audio;
                    media.subType    = MediaSubType.PCM;
                    media.formatType = FormatType.WaveEx;
                    hr = sampleGrabber.SetMediaType(media);
                    DsError.ThrowExceptionForHR(hr);

                    IPin sampleGrabberPinIn  = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Input, 0);
                    IPin sampleGrabberPinOut = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Output, 0);
                    hr = _graph.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);

                    PinInfo pinInfo;
                    hr = audioPinOut.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(hr);

                    FilterInfo filterInfo;
                    hr = pinInfo.filter.QueryFilterInfo(out filterInfo);
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.Connect(audioPinOut, sampleGrabberPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    //Add null renderer
                    NullRenderer nullRenderer = new NullRenderer();
                    hr = _graph.AddFilter((IBaseFilter)nullRenderer, "NullRenderer");
                    DsError.ThrowExceptionForHR(hr);

                    IPin nullRendererPinIn = DsFindPin.ByDirection((IBaseFilter)nullRenderer, PinDirection.Input, 0);
                    hr = _graph.Connect(sampleGrabberPinOut, nullRendererPinIn);
                    DsError.ThrowExceptionForHR(hr);

                    _audioEngine.Setup(this.GetSampleGrabberFormat(sampleGrabber));
                }
            }
        }
Ejemplo n.º 39
0
        private void BuildGraph(DirectShowLib.DsDevice dsDevice)
        {
            int hr = 0;
            pGraph = new FilterGraph() as IFilterGraph2;

            //graph builder
            ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

            try
            {
                hr = pBuilder.SetFiltergraph(pGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add camera
                IBaseFilter camera;
                //hr = pGraph.FindFilterByName(dsDevice.Name, out camera);
                hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera);
                DsError.ThrowExceptionForHR(hr);

                hr = pGraph.AddFilter(camera, "camera");
                DsError.ThrowExceptionForHR(hr);

                // Set format for camera
                AMMediaType pmt = new AMMediaType();
                pmt.majorType = MediaType.Video;
                pmt.subType = MediaSubType.YUY2;
                pmt.formatType = FormatType.VideoInfo;
                pmt.fixedSizeSamples = true;
                pmt.formatSize = 88;
                pmt.sampleSize = 829440;
                pmt.temporalCompression = false;
                VideoInfoHeader format = new VideoInfoHeader();
                format.SrcRect = new DsRect();
                format.TargetRect = new DsRect();
                format.BitRate = 20736000;
                format.AvgTimePerFrame = 400000;
                format.BmiHeader = new BitmapInfoHeader();
                format.BmiHeader.Size = 40;
                format.BmiHeader.Width = 720;
                format.BmiHeader.Height = 576;
                format.BmiHeader.Planes = 1;
                format.BmiHeader.BitCount = 24;
                format.BmiHeader.Compression = 844715353;
                format.BmiHeader.ImageSize = 827440;
                pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format));
                Marshal.StructureToPtr(format, pmt.formatPtr, false);
                hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt);
                //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt);
                DsUtils.FreeAMMediaType(pmt);
                DsError.ThrowExceptionForHR(hr);

                IAMCrossbar crossBar = null;
                object dummy;
                hr = pBuilder.FindInterface(PinCategory.Capture, MediaType.Video, camera, typeof(IAMCrossbar).GUID, out dummy);
                if( hr >=0)
                {
                    crossBar = (IAMCrossbar)dummy;
                    int oPin, iPin;
                    int ovLink, ivLink;
                    ovLink = ivLink = 0;
                    crossBar.get_PinCounts(out oPin, out iPin);
                    int pIdxRel;
                    PhysicalConnectorType physicalConType;
                    for (int i = 0; i < iPin; i++)
                    {
                        crossBar.get_CrossbarPinInfo(true, i, out pIdxRel, out physicalConType);
                        if (physicalConType == PhysicalConnectorType.Video_Composite)
                            ivLink = i;
                    }
                    for (int i = 0; i < oPin; i++)
                    {
                        crossBar.get_CrossbarPinInfo(false, i, out pIdxRel, out physicalConType);
                        if (physicalConType == PhysicalConnectorType.Video_VideoDecoder)
                            ovLink = i;
                    }

                    try
                    {
                        crossBar.Route(ovLink, ivLink);
                    }
                    catch
                    {

                        throw new Exception("Failed to get IAMCrossbar");
                    }
                }

                //add AVI Decompressor
                IBaseFilter pAVIDecompressor = (IBaseFilter)new AVIDec();
                hr = pGraph.AddFilter(pAVIDecompressor, "AVI Decompressor");

                //add color space converter
                IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour();
                hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter");
                DsError.ThrowExceptionForHR(hr);

                // Connect camera and AVI Decomp
                hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pAVIDecompressor, "XForm In"), null);
                DsError.ThrowExceptionForHR(hr);

                // Connect AVI Decomp and color space converter
                hr = pGraph.ConnectDirect(DsFindPin.ByName(pAVIDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null);
                DsError.ThrowExceptionForHR(hr);

                //add SampleGrabber
                //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber));
                //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber");
                IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter;
                hr = pGraph.AddFilter(sampleGrabber, "Sample grabber");
                DsError.ThrowExceptionForHR(hr);

                // Configure the samplegrabber
                AMMediaType pSampleGrabber_pmt = new AMMediaType();
                pSampleGrabber_pmt.majorType = MediaType.Video;
                pSampleGrabber_pmt.subType = MediaSubType.ARGB32;
                pSampleGrabber_pmt.formatType = FormatType.VideoInfo;
                pSampleGrabber_pmt.fixedSizeSamples = true;
                pSampleGrabber_pmt.formatSize = 88;
                pSampleGrabber_pmt.sampleSize = 1658880;
                pSampleGrabber_pmt.temporalCompression = false;
                VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader();
                pSampleGrabber_format.SrcRect = new DsRect();
                pSampleGrabber_format.SrcRect.right = 720;
                pSampleGrabber_format.SrcRect.bottom = 576;
                pSampleGrabber_format.TargetRect = new DsRect();
                pSampleGrabber_format.TargetRect.right = 720;
                pSampleGrabber_format.TargetRect.bottom = 576;
                pSampleGrabber_format.BitRate = 331776000;
                pSampleGrabber_format.AvgTimePerFrame = 400000;
                pSampleGrabber_format.BmiHeader = new BitmapInfoHeader();
                pSampleGrabber_format.BmiHeader.Size = 40;
                pSampleGrabber_format.BmiHeader.Width = 720;
                pSampleGrabber_format.BmiHeader.Height = 576;
                pSampleGrabber_format.BmiHeader.Planes = 1;
                pSampleGrabber_format.BmiHeader.BitCount = 32;
                pSampleGrabber_format.BmiHeader.ImageSize = 1658880;

                pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format));
                Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false);
                hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt);
                DsError.ThrowExceptionForHR(hr);

                //connect MJPG dec and SampleGrabber
                //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null);
                hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null);
                DsError.ThrowExceptionForHR(hr);

                //set callback
                hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                // Clean this mess up!
            }
        }
Ejemplo n.º 40
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
Ejemplo n.º 41
0
        public void WorkerThread()
        {
            Grabber        pCallback = new Grabber(this);
            object         o         = null;
            object         ppvResult = null;
            object         obj4      = null;
            IGraphBuilder  builder   = null;
            IBaseFilter    pFilter   = null;
            IBaseFilter    filter2   = null;
            ISampleGrabber grabber2  = null;
            IMediaControl  control   = null;

            try
            {
                Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }
                o       = Activator.CreateInstance(typeFromCLSID);
                builder = (IGraphBuilder)o;
                UCOMIBindCtx ppbc     = null;
                UCOMIMoniker ppmk     = null;
                int          pchEaten = 0;
                if (Win32.CreateBindCtx(0, out ppbc) == 0)
                {
                    if (Win32.MkParseDisplayName(ppbc, this.source, ref pchEaten, out ppmk) == 0)
                    {
                        Guid gUID = typeof(IBaseFilter).GUID;
                        ppmk.BindToObject(null, null, ref gUID, out ppvResult);
                        Marshal.ReleaseComObject(ppmk);
                        ppmk = null;
                    }
                    Marshal.ReleaseComObject(ppbc);
                    ppbc = null;
                }
                if (ppvResult == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }
                pFilter       = (IBaseFilter)ppvResult;
                typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }
                obj4     = Activator.CreateInstance(typeFromCLSID);
                grabber2 = (ISampleGrabber)obj4;
                filter2  = (IBaseFilter)obj4;
                builder.AddFilter(pFilter, "source");
                builder.AddFilter(filter2, "grabber");
                AMMediaType pmt = new AMMediaType {
                    majorType = MediaType.Video,
                    subType   = MediaSubType.RGB24
                };
                grabber2.SetMediaType(pmt);
                if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }
                if (grabber2.GetConnectedMediaType(pmt) == 0)
                {
                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader));
                    pCallback.Width  = header.BmiHeader.Width;
                    pCallback.Height = header.BmiHeader.Height;
                    pmt.Dispose();
                }
                builder.Render(DSTools.GetOutPin(filter2, 0));
                grabber2.SetBufferSamples(false);
                grabber2.SetOneShot(false);
                grabber2.SetCallback(pCallback, 1);
                ((IVideoWindow)o).put_AutoShow(false);
                control = (IMediaControl)o;
                control.Run();
                while (!this.stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                control.StopWhenReady();
            }
            catch (Exception)
            {
            }
            finally
            {
                control  = null;
                builder  = null;
                pFilter  = null;
                filter2  = null;
                grabber2 = null;
                if (o != null)
                {
                    Marshal.ReleaseComObject(o);
                    o = null;
                }
                if (ppvResult != null)
                {
                    Marshal.ReleaseComObject(ppvResult);
                    ppvResult = null;
                }
                if (obj4 != null)
                {
                    Marshal.ReleaseComObject(obj4);
                    obj4 = null;
                }
            }
        }
Ejemplo n.º 42
0
        private void PlayMovieInWindow()
        {
            int hr = 0;
            this.graphBuilder = (IGraphBuilder)new FilterGraph();

            //Добавляем в граф нужный рендерер (Auto - graphBuilder сам выберет рендерер)
            Settings.VRenderers renderer = Settings.VideoRenderer;
            if (renderer == Settings.VRenderers.Overlay)
            {
                IBaseFilter add_vr = (IBaseFilter)new VideoRenderer();
                hr = graphBuilder.AddFilter(add_vr, "Video Renderer");
                DsError.ThrowExceptionForHR(hr);
            }
            else if (renderer == Settings.VRenderers.VMR7)
            {
                IBaseFilter add_vmr = (IBaseFilter)new VideoMixingRenderer();
                hr = graphBuilder.AddFilter(add_vmr, "Video Renderer");
                DsError.ThrowExceptionForHR(hr);
            }
            else if (renderer == Settings.VRenderers.VMR9)
            {
                IBaseFilter add_vmr9 = (IBaseFilter)new VideoMixingRenderer9();
                hr = graphBuilder.AddFilter(add_vmr9, "Video Mixing Renderer 9");
                DsError.ThrowExceptionForHR(hr);
            }
            else if (renderer == Settings.VRenderers.EVR)
            {
                //Создаём Win32-окно, т.к. использовать WPF-поверхность не получится
                VHost = new VideoHwndHost();
                VHost.RepaintRequired += new EventHandler(VHost_RepaintRequired);
                VHostElement.Visibility = Visibility.Visible;
                VHostElement.Child = VHost;
                VHandle = VHost.Handle;

                //Добавляем и настраиваем EVR
                IBaseFilter add_evr = (IBaseFilter)new EnhancedVideoRenderer();
                hr = graphBuilder.AddFilter(add_evr, "Enhanced Video Renderer");
                DsError.ThrowExceptionForHR(hr);

                object obj;
                IMFGetService pGetService = null;
                pGetService = (IMFGetService)add_evr;
                hr = pGetService.GetService(MFServices.MR_VIDEO_RENDER_SERVICE, typeof(IMFVideoDisplayControl).GUID, out obj);
                MFError.ThrowExceptionForHR(hr);

                try
                {
                    EVRControl = (IMFVideoDisplayControl)obj;
                }
                catch
                {
                    Marshal.ReleaseComObject(obj);
                    throw;
                }

                //Указываем поверхность
                hr = EVRControl.SetVideoWindow(VHandle);
                MFError.ThrowExceptionForHR(hr);

                //Сохраняем аспект
                hr = EVRControl.SetAspectRatioMode(MFVideoAspectRatioMode.PreservePicture);
                MFError.ThrowExceptionForHR(hr);
            }

            // Have the graph builder construct its the appropriate graph automatically
            hr = this.graphBuilder.RenderFile(this.filepath, null);
            DsError.ThrowExceptionForHR(hr);

            if (EVRControl == null)
            {
                //Ищем рендерер и ВКЛЮЧАЕМ соблюдение аспекта (рендерер сам подгонит картинку под размер окна, с учетом аспекта)
                IsRendererARFixed = false;
                IBaseFilter filter = null;
                graphBuilder.FindFilterByName("Video Renderer", out filter);
                if (filter != null)
                {
                    IVMRAspectRatioControl vmr = filter as IVMRAspectRatioControl;
                    if (vmr != null)
                    {
                        DsError.ThrowExceptionForHR(vmr.SetAspectRatioMode(VMRAspectRatioMode.LetterBox));
                        IsRendererARFixed = true;
                    }
                }
                else
                {
                    graphBuilder.FindFilterByName("Video Mixing Renderer 9", out filter);
                    if (filter != null)
                    {
                        IVMRAspectRatioControl9 vmr9 = filter as IVMRAspectRatioControl9;
                        if (vmr9 != null)
                        {
                            DsError.ThrowExceptionForHR(vmr9.SetAspectRatioMode(VMRAspectRatioMode.LetterBox));
                            IsRendererARFixed = true;
                        }
                    }
                }
            }
            else
                IsRendererARFixed = true;

            this.mediaControl = (IMediaControl)this.graphBuilder;
            this.mediaEventEx = (IMediaEventEx)this.graphBuilder;
            this.mediaSeeking = (IMediaSeeking)this.graphBuilder;
            this.mediaPosition = (IMediaPosition)this.graphBuilder;
            this.videoWindow = (EVRControl == null) ? this.graphBuilder as IVideoWindow : null;
            this.basicVideo = (EVRControl == null) ? this.graphBuilder as IBasicVideo : null;
            this.basicAudio = this.graphBuilder as IBasicAudio;
            this.basicAudio.put_Volume(VolumeSet);
            this.CheckIsAudioOnly();
            if (!this.isAudioOnly)
            {
                if (videoWindow != null)
                {
                    hr = this.videoWindow.put_Owner(this.source.Handle);
                    DsError.ThrowExceptionForHR(hr);

                    hr = this.videoWindow.put_MessageDrain(this.source.Handle);
                    DsError.ThrowExceptionForHR(hr);

                    hr = this.videoWindow.put_WindowStyle(DirectShowLib.WindowStyle.Child | DirectShowLib.WindowStyle.ClipSiblings | DirectShowLib.WindowStyle.ClipChildren);
                    DsError.ThrowExceptionForHR(hr);
                }

                this.MoveVideoWindow();
            }
            else
            {
                if (VHost != null)
                {
                    VHost.Dispose();
                    VHost = null;
                    VHandle = IntPtr.Zero;
                    VHostElement.Child = null;
                    VHostElement.Visibility = Visibility.Collapsed;
                }
            }

            hr = this.mediaEventEx.SetNotifyWindow(this.source.Handle, WMGraphNotify, IntPtr.Zero);
            DsError.ThrowExceptionForHR(hr);

            //Восстанавливаем старую позицию
            if (mediaload == MediaLoad.Update && oldpos != TimeSpan.Zero)
            {
                if (NaturalDuration >= oldpos)
                {
                    hr = mediaPosition.put_CurrentPosition(oldpos.TotalSeconds);
                    DsError.ThrowExceptionForHR(hr);
                }
            }

            //Восстанавливаем старый PlayState
            if (mediaload == MediaLoad.Update && (oldplaystate == PlayState.Paused || oldplaystate == PlayState.Stopped))
            {
                hr = this.mediaControl.Pause();
                DsError.ThrowExceptionForHR(hr);
                this.currentState = PlayState.Paused;
                this.SetPlayIcon();
            }
            else
            {
                hr = this.mediaControl.Run();
                DsError.ThrowExceptionForHR(hr);
                this.currentState = PlayState.Running;
                this.SetPauseIcon();
            }

            AddFiltersToMenu();
        }
Ejemplo n.º 43
0
    /// <summary> build the capture graph for grabber. </summary>
    private void SetupGraph(string FileName)
    {
      int hr;

      // Get the graphbuilder object
      this.graphBuilder = new FilterGraph() as IGraphBuilder;
      this.mediaControl = this.graphBuilder as IMediaControl;
      this.mediaSeeking = this.graphBuilder as IMediaSeeking;
      this.mediaEvent = this.graphBuilder as IMediaEvent;

      try
      {
        // Get the SampleGrabber interface
        this.sampleGrabber = new SampleGrabber() as ISampleGrabber;
        this.sampleGrabberFilter = sampleGrabber as IBaseFilter;

        ConfigureSampleGrabber(sampleGrabber);

        // Add the frame grabber to the graph
        hr = graphBuilder.AddFilter(sampleGrabberFilter, "Ds.NET Sample Grabber");
        DsError.ThrowExceptionForHR(hr);

        IBaseFilter aviSplitter = new AviSplitter() as IBaseFilter;

        // Add the aviSplitter to the graph
        hr = graphBuilder.AddFilter(aviSplitter, "Splitter");
        DsError.ThrowExceptionForHR(hr);

        // Have the graph builder construct its appropriate graph automatically
        hr = this.graphBuilder.RenderFile(FileName, null);
        DsError.ThrowExceptionForHR(hr);

#if DEBUG
        m_rot = new DsROTEntry(graphBuilder);
#endif

        // Remove the video renderer filter
        IBaseFilter defaultVideoRenderer = null;
        graphBuilder.FindFilterByName("Video Renderer", out defaultVideoRenderer);
        graphBuilder.RemoveFilter(defaultVideoRenderer);

        // Disconnect anything that is connected
        // to the output of the sample grabber
        IPin iPinSampleGrabberOut = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0);
        IPin iPinVideoIn;
        hr = iPinSampleGrabberOut.ConnectedTo(out iPinVideoIn);

        if (hr == 0)
        {
          // Disconnect the sample grabber output from the attached filters
          hr = iPinVideoIn.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          hr = iPinSampleGrabberOut.Disconnect();
          DsError.ThrowExceptionForHR(hr);
        }
        else
        {
          // Try other way round because automatic renderer could not build
          // graph including the sample grabber
          IPin iPinAVISplitterOut = DsFindPin.ByDirection(aviSplitter, PinDirection.Output, 0);
          IPin iPinAVISplitterIn;
          hr = iPinAVISplitterOut.ConnectedTo(out iPinAVISplitterIn);
          DsError.ThrowExceptionForHR(hr);

          hr = iPinAVISplitterOut.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          hr = iPinAVISplitterIn.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          // Connect the avi splitter output to sample grabber
          IPin iPinSampleGrabberIn = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0);
          hr = graphBuilder.Connect(iPinAVISplitterOut, iPinSampleGrabberIn);
          DsError.ThrowExceptionForHR(hr);
        }

        // Add the null renderer to the graph
        nullrenderer = new NullRenderer() as IBaseFilter;
        hr = graphBuilder.AddFilter(nullrenderer, "Null renderer");
        DsError.ThrowExceptionForHR(hr);

        // Get the input pin of the null renderer
        IPin iPinNullRendererIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

        // Connect the sample grabber to the null renderer
        hr = graphBuilder.Connect(iPinSampleGrabberOut, iPinNullRendererIn);
        DsError.ThrowExceptionForHR(hr);

        // Read and cache the image sizes
        SaveSizeInfo(sampleGrabber);

        this.GetFrameStepInterface();
      }
      finally
      {
      }
    }
Ejemplo n.º 44
0
        /// <summary>
        /// <para>指定された動画ファイルから音声のみをエンコードし、WAVファイルイメージを作成して返す。</para>
        /// </summary>
        public static void t変換(string fileName, out byte[] wavFileImage)
        {
            int hr = 0;

            IGraphBuilder graphBuilder = null;

            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();

                #region [ オーディオ用サンプルグラバの作成と追加。]
                //-----------------
                ISampleGrabber sampleGrabber = null;
                try
                {
                    sampleGrabber = (ISampleGrabber) new SampleGrabber();


                    // サンプルグラバのメディアタイプの設定。

                    var mediaType = new AMMediaType()
                    {
                        majorType  = MediaType.Audio,
                        subType    = MediaSubType.PCM,
                        formatType = FormatType.WaveEx,
                    };
                    try
                    {
                        hr = sampleGrabber.SetMediaType(mediaType);
                        DsError.ThrowExceptionForHR(hr);
                    }
                    finally
                    {
                        if (mediaType != null)
                        {
                            DsUtils.FreeAMMediaType(mediaType);
                        }
                    }


                    // サンプルグラバのバッファリングを有効にする。

                    hr = sampleGrabber.SetBufferSamples(true);
                    DsError.ThrowExceptionForHR(hr);


                    // サンプルグラバにコールバックを追加する。

                    sampleGrabberProc = new CSampleGrabberCallBack();
                    hr = sampleGrabber.SetCallback(sampleGrabberProc, 1);                       // 1:コールバックの BufferCB() メソッドの方を呼び出す。


                    // サンプルグラバをグラフに追加する。

                    hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber for Audio/PCM");
                    DsError.ThrowExceptionForHR(hr);
                }
                finally
                {
                    C共通.tCOMオブジェクトを解放する(ref sampleGrabber);
                }
                //-----------------
                #endregion

                var e = new DirectShowLib.DsROTEntry(graphBuilder);

                // fileName からグラフを自動生成。

                hr = graphBuilder.RenderFile(fileName, null);                   // IMediaControl.RenderFile() は推奨されない
                DsError.ThrowExceptionForHR(hr);


                // ビデオレンダラを除去。

                CDirectShow.tビデオレンダラをグラフから除去する(graphBuilder);                           // オーディオレンダラをNullに変えるより前に実行すること。(CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() の中で一度再生するので、そのときにActiveウィンドウが表示されてしまうため。)


                // オーディオレンダラを NullRenderer に置換。

                WaveFormat wfx;
                byte[]     wfx拡張領域;
                CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(graphBuilder, out wfx, out wfx拡張領域);


                // 基準クロックを NULL(最高速)に設定する。

                IMediaFilter mediaFilter = graphBuilder as IMediaFilter;
                mediaFilter.SetSyncSource(null);
                mediaFilter = null;


                // メモリストリームにデコードデータを出力する。

                sampleGrabberProc.MemoryStream = new MemoryStream();                    // CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() で一度再生しているので、ストリームをクリアする。
                var ms = sampleGrabberProc.MemoryStream;
                var bw = new BinaryWriter(ms);
                bw.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 });                                                            // 'RIFF'
                bw.Write((UInt32)0);                                                                                        // ファイルサイズ - 8 [byte];今は不明なので後で上書きする。
                bw.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 });                                                            // 'WAVE'
                bw.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 });                                                            // 'fmt '
                bw.Write((UInt32)(16 + ((wfx拡張領域.Length > 0) ? (2 /*sizeof(WAVEFORMATEX.cbSize)*/ + wfx拡張領域.Length) : 0))); // fmtチャンクのサイズ[byte]
                bw.Write((UInt16)wfx.FormatTag);                                                                            // フォーマットID(リニアPCMなら1)
                bw.Write((UInt16)wfx.Channels);                                                                             // チャンネル数
                bw.Write((UInt32)wfx.SamplesPerSecond);                                                                     // サンプリングレート
                bw.Write((UInt32)wfx.AverageBytesPerSecond);                                                                // データ速度
                bw.Write((UInt16)wfx.BlockAlignment);                                                                       // ブロックサイズ
                bw.Write((UInt16)wfx.BitsPerSample);                                                                        // サンプルあたりのビット数
                if (wfx拡張領域.Length > 0)
                {
                    bw.Write((UInt16)wfx拡張領域.Length);                                           // 拡張領域のサイズ[byte]
                    bw.Write(wfx拡張領域);                                                          // 拡張データ
                }
                bw.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 });                                // 'data'
                int nDATAチャンクサイズ位置 = (int)ms.Position;
                bw.Write((UInt32)0);                                                            // dataチャンクのサイズ[byte];今は不明なので後で上書きする。

                #region [ 再生を開始し、終了を待つ。- 再生中、sampleGrabberProc.MemoryStream に PCM データが蓄積されていく。]
                //-----------------
                IMediaControl mediaControl = graphBuilder as IMediaControl;
                mediaControl.Run();                                                             // 再生開始

                IMediaEvent mediaEvent = graphBuilder as IMediaEvent;
                EventCode   eventCode;
                hr = mediaEvent.WaitForCompletion(-1, out eventCode);
                DsError.ThrowExceptionForHR(hr);
                if (eventCode != EventCode.Complete)
                {
                    throw new Exception("再生待ちに失敗しました。");
                }

                mediaControl.Stop();
                mediaEvent   = null;
                mediaControl = null;
                //-----------------
                #endregion

                bw.Seek(4, SeekOrigin.Begin);
                bw.Write((UInt32)ms.Length - 8);                                                        // ファイルサイズ - 8 [byte]

                bw.Seek(nDATAチャンクサイズ位置, SeekOrigin.Begin);
                bw.Write((UInt32)ms.Length - (nDATAチャンクサイズ位置 + 4));                             // dataチャンクサイズ [byte]


                // 出力その2を作成。

                wavFileImage = ms.ToArray();


                // 終了処理。

                bw.Close();
                sampleGrabberProc.Dispose();                    // ms.Close()
            }
            finally
            {
                C共通.tCOMオブジェクトを解放する(ref graphBuilder);
            }
        }
Ejemplo n.º 45
0
        /// <summary>
        /// Builds the DVD DirectShow graph
        /// </summary>
        private void BuildGraph()
        {
            try
            {
                FreeResources();

                int hr;

                /* Create our new graph */
                m_graph = (IGraphBuilder)new FilterGraphNoThread();

            #if DEBUG
                m_rot = new DsROTEntry(m_graph);
            #endif
                /* We are going to use the VMR9 for now.  The EVR does not
                 * seem to work with the interactive menus yet.  It should
                 * play Dvds fine otherwise */
                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates and initializes a new renderer ready to render to WPF */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 2);

                /* Do some VMR9 specific stuff */
                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if(mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;

                        /* Enable this line to prefer YUV */
                        //hr = mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                /* Create a new DVD Navigator. */
                var dvdNav = (IBaseFilter)new DVDNavigator();

                /* The DVDControl2 interface lets us control DVD features */
                m_dvdControl = dvdNav as IDvdControl2;

                if (m_dvdControl == null)
                    throw new Exception("Could not QueryInterface the IDvdControl2 interface");

                /* QueryInterface the DVDInfo2 */
                m_dvdInfo = dvdNav as IDvdInfo2;

                /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */
                if (!string.IsNullOrEmpty(DvdDirectory))
                {
                    hr = m_dvdControl.SetDVDDirectory(DvdDirectory);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */
                hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true);
                DsError.ThrowExceptionForHR(hr);

                /* If the graph stops, resume at the same point */
                m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false);

                hr = m_graph.AddFilter(dvdNav, "DVD Navigator");
                DsError.ThrowExceptionForHR(hr);

                IPin dvdVideoPin = null;
                IPin dvdAudioPin = null;
                IPin dvdSubPicturePin = null;

                IPin dvdNavPin;
                int i = 0;

                /* Loop all the output pins on the DVD Navigator, trying to find which pins are which.
                 * We could more easily find the pins by name, but this is more fun...and more flexible
                 * if we ever want to use a 3rd party DVD navigator that used different pin names */
                while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null)
                {
                    var mediaTypes = new AMMediaType[1];
                    IntPtr pFetched = IntPtr.Zero;

                    IEnumMediaTypes mediaTypeEnum;
                    dvdNavPin.EnumMediaTypes(out mediaTypeEnum);

                    /* Loop over each of the mediaTypes of each pin */
                    while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0)
                    {
                        AMMediaType mediaType = mediaTypes[0];

                        /* This will be the video stream pin */
                        if (mediaType.subType == MediaSubType.Mpeg2Video)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdVideoPin = dvdNavPin;
                            break;
                        }

                        /* This will be the audio stream pin */
                        if (mediaType.subType == MediaSubType.DolbyAC3 ||
                           mediaType.subType == MediaSubType.Mpeg2Audio)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdAudioPin = dvdNavPin;
                            break;
                        }

                        /* This is the Dvd sub picture pin.  This generally
                         * shows overlays for Dvd menus and sometimes closed captions */
                        if (mediaType.subType == DVD_SUBPICTURE_TYPE)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdSubPicturePin = dvdNavPin;
                            break;
                        }
                    }

                    mediaTypeEnum.Reset();
                    Marshal.ReleaseComObject(mediaTypeEnum);
                    i++;
                }

                /* This is the windowed renderer.  This is *NEEDED* in order
                 * for interactive menus to work with the other VMR9 in renderless mode */
                var dummyRenderer = (IBaseFilter)new VideoMixingRenderer9();
                var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer;

                /* In order for this interactive menu trick to work, the VMR9
                 * must be set to Windowed.  We will make sure the window is hidden later on */
                hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed);
                DsError.ThrowExceptionForHR(hr);

                hr = dummyRendererConfig.SetNumberOfStreams(1);
                DsError.ThrowExceptionForHR(hr);

                hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed");
                DsError.ThrowExceptionForHR(hr);

                if (dvdAudioPin != null)
                {
                    /* This should render out to the default audio device. We
                     * could modify this code here to go out any audio
                     * device, such as SPDIF or another sound card */
                    hr = m_graph.Render(dvdAudioPin);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* Get the first input pin on our dummy renderer */
                m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */
                                                                  PinConnectedStatus.Unconnected,
                                                                  0);

                /* Get an available pin on our real renderer */
                IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                                PinConnectedStatus.Unconnected,
                                                                0); /* Pin index */

                /* Connect the pin to the renderer */
                hr = m_graph.Connect(dvdVideoPin, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Get the next available pin on our real renderer */
                rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                           PinConnectedStatus.Unconnected,
                                                           0); /* Pin index */

                /* Render the sub picture, which will connect
                 * the DVD navigator to the codec, not the renderer */
                hr = m_graph.Render(dvdSubPicturePin);
                DsError.ThrowExceptionForHR(hr);

                /* These are the subtypes most likely to be our dvd subpicture */
                var preferedSubpictureTypes = new[]{MediaSubType.ARGB4444,
                                                    MediaSubType.AI44,
                                                    MediaSubType.AYUV,
                                                    MediaSubType.ARGB32};
                IPin dvdSubPicturePinOut = null;

                /* Find what should be the subpicture pin out */
                foreach (var guidType in preferedSubpictureTypes)
                {
                    dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */
                                                                    PinDirection.Output,
                                                                    m_graph); /* Our current graph */
                    if (dvdSubPicturePinOut != null)
                        break;
                }

                if (dvdSubPicturePinOut == null)
                    throw new Exception("Could not find the sub picture pin out");

                /* Here we connec thte Dvd sub picture pin to the video renderer.
                 * This enables the overlays on Dvd menus and some closed
                 * captions to be rendered. */
                hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Search for the Line21 out in the graph */
                IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data,
                                                           PinDirection.Output,
                                                           m_graph);
                if (line21Out == null)
                    throw new Exception("Could not find the Line21 pin out");

                /* We connect our line21Out out in to the dummy renderer
                 * this is what ultimatly makes interactive DVDs work with
                 * VMR9 in renderless (for WPF) */
                hr = m_graph.Connect(line21Out, m_dummyRendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* This is the dummy renderers Win32 window. */
                m_dummyRenderWindow = dummyRenderer as IVideoWindow;

                if (m_dummyRenderWindow == null)
                    throw new Exception("Could not QueryInterface for IVideoWindow");

                ConfigureDummyWindow();

                /* Setup our base classes with this filter graph */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);
            }
            catch (Exception ex)
            {
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
                return;
            }

            InvokeMediaOpened();
        }
Ejemplo n.º 46
0
        /// <summary>
        /// Connecte le File Splitter et le renderer vidéo en créant le décodeur vidéo.
        /// </summary>
        /// <param name="graph">Le graphe.</param>
        /// <param name="filtersConfig">La configuration pour ce fichier.</param>
        /// <param name="parserOutputVideoPin">Le pin de sortie vidéo</param>
        /// <param name="videoRendererInputPin">Le pin d'entrée du Renderer.</param>
        internal static void ConnectSplitterAndRendererWithDecoder(IGraphBuilder graph, ExtensionFiltersSource filtersConfig,
                                                                   IPin parserOutputVideoPin, IPin videoRendererInputPin)
        {
            FilterSource videoFilterSource = filtersConfig.VideoDecoder;

            switch (videoFilterSource.SourceType)
            {
            case FilterSourceTypeEnum.Auto:

                int hr = graph.Connect(parserOutputVideoPin, videoRendererInputPin);
                DsError.ThrowExceptionForHR(hr);

                break;

            case FilterSourceTypeEnum.External:
                if (new Guid(videoFilterSource.ExternalCLSID) == new Guid(CLSID_VIDEO_DECODER_DMO))
                {
                    // The DMO filter is handled differently
                    DMOWrapperFilter  dmoFilter = new DMOWrapperFilter();
                    IDMOWrapperFilter wrapper   = (IDMOWrapperFilter)dmoFilter;
                    hr = wrapper.Init(new Guid(CLSID_VIDEO_DECODER_DMO), DirectShowLib.DMO.DMOCategory.VideoDecoder);

                    DsError.ThrowExceptionForHR(hr);

                    if (dmoFilter is IBaseFilter decoderFilter)
                    {
                        hr = graph.AddFilter(decoderFilter, "WMVideo Decoder DMO");
                        DsError.ThrowExceptionForHR(hr);

                        IPin wmvDecoderInputPin = DsFindPin.ByDirection(decoderFilter, PinDirection.Input, 0);
                        hr = graph.ConnectDirect(parserOutputVideoPin, wmvDecoderInputPin, null);
                        DsError.ThrowExceptionForHR(hr);

                        IPin wmvDecoderOutputPin = DsFindPin.ByDirection(decoderFilter, PinDirection.Output, 0);
                        hr = graph.ConnectDirect(wmvDecoderOutputPin, videoRendererInputPin, null);
                        DsError.ThrowExceptionForHR(hr);

                        SafeRelease(wmvDecoderInputPin);
                        SafeRelease(wmvDecoderOutputPin);
                    }
                    else
                    {
                        wrapper = null;
                        SafeRelease(dmoFilter);
                        dmoFilter = null;
                    }
                }
                else
                {
                    Type        filterType     = null;
                    IBaseFilter externalFilter = null;

                    CreateFilter(videoFilterSource.ExternalCLSID, videoFilterSource.Name, ref filterType, ref externalFilter);

                    hr = graph.AddFilter(externalFilter, videoFilterSource.Name);
                    DsError.ThrowExceptionForHR(hr);

                    IPin externalDecoderInputPin = DsFindPin.ByDirection(externalFilter, PinDirection.Input, 0);
                    hr = graph.ConnectDirect(parserOutputVideoPin, externalDecoderInputPin, null);
                    DsError.ThrowExceptionForHR(hr);

                    IPin externalDecoderOutputPin = DsFindPin.ByDirection(externalFilter, PinDirection.Output, 0);
                    hr = graph.ConnectDirect(externalDecoderOutputPin, videoRendererInputPin, null);
                    DsError.ThrowExceptionForHR(hr);

                    SafeRelease(externalDecoderInputPin);
                    SafeRelease(externalDecoderOutputPin);
                }


                break;

            default:
                throw new ArgumentOutOfRangeException($"{nameof(videoFilterSource)}.{nameof(FilterSource.SourceType)}");
            }
        }
Ejemplo n.º 47
0
        private void BuildGraph()
        {
            int hr;
            try
            {
                lblTotalTime.Text = mvs.PlayTime.ToString();
                TimeSpan tt = TimeSpan.Parse(mvs.PlayTime);
                DateTime dt = new DateTime(tt.Ticks);
                lblTotalTime.Text = String.Format("{0:HH:mm:ss}", dt);

                if (mvs.LocalMedia[0].IsDVD)
                {

                    mediaToPlay = mvs.LocalMedia[0];
                    MediaState mediaState = mediaToPlay.State;
                    if (mediaState == MediaState.NotMounted)
                    {
                        MountResult result = mediaToPlay.Mount();
                    }

                    string videoPath = mediaToPlay.GetVideoPath();

                    if (videoPath != null)
                        FirstPlayDvd(videoPath);
                    else
                      FirstPlayDvd(mvs.LocalMedia[0].File.FullName);
                    // Add delegates for Windowless operations
                    AddHandlers();
                    MainForm_ResizeMove(null, null);

                }
                else
                {
                    _graphBuilder = (IFilterGraph2)new FilterGraph();
                    _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder);
                    _mediaCtrl = (IMediaControl)_graphBuilder;
                    _mediaSeek = (IMediaSeeking)_graphBuilder;
                    _mediaPos = (IMediaPosition)_graphBuilder;
                    _mediaStep = (IVideoFrameStep)_graphBuilder;
                    _vmr9Filter = (IBaseFilter)new VideoMixingRenderer9();
                    ConfigureVMR9InWindowlessMode();
                    AddHandlers();
                    MainForm_ResizeMove(null, null);
                    hr = _graphBuilder.AddFilter(_vmr9Filter, "Video Mixing Render 9");
                    AddPreferedCodecs(_graphBuilder);
                    DsError.ThrowExceptionForHR(hr);
                    hr = _graphBuilder.RenderFile(mvs.LocalMedia[0].File.FullName, null);
                    DsError.ThrowExceptionForHR(hr);
                }

            }
            catch (Exception e)
            {
                CloseDVDInterfaces();
                logger.ErrorException("An error occured during the graph building : \r\n\r\n",e);
            }
        }
Ejemplo n.º 48
0
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog lopenFileDialog = new OpenFileDialog();

            lopenFileDialog.AddExtension = true;

            var lresult = lopenFileDialog.ShowDialog();

            if (lresult != true)
            {
                return;
            }

            IBaseFilter lDSoundRender = new DSoundRender() as IBaseFilter;

            m_pGraph.AddFilter(lDSoundRender, "Audio Renderer");


            int k = 0;

            IPin[] lAudioRendererPins = new IPin[1];

            IEnumPins ppEnum;

            k = lDSoundRender.EnumPins(out ppEnum);

            k = ppEnum.Next(1, lAudioRendererPins, IntPtr.Zero);

            var lCaptureManagerEVRMultiSinkFactory = CaptureManagerVideoRendererMultiSinkFactory.getInstance().getICaptureManagerEVRMultiSinkFactory();

            uint lMaxVideoRenderStreamCount = lCaptureManagerEVRMultiSinkFactory.getMaxVideoRenderStreamCount();

            if (lMaxVideoRenderStreamCount == 0)
            {
                return;
            }

            List <object> lOutputNodesList = new List <object>();

            lCaptureManagerEVRMultiSinkFactory.createOutputNodes(
                IntPtr.Zero,
                mEVRDisplay.Surface.texture,
                1,// lMaxVideoRenderStreamCount,
                out lOutputNodesList);

            if (lOutputNodesList.Count == 0)
            {
                return;
            }

            IBaseFilter lVideoMixingRenderer9 = (IBaseFilter)lOutputNodesList[0];

            var h = m_pGraph.AddFilter(lVideoMixingRenderer9, "lVideoMixingRenderer9");


            IPin[] lVideoRendererPin = new IPin[1];


            k = lVideoMixingRenderer9.EnumPins(out ppEnum);

            k = ppEnum.Next(1, lVideoRendererPin, IntPtr.Zero);


            IBaseFilter m_SourceFilter = null;

            m_pGraph.AddSourceFilter(lopenFileDialog.FileName, null, out m_SourceFilter);

            IEnumPins lEnumPins = null;

            m_SourceFilter.EnumPins(out lEnumPins);

            IPin[] lPins = new IPin[1];

            while (lEnumPins.Next(1, lPins, IntPtr.Zero) == 0)
            {
                IEnumMediaTypes lIEnumMediaTypes;

                lPins[0].EnumMediaTypes(out lIEnumMediaTypes);

                AMMediaType[] ppMediaTypes = new AMMediaType[1];

                while (lIEnumMediaTypes.Next(1, ppMediaTypes, IntPtr.Zero) == 0)
                {
                    var gh = ppMediaTypes[0].subType;

                    if (ppMediaTypes[0].majorType == DirectShowLib.MediaType.Video)
                    {
                        k = m_pGraph.Connect(lPins[0], lVideoRendererPin[0]);
                    }
                }

                foreach (var item in lPins)
                {
                    k = m_pGraph.Render(item);
                }
            }

            IMediaControl lIMediaControl = m_pGraph as IMediaControl;

            k = lIMediaControl.Run();
        }
Ejemplo n.º 49
0
    /// <summary> create the used COM components and get the interfaces. </summary>
    protected bool GetInterfaces()
    {
      VMR9Util.g_vmr9 = null;
      if (IsRadio == false)
      {
        Vmr9 = VMR9Util.g_vmr9 = new VMR9Util();

        // switch back to directx fullscreen mode
        Log.Info("RTSPPlayer: Enabling DX9 exclusive mode");
        GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null);
        GUIWindowManager.SendMessage(msg);
      }
      //Type comtype = null;
      //object comobj = null;

      DsRect rect = new DsRect();
      rect.top = 0;
      rect.bottom = GUIGraphicsContext.form.Height;
      rect.left = 0;
      rect.right = GUIGraphicsContext.form.Width;


      try
      {
        graphBuilder = (IGraphBuilder)new FilterGraph();

        Log.Info("RTSPPlayer: add source filter");
        if (IsRadio == false)
        {
          bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder);
          if (!AddVMR9)
          {
            Log.Error("RTSPPlayer:Failed to add VMR9 to graph");
            return false;
          }
          VMR9Util.g_vmr9.Enable(false);
        }

        _mpegDemux = (IBaseFilter)new MPEG2Demultiplexer();
        graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer");

        _rtspSource = (IBaseFilter)new RtpSourceFilter();
        int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter");
        if (hr != 0)
        {
          Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr);
          return false;
        }

        // add preferred video & audio codecs
        Log.Info("RTSPPlayer: add video/audio codecs");
        string strVideoCodec = "";
        string strAudioCodec = "";
        string strAudiorenderer = "";
        int intFilters = 0; // FlipGer: count custom filters
        string strFilters = ""; // FlipGer: collect custom filters
        string postProcessingFilterSection = "mytv";
        using (Settings xmlreader = new MPSettings())
        {
          if (_mediaType == g_Player.MediaType.Video)
          {
            strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", "");
            strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", "");
            strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device");
            postProcessingFilterSection = "movieplayer";
          }
          else
          {
            strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
            strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
            strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device");
            postProcessingFilterSection = "mytv";
          }
          enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false);
          // FlipGer: load infos for custom filters
          int intCount = 0;
          while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") !=
                 "undefined")
          {
            if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false))
            {
              strFilters +=
                xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") +
                ";";
              intFilters++;
            }
            intCount++;
          }
        }
        string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant();
        if (IsRadio == false)
        {
          if (strVideoCodec.Length > 0)
          {
            DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
          }
        }
        if (strAudioCodec.Length > 0)
        {
          DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
        }

        if (enableDvbSubtitles == true)
        {
          try
          {
            _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder);
            SubtitleRenderer.GetInstance().SetPlayer(this);
            dvbSubRenderer = SubtitleRenderer.GetInstance();
          }
          catch (Exception e)
          {
            Log.Error(e);
          }
        }

        Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null));
        // FlipGer: add custom filters to graph
        string[] arrFilters = strFilters.Split(';');
        for (int i = 0; i < intFilters; i++)
        {
          DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]);
        }
        if (strAudiorenderer.Length > 0)
        {
          audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false);
        }

        Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile);
        IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource;
        if (interfaceFile == null)
        {
          Log.Error("RTSPPlayer:Failed to get IFileSourceFilter");
          return false;
        }

        //Log.Info("RTSPPlayer: open file:{0}",filename);
        hr = interfaceFile.Load(m_strCurrentFile, null);
        if (hr != 0)
        {
          Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr);
          return false;
        }

        #region connect rtspsource->demux

        Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux");
        IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0);
        if (pinTsOut == null)
        {
          Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
          return false;
        }
        IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
        if (pinDemuxIn == null)
        {
          Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
          return false;
        }

        hr = graphBuilder.Connect(pinTsOut, pinDemuxIn);
        if (hr != 0)
        {
          Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr);
          return false;
        }
        DirectShowUtil.ReleaseComObject(pinTsOut);
        DirectShowUtil.ReleaseComObject(pinDemuxIn);

        #endregion

        #region render demux output pins

        if (IsRadio)
        {
          Log.Info("RTSPPlayer:render audio demux outputs");
          IEnumPins enumPins;
          _mpegDemux.EnumPins(out enumPins);
          IPin[] pins = new IPin[2];
          int fetched = 0;
          while (enumPins.Next(1, pins, out fetched) == 0)
          {
            if (fetched != 1)
            {
              break;
            }
            PinDirection direction;
            pins[0].QueryDirection(out direction);
            if (direction == PinDirection.Input)
            {
              continue;
            }
            IEnumMediaTypes enumMediaTypes;
            pins[0].EnumMediaTypes(out enumMediaTypes);
            AMMediaType[] mediaTypes = new AMMediaType[20];
            int fetchedTypes;
            enumMediaTypes.Next(20, mediaTypes, out fetchedTypes);
            for (int i = 0; i < fetchedTypes; ++i)
            {
              if (mediaTypes[i].majorType == MediaType.Audio)
              {
                graphBuilder.Render(pins[0]);
                break;
              }
            }
          }
        }
        else
        {
          Log.Info("RTSPPlayer:render audio/video demux outputs");
          IEnumPins enumPins;
          _mpegDemux.EnumPins(out enumPins);
          IPin[] pins = new IPin[2];
          int fetched = 0;
          while (enumPins.Next(1, pins, out fetched) == 0)
          {
            if (fetched != 1)
            {
              break;
            }
            PinDirection direction;
            pins[0].QueryDirection(out direction);
            if (direction == PinDirection.Input)
            {
              continue;
            }
            graphBuilder.Render(pins[0]);
          }
        }

        #endregion

        // Connect DVB subtitle filter pins in the graph
        if (_mpegDemux != null && enableDvbSubtitles == true)
        {
          IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer;
          hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr);

          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinPcr OK");

            IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr");
            IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr");
            hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr);
          }

          hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle);
          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinSubtitle OK");

            IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle");
            IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In");
            hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr);
          }

          hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT);
          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinPMT OK");

            IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT");
            IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT");
            hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr);
          }
        }


        if (IsRadio == false)
        {
          if (!VMR9Util.g_vmr9.IsVMR9Connected)
          {
            //VMR9 is not supported, switch to overlay
            Log.Info("RTSPPlayer: vmr9 not connected");
            _mediaCtrl = null;
            Cleanup();
            return false;
          }
          VMR9Util.g_vmr9.SetDeinterlaceMode();
        }

        _mediaCtrl = (IMediaControl)graphBuilder;
        mediaEvt = (IMediaEventEx)graphBuilder;
        _mediaSeeking = (IMediaSeeking)graphBuilder;
        mediaPos = (IMediaPosition)graphBuilder;
        basicAudio = graphBuilder as IBasicAudio;
        //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched);
        DirectShowUtil.EnableDeInterlace(graphBuilder);
        if (VMR9Util.g_vmr9 != null)
        {
          m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth;
          m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight;
        }
        if (audioRendererFilter != null)
        {
          Log.Info("RTSPPlayer9:set reference clock");
          IMediaFilter mp = graphBuilder as IMediaFilter;
          IReferenceClock clock = audioRendererFilter as IReferenceClock;
          hr = mp.SetSyncSource(null);
          hr = mp.SetSyncSource(clock);
          Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr);
        }
        Log.Info("RTSPPlayer: graph build successfull");
        return true;
      }
      catch (Exception ex)
      {
        Error.SetError("Unable to play movie", "Unable build graph for VMR9");
        Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace);
        CloseInterfaces();
        return false;
      }
    }
Ejemplo n.º 50
0
        //public bool IsVMR9Connected

        #endregion

        #region public members

        /// <summary>
        /// Add VMR9 filter to graph and configure it
        /// </summary>
        /// <param name="graphBuilder"></param>
        public bool AddVMR9(IGraphBuilder graphBuilder)
        {
            if (!_useVmr9)
            {
                Log.Debug("VMR9: addvmr9 - vmr9 is deactivated");
                return(false);
            }
            if (_isVmr9Initialized)
            {
                Log.Debug("VMR9: addvmr9: vmr9 has already been initialized");
                return(false);
            }

            bool _useEvr = GUIGraphicsContext.IsEvr;

            if (_instanceCounter != 0)
            {
                Log.Error("VMR9: Multiple instances of VMR9 running!!!");
                throw new Exception("VMR9Helper: Multiple instances of VMR9 running!!!");
            }

            HResult hr;
            IntPtr  hMonitor = Manager.GetAdapterMonitor(GUIGraphicsContext.DX9Device.DeviceCaps.AdapterOrdinal);
            IntPtr  upDevice = DirectShowUtil.GetUnmanagedDevice(GUIGraphicsContext.DX9Device);

            _scene = new PlaneScene(this);
            _scene.Init();

            if (_useEvr)
            {
                EvrInit(_scene, (uint)upDevice.ToInt32(), ref _vmr9Filter, (uint)hMonitor.ToInt32());
                hr = new HResult(graphBuilder.AddFilter(_vmr9Filter, "Enhanced Video Renderer"));
                Log.Info("VMR9: added EVR Renderer to graph");
            }
            else
            {
                _vmr9Filter = (IBaseFilter) new VideoMixingRenderer9();
                Log.Info("VMR9: added Video Mixing Renderer 9 to graph");

                Vmr9Init(_scene, (uint)upDevice.ToInt32(), _vmr9Filter, (uint)hMonitor.ToInt32());
                hr = new HResult(graphBuilder.AddFilter(_vmr9Filter, "Video Mixing Renderer 9"));
            }

            if (_vmr9Filter == null)
            {
                Error.SetError("Unable to play movie", "Renderer could not be added");
                Log.Error("VMR9: Renderer not installed / cannot be used!");
                return(false);
            }

            if (hr != 0)
            {
                if (_useEvr)
                {
                    EvrDeinit();
                }
                else
                {
                    Vmr9Deinit();
                }
                _scene.Stop();
                _scene.Deinit();
                _scene = null;

                DirectShowUtil.ReleaseComObject(_vmr9Filter);
                _vmr9Filter = null;
                Error.SetError("Unable to play movie", "Unable to initialize Renderer");
                Log.Error("VMR9: Failed to add Renderer to filter graph");
                return(false);
            }

            _qualityInterface         = _vmr9Filter as IQualProp;
            _vmr9MixerBitmapInterface = _vmr9Filter as IVMRMixerBitmap9;
            _graphBuilderInterface    = graphBuilder;
            _instanceCounter++;
            _isVmr9Initialized = true;
            if (!_useEvr)
            {
                SetDeinterlacePrefs();

                IVMRMixerControl9 mixer = _vmr9Filter as IVMRMixerControl9;
                if (mixer != null)
                {
                    VMR9MixerPrefs dwPrefs;
                    mixer.GetMixingPrefs(out dwPrefs);
                    dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;

                    dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                    // YUV saves graphics bandwith  http://msdn2.microsoft.com/en-us/library/ms788177(VS.85).aspx
                    hr.Set(mixer.SetMixingPrefs(dwPrefs));
                    Log.Debug("VMR9: Enabled YUV mixing - " + hr.ToDXString());

                    using (Settings xmlreader = new MPSettings())
                    {
                        //Enable nonsquaremixing
                        if (xmlreader.GetValueAsBool("general", "nonsquare", true))
                        {
                            mixer.GetMixingPrefs(out dwPrefs);
                            dwPrefs |= VMR9MixerPrefs.NonSquareMixing;
                            hr.Set(mixer.SetMixingPrefs(dwPrefs));
                            Log.Debug("VRM9: Turning on nonsquare mixing - " + hr.ToDXString());
                            hr.Set(mixer.SetMixingPrefs(dwPrefs));
                        }

                        // Enable DecimateMask - this will effectively use only half of the input width & length
                        if (xmlreader.GetValueAsBool("general", "dx9decimatemask", false))
                        {
                            mixer.GetMixingPrefs(out dwPrefs);
                            dwPrefs &= ~VMR9MixerPrefs.DecimateMask;
                            dwPrefs |= VMR9MixerPrefs.DecimateOutput;
                            hr.Set(mixer.SetMixingPrefs(dwPrefs));
                            Log.Debug("VRM9: Enable decimatemask - " + hr.ToDXString());
                            hr.Set(mixer.SetMixingPrefs(dwPrefs));
                        }

                        // see  D3DTEXTUREFILTERTYPE Enumerated Type documents for further information
                        // MixerPref9_PointFiltering
                        // MixerPref9_BiLinearFiltering
                        // MixerPref9_AnisotropicFiltering
                        // MixerPref9_PyramidalQuadFiltering
                        // MixerPref9_GaussianQuadFiltering

                        mixer.SetMixingPrefs(dwPrefs);
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.FilteringMask;
                        string filtermode9 = xmlreader.GetValueAsString("general", "dx9filteringmode", "Gaussian Quad Filtering");
                        if (filtermode9 == "Point Filtering")
                        {
                            dwPrefs |= VMR9MixerPrefs.PointFiltering;
                        }
                        else if (filtermode9 == "Bilinear Filtering")
                        {
                            dwPrefs |= VMR9MixerPrefs.BiLinearFiltering;
                        }
                        else if (filtermode9 == "Anisotropic Filtering")
                        {
                            dwPrefs |= VMR9MixerPrefs.AnisotropicFiltering;
                        }
                        else if (filtermode9 == "Pyrimidal Quad Filtering")
                        {
                            dwPrefs |= VMR9MixerPrefs.PyramidalQuadFiltering;
                        }
                        else
                        {
                            dwPrefs |= VMR9MixerPrefs.GaussianQuadFiltering;
                        }

                        hr.Set(mixer.SetMixingPrefs(dwPrefs));
                        Log.Debug("VRM9: Set filter mode - " + filtermode9 + " " + hr.ToDXString());
                    }
                }
            }
            _threadId = Thread.CurrentThread.ManagedThreadId;
            GUIGraphicsContext.Vmr9Active = true;
            g_vmr9 = this;
            Log.Debug("VMR9: Renderer successfully added");
            return(true);
        }
Ejemplo n.º 51
0
        /// <summary>
        /// Initialize the graph
        /// </summary>
        public void InitGraph()
        {
            if (theDevice == null)
                return;

            //Create the Graph
            graphBuilder = (IGraphBuilder) new FilterGraph();

            //Create the Capture Graph Builder
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            //Create the media control for controlling the graph
            mediaControl = (IMediaControl) this.graphBuilder;

            // Attach the filter graph to the capture graph
            int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            //Add the Video input device to the graph
            hr = graphBuilder.AddFilter(theDevice, "source filter");
            DsError.ThrowExceptionForHR(hr);

            //Add the Video compressor filter to the graph
            hr = graphBuilder.AddFilter(theCompressor, "compressor filter");
            DsError.ThrowExceptionForHR(hr);

            //Create the file writer part of the graph. SetOutputFileName does this for us, and returns the mux and sink
            IBaseFilter mux;
            IFileSinkFilter sink;
            hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, textBox1.Text, out mux, out sink);
            DsError.ThrowExceptionForHR(hr);

            //Render any preview pin of the device
            hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, theDevice, null, null);
            DsError.ThrowExceptionForHR(hr);

            //Connect the device and compressor to the mux to render the capture part of the graph
            hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, theDevice, theCompressor, mux);
            DsError.ThrowExceptionForHR(hr);

            #if DEBUG
            m_rot = new DsROTEntry(graphBuilder);
            #endif

            //get the video window from the graph
            IVideoWindow videoWindow = null;
            videoWindow = (IVideoWindow) graphBuilder;

            //Set the owener of the videoWindow to an IntPtr of some sort (the Handle of any control - could be a form / button etc.)
            hr = videoWindow.put_Owner(panel1.Handle);
            DsError.ThrowExceptionForHR(hr);

            //Set the style of the video window
            hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren);
            DsError.ThrowExceptionForHR(hr);

            // Position video window in client rect of main application window
            hr = videoWindow.SetWindowPosition(0,0, panel1.Width, panel1.Height);
            DsError.ThrowExceptionForHR(hr);

            // Make the video window visible
            hr = videoWindow.put_Visible(OABool.True);
            DsError.ThrowExceptionForHR(hr);

            Marshal.ReleaseComObject(mux);
            Marshal.ReleaseComObject(sink);
            Marshal.ReleaseComObject(captureGraphBuilder);
        }
Ejemplo n.º 52
0
        DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq)
        {
            // Init variables
            //IPin[] pin = new IPin[1];
            IBaseFilter DecFilterAudio   = null;
            IBaseFilter DecFilterVideo   = null;
            IBaseFilter MainAudioDecoder = null;
            IBaseFilter MainVideoDecoder = null;
            string      dPin             = string.Empty;
            string      sName            = string.Empty;
            string      dName            = string.Empty;
            string      sPin             = string.Empty;
            FileInfo    fiInputFile      = new FileInfo(strq.FileName);
            string      txtOutputFNPath  = fiInputFile.FullName + ".wmv";

            if (
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) &&
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Add the DVRMS/WTV file / filter to the graph
                SendDebugMessage("Add SBE Source Filter", 0);

                hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable
                DsError.ThrowExceptionForHR(hr);
                dc.Add(currentSBEfilter);

                // Get the SBE audio and video out pins
                IPin SBEVidOutPin, SBEAudOutPin;
                SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null);
                SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null);

                // Set up two decrypt filters according to file extension (assume audio and video both present )
                if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                {
                    // Add DVR-MS decrypt filters
                    SendDebugMessage("Add DVRMS (bda) decryption", 0);
                    DecFilterAudio = (IBaseFilter) new DTFilter();  // THESE ARE FOR DVR-MS (BDA DTFilters)
                    DecFilterVideo = (IBaseFilter) new DTFilter();
                    graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag");
                    graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001");
                }
                else  // Add WTV decrypt filters
                {
                    SendDebugMessage("Add WTV (pbda) decryption", 0);
                    DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder);
                    DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001");
                }
                dc.Add(DecFilterAudio);
                dc.Add(DecFilterVideo);

                // Make the first link in the graph: SBE => Decrypts
                SendDebugMessage("Connect SBE => Decrypt filters", 0);
                IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false);
                IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0);
                if (DecAudioInPin == null)
                {
                    SendDebugMessage("WARNING: No Audio Input to decrypt filter.");
                }
                else
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false);
                }

                // Get Dec Audio Out pin
                IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0);

                // Examine Dec Audio out for audio format
                SendDebugMessage("Examining source audio", 0);
                AMMediaType AudioMediaType = null;
                getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType);
                SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString());
                SendDebugMessage("Examining Audio StreamInfo");
                StreamInfo si         = FileInformation.GetStreamInfo(AudioMediaType);
                bool       AudioIsAC3 = (si.SimpleType == "AC-3");
                if (AudioIsAC3)
                {
                    SendDebugMessage("Audio type is AC3");
                }
                else
                {
                    SendDebugMessage("Audio type is not AC3");
                }
                si = null;
                DsUtils.FreeAMMediaType(AudioMediaType);

                // Add an appropriate audio decoder
                if (AudioIsAC3)
                {
                    if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID))
                    {
                        SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected.");
                        return(DSStreamResultCodes.ErrorAC3CodecNotFound);
                    }
                    else
                    {
                        MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder);   //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph);
                        Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid);
                        SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString());
                    }
                }
                else
                {
                    MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder);
                }

                // Add a video decoder
                SendDebugMessage("Add DTV decoder", 0);
                MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder);
                dc.Add(MainAudioDecoder);
                dc.Add(MainVideoDecoder);

                //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder);

                // Add a null renderer
                SendDebugMessage("Add null renderer", 0);
                NullRenderer MyNullRenderer = new NullRenderer();
                dc.Add(MyNullRenderer);
                hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Link up video through to null renderer
                SendDebugMessage("Connect video to null renderer", 0);
                // Make the second link:  Decrypts => DTV
                IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0);
                IPin DTVVideoInPin  = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0);  // first one should be video input?  //
                FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false);
                // 3. DTV => Null renderer
                IPin NullRInPin     = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0);
                IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false);
                Marshal.ReleaseComObject(NullRInPin); NullRInPin = null;

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                DsError.ThrowExceptionForHR(tempControl.Run());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);
                //DsError.ThrowExceptionForHR(hr);  // DO *NOT* DO THIS HERE!  THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS == FilterState.Running)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }

                // Remove null renderer
                hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer);

                // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                AMMediaType pmt = null;
                getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt);
                FrameSize SourceFrameSize;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    int VideoWidth  = pvih2.BmiHeader.Width;
                    int VideoHeight = pvih2.BmiHeader.Height;
                    SourceFrameSize = new FrameSize(VideoWidth, VideoHeight);
                }
                else
                {
                    SourceFrameSize = new FrameSize(320, 240);
                }

                // Free up
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // Link up audio
                // 2. Audio Decrypt -> Audio decoder
                IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false);

                // Add ASF Writer
                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Make the final links:  DTV => writer
                SendDebugMessage("Linking audio/video through to decoder and writer", 0);
                IPin DTVAudioOutPin   = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0);
                IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);
                IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false);
                if (ASFVideoInputPin != null)
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false);
                }

                // Configure ASFWriter
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // dec
                Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin   = null;
                Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin   = null;
                Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null;
                Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null;
                // dtv
                Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null;
                Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin   = null;
                Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null;
                Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null;
                // asf
                Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null;
                Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
Ejemplo n.º 53
0
    protected bool GetInterfaces(string filename, int titleBD)
    {
      try
      {
        Log.Debug("BDPlayer: GetInterfaces()");

        _graphBuilder = (IGraphBuilder)new FilterGraph();
        _rotEntry = new DsROTEntry(_graphBuilder as IFilterGraph);

        filterConfig = GetFilterConfiguration();

        if (filterConfig.AudioRenderer.Length > 0)
        {
          _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, filterConfig.AudioRenderer, true);
        }

        BDReader reader = new BDReader();
        _interfaceBDReader = reader as IBaseFilter;
        _ireader = reader as IBDReader;

        if (_interfaceBDReader == null || _ireader == null)
        {
          // todo: add exception
          return false;
        }

        // add the BD reader
        int hr = _graphBuilder.AddFilter(_interfaceBDReader, BD_READER_GRAPH_NAME);
        DsError.ThrowExceptionForHR(hr);

        Log.Debug("BDPlayer: Add BDReader to graph");

        IFileSourceFilter interfaceFile = (IFileSourceFilter)_interfaceBDReader;

        LoadSettings(_ireader);
        _ireader.SetD3DDevice(DirectShowUtil.GetUnmanagedDevice(GUIGraphicsContext.DX9Device));
        _ireader.SetBDReaderCallback(this);

        hr = interfaceFile.Load(filename, null);

        DsError.ThrowExceptionForHR(hr);

        Log.Debug("BDPlayer: BDReader loaded: {0}", filename);

        List<TitleInfo> titles = GetTitleInfoCollection(_ireader);

        while (true)
        {
          if (g_Player.ForcePlay && g_Player.SetResumeBDTitleState < g_Player.BdDefaultTitle)
          {
            if (titles.Count == 1)
            {
              _titleToPlay = 0;
              g_Player.SetResumeBDTitleState = g_Player.BdRemuxTitle;
            }
            else
            {
              _titleToPlay = g_Player.SetResumeBDTitleState;
            }
            _forceTitle = true;
            g_Player.ForcePlay = false;
          }
          else
          {
            if (titles.Count == 1)
            {
              // BD has only one title (remux one)
              _forceTitle = true;
              _titleToPlay = 0;
              g_Player.SetResumeBDTitleState = g_Player.BdRemuxTitle;

              if (g_Player.SetResumeBDTitleState == -1)
              {
                // user cancelled dialog
                titles.Dispose();
                g_Player.Stop();
                return false;
              }
            }
            else
            {
              _titleToPlay = SelectTitle(titles);
              g_Player.SetResumeBDTitleState = _titleToPlay;
              Log.Info("BDPlayer: BDReader _titleToPlay : {0}", _titleToPlay);
              if (_titleToPlay > -1)
              {
                // a specific title was selected
                _forceTitle = true;

                if (g_Player.SetResumeBDTitleState == -1)
                {
                  // user cancelled dialog
                  titles.Dispose();
                  g_Player.Stop();
                  return false;
                }
              }
              else
              {
                if (_titleToPlay == -1)
                {
                  // user cancelled dialog
                  g_Player.Stop();
                  titles.Dispose();
                  return false;
                }

                // user choose to display menu
                _forceTitle = false;
              }
            }
          }

          _ireader.ForceTitleBasedPlayback(_forceTitle, (uint)_titleToPlay);

          Log.Debug("BDPlayer: Starting BDReader");
          eventBuffer.Clear();
          hr = _ireader.Start();
          if (hr != 0)
          {

            if (!_forceTitle)
            {
              Log.Error("BDPlayer: Failed to start file:{0} :0x{1:x}", filename, hr);
              continue;
            }

            Log.Error("BDPlayer: Failed to start in title based mode file:{0} :0x{1:x}", filename, hr);
            titles.Dispose();
            return false;
          }
          else
          {
            Log.Info("BDPlayer: BDReader started");
          }

          break;
        }

        titles.Dispose();

        #region Filters

        Log.Info("BDPlayer: Adding filters");

        _vmr9 = new VMR9Util();
        _vmr9.AddVMR9(_graphBuilder);
        _vmr9.Enable(false);

        // Set VideoDecoder and VC1Override before adding filter in graph
        SetVideoDecoder();
        SetVC1Override();

        // Add preferred video filters
        UpdateFilters("Video");

        // Add preferred audio filters
        UpdateFilters("Audio");

        // Let the subtitle engine handle the proper filters
        try
        {
          SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder);
        }
        catch (Exception e)
        {
          Log.Error(e);
        }
        
        #endregion

        #region PostProcessingEngine Detection

        IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true);
        if (!postengine.LoadPostProcessing(_graphBuilder))
        {
          PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine();
        }

        #endregion

        #region render BDReader output pins

        Log.Info("BDPlayer: Render BDReader outputs");

        if (_interfaceBDReader != null)
        {
          DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _interfaceBDReader);
        }
        
        //remove InternalScriptRenderer as it takes subtitle pin
        disableISR();

        //disable Closed Captions!
        disableCC();

        //RemoveAudioR();

        DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder);

        #endregion

        _mediaCtrl = (IMediaControl)_graphBuilder;
        _mediaEvt = (IMediaEventEx)_graphBuilder;
        _mediaSeeking = (IMediaSeeking)_graphBuilder;

        try
        {
          SubtitleRenderer.GetInstance().SetPlayer(this);
          _dvbSubRenderer = SubtitleRenderer.GetInstance();
        }
        catch (Exception e)
        {
          Log.Error(e);
        }

        _subtitleStream = (Player.TSReaderPlayer.ISubtitleStream)_interfaceBDReader;
        if (_subtitleStream == null)
        {
          Log.Error("BDPlayer: Unable to get ISubtitleStream interface");
        }

        // if only dvb subs are enabled, pass null for ttxtDecoder
        _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null);
        EnableSubtitle = _subtitlesEnabled;

        //Sync Audio Renderer
        SyncAudioRenderer();

        if (!_vmr9.IsVMR9Connected)
        {
          Log.Error("BDPlayer: Failed vmr9 not connected");
          return false;
        }
        _vmr9.SetDeinterlaceMode();
        return true;
      }
      catch (Exception ex)
      {
        Log.Error("BDPlayer: Exception while creating DShow graph {0}", ex.Message);
        return false;
      }
    }
Ejemplo n.º 54
0
        DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq)
        {
            UsingSBEFilter = false;  // Not using stream buffer

            // Init variables
            IPin[]   pin             = new IPin[1];
            string   dPin            = string.Empty;
            string   sName           = string.Empty;
            string   dName           = string.Empty;
            string   sPin            = string.Empty;
            FileInfo fiInputFile     = new FileInfo(strq.FileName);
            string   txtOutputFNPath = fiInputFile.FullName + ".wmv";

            if (
                (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) ||
                (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                SendDebugMessage("Setting filename", 0);
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio
                SendDebugMessage("Adding ACM Wrapper", 0);
                IBaseFilter ACMFilter = FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder);
                dc.Add(ACMFilter);

                // Render file - then build graph
                SendDebugMessage("Rendering file", 0);
                graphbuilder.RenderFile(fiInputFile.FullName, null);
                SendDebugMessage("Saving graph", 0);
                FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf");

                // Are both our ASF pins connected?
                IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);

                // Get media type from vid input pin for ASF writer
                AMMediaType pmt = new AMMediaType();
                hr = ASFVidInputPin.ConnectionMediaType(pmt);

                FrameSize SourceFrameSize = null;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height);
                }
                else if (pmt.formatType == FormatType.VideoInfo)  //{05589f80-c356-11ce-bf01-00aa0055595a}
                {
                    VideoInfoHeader pvih = new VideoInfoHeader();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih);
                    SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height);
                }
                else
                {
                    SourceFrameSize = new FrameSize(200, 200); // SQUARE
                }
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS != FilterState.Stopped)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }
                // Free up media type
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // (re)Configure the ASF writer with the selected WM Profile
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // source
                Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null;
                Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
Ejemplo n.º 55
0
        /// <summary>
        /// Creates an instance of the EVR
        /// </summary>
        private IBaseFilter CreateEnhancedVideoRenderer(IGraphBuilder graph)
        {
            var evr = new EnhancedVideoRenderer();
            var filter = evr as IBaseFilter;

            int hr = graph.AddFilter(filter, string.Format("Renderer: {0}", VideoRendererType.EnhancedVideoRenderer));
            DsError.ThrowExceptionForHR(hr);

            /* QueryInterface for the IMFVideoRenderer */
            var videoRenderer = filter as IMFVideoRenderer;

            if (videoRenderer == null)
                throw new Exception("Could not QueryInterface for the IMFVideoRenderer");

            /* Create a new EVR presenter */
            var presenter = EvrPresenter.CreateNew();

            /* Initialize the EVR renderer with the custom video presenter */
            hr = videoRenderer.InitializeRenderer(null, presenter.VideoPresenter);
            DsError.ThrowExceptionForHR(hr);

            var presenterSettings = presenter.VideoPresenter as IEVRPresenterSettings;
            if (presenterSettings == null)
                throw new Exception("Could not QueryInterface for the IEVRPresenterSettings");

            presenterSettings.SetBufferCount(4);

            /* Use our interop hWnd */
            IntPtr handle = HwndHelper.Handle;

            /* QueryInterface the IMFVideoDisplayControl */
            var displayControl = presenter.VideoPresenter as IMFVideoDisplayControl;

            if (displayControl == null)
                throw new Exception("Could not QueryInterface the IMFVideoDisplayControl");

            /* Configure the presenter with our hWnd */
            hr = displayControl.SetVideoWindow(handle);
            DsError.ThrowExceptionForHR(hr);

            var filterConfig = filter as IEVRFilterConfig;

            if(filterConfig != null)
                filterConfig.SetNumberOfStreams(4);

            RegisterCustomAllocator(presenter);

            return filter;
        }
Ejemplo n.º 56
0
        private bool AddCodecs(IGraphBuilder graphBuilder, TranscodeInfo info)
        {
            int hr;

            Log.Info("DVRMS2DIVX: add ffdshow (Divx) codec to graph");
            string monikerXVID = @"@device:sw:{33D9A760-90C8-11D0-BD43-00A0C911CE86}\ffdshow video encoder";

            divxCodec = Marshal.BindToMoniker(monikerXVID) as IBaseFilter;
            if (divxCodec == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:Unable to create Divx MPEG-4 Codec");
                Cleanup();
                return(false);
            }

            hr = graphBuilder.AddFilter(divxCodec, "ffdshow video encoder");
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:Add DivX MPEG-4 Codec to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }


            Log.Info("DVRMS2DIVX: add MPEG3 codec to graph");
            string monikerMPEG3 = @"@device:cm:{33D9A761-90C8-11D0-BD43-00A0C911CE86}\85MPEG Layer-3";

            mp3Codec = Marshal.BindToMoniker(monikerMPEG3) as IBaseFilter;
            if (mp3Codec == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:Unable to create MPEG Layer-3 Codec");
                Cleanup();
                return(false);
            }

            hr = graphBuilder.AddFilter(mp3Codec, "MPEG Layer-3");
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:Add MPEG Layer-3 to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            //add filewriter
            Log.Info("DVRMS2DIVX: add FileWriter to graph");
            string monikerFileWrite =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{8596E5F0-0DA5-11D0-BD21-00A0C911CE86}";
            IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;

            if (fileWriterbase == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:Unable to create FileWriter");
                Cleanup();
                return(false);
            }


            fileWriterFilter = fileWriterbase as IFileSinkFilter2;
            if (fileWriterFilter == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:Add unable to get IFileSinkFilter for filewriter");
                Cleanup();
                return(false);
            }

            hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:Add FileWriter to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }


            //set output filename
            //AMMediaType mt = new AMMediaType();
            string outputFileName = System.IO.Path.ChangeExtension(info.file, ".avi");

            Log.Info("DVRMS2DIVX: set output file to :{0}", outputFileName);
            hr = fileWriterFilter.SetFileName(outputFileName, null);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:unable to set filename for filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            // add avi muxer
            Log.Info("DVRMS2DIVX: add AVI Muxer to graph");
            string monikerAviMuxer =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2510970-F137-11CE-8B67-00AA00A3F1A6}";

            aviMuxer = Marshal.BindToMoniker(monikerAviMuxer) as IBaseFilter;
            if (aviMuxer == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:Unable to create AviMux");
                Cleanup();
                return(false);
            }


            hr = graphBuilder.AddFilter(aviMuxer, "AviMux");
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:Add AviMux to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }


            //connect output of mpeg2 codec to xvid codec
            Log.Info("DVRMS2DIVX: connect mpeg2 video codec->divx codec");
            IPin pinOut, pinIn;

            pinIn = DsFindPin.ByDirection(divxCodec, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of divx codec:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinOut = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 video codec :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 video codec->divx:0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            //connect output of mpeg2 audio codec to mpeg3 codec
            Log.Info("DVRMS2DIVX: connect mpeg2 audio codec->mp3 codec");
            pinIn = DsFindPin.ByDirection(mp3Codec, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinOut = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 audio codec :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 audio codec->mpeg3:0x{0:X}", hr);
                Cleanup();
                return(false);
            }


            //connect output of mpeg3 codec to pin#0 of avimux
            Log.Info("DVRMS2DIVX: connect mp3 codec->avimux");
            pinOut = DsFindPin.ByDirection(mp3Codec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn = DsFindPin.ByDirection(aviMuxer, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 audio codec :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg3 codec->avimux:0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            //connect output of xvid codec to pin#1 of avimux
            Log.Info("DVRMS2DIVX: connect divx codec->avimux");
            pinOut = DsFindPin.ByDirection(divxCodec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn = DsFindPin.ByDirection(aviMuxer, PinDirection.Input, 1);
            if (pinIn == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get output#1 pin of avimux :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:unable to connect divx codec->avimux:0x{0:X}", hr);
                Cleanup();
                return(false);
            }


            //connect avi mux out->filewriter in
            Log.Info("DVRMS2DIVX: connect avimux->filewriter");
            pinOut = DsFindPin.ByDirection(aviMuxer, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of avimux:0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of Filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("DVRMS2DIVX:FAILED:connect muxer->filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            return(true);
        }
Ejemplo n.º 57
0
    public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                          MediaPortal.Core.Transcoding.Quality quality, Standard standard)
    {
      if (!Supports(format)) return false;
      string ext = System.IO.Path.GetExtension(info.file);
      if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe") return false;

      //Type comtype = null;
      //object comobj = null;
      try
      {
        Log.Info("DVR2MPG: create graph");
        graphBuilder = (IGraphBuilder)new FilterGraph();

        _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);

        Log.Info("DVR2MPG: add streambuffersource");
        bufferSource = (IStreamBufferSource)new StreamBufferSource();


        IBaseFilter filter = (IBaseFilter)bufferSource;
        graphBuilder.AddFilter(filter, "SBE SOURCE");

        Log.Info("DVR2MPG: load file:{0}", info.file);
        IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
        int hr = fileSource.Load(info.file, null);


        Log.Info("DVR2MPG: Add Cyberlink MPEG2 multiplexer to graph");
        string monikerPowerDvdMuxer =
          @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{7F2BBEAF-E11C-4D39-90E8-938FB5A86045}";
        powerDvdMuxer = Marshal.BindToMoniker(monikerPowerDvdMuxer) as IBaseFilter;
        if (powerDvdMuxer == null)
        {
          Log.Warn("DVR2MPG: FAILED:Unable to create Cyberlink MPEG Muxer (PowerDVD)");
          Cleanup();
          return false;
        }

        hr = graphBuilder.AddFilter(powerDvdMuxer, "PDR MPEG Muxer");
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:Add Cyberlink MPEG Muxer to filtergraph :0x{0:X}", hr);
          Cleanup();
          return false;
        }

        //add filewriter 
        Log.Info("DVR2MPG: Add FileWriter to graph");
        string monikerFileWrite =
          @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{3E8868CB-5FE8-402C-AA90-CB1AC6AE3240}";
        IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;
        if (fileWriterbase == null)
        {
          Log.Warn("DVR2MPG: FAILED:Unable to create FileWriter");
          Cleanup();
          return false;
        }


        fileWriterFilter = fileWriterbase as IFileSinkFilter;
        if (fileWriterFilter == null)
        {
          Log.Warn("DVR2MPG: FAILED:Add unable to get IFileSinkFilter for filewriter");
          Cleanup();
          return false;
        }

        hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:Add FileWriter to filtergraph :0x{0:X}", hr);
          Cleanup();
          return false;
        }


        //connect output #0 of streambuffer source->powerdvd audio in
        //connect output #1 of streambuffer source->powerdvd video in
        Log.Info("DVR2MPG: connect streambuffer->multiplexer");
        IPin pinOut0, pinOut1;
        IPin pinIn0, pinIn1;
        pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0);
        pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1);

        pinIn0 = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Input, 0);
        pinIn1 = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Input, 1);
        if (pinOut0 == null || pinOut1 == null || pinIn0 == null || pinIn1 == null)
        {
          Log.Warn("DVR2MPG: FAILED:unable to get pins of muxer&source");
          Cleanup();
          return false;
        }

        bool usingAc3 = false;
        AMMediaType amAudio = new AMMediaType();
        amAudio.majorType = MediaType.Audio;
        amAudio.subType = MediaSubType.Mpeg2Audio;
        hr = pinOut0.Connect(pinIn1, amAudio);
        if (hr != 0)
        {
          amAudio.subType = MediaSubType.DolbyAC3;
          hr = pinOut0.Connect(pinIn1, amAudio);
          usingAc3 = true;
        }
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED: unable to connect audio pins: 0x{0:X}", hr);
          Cleanup();
          return false;
        }

        if (usingAc3)
          Log.Info("DVR2MPG: using AC3 audio");
        else
          Log.Info("DVR2MPG: using MPEG audio");

        AMMediaType amVideo = new AMMediaType();
        amVideo.majorType = MediaType.Video;
        amVideo.subType = MediaSubType.Mpeg2Video;
        hr = pinOut1.Connect(pinIn0, amVideo);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED: unable to connect video pins: 0x{0:X}", hr);
          Cleanup();
          return false;
        }


        //connect output of powerdvd muxer->input of filewriter
        Log.Info("DVR2MPG: connect multiplexer->filewriter");
        IPin pinOut, pinIn;
        pinOut = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Output, 0);
        if (pinOut == null)
        {
          Log.Warn("DVR2MPG: FAILED:cannot get output pin of Cyberlink MPEG muxer :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
        if (pinIn == null)
        {
          Log.Warn("DVR2MPG: FAILED:cannot get input pin of Filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        AMMediaType mt = new AMMediaType();
        hr = pinOut.Connect(pinIn, mt);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:connect muxer->filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }

        //set output filename
        string outputFileName = System.IO.Path.ChangeExtension(info.file, ".mpg");
        Log.Info("DVR2MPG: set output file to :{0}", outputFileName);
        mt.majorType = MediaType.Stream;
        mt.subType = MediaSubTypeEx.MPEG2;

        hr = fileWriterFilter.SetFileName(outputFileName, mt);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:unable to set filename for filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        mediaControl = graphBuilder as IMediaControl;
        mediaSeeking = graphBuilder as IMediaSeeking;
        mediaEvt = graphBuilder as IMediaEventEx;
        Log.Info("DVR2MPG: start transcoding");
        hr = mediaControl.Run();
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:unable to start graph :0x{0:X}", hr);
          Cleanup();
          return false;
        }
      }
      catch (Exception ex)
      {
        Log.Error("DVR2MPG: Unable create graph: {0}", ex.Message);
        Cleanup();
        return false;
      }
      return true;
    }
Ejemplo n.º 58
0
        public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                              MediaPortal.Core.Transcoding.Quality quality, Standard standard)
        {
            if (!Supports(format))
            {
                return(false);
            }
            string ext = System.IO.Path.GetExtension(info.file);

            if (ext.ToLowerInvariant() != ".dvr-ms" && ext.ToLowerInvariant() != ".sbe")
            {
                Log.Info("DVRMS2DIVX: wrong file format");
                return(false);
            }

            //disable xvid status window while encoding

            /*  try
             *                        {
             *                                using (RegistryKey subkey = Registry.CurrentUser.OpenSubKey(@"Software\GNU\XviD", true))
             *                                {
             *                                        if (subkey != null)
             *                                        {
             *                                                Int32 uivalue = 0;
             *                                                subkey.SetValue("display_status", (Int32)uivalue);
             *                                                subkey.SetValue("debug", (Int32)uivalue);
             *                                                subkey.SetValue("bitrate", (Int32)bitrate);
             *
             *                                                uivalue = 1;
             *                                                subkey.SetValue("interlacing", (Int32)uivalue);
             *                                        }
             *                                }
             *                        }
             *                        catch (Exception ex)
             *                        {
             *                        }*/
            //Type comtype = null;
            //object comobj = null;
            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();

                _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);

                Log.Info("DVRMS2DIVX: add filesource");
                bufferSource = (IStreamBufferSource) new StreamBufferSource();

                IBaseFilter filter = (IBaseFilter)bufferSource;
                graphBuilder.AddFilter(filter, "SBE SOURCE");
                IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
                Log.Info("DVRMS2DIVX: load file:{0}", info.file);
                int hr = fileSource.Load(info.file, null);


                /*string strDemuxerMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{AFB6C280-2C41-11D3-8A60-0000F81E0E4A}";
                 *
                 *                      mpegDemuxer = Marshal.BindToMoniker(strDemuxerMoniker) as IBaseFilter;
                 *                      if (mpegDemuxer == null)
                 *                      {
                 *                                      Log.Error("DVRMS2DIVX:FAILED:unable to add mpeg2 demuxer");
                 *                                      Cleanup();
                 *                                      return false;
                 *                      }
                 *                      hr = graphBuilder.AddFilter(mpegDemuxer, "MPEG-2 Demultiplexer");
                 *                      if (hr != 0)
                 *                      {
                 *                                      Log.Error("DVRMS2DIVX:FAILED:Add mpeg2 demuxer to filtergraph :0x{0:X}", hr);
                 *                                      Cleanup();
                 *                                      return false;
                 *                      }*/

                //add mpeg2 audio/video codecs
                string strVideoCodecMoniker =
                    @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{39F498AF-1A09-4275-B193-673B0BA3D478}";
                string strAudioCodec = "MPC - MPA Decoder Filter";
                Log.Info("DVRMS2DIVX: add MPV mpeg2 video decoder");
                Mpeg2VideoCodec = Marshal.BindToMoniker(strVideoCodecMoniker) as IBaseFilter;
                if (Mpeg2VideoCodec == null)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 video decoder");
                    Cleanup();
                    return(false);
                }
                hr = graphBuilder.AddFilter(Mpeg2VideoCodec, "MPC - MPEG-2 Video Decoder (Gabest)");
                if (hr != 0)
                {
                    Log.Error("DVRMS2DIVX:FAILED:Add MPV mpeg2 video  to filtergraph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }

                Log.Info("DVRMS2DIVX: add MPA mpeg2 audio codec:{0}", strAudioCodec);
                Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
                if (Mpeg2AudioCodec == null)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 audio codec");
                    Cleanup();
                    return(false);
                }

                //connect output #0 of streambuffer source->mpeg2 audio codec pin 1
                //connect output #1 of streambuffer source->mpeg2 video codec pin 1
                Log.Info("DVRMS2DIVX: connect streambufer source->mpeg audio/video decoders");
                IPin pinOut0, pinOut1;
                IPin pinIn0, pinIn1;
                pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
                pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
                if (pinOut0 == null || pinOut1 == null)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to get pins of source");
                    Cleanup();
                    return(false);
                }

                pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
                pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
                if (pinIn0 == null || pinIn1 == null)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to get pins of mpeg2 video/audio codec");
                    Cleanup();
                    return(false);
                }

                hr = graphBuilder.Connect(pinOut0, pinIn1);
                if (hr != 0)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to connect audio pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }


                hr = graphBuilder.Connect(pinOut1, pinIn0);
                if (hr != 0)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to connect video pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }

                //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
                //				if (hr!=0)
                //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);
                mediaControl = graphBuilder as IMediaControl;
                mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
                mediaEvt     = graphBuilder as IMediaEventEx;
                mediaPos     = graphBuilder as IMediaPosition;

                //get file duration
                Log.Info("DVRMS2DIVX: Get duration of movie");
                long lTime = 5 * 60 * 60;
                lTime *= 10000000;
                long pStop = 0;
                hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                               AMSeekingSeekingFlags.NoPositioning);
                if (hr == 0)
                {
                    long lStreamPos;
                    mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
                    m_dDuration = lStreamPos;
                    lTime       = 0;
                    mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                              AMSeekingSeekingFlags.NoPositioning);
                }
                double duration = m_dDuration / 10000000d;
                Log.Info("DVRMS2DIVX: movie duration:{0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));

                //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
                //				if (hr!=0)
                //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                int maxCount = 20;
                while (true)
                {
                    long lCurrent;
                    mediaSeeking.GetCurrentPosition(out lCurrent);
                    double dpos = (double)lCurrent;
                    dpos /= 10000000d;
                    System.Threading.Thread.Sleep(100);
                    if (dpos >= 2.0d)
                    {
                        break;
                    }
                    maxCount--;
                    if (maxCount <= 0)
                    {
                        break;
                    }
                }

                mediaControl.Stop();
                FilterState state;
                mediaControl.GetState(500, out state);
                GC.Collect();
                GC.Collect();
                GC.Collect();
                GC.WaitForPendingFinalizers();
                graphBuilder.RemoveFilter(aviMuxer);
                graphBuilder.RemoveFilter(divxCodec);
                graphBuilder.RemoveFilter(mp3Codec);
                graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }

                //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
                //			if (hr!=0)
                //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);

                Log.Info("DVRMS2DIVX: start transcoding");
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
            }
            catch (Exception ex)
            {
                Log.Error("DVRMS2DIVX:Unable create graph: {0}", ex.Message);
                Cleanup();
                return(false);
            }
            return(true);
        }
Ejemplo n.º 59
0
        /// <summary>
        /// プレイヤーの接続
        /// </summary>
        /// <param name="filename"></param>
        private void Player_Connect(string filename)
        {
            #region グラフビルダーの生成:
            {
                Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph);
                if (Graph == null)
                    throw new System.IO.IOException("Failed to create a GraphBuilder.");

                Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2);
                if (Builder == null)
                    throw new System.IO.IOException("Failed to create a GraphBuilder.");
                Builder.SetFiltergraph(Graph);
            }
            #endregion

            #region 映像入力用: ソースフィルタを生成します.
            {
                Graph.AddSourceFilter(filename, "VideoSource", ref VideoSource);
                if (VideoSource == null)
                    throw new System.IO.IOException("Failed to create a VideoSource.");
            }
            #endregion

            #region 映像捕獲用: サンプルグラバーを生成します.
            {
                VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                if (VideoGrabber == null)
                    throw new System.IO.IOException("Failed to create a VideoGrabber.");
                Graph.AddFilter(VideoGrabber, "VideoGrabber");

                // サンプルグラバフィルタの入力形式設定.
                // SetMediaType で必要なメディア タイプを指定します。
                //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                {
                    var grabber = (ISampleGrabber)VideoGrabber;

                    var mt = new AM_MEDIA_TYPE();
                    mt.majortype = new Guid(GUID.MEDIATYPE_Video);
                    mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24);
                    mt.formattype = new Guid(GUID.FORMAT_VideoInfo);
                    grabber.SetMediaType(mt);
                    grabber.SetBufferSamples(false);			// サンプルコピー 無効.
                    grabber.SetOneShot(false);					// One Shot 無効.
                    //grabber.SetCallback(VideoGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                    grabber.SetCallback(VideoGrabberCB, 1);		// 1:BufferCB メソッドを呼び出すよう指示する.
                }
            }
            #endregion

            #region 音声捕獲用: サンプルグラバーを生成します.
            {
                AudioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                if (AudioGrabber == null)
                    throw new System.IO.IOException("Failed to create a AudioGrabber.");
                Graph.AddFilter(AudioGrabber, "AudioGrabber");

                // サンプルグラバフィルタの入力形式設定.
                // SetMediaType で必要なメディア タイプを指定します。
                //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                {
                    var grabber = (ISampleGrabber)AudioGrabber;

                    var mt = new AM_MEDIA_TYPE();
                    mt.majortype = new Guid(GUID.MEDIATYPE_Audio);
                    mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM);
                    mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx);
                    grabber.SetMediaType(mt);
                    grabber.SetBufferSamples(false);			// サンプルコピー 無効.
                    grabber.SetOneShot(false);					// One Shot 無効.
                    //grabber.SetCallback(AudioGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                    grabber.SetCallback(AudioGrabberCB, 1);		// 1:BufferCB メソッドを呼び出すよう指示する.
                }
            }
            #endregion

            #region 映像出力用: レンダラーを生成します.
            {
                VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                if (VideoRenderer == null)
                    throw new System.IO.IOException("Failed to create a VideoRenderer.");
                Graph.AddFilter(VideoRenderer, "VideoRenderer");
            }
            #endregion

            #region 音声出力用: レンダラーを生成します.
            {
                AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                if (AudioRenderer == null)
                    throw new System.IO.IOException("Failed to create a AudioRenderer.");
                Graph.AddFilter(AudioRenderer, "AudioRenderer");
            }
            #endregion

            #region フィルタの接続:
            if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase))
            {
                #region AVI 形式ファイル用の初期化:
                unsafe
                {
                    HRESULT hr;

                    // AVI 分離器の追加:
                    Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter);
                    if (Splitter == null)
                        throw new System.IO.IOException("Failed to create a Splitter.");
                    Graph.AddFilter(Splitter, "Splitter");

                    // フィルタの接続: (AVI 分離器)
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (映像入力)
                    var mediatype_video = new Guid(GUID.MEDIATYPE_Video);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (音声入力) ※ Audioless も有る.
                    try
                    {
                        var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio);
                        hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer);
                    }
                    catch (System.Exception ex)
                    {
                        Debug.WriteLine(ex.StackTrace);
                    }
                }
                #endregion
            }
            else if (
                filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) ||
                filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase))
            {
                #region WMV 形式ファイル用の初期化:
                unsafe
                {
                    HRESULT hr;

                    // フィルタの接続: (映像入力)
                    var mediatype_video = new Guid(GUID.MEDIATYPE_Video);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (音声入力)
                    var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);
                }
                #endregion
            }
            #endregion

            // 同期用: サンプルグラバーのイベント登録:
            VideoGrabberCB.Enable = true;
            VideoGrabberCB.Notify += VideoGrabberCB_Notify;
            VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber);
        }
Ejemplo n.º 60
0
        // Thread entry point
        public void WorkerThread()
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder  graph       = null;
            IBaseFilter    sourceBase  = null;
            IBaseFilter    grabberBase = null;
            ISampleGrabber sg          = null;
            IMediaControl  mc          = null;

            try
            {
                // Get type for filter graph
                Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObj = Activator.CreateInstance(srvType);
                graph    = (IGraphBuilder)graphObj;
                int n = 0;

                // create bind context
                if (Win32.CreateBindCtx(0, out UCOMIBindCtx bindCtx) == 0)
                {
                    // ----
                    // convert moniker`s string to a moniker
                    if (Win32.MkParseDisplayName(bindCtx, source, ref n, out UCOMIMoniker moniker) == 0)
                    {
                        // get device base filter
                        Guid filterId = typeof(IBaseFilter).GUID;
                        moniker.BindToObject(null, null, ref filterId, out sourceObj);

                        Marshal.ReleaseComObject(moniker);
                        moniker = null;
                    }
                    Marshal.ReleaseComObject(bindCtx);
                    bindCtx = null;
                }
                // ----

                if (sourceObj == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }

                sourceBase = (IBaseFilter)sourceObj;

                // Get type for sample grabber
                srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObj  = Activator.CreateInstance(srvType);
                sg          = (ISampleGrabber)grabberObj;
                grabberBase = (IBaseFilter)grabberObj;

                // add source filter to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mt = new AMMediaType
                {
                    majorType = MediaType.Video,
                    subType   = MediaSubType.RGB24
                };

                sg.SetMediaType(mt);

                // connect pins
                if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                    System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height);
                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graph.Render(DSTools.GetOutPin(grabberBase, 0));

                //
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;


                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc          = null;
                graph       = null;
                sourceBase  = null;
                grabberBase = null;
                sg          = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (sourceObj != null)
                {
                    Marshal.ReleaseComObject(sourceObj);
                    sourceObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }