コード例 #1
0
        private void CloseInterfaces()
        {
            int hr = 0;

            try
            {
                lock (this)
                {
                    // Relinquish ownership (IMPORTANT!) after hiding video window
                    hr = this.videoWindow.put_Visible(OABool.False);
                    DsError.ThrowExceptionForHR(hr);
                    hr = this.videoWindow.put_Owner(IntPtr.Zero);
                    DsError.ThrowExceptionForHR(hr);
                    if (this.mediaEventEx != null)
                    {
                        hr = this.mediaEventEx.SetNotifyWindow(IntPtr.Zero, 0, IntPtr.Zero);
                        DsError.ThrowExceptionForHR(hr);
                    }
                    // Release and zero DirectShow interfaces
                    if (this.mediaEventEx != null)
                    {
                        this.mediaEventEx = null;
                    }
                    if (this.mediaSeeking != null)
                    {
                        this.mediaSeeking = null;
                    }
                    if (this.mediaPosition != null)
                    {
                        this.mediaPosition = null;
                    }
                    if (this.mediaControl != null)
                    {
                        this.mediaControl = null;
                    }
                    if (this.basicAudio != null)
                    {
                        this.basicAudio = null;
                    }
                    if (this.basicVideo != null)
                    {
                        this.basicVideo = null;
                    }
                    if (this.videoWindow != null)
                    {
                        this.videoWindow = null;
                    }
                    if (this.frameStep != null)
                    {
                        this.frameStep = null;
                    }
                    if (this.graphBuilder != null)
                    {
                        Marshal.ReleaseComObject(this.graphBuilder);
                    }
                    this.graphBuilder = null;
                    GC.Collect();
                }
            }
            catch
            {
                //what to do?
            }
        }
コード例 #2
0
        internal static void _DisplayPropertyPage(object filter_or_pin, IntPtr hwndOwner)
        {
            if (filter_or_pin == null)
            {
                return;
            }

            //Get the ISpecifyPropertyPages for the filter
            ISpecifyPropertyPages pProp = filter_or_pin as ISpecifyPropertyPages;
            int hr = 0;

            if (pProp == null)
            {
                //If the filter doesn't implement ISpecifyPropertyPages, try displaying IAMVfwCompressDialogs instead!
                IAMVfwCompressDialogs compressDialog = filter_or_pin as IAMVfwCompressDialogs;
                if (compressDialog != null)
                {
                    hr = compressDialog.ShowDialog(VfwCompressDialogs.Config, IntPtr.Zero);
                    DsError.ThrowExceptionForHR(hr);
                }
                return;
            }

            string caption = string.Empty;

            if (filter_or_pin is IBaseFilter)
            {
                //Get the name of the filter from the FilterInfo struct
                IBaseFilter as_filter = filter_or_pin as IBaseFilter;
                FilterInfo  filterInfo;
                hr = as_filter.QueryFilterInfo(out filterInfo);
                DsError.ThrowExceptionForHR(hr);

                caption = filterInfo.achName;

                if (filterInfo.pGraph != null)
                {
                    Marshal.ReleaseComObject(filterInfo.pGraph);
                }
            }
            else
            if (filter_or_pin is IPin)
            {
                //Get the name of the filter from the FilterInfo struct
                IPin    as_pin = filter_or_pin as IPin;
                PinInfo pinInfo;
                hr = as_pin.QueryPinInfo(out pinInfo);
                DsError.ThrowExceptionForHR(hr);

                caption = pinInfo.name;
            }


            // Get the propertypages from the property bag
            DsCAUUID caGUID;

            hr = pProp.GetPages(out caGUID);
            DsError.ThrowExceptionForHR(hr);

            // Create and display the OlePropertyFrame
            object oDevice = (object)filter_or_pin;

            hr = NativeMethodes.OleCreatePropertyFrame(hwndOwner, 0, 0, caption, 1, ref oDevice, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero);
            DsError.ThrowExceptionForHR(hr);

            // Release COM objects
            Marshal.FreeCoTaskMem(caGUID.pElems);
            Marshal.ReleaseComObject(pProp);
        }
コード例 #3
0
        /// <summary>
        /// Gets available resolutions (which are appropriate for us) for capture pin (PinCategory.Capture).
        /// </summary>
        /// <param name="captureFilter">Capture pin (PinCategory.Capture) for asking for resolution list.</param>
        private static ResolutionList GetResolutionsAvailable(IPin pinOutput)
        {
            int hr = 0;

            ResolutionList ResolutionsAvailable = new ResolutionList();

            //ResolutionsAvailable.Clear();

            // Media type (shoudl be cleaned)
            AMMediaType media_type = null;

            //NOTE: pSCC is not used. All we need is media_type
            IntPtr pSCC = IntPtr.Zero;

            try
            {
                IAMStreamConfig videoStreamConfig = pinOutput as IAMStreamConfig;

                // -------------------------------------------------------------------------
                // We want the interface to expose all media types it supports and not only the last one set
                hr = videoStreamConfig.SetFormat(null);
                DsError.ThrowExceptionForHR(hr);

                int piCount = 0;
                int piSize  = 0;

                hr = videoStreamConfig.GetNumberOfCapabilities(out piCount, out piSize);
                DsError.ThrowExceptionForHR(hr);

                for (int i = 0; i < piCount; i++)
                {
                    // ---------------------------------------------------
                    pSCC = Marshal.AllocCoTaskMem(piSize);
                    videoStreamConfig.GetStreamCaps(i, out media_type, pSCC);

                    // NOTE: we could use VideoStreamConfigCaps.InputSize or something like that to get resolution, but it's deprecated
                    //VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pSCC, typeof(VideoStreamConfigCaps));
                    // ---------------------------------------------------

                    if (IsBitCountAppropriate(GetBitCountForMediaType(media_type)))
                    {
                        ResolutionsAvailable.AddIfNew(GetResolutionForMediaType(media_type));
                    }

                    FreeSCCMemory(ref pSCC);
                    FreeMediaType(ref media_type);
                }
            }
            catch
            {
                throw;
            }
            finally
            {
                // clean up
                FreeSCCMemory(ref pSCC);
                FreeMediaType(ref media_type);
            }

            return(ResolutionsAvailable);
        }
コード例 #4
0
ファイル: MediaDetector.cs プロジェクト: gitter-badger/GS
        /// <summary>
        /// Gets an image snapshot from the media file that was opened
        /// </summary>
        /// <param name="position">The media time position for the requested thumbnail</param>
        /// <returns>Returns a BitmapSource of the video position.</returns>
        public unsafe BitmapSource GetImage(TimeSpan position)
        {
            VerifyAccess();

            const int BITS_PER_PIXEL = 3;

            if (string.IsNullOrEmpty(m_filename))
            {
                throw new Exception("A media file must be successfully loaded first.");
            }

            if (!HasVideo)
            {
                throw new Exception("The media does not have a video stream");
            }

            double secondsPos = position.TotalSeconds;

            /* Our pointer to the bitmap's pixel buffer */
            IntPtr pBuffer = IntPtr.Zero;


            /* The WPF bitmap source we return */
            BitmapSource bmpSource = null;

            try
            {
                /* The size of our buffer */
                int bufferSize;

                /* Queries the size of the bitmap buffer first */
                int hr = m_mediaDet.GetBitmapBits(secondsPos,
                                                  out bufferSize,
                                                  IntPtr.Zero,
                                                  (int)VideoResolution.Width,
                                                  (int)VideoResolution.Height);
                if (hr == 0)
                {
                    /* Allocate some unmanaged memory, big enough for our bitmap bytes */
                    pBuffer = Marshal.AllocCoTaskMem(bufferSize);

                    /* Get the pixel buffer for the thumbnail */
                    hr = m_mediaDet.GetBitmapBits(secondsPos,
                                                  out bufferSize,
                                                  pBuffer,
                                                  (int)VideoResolution.Width,
                                                  (int)VideoResolution.Height);

                    DsError.ThrowExceptionForHR(hr);

                    /* The bitmap header exists is the buffer.  We 'cast' it out to read it */
                    var bitmapHeader = (BitmapInfoHeader)Marshal.PtrToStructure(pBuffer, typeof(BitmapInfoHeader));

                    /* We use a pointer so we can do some pointer
                     * arithmetic. This method should be compatible
                     * with 32/64bit processes */
                    var pBitmapData = (byte *)pBuffer.ToPointer();

                    pBitmapData += bitmapHeader.Size;

                    /* This will be the pointer to the bitmap pixels */
                    var bitmapData = new IntPtr(pBitmapData);

                    /* We create a GDI bitmap, so we can flip it before we
                     * load it into a WPF BitmapSource */
                    if (m_bitmap == null)
                    {
                        m_bitmap = new Bitmap(bitmapHeader.Width,
                                              bitmapHeader.Height,
                                              PixelFormat.Format24bppRgb);
                    }

                    /* The GDI bitmap's pixels are locked so we can grab a pointer to the pixel buffer */
                    BitmapData bmpData = m_bitmap.LockBits(new Rectangle(0, 0, bitmapHeader.Width, bitmapHeader.Height),
                                                           ImageLockMode.WriteOnly,
                                                           PixelFormat.Format24bppRgb);

                    /* The we copy our pixel buffer to the GDI bitmap's pixel buffer */
                    CopyMemory(bmpData.Scan0, bitmapData, (int)VideoResolution.Width * (int)VideoResolution.Height * BITS_PER_PIXEL);

                    m_bitmap.UnlockBits(bmpData);

                    /* The bitmap is bottom up, so it needs to be flipped */
                    m_bitmap.RotateFlip(RotateFlipType.Rotate180FlipX);

                    /* Lock the GDI pixel buffer so we can load them into a WPF BitmapSource */
                    bmpData = m_bitmap.LockBits(new Rectangle(0, 0, bitmapHeader.Width, bitmapHeader.Height),
                                                ImageLockMode.ReadOnly,
                                                PixelFormat.Format24bppRgb);

                    bmpSource = BitmapSource.Create((int)VideoResolution.Width,
                                                    (int)VideoResolution.Height,
                                                    96,
                                                    96,
                                                    PixelFormats.Bgr24,
                                                    null,
                                                    bmpData.Scan0,
                                                    bmpData.Stride * (int)VideoResolution.Height,
                                                    bmpData.Stride);

                    m_bitmap.UnlockBits(bmpData);
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine("GetImage error: " + ex.Message);
            }
            finally
            {
                if (pBuffer != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(pBuffer);
                }
            }

            return(bmpSource);
        }
コード例 #5
0
        private void CMB_videosources_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (MainV2.MONO)
            {
                return;
            }

            int                   hr;
            int                   count;
            int                   size;
            object                o;
            IBaseFilter           capFilter = null;
            ICaptureGraphBuilder2 capGraph  = null;
            AMMediaType           media     = null;
            VideoInfoHeader       v;
            VideoStreamConfigCaps c;
            List <GCSBitmapInfo>  modes = new List <GCSBitmapInfo>();

            // Get the ICaptureGraphBuilder2
            capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            IFilterGraph2 m_FilterGraph = (IFilterGraph2) new FilterGraph();

            DsDevice[] capDevices;
            capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);

            // Add the video device
            hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices[CMB_videosources.SelectedIndex].Mon, null, "Video input", out capFilter);
            try
            {
                DsError.ThrowExceptionForHR(hr);
            }
            catch (Exception ex)
            {
                CustomMessageBox.Show("Can not add video source\n" + ex.ToString());
                return;
            }

            // Find the stream config interface
            hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);
            DsError.ThrowExceptionForHR(hr);

            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size);
            DsError.ThrowExceptionForHR(hr);
            IntPtr TaskMemPointer = Marshal.AllocCoTaskMem(size);

            for (int i = 0; i < count; i++)
            {
                IntPtr ptr = IntPtr.Zero;

                hr = videoStreamConfig.GetStreamCaps(i, out media, TaskMemPointer);
                v  = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                c  = (VideoStreamConfigCaps)Marshal.PtrToStructure(TaskMemPointer, typeof(VideoStreamConfigCaps));
                modes.Add(new GCSBitmapInfo(v.BmiHeader.Width, v.BmiHeader.Height, c.MaxFrameInterval, c.VideoStandard.ToString(), media));
            }
            Marshal.FreeCoTaskMem(TaskMemPointer);
            DsUtils.FreeAMMediaType(media);

            CMB_videoresolutions.DataSource = modes;

            if (MainV2.getConfig("video_options") != "" && CMB_videosources.Text != "")
            {
                try
                {
                    CMB_videoresolutions.SelectedIndex = int.Parse(MainV2.getConfig("video_options"));
                }
                catch { } // ignore bad entries
            }
        }
コード例 #6
0
        //void eventListener_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
        //{
        //  this.CloseAudioFile();
        //}

        /// <summary>
        /// Called on a new thread to process events from the graph.  The thread
        /// exits when the graph finishes.  Cancelling is done here.
        /// </summary>
        private void EventWait()
        {
            // Returned when GetEvent is called but there are no events
            const int E_ABORT = unchecked ((int)0x80004004);

            int       hr;
            IntPtr    p1, p2;
            EventCode ec;
            EventCode exitCode = 0;

            IMediaEvent pEvent = (IMediaEvent)this.filterGraph;

            do
            {
                // Read the event
                for (
                    hr = pEvent.GetEvent(out ec, out p1, out p2, 100);
                    hr >= 0 && this.currentState == PlayState.Running;
                    hr = pEvent.GetEvent(out ec, out p1, out p2, 100)
                    )
                {
                    switch (ec)
                    {
                    // If all files finished playing
                    case EventCode.Complete:
                    case EventCode.ErrorAbort:
                    case EventCode.UserAbort:
                        ChangeState(PlayState.Exiting);
                        exitCode = ec;
                        break;
                    }

                    // Release any resources the message allocated
                    hr = pEvent.FreeEventParams(ec, p1, p2);
                    DsError.ThrowExceptionForHR(hr);
                }

                // If the error that exited the loop wasn't due to running out of events
                if (hr != E_ABORT)
                {
                    DsError.ThrowExceptionForHR(hr);
                }
            } while (this.currentState == PlayState.Running);

            // If the user cancelled
            if (this.currentState == PlayState.Cancelling)
            {
                // Stop the graph, send an appropriate exit code
                hr       = this.mediaControl.Stop();
                exitCode = EventCode.UserAbort;
            }


            // Send an event saying we are complete
            if (exitCode == EventCode.Complete && Completed != null)
            {
                CompletedArgs ca = new CompletedArgs(exitCode);
                Completed(this, ca);
            }
            //threadCompleted = true;
        } // Exit the thread
コード例 #7
0
 public void Pause()
 {
     DsError.ThrowExceptionForHR(control.Pause());
     DsError.ThrowExceptionForHR(control.GetState(1000, out state));
     bPause = true;
 }
コード例 #8
0
ファイル: DvdPlayer.cs プロジェクト: gitter-badger/GS
        /// <summary>
        /// Builds the DVD DirectShow graph
        /// </summary>
        private void BuildGraph()
        {
            try
            {
                FreeResources();

                int hr;

                /* Create our new graph */
                m_graph = (IGraphBuilder) new FilterGraphNoThread();

#if DEBUG
                m_rot = new DsROTEntry(m_graph);
#endif

                /* We are going to use the VMR9 for now.  The EVR does not
                 * seem to work with the interactive menus yet.  It should
                 * play Dvds fine otherwise */
                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates and initializes a new renderer ready to render to WPF */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 2);

                /* Do some VMR9 specific stuff */
                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;

                        /* Enable this line to prefer YUV */
                        //hr = mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                /* Create a new DVD Navigator. */
                var dvdNav = (IBaseFilter) new DVDNavigator();

                /* The DVDControl2 interface lets us control DVD features */
                m_dvdControl = dvdNav as IDvdControl2;

                if (m_dvdControl == null)
                {
                    throw new Exception("Could not QueryInterface the IDvdControl2 interface");
                }

                /* QueryInterface the DVDInfo2 */
                m_dvdInfo = dvdNav as IDvdInfo2;

                /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */
                if (!string.IsNullOrEmpty(DvdDirectory))
                {
                    hr = m_dvdControl.SetDVDDirectory(DvdDirectory);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */
                hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true);
                DsError.ThrowExceptionForHR(hr);

                /* If the graph stops, resume at the same point */
                m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false);

                hr = m_graph.AddFilter(dvdNav, "DVD Navigator");
                DsError.ThrowExceptionForHR(hr);

                IPin dvdVideoPin      = null;
                IPin dvdAudioPin      = null;
                IPin dvdSubPicturePin = null;

                IPin dvdNavPin;
                int  i = 0;

                /* Loop all the output pins on the DVD Navigator, trying to find which pins are which.
                 * We could more easily find the pins by name, but this is more fun...and more flexible
                 * if we ever want to use a 3rd party DVD navigator that used different pin names */
                while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null)
                {
                    var    mediaTypes = new AMMediaType[1];
                    IntPtr pFetched   = IntPtr.Zero;

                    IEnumMediaTypes mediaTypeEnum;
                    dvdNavPin.EnumMediaTypes(out mediaTypeEnum);

                    /* Loop over each of the mediaTypes of each pin */
                    while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0)
                    {
                        AMMediaType mediaType = mediaTypes[0];

                        /* This will be the video stream pin */
                        if (mediaType.subType == MediaSubType.Mpeg2Video)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdVideoPin = dvdNavPin;
                            break;
                        }

                        /* This will be the audio stream pin */
                        if (mediaType.subType == MediaSubType.DolbyAC3 ||
                            mediaType.subType == MediaSubType.Mpeg2Audio)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdAudioPin = dvdNavPin;
                            break;
                        }

                        /* This is the Dvd sub picture pin.  This generally
                         * shows overlays for Dvd menus and sometimes closed captions */
                        if (mediaType.subType == DVD_SUBPICTURE_TYPE)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdSubPicturePin = dvdNavPin;
                            break;
                        }
                    }

                    mediaTypeEnum.Reset();
                    Marshal.ReleaseComObject(mediaTypeEnum);
                    i++;
                }

                /* This is the windowed renderer.  This is *NEEDED* in order
                 * for interactive menus to work with the other VMR9 in renderless mode */
                var dummyRenderer       = (IBaseFilter) new VideoMixingRenderer9();
                var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer;

                /* In order for this interactive menu trick to work, the VMR9
                 * must be set to Windowed.  We will make sure the window is hidden later on */
                hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed);
                DsError.ThrowExceptionForHR(hr);

                hr = dummyRendererConfig.SetNumberOfStreams(1);
                DsError.ThrowExceptionForHR(hr);

                hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed");
                DsError.ThrowExceptionForHR(hr);

                if (dvdAudioPin != null)
                {
                    /* This should render out to the default audio device. We
                     * could modify this code here to go out any audio
                     * device, such as SPDIF or another sound card */
                    hr = m_graph.Render(dvdAudioPin);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* Get the first input pin on our dummy renderer */
                m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */
                                                                  PinConnectedStatus.Unconnected,
                                                                  0);

                /* Get an available pin on our real renderer */
                IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                                PinConnectedStatus.Unconnected,
                                                                0);         /* Pin index */

                /* Connect the pin to the renderer */
                hr = m_graph.Connect(dvdVideoPin, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Get the next available pin on our real renderer */
                rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                           PinConnectedStatus.Unconnected,
                                                           0);         /* Pin index */

                /* Render the sub picture, which will connect
                 * the DVD navigator to the codec, not the renderer */
                hr = m_graph.Render(dvdSubPicturePin);
                DsError.ThrowExceptionForHR(hr);

                /* These are the subtypes most likely to be our dvd subpicture */
                var preferedSubpictureTypes = new[] { MediaSubType.ARGB4444,
                                                      MediaSubType.AI44,
                                                      MediaSubType.AYUV,
                                                      MediaSubType.ARGB32 };
                IPin dvdSubPicturePinOut = null;

                /* Find what should be the subpicture pin out */
                foreach (var guidType in preferedSubpictureTypes)
                {
                    dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */
                                                                    PinDirection.Output,
                                                                    m_graph); /* Our current graph */
                    if (dvdSubPicturePinOut != null)
                    {
                        break;
                    }
                }

                if (dvdSubPicturePinOut == null)
                {
                    throw new Exception("Could not find the sub picture pin out");
                }

                /* Here we connec thte Dvd sub picture pin to the video renderer.
                 * This enables the overlays on Dvd menus and some closed
                 * captions to be rendered. */
                hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Search for the Line21 out in the graph */
                IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data,
                                                           PinDirection.Output,
                                                           m_graph);
                if (line21Out == null)
                {
                    throw new Exception("Could not find the Line21 pin out");
                }

                /* We connect our line21Out out in to the dummy renderer
                 * this is what ultimatly makes interactive DVDs work with
                 * VMR9 in renderless (for WPF) */
                hr = m_graph.Connect(line21Out, m_dummyRendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* This is the dummy renderers Win32 window. */
                m_dummyRenderWindow = dummyRenderer as IVideoWindow;

                if (m_dummyRenderWindow == null)
                {
                    throw new Exception("Could not QueryInterface for IVideoWindow");
                }

                ConfigureDummyWindow();

                /* Setup our base classes with this filter graph */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);
            }
            catch (Exception ex)
            {
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
                return;
            }

            InvokeMediaOpened();
        }
コード例 #9
0
        private void RenderSource()
        {
            int hr;

            IEnumPins enumPins;

            IPin[] pins = new IPin[1];

            hr = _netSrc.EnumPins(out enumPins);
            DsError.ThrowExceptionForHR(hr);
            try
            {
                while (enumPins.Next(pins.Length, pins, IntPtr.Zero) == 0)
                {
                    try
                    {
                        PinInfo pinInfo;
                        IPin    upstreamPin = pins[0];
                        hr = upstreamPin.QueryPinInfo(out pinInfo);
                        DsError.ThrowExceptionForHR(hr);
                        if (pinInfo.dir == PinDirection.Output)
                        {
                            IEnumMediaTypes enumMediaTypes;
                            hr = upstreamPin.EnumMediaTypes(out enumMediaTypes);
                            DsError.ThrowExceptionForHR(hr);
                            AMMediaType[] mediaTypes = new AMMediaType[1];
                            if (enumMediaTypes.Next(1, mediaTypes, IntPtr.Zero) == 0)
                            {
                                AMMediaType mediaType = mediaTypes[0];
                                if (mediaType.majorType == MediaType.Video)
                                {
                                    if ((mediaType.subType == new Guid("34363268-0000-0010-8000-00AA00389B71")) ||
                                        (mediaType.subType == new Guid("34363248-0000-0010-8000-00aa00389b71")) ||
                                        (mediaType.subType == new Guid("3436324c-0000-0010-8000-00aa00389b71")) ||
                                        (mediaType.subType == new Guid("31637661-5349-4d4f-4544-494154595045")) ||
                                        (mediaType.subType == new Guid("8d2d71cb-243f-45e3-b2d8-5fd7967ec09b")))
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD H264 Decoder (3.0)");
                                        ConnectFilters(_graphBuilder, _netSrc, pinInfo.name, _infPinTee, "Input", true);
                                        ConnectFilters(_graphBuilder, _infPinTee, "Output1", _decoderFilter, "XForm In", true);
                                        ConnectFilters(_graphBuilder, _infPinTee, "Output2", _bridgeSink, "Input 1", true);
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoCallbackFilter, "Input", true);
                                        ConnectFilters(_graphBuilder, _videoCallbackFilter, "Output", _videoRender, "VMR Input0", true);
                                    }
                                    else if (mediaType.subType == new Guid("4B324A4C-0000-0010-8000-00AA00389B71"))
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD MJ2K Decoder (2.0)");
                                        ConnectFilters(_graphBuilder, _netSrc, pinInfo.name, _decoderFilter, "Input", true);
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoRender, "VMR Input0", true);
                                    }
                                    else if (mediaType.subType == MediaSubType.MJPG)
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD MCMP/MJPEG Decoder (2.0)");
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoRender, "VMR Input0", true);
                                    }
                                    else
                                    {
                                        throw new Exception("Can't Render--Unsupported codec in stream");
                                    }
                                }
                                else
                                {
                                    //    throw new Exception("Can't Render--Unsupported type - audio? not supported");
                                }
                            }
                        }
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(pins[0]);
                    }
                }
            }
            finally
            {
                Marshal.ReleaseComObject(enumPins);
            }
        }
コード例 #10
0
        [HandleProcessCorruptedStateExceptions] // Some filters cause AccessViolations; NET 4 doesn't catch these without this flag see http://msdn.microsoft.com/en-us/magazine/dd419661.aspx#id0070035
        protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter)
        {
            bool shouldTerminateGraphLoop = false;

            // Get the media seeking interface to use for computing status and progress updates
            IMediaSeeking mediaSeeking = (IMediaSeeking)currentOutputFilter;

            if (!CanGetPositionAndDuration(mediaSeeking))
            {
                // Try to seek using the main graph
                mediaSeeking = (IMediaSeeking)currentFilterGraph;
                if (!CanGetPositionAndDuration(mediaSeeking))
                {
                    mediaSeeking = null;
                }
            }

            // Run the graph
            int               hr           = 0;
            IMediaControl     mediaControl = (IMediaControl)graphBuilder;
            IMediaEvent       mediaEvent   = (IMediaEvent)graphBuilder;
            EventCode         statusCode;
            DateTime          startingTime     = DateTime.Now;
            TerminationReason whyDidITerminate = TerminationReason.None;

            try
            {
                hr = mediaControl.Pause();
                hr = mediaControl.Run();
                DsError.ThrowExceptionForHR(hr);

                // Signal (if first time) that graph is running OK
                SignalGraphStartedEvent(true);

                bool anyClientsYet      = false;
                bool conversionComplete = false;
                while (!shouldTerminateGraphLoop) // continue until we're done/cancelled
                {
                    // Any commands?  (e.g. seek / cancel)
                    ProcessAnyCommands(ref shouldTerminateGraphLoop, ref whyDidITerminate);

                    // Check graph conversion progress
                    if (conversionComplete)
                    {
                        // stall to avoid 100% loop
                        hr = mediaEvent.WaitForCompletion(250, out statusCode);
                    }
                    else
                    {
                        conversionComplete = CheckGraphConversion(ref mediaSeeking);

                        if (conversionComplete)
                        {
                            if (ConversionProgressChanged != null)
                            {
                                ConversionProgressChanged(new object(), new ProgressChangedEventArgs(100)); // final progress update stating 100% done
                            }
                            if (ConversionCompleted != null)
                            {
                                ConversionCompleted(new object(), new EventArgs());
                            }
                        }
                    }

                    // Check number of clients
                    int numClients = NumberOfConnectedClients();
                    if ((numClients > 0) && (!anyClientsYet))
                    {
                        anyClientsYet = true;  // A client connected
                    }
                    else if ((numClients == 0))
                    {
                        if (anyClientsYet)
                        {
                            // There were clients, but All clients have disconnected
                            shouldTerminateGraphLoop = true;
                            whyDidITerminate         = TerminationReason.AllClientsDisconnected;
                        }
                        else
                        {
                            // There aren't any clients and never have been - timeout?
                            TimeSpan timeElapsed = DateTime.Now - startingTime;
                            if (timeElapsed.TotalSeconds > TIMEOUT_SECONDS)
                            {
                                shouldTerminateGraphLoop = true;
                                whyDidITerminate         = TerminationReason.NoClientsTimeout;
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException("Error running graph: ", ex);
                whyDidITerminate = TerminationReason.Error;

                SignalGraphStartedEvent(false);
            }
            finally
            {
                try
                {
                    // Raise 'done' event first, before any possible AccessExceptions
                    switch (whyDidITerminate)
                    {
                    case TerminationReason.AllClientsDisconnected:
                        if (Finished != null)
                        {
                            Finished(this, new ConversionEndedEventArgs(false, "All clients disconnected."));
                        }
                        break;

                    case TerminationReason.NoClientsTimeout:
                        if (Finished != null)
                        {
                            Finished(this, new ConversionEndedEventArgs(false, "No clients ever connected."));
                        }
                        break;

                    case TerminationReason.UserCancelled:
                        if (Finished != null)
                        {
                            Finished(this, new ConversionEndedEventArgs(false, "User cancelled."));
                        }
                        break;

                    case TerminationReason.Error:
                        if (Finished != null)
                        {
                            Finished(this, new ConversionEndedEventArgs(true, "Unspecified error."));
                        }
                        break;

                    default:
                        if (Finished != null)
                        {
                            Finished(this, new ConversionEndedEventArgs(false, "Finished but no additional info."));
                        }
                        break;
                    }



                    // Stop graph
                    FilterState   graphState;
                    IMediaControl mediaControl2 = (IMediaControl)graphBuilder;
                    mediaControl2.GetState(50, out graphState);
                    if (graphState == FilterState.Running)
                    {
                        mediaControl2.Pause();
                        mediaControl2.Stop();  // Throwing AccessViolationException: attempted to read or write protected memory  (probably a badly written filter somewhere)
                    }
                }
                catch (AccessViolationException)
                {
                    SendDebugMessage("Ignoring expected AViolationException", 0);
                }
                catch (Exception ex)
                {
                    SendDebugMessageWithException("Ignoring exception when closing graph: ", ex);
                }

                // Close sink - can take a loooong time
                CloseNetworkSink();

                /*//so do in a separate thread...
                 * Thread t = new Thread(CloseNetworkSink);
                 * t.Start();*/
            }
        }
コード例 #11
0
 protected override void CreateGraphBuilder()
 {
     _dvdGraph = (IDvdGraphBuilder) new DvdGraphBuilder();
     DsError.ThrowExceptionForHR(_dvdGraph.GetFiltergraph(out _graphBuilder));
     _streamCount = 3; // Allow Video, CC, and Subtitle
 }
コード例 #12
0
ファイル: VideoPlayer.cs プロジェクト: fabianhick/viana
        /// <summary>
        ///   The event wait.
        /// </summary>
        private void EventWait()
        {
            // Returned when GetEvent is called but there are no events
            const int E_ABORT = unchecked ((int)0x80004004);

            int       hr;
            IntPtr    p1, p2;
            EventCode ec;

            // Console.WriteLine("LoopStarted");
            do
            {
                // If we are shutting down
                if (this.shouldExitEventLoop)
                {
                    break;
                }

                // Make sure that we don't access the media event interface
                // after it has already been released.
                if (this.mediaEvent == null)
                {
                    return;
                }

                // Avoid contention for m_State
                lock (this)
                {
                    // If we are not shutting down
                    if (!this.shouldExitEventLoop)
                    {
                        // Read the event
                        for (hr = this.mediaEvent.GetEvent(out ec, out p1, out p2, 100);
                             hr >= 0;
                             hr = this.mediaEvent.GetEvent(out ec, out p1, out p2, 100))
                        {
                            // Console.WriteLine("InLoop");
                            //// Write the event name to the debug window
                            // Debug.WriteLine(ec.ToString());

                            // If the clip is finished playing
                            if (ec == EventCode.Complete)
                            {
                                // Call Stop() to set state
                                this.Stop();

                                // Throw event
                                if (this.FileComplete != null)
                                {
                                    this.FileComplete(this, EventArgs.Empty);
                                }
                            }
                            else if (ec == EventCode.StepComplete)
                            {
                                Console.WriteLine("StepComplete");
                                // Throw event
                                if (this.StepComplete != null)
                                {
                                    this.StepComplete(this, EventArgs.Empty);
                                }
                            }

                            // Release any resources the message allocated
                            hr = this.mediaEvent.FreeEventParams(ec, p1, p2);
                            DsError.ThrowExceptionForHR(hr);
                        }

                        // If the error that exited the loop wasn't due to running out of events
                        if (hr != E_ABORT)
                        {
                            DsError.ThrowExceptionForHR(hr);
                        }
                    }
                    else
                    {
                        // We are shutting down
                        break;
                    }
                }
            }while (true);

            // Console.WriteLine("LoopExited");
        }
コード例 #13
0
ファイル: VideoPlayer.cs プロジェクト: fabianhick/viana
        /// <summary>
        ///   The build graph.
        /// </summary>
        /// <exception cref="ArgumentOutOfRangeException"></exception>
        private void BuildGraph()
        {
            if (this.VideoFilename == string.Empty)
            {
                return;
            }

            this.filterGraph = (IFilterGraph2) new FilterGraph();

            // #if DEBUG
            this.rotEntry = new DsROTEntry(this.filterGraph);

            // #endif

            // IFileSourceFilter urlSourceFilter = new URLReader() as IFileSourceFilter;
            // IBaseFilter sourceFilter = urlSourceFilter as IBaseFilter;

            // string fileURL = string.Concat(@"file:///", this.filename.Replace("\\","/"));
            // hr = urlSourceFilter.Load(fileURL, null);
            // DsError.ThrowExceptionForHR(hr);
            // this.filterGraph.AddFilter(sourceFilter, "URL Source");
            IBaseFilter sourceFilter;

            this.filterGraph.AddSourceFilter(this.VideoFilename, "File Source", out sourceFilter);

            // Create the SampleGrabber interface
            this.sampleGrabber = (ISampleGrabber) new SampleGrabber();
            var baseGrabFlt = (IBaseFilter)this.sampleGrabber;

            this.ConfigureSampleGrabber(this.sampleGrabber);

            // Add the frame grabber to the graph
            int hr = this.filterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");

            if (hr != 0)
            {
                ErrorLogger.WriteLine(
                    "Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + DsError.GetErrorText(hr));
            }

            IPin sampleGrabberIn  = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
            IPin sampleGrabberOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
            IPin sourceOut;

            // Iterate through source output pins, to find video output pin to be connected to
            // the sample grabber
            int i = 0;

            do
            {
                sourceOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
                if (sourceOut == null)
                {
                    throw new ArgumentOutOfRangeException("Found no compatible video source output pin");
                }

                hr = this.filterGraph.Connect(sourceOut, sampleGrabberIn);
                i++;
            }while (hr < 0);

            DsError.ThrowExceptionForHR(hr);
            hr = this.filterGraph.Render(sampleGrabberOut);
            DsError.ThrowExceptionForHR(hr);

            //// Have the graph builder construct its the appropriate graph automatically
            // hr = this.graphBuilder.RenderFile(filename, null);
            // DsError.ThrowExceptionForHR(hr);

            // QueryInterface for DirectShow interfaces
            this.mediaControl = (IMediaControl)this.filterGraph;

            // this.mediaEventEx = (IMediaEventEx)this.graphBuilder;
            this.mediaSeeking  = (IMediaSeeking)this.filterGraph;
            this.mediaPosition = (IMediaPosition)this.filterGraph;
            this.mediaEvent    = (IMediaEvent)this.filterGraph;

            //hr = this.mediaSeeking.IsFormatSupported(TimeFormat.Frame);
            //if (hr != 0)
            //{
            //  this.isFrameTimeCapable = false;
            //}
            //else
            //{
            //  this.isFrameTimeCapable = true;

            // string text = DsError.GetErrorText(hr);
            // hr = this.mediaSeeking.SetTimeFormat(TimeFormat.Frame);
            // text = DsError.GetErrorText(hr);
            //}

            // hr = this.mediaSeeking.GetTimeFormat(out this.timeFormat);
            // DsError.ThrowExceptionForHR(hr);

            // Query for video interfaces, which may not be relevant for audio files
            this.videoWindow = this.filterGraph as IVideoWindow;
            hr = this.videoWindow.put_AutoShow(OABool.False);
            DsError.ThrowExceptionForHR(hr);

            this.basicVideo = this.filterGraph as IBasicVideo;

            // Query for audio interfaces, which may not be relevant for video-only files
            this.basicAudio = this.filterGraph as IBasicAudio;

            //// Have the graph signal event via window callbacks for performance
            // hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WMGraphNotify, IntPtr.Zero);
            // DsError.ThrowExceptionForHR(hr);

            // Get the event handle the graph will use to signal
            // when events occur
            IntPtr hEvent;

            hr = this.mediaEvent.GetEventHandle(out hEvent);
            DsError.ThrowExceptionForHR(hr);

            // Reset event loop exit flag
            this.shouldExitEventLoop = false;

            // Create a new thread to wait for events
            this.eventThread      = new Thread(this.EventWait);
            this.eventThread.Name = "Media Event Thread";
            this.eventThread.Start();

            this.GetFrameStepInterface();

            // Update the SampleGrabber.
            this.SaveSizeInfo(this.sampleGrabber);
        }
コード例 #14
0
        public static IEnumerator <AudioEncoder> EnumerateEncoders()
        {
            ICreateDevEnum deviceEnumerator = null;

            try
            {
                deviceEnumerator = (ICreateDevEnum) new CreateDevEnum();

                IEnumMoniker monikerEnum = null;

                try
                {
                    int hr =
                        deviceEnumerator.CreateClassEnumerator(FilterGraphTools.AudioCompressorCategoryClassId,
                                                               out monikerEnum, CDef.None);
                    DsError.ThrowExceptionForHR(hr);

                    var monikers = new IMoniker[1];

                    while (true)
                    {
                        try
                        {
                            hr = monikerEnum.Next(1, monikers, IntPtr.Zero);

                            DsError.ThrowExceptionForHR(hr);

                            object bag;

                            Guid id = FilterGraphTools.IPropertyBagInterfaceId;

                            if (monikers[0] == null)
                            {
                                break;
                            }

                            monikers[0].BindToStorage(null, null, ref id, out bag);

                            var propertyBag = (IPropertyBag)bag;

                            Marshal.ReleaseComObject(propertyBag);

                            object variantName;
                            hr = propertyBag.Read(FriendlyNameParameter, out variantName, null);
                            DsError.ThrowExceptionForHR(hr);

                            string friendlyName = Convert.ToString(variantName, CultureInfo.InvariantCulture);

                            Guid id2 = FilterGraphTools.IBaseFilterInterfaceId;

                            object filter;

                            monikers[0].BindToObject(null, null, ref id2, out filter);

                            if (filter != null)
                            {
                                yield return(new AudioEncoder(friendlyName, (IBaseFilter)filter));
                            }
                        }
                        finally
                        {
                            if (monikers[0] != null)
                            {
                                Marshal.ReleaseComObject(monikers[0]);
                            }
                        }
                    }
                }
                finally
                {
                    if (monikerEnum != null)
                    {
                        Marshal.ReleaseComObject(monikerEnum);
                    }
                }
            }
            finally
            {
                if (deviceEnumerator != null)
                {
                    Marshal.ReleaseComObject(deviceEnumerator);
                }
            }
        }
コード例 #15
0
ファイル: Form1.cs プロジェクト: gmorkvenas/pimaker
        /// <summary>
        /// Initialize the graph
        /// </summary>
        public void InitGraph()
        {
            if (theDevice == null)
            {
                return;
            }

            //Create the Graph
            graphBuilder = (IGraphBuilder) new FilterGraph();

            //Create the Capture Graph Builder
            ICaptureGraphBuilder2 captureGraphBuilder = null;

            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            //Create the media control for controlling the graph
            mediaControl = (IMediaControl)this.graphBuilder;

            // Attach the filter graph to the capture graph
            int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);

            DsError.ThrowExceptionForHR(hr);

            //Add the Video input device to the graph
            hr = graphBuilder.AddFilter(theDevice, "source filter");
            DsError.ThrowExceptionForHR(hr);


            //Add the Video compressor filter to the graph
            hr = graphBuilder.AddFilter(theCompressor, "compressor filter");
            DsError.ThrowExceptionForHR(hr);

            //Create the file writer part of the graph. SetOutputFileName does this for us, and returns the mux and sink
            IBaseFilter     mux;
            IFileSinkFilter sink;

            hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, textBox1.Text, out mux, out sink);
            DsError.ThrowExceptionForHR(hr);


            //Render any preview pin of the device
            hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, theDevice, null, null);
            DsError.ThrowExceptionForHR(hr);

            //Connect the device and compressor to the mux to render the capture part of the graph
            hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, theDevice, theCompressor, mux);
            DsError.ThrowExceptionForHR(hr);

#if DEBUG
            m_rot = new DsROTEntry(graphBuilder);
#endif

            //get the video window from the graph
            IVideoWindow videoWindow = null;
            videoWindow = (IVideoWindow)graphBuilder;

            //Set the owener of the videoWindow to an IntPtr of some sort (the Handle of any control - could be a form / button etc.)
            hr = videoWindow.put_Owner(panel1.Handle);
            DsError.ThrowExceptionForHR(hr);

            //Set the style of the video window
            hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren);
            DsError.ThrowExceptionForHR(hr);

            // Position video window in client rect of main application window
            hr = videoWindow.SetWindowPosition(0, 0, panel1.Width, panel1.Height);
            DsError.ThrowExceptionForHR(hr);

            // Make the video window visible
            hr = videoWindow.put_Visible(OABool.True);
            DsError.ThrowExceptionForHR(hr);

            Marshal.ReleaseComObject(mux);
            Marshal.ReleaseComObject(sink);
            Marshal.ReleaseComObject(captureGraphBuilder);
        }
コード例 #16
0
        public void SetMediaItem(IResourceLocator locator, string mediaItemTitle)
        {
            // free previous opened resource
            FilterGraphTools.TryDispose(ref _resourceAccessor);
            FilterGraphTools.TryDispose(ref _rot);

            _state    = PlayerState.Active;
            _isPaused = true;
            try
            {
                _resourceLocator = locator;
                _mediaItemTitle  = mediaItemTitle;
                if (_resourceLocator.NativeResourcePath.IsNetworkResource)
                {
                    _resourceAccessor = _resourceLocator.CreateAccessor() as INetworkResourceAccessor;
                    if (_resourceAccessor == null)
                    {
                        throw new IllegalCallException("The VideoPlayer can only play network resources of type INetworkResourceAccessor");
                    }

                    ServiceRegistration.Get <ILogger>().Debug("{0}: Initializing for network media item '{1}'", PlayerTitle, SourcePathOrUrl);
                }
                else
                {
                    ILocalFsResourceAccessor lfsr;
                    if (!_resourceLocator.TryCreateLocalFsAccessor(out lfsr))
                    {
                        throw new IllegalCallException("The VideoPlayer can only play local file system resources");
                    }

                    _resourceAccessor = lfsr;
                    ServiceRegistration.Get <ILogger>().Debug("{0}: Initializing for media item '{1}'", PlayerTitle, SourcePathOrUrl);
                }

                // Create a DirectShow FilterGraph
                CreateGraphBuilder();

                // Add it in ROT (Running Object Table) for debug purpose, it allows to view the Graph from outside (i.e. graphedit)
                _rot = new DsROTEntry(_graphBuilder);

                // Add a notification handler (see WndProc)
                _instancePtr = Marshal.AllocCoTaskMem(4);
                IMediaEventEx mee = _graphBuilder as IMediaEventEx;
                if (mee != null)
                {
                    mee.SetNotifyWindow(SkinContext.Form.Handle, WM_GRAPHNOTIFY, _instancePtr);
                }

                // Create the Allocator / Presenter object
                AddPresenter();

                ServiceRegistration.Get <ILogger>().Debug("{0}: Adding audio renderer", PlayerTitle);
                AddAudioRenderer();

                ServiceRegistration.Get <ILogger>().Debug("{0}: Adding preferred codecs", PlayerTitle);
                AddPreferredCodecs();

                ServiceRegistration.Get <ILogger>().Debug("{0}: Adding file source", PlayerTitle);
                AddFileSource();

                ServiceRegistration.Get <ILogger>().Debug("{0}: Run graph", PlayerTitle);

                //This needs to be done here before we check if the evr pins are connected
                //since this method gives players the chance to render the last bits of the graph
                OnBeforeGraphRunning();

                // Now run the graph, i.e. the DVD player needs a running graph before getting informations from dvd filter.
                IMediaControl mc = (IMediaControl)_graphBuilder;
                int           hr = mc.Run();
                DsError.ThrowExceptionForHR(hr);

                _initialized = true;
                OnGraphRunning();
            }
            catch (Exception)
            {
                Shutdown();
                throw;
            }
        }
コード例 #17
0
        private void BuildGraph()
        {
            int hr = 0;

            graphBuilder = (IFilterGraph2) new FilterGraph();
            rot          = new DsROTEntry(graphBuilder);

            ICaptureGraphBuilder2 capBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            capBuilder.SetFiltergraph(graphBuilder);

            // Get the BDA network provider specific for this given network type
            networkProvider = BDAUtils.GetNetworkProvider(networkType);

            hr = graphBuilder.AddFilter(networkProvider, "BDA Network Provider");
            DsError.ThrowExceptionForHR(hr);

            tuner = (ITuner)networkProvider;

            // Get a tuning space for this network type
            tuningSpace = BDAUtils.GetTuningSpace(networkType);

            hr = tuner.put_TuningSpace(tuningSpace);
            DsError.ThrowExceptionForHR(hr);

            // Create a tune request from this tuning space
            tuneRequest = BDAUtils.CreateTuneRequest(tuningSpace);

            // Is it okay ?
            hr = tuner.Validate(tuneRequest);
            if (hr == 0)
            {
                // Set it
                hr = tuner.put_TuneRequest(tuneRequest);
                DsError.ThrowExceptionForHR(hr);

                // found a BDA Tuner and a BDA Capture that can connect to this network provider
                BDAUtils.AddBDATunerAndDemodulatorToGraph(graphBuilder, networkProvider, out bdaTuner, out bdaCapture);

                if ((bdaTuner != null) && (bdaCapture != null))
                {
                    // Create and add the mpeg2 demux
                    mpeg2Demux = (IBaseFilter) new MPEG2Demultiplexer();

                    hr = graphBuilder.AddFilter(mpeg2Demux, "MPEG2 Demultiplexer");
                    DsError.ThrowExceptionForHR(hr);

                    // connect it to the BDA Capture
                    hr = capBuilder.RenderStream(null, null, bdaCapture, null, mpeg2Demux);
                    DsError.ThrowExceptionForHR(hr);

                    // Add the two mpeg2 transport stream helper filters
                    BDAUtils.AddTransportStreamFiltersToGraph(graphBuilder, out bdaTIF, out bdaSecTab);

                    if ((bdaTIF != null) && (bdaSecTab != null))
                    {
                        // Render all the output pins of the demux (missing filters are added)
                        for (int i = 0; i < 5; i++)
                        {
                            IPin pin = DsFindPin.ByDirection(mpeg2Demux, PinDirection.Output, i);

                            hr = graphBuilder.Render(pin);
                            Marshal.ReleaseComObject(pin);
                        }
                    }
                }
            }

            // Currently only DVBT Network Provider support this interface...
            freqMap = (IFrequencyMap)networkProvider;
        }
コード例 #18
0
ファイル: Form1.cs プロジェクト: d3x0r/xperdex
        /// <summary>
        /// Displays a property page for a filter
        /// </summary>
        /// <param name="dev">The filter for which to display a property page</param>
        private void DisplayPropertyPage(IBaseFilter dev)
        {
            //Get the ISpecifyPropertyPages for the filter
            ISpecifyPropertyPages pProp = dev as ISpecifyPropertyPages;
            int hr = 0;

            if (pProp == null)
            {
                //If the filter doesn't implement ISpecifyPropertyPages, try displaying IAMVfwCompressDialogs instead!
                IAMVfwCompressDialogs compressDialog = dev as IAMVfwCompressDialogs;
                if (compressDialog != null)
                {
                    hr = compressDialog.ShowDialog(VfwCompressDialogs.Config, IntPtr.Zero);
                    DsError.ThrowExceptionForHR(hr);
                }
                return;
            }

            //Get the name of the filter from the FilterInfo struct
            FilterInfo filterInfo;

            hr = dev.QueryFilterInfo(out filterInfo);
            DsError.ThrowExceptionForHR(hr);

            // Get the propertypages from the property bag
            DsCAUUID caGUID;

            hr = pProp.GetPages(out caGUID);
            DsError.ThrowExceptionForHR(hr);

            // Check for property pages on the output pin
            IPin pPin = DsFindPin.ByDirection(dev, PinDirection.Output, 0);
            ISpecifyPropertyPages pProp2 = pPin as ISpecifyPropertyPages;

            if (pProp2 != null)
            {
                DsCAUUID caGUID2;
                hr = pProp2.GetPages(out caGUID2);
                DsError.ThrowExceptionForHR(hr);

                if (caGUID2.cElems > 0)
                {
                    int soGuid = Marshal.SizeOf(typeof(Guid));

                    // Create a new buffer to hold all the GUIDs
                    IntPtr p1 = Marshal.AllocCoTaskMem((caGUID.cElems + caGUID2.cElems) * soGuid);

                    // Copy over the pages from the Filter
                    for (int x = 0; x < caGUID.cElems * soGuid; x++)
                    {
                        Marshal.WriteByte(p1, x, Marshal.ReadByte(caGUID.pElems, x));
                    }

                    // Add the pages from the pin
                    for (int x = 0; x < caGUID2.cElems * soGuid; x++)
                    {
                        Marshal.WriteByte(p1, x + (caGUID.cElems * soGuid), Marshal.ReadByte(caGUID2.pElems, x));
                    }

                    // Release the old memory
                    Marshal.FreeCoTaskMem(caGUID.pElems);
                    Marshal.FreeCoTaskMem(caGUID2.pElems);

                    // Reset caGUID to include both
                    caGUID.pElems  = p1;
                    caGUID.cElems += caGUID2.cElems;
                }
            }

            // Create and display the OlePropertyFrame
            object oDevice = (object)dev;

            hr = OleCreatePropertyFrame(this.Handle, 0, 0, filterInfo.achName, 1, ref oDevice, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero);
            DsError.ThrowExceptionForHR(hr);

            // Release COM objects
            Marshal.FreeCoTaskMem(caGUID.pElems);
            Marshal.ReleaseComObject(pProp);
            if (filterInfo.pGraph != null)
            {
                Marshal.ReleaseComObject(filterInfo.pGraph);
            }
        }
コード例 #19
0
ファイル: DSRecord.cs プロジェクト: zhjh-stack/ogama
        ///////////////////////////////////////////////////////////////////////////////
        // Inherited methods                                                         //
        ///////////////////////////////////////////////////////////////////////////////
        #region OVERRIDES
        #endregion //OVERRIDES

        ///////////////////////////////////////////////////////////////////////////////
        // Eventhandler                                                              //
        ///////////////////////////////////////////////////////////////////////////////
        #region EVENTS

        ///////////////////////////////////////////////////////////////////////////////
        // Eventhandler for UI, Menu, Buttons, Toolbars etc.                         //
        ///////////////////////////////////////////////////////////////////////////////
        #region WINDOWSEVENTHANDLER
        #endregion //WINDOWSEVENTHANDLER

        ///////////////////////////////////////////////////////////////////////////////
        // Eventhandler for Custom Defined Events                                    //
        ///////////////////////////////////////////////////////////////////////////////
        #region CUSTOMEVENTHANDLER
        #endregion //CUSTOMEVENTHANDLER

        #endregion //EVENTS

        ///////////////////////////////////////////////////////////////////////////////
        // Methods and Eventhandling for Background tasks                            //
        ///////////////////////////////////////////////////////////////////////////////
        #region BACKGROUNDWORKER
        #endregion //BACKGROUNDWORKER

        ///////////////////////////////////////////////////////////////////////////////
        // Methods for doing main class job                                          //
        ///////////////////////////////////////////////////////////////////////////////
        #region PRIVATEMETHODS

        /// <summary>
        /// Build the filter graph
        /// </summary>
        /// <returns>True if successful, otherwise false.</returns>
        private bool SetupGraph()
        {
            int hr;

            // Get the graphbuilder object
            this.filterGraph = new FilterGraph() as IFilterGraph2;

            // Get a ICaptureGraphBuilder2 to help build the graph
            ICaptureGraphBuilder2 captureGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            try
            {
                // Link the ICaptureGraphBuilder2 to the IFilterGraph2
                hr = captureGraph.SetFiltergraph(this.filterGraph);
                DsError.ThrowExceptionForHR(hr);

#if DEBUG
                // Allows you to view the graph with GraphEdit File/Connect
                this.dsRot = new DsROTEntry(this.filterGraph);
#endif

                // Our data source
                IBaseFilter bitmapSource = (IBaseFilter) new GenericSampleSourceFilter();

                try
                {
                    // Get the pin from the filter so we can configure it
                    IPin ipin = DsFindPin.ByDirection(bitmapSource, PinDirection.Output, 0);

                    try
                    {
                        // Configure the pin using the provided BitmapInfo
                        this.ConfigurePusher((IGenericSampleConfig)ipin);
                    }
                    catch (Exception ex)
                    {
                        ExceptionMethods.HandleException(ex);
                        return(false);
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(ipin);
                    }

                    // Add the source filter to the graph
                    hr = this.filterGraph.AddFilter(bitmapSource, "GenericSampleSourceFilter");
                    Marshal.ThrowExceptionForHR(hr);

                    IBaseFilter smartTee = new SmartTee() as IBaseFilter;

                    // Add the smart tee filter to the graph
                    hr = this.filterGraph.AddFilter(smartTee, "Smart Tee");
                    Marshal.ThrowExceptionForHR(hr);

                    // Add the Video compressor filter to the graph
                    if (this.videoCompressor != null)
                    {
                        hr = this.filterGraph.AddFilter(this.videoCompressor, "video compressor filter");
                        DsError.ThrowExceptionForHR(hr);
                    }

                    // Create the file writer part of the graph.
                    // SetOutputFileName does this for us, and returns the mux and sink
                    IBaseFilter     mux;
                    IFileSinkFilter sink;
                    hr = captureGraph.SetOutputFileName(
                        MediaSubType.Avi,
                        this.videoExportProperties.OutputVideoProperties.Filename,
                        out mux,
                        out sink);
                    DsError.ThrowExceptionForHR(hr);

                    // Connect the bitmap source output to the smart tee
                    hr = captureGraph.RenderStream(
                        null,
                        null,
                        bitmapSource,
                        null,
                        smartTee);
                    DsError.ThrowExceptionForHR(hr);

                    // Render the smart tee capture pin to the capture part including
                    // compressor to the file muxer.
                    hr = captureGraph.RenderStream(
                        null,
                        null,
                        smartTee,
                        this.videoCompressor,
                        mux);
                    DsError.ThrowExceptionForHR(hr);

                    // Render the smart tee preview pin to the default video renderer
                    hr = captureGraph.RenderStream(
                        null,
                        null,
                        smartTee,
                        null,
                        null);
                    DsError.ThrowExceptionForHR(hr);

                    // Configure the Video Window
                    this.videoWindow = this.filterGraph as IVideoWindow;
                    this.ConfigureVideoWindow(this.videoWindow, this.previewWindow);
                }
                catch (Exception ex)
                {
                    ExceptionMethods.HandleException(ex);
                    return(false);
                }
                finally
                {
                    //Marshal.ReleaseComObject(bitmapSource);
                }

                // Grab some other interfaces
                this.mediaControl = this.filterGraph as IMediaControl;
            }
            catch (Exception ex)
            {
                ExceptionMethods.HandleException(ex);
                return(false);
            }
            finally
            {
                Marshal.ReleaseComObject(captureGraph);
            }

            return(true);
        }
コード例 #20
0
        void TestMenusAndButtons()
        {
            int       hr;
            IDvdCmd   ppCmd;
            DvdDomain dvdd;
            int       pulButtonsAvailable;
            int       pulCurrentButton;

            // Top menu
            hr = m_idc2.ShowMenu(DvdMenuId.Title, DvdCmdFlags.Flush | DvdCmdFlags.SendEvents, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);
            Application.DoEvents();

            hr = m_idi2.GetCurrentDomain(out dvdd);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(dvdd == DvdDomain.VideoManagerMenu, "TestMenusAndButtons");

            hr = m_idc2.SelectButton(2);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);
            Application.DoEvents();

            hr = m_idi2.GetCurrentButton(out pulButtonsAvailable, out pulCurrentButton);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(pulCurrentButton == 2, "TestMenusAndButtons2");
            Debug.Assert(pulButtonsAvailable == 2, "TestMenusAndButtons2a");

            // Button 1 leads to a chapter menu with 2 items
            hr = m_idc2.SelectButton(1);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);
            Application.DoEvents();

            hr = m_idc2.ActivateButton();
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idc2.ReturnFromSubmenu(DvdCmdFlags.Flush | DvdCmdFlags.SendEvents, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idc2.SelectRelativeButton(DvdRelativeButton.Lower);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idi2.GetCurrentButton(out pulButtonsAvailable, out pulCurrentButton);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(pulCurrentButton == 2, "TestMenusAndButtons2");

            hr = m_idc2.SelectAndActivateButton(1);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idc2.SelectButton(2);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idc2.SelectAtPosition(new Point(130, 130));
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idi2.GetCurrentButton(out pulButtonsAvailable, out pulCurrentButton);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(pulCurrentButton == 1, "TestMenusAndButtons2");

            hr = m_idc2.ReturnFromSubmenu(DvdCmdFlags.Flush | DvdCmdFlags.SendEvents, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);

            hr = m_idc2.ActivateAtPosition(new Point(130, 130));
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(100);
        }
コード例 #21
0
 public void Seek(int timeInMs)
 {
     DsError.ThrowExceptionForHR(seeker.SetPositions(new DsLong(timeInMs * 10000), AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning));
     DsError.ThrowExceptionForHR(control.GetState(1000, out state));
 }
コード例 #22
0
        void TestPauseResume()
        {
            int                  hr;
            IDvdCmd              ppCmd;
            DvdDomain            dvdd;
            DvdPlaybackLocation2 pLocation, pLocation2;

            AllowPlay();
            hr = m_idc2.PlayChapterInTitle(1, 2, DvdCmdFlags.Flush | DvdCmdFlags.SendEvents, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(500);

            hr = m_idc2.Pause(true);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(3000);

            hr = m_idi2.GetCurrentLocation(out pLocation);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(3000);

            hr = m_idi2.GetCurrentLocation(out pLocation2);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(pLocation.TimeCode.bSeconds == pLocation2.TimeCode.bSeconds, "TestPauseResume");

            hr = m_idc2.Pause(false);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(500);

            hr = m_idi2.GetCurrentLocation(out pLocation2);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(pLocation.TimeCode.bSeconds != pLocation2.TimeCode.bSeconds, "TestPauseResume2");

            hr = m_idc2.ShowMenu(DvdMenuId.Root, DvdCmdFlags.Flush | DvdCmdFlags.SendEvents, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(500);

            hr = m_idi2.GetCurrentDomain(out dvdd);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(dvdd == DvdDomain.VideoManagerMenu, "TestPauseResume3");

            hr = m_idc2.Resume(DvdCmdFlags.None, out ppCmd);
            DsError.ThrowExceptionForHR(hr);

            Thread.Sleep(500);

            hr = m_idi2.GetCurrentDomain(out dvdd);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(dvdd == DvdDomain.Title, "TestPauseResume4");

            hr = m_idc2.StillOff();
            DsError.ThrowExceptionForHR(hr);

            hr = m_idc2.Stop();
            DsError.ThrowExceptionForHR(hr);
        }
コード例 #23
0
        /// <summary>
        /// Opens the media by initializing the DirectShow graph
        //
        //                                          +----------------+          +----------------+       +-----------------------+
        //     +---------------------+              | LavVideo       |          | VobSub         |       | EVR+CustomPresenter   |
        //     | LavSplitterSource   |              |----------------|          |----------------|       |-----------------------|
        //     +---------------------+              |                |          |                |       |                       |
        //     |                     |              |                |          |                |       |    VIDEO              |
        //     |             video  +|->+---------+<-+ IN       OUT +->+------+<-+ VID_IN   OUT +-> +-+ <-+   RENDERER           |
        //     |                     |              +----------------+          |                |       |                       |
        //     |             audio  +|->+------+                                |                |       +-----------------------+
        //     |                     |         |    +----------------+      +-+<-+ TXT_IN        |
        //     |          subtitle  +|->+--+   |    | LavAudio       |      |   |                |
        //     +---------------------+    |   |    |----------------|      |   +----------------+       +-----------------------+
        //                                 |   |    |                |      |                            | DShow output device   |
        //                                 |   |    |                |     xxx                           |-----------------------|
        //                                 |   +--+<-+ IN       OUT +->+--x | x-----------------------+  |                       |
        //                                 |        +----------------+      |                            |    AUDIO              |
        //                                 |                                |                           <-+   RENDERER           |
        //                                 |                                |                            |                       |
        //                                 +--------------------------------+                            +-----------------------+
        //
        /// </summary>
//        protected virtual void OpenSource()
//        {
//            /* Make sure we clean up any remaining mess */
//            //if (m_graph != null) RemoveAllFilters(m_graph);
//            FreeResources();

//            if (m_sourceUri == null)
//                return;

//            string fileSource = m_sourceUri.OriginalString;

//            if (string.IsNullOrEmpty(fileSource))
//                return;

//            try
//            {
//                if (m_graph != null) Marshal.ReleaseComObject(m_graph);

//                /* Creates the GraphBuilder COM object */
//                m_graph = new FilterGraphNoThread() as IGraphBuilder;

//                if (m_graph == null)
//                    throw new Exception("Could not create a graph");

//                /* Add our prefered audio renderer */
//                var audioRenderer = InsertAudioRenderer(AudioRenderer);
//                if (_audioRenderer != null) Marshal.ReleaseComObject(_audioRenderer);
//                _audioRenderer = audioRenderer;

//                if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT &&
//                (System.Environment.OSVersion.Version.Major == 5)))
//                    VideoRenderer = VideoRendererType.VideoMixingRenderer9;

//                IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2);
//                if (_renderer != null) Marshal.ReleaseComObject(_renderer);
//                _renderer = renderer;
//                //if (_renderer != null)
//                //    m_graph.AddFilter((IBaseFilter)_renderer, "Renderer");

//                var filterGraph = m_graph as IFilterGraph2;

//                if (filterGraph == null)
//                    throw new Exception("Could not QueryInterface for the IFilterGraph2");

//                ILAVAudioSettings lavAudioSettings;
//                ILAVAudioStatus lavStatus;
//                IBaseFilter audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus);
//                if (audioDecoder != null)
//                {
//                    if (_audio != null) Marshal.ReleaseComObject(_audio);
//                    _audio = audioDecoder;
//                    lavAudioSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter((IBaseFilter)_audio, "LavAudio");
//                }

//                ILAVSplitterSettings splitterSettings;
//                IFileSourceFilter splitter = FilterProvider.GetSplitterSource(out splitterSettings);
//                //IBaseFilter splitter = FilterProvider.GetSplitter(out splitterSettings);

//                if (splitter != null)
//                {
//                    splitter.Load(fileSource, null);
//                    if (_splitter != null) Marshal.ReleaseComObject(_splitter);
//                    _splitter = splitter;
//                    splitterSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter");
//                }

//                int hr = 0;


//                /* We will want to enum all the pins on the source filter */
//                IEnumPins pinEnum;

//                hr = ((IBaseFilter)splitter).EnumPins(out pinEnum);
//                DsError.ThrowExceptionForHR(hr);

//                IntPtr fetched = IntPtr.Zero;
//                IPin[] pins = { null };

//                /* Counter for how many pins successfully rendered */


//                if (VideoRenderer == VideoRendererType.VideoMixingRenderer9)
//                {
//                    var mixer = renderer as IVMRMixerControl9;

//                    if (mixer != null)
//                    {
//                        VMR9MixerPrefs dwPrefs;
//                        mixer.GetMixingPrefs(out dwPrefs);
//                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
//                        dwPrefs |= VMR9MixerPrefs.RenderTargetRGB;
//                        //mixer.SetMixingPrefs(dwPrefs);
//                    }
//                }

//                // Test using FFDShow Video Decoder Filter
//                ILAVVideoSettings lavVideoSettings;
//                IBaseFilter lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings);
//                if (_video != null) Marshal.ReleaseComObject(_video);
//                _video = lavVideo;

//                IBaseFilter vobSub = FilterProvider.GetVobSubFilter();

//                if (vobSub != null)
//                {
//                    m_graph.AddFilter(vobSub, "VobSub");
//                    IDirectVobSub vss = vobSub as IDirectVobSub;
//                    if (_vobsub != null) Marshal.ReleaseComObject(_vobsub);
//                    _vobsub = vss;
//                    InitSubSettings();
//                }

//                if (lavVideo != null)
//                {
//                    lavVideoSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter(lavVideo, "LavVideo");
//                }

//                int ret;

//                IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter();
//                if (dcDsp != null)
//                {
//                    _dspFilter = (IDCDSPFilterInterface)dcDsp;

//                    //hr = i.set_PCMDataBeforeMainDSP(true);
//                    hr = m_graph.AddFilter((IBaseFilter)dcDsp, "VobSub");

//                    ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(audioDecoder, PinDirection.Input, 0));
//                    ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)audioDecoder, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0));
//                    ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(audioRenderer, PinDirection.Input, 0));

//                    //bool d = false;
//                    //int delay = 0;
//                    //hr = i.get_EnableDelay(ref d);
//                    int cnt = 0;
//                    object intf = null;
//                    //hr = i.set_EnableDelay(true);
//                    //hr = i.set_Delay(0);
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer);
//                    hr = _dspFilter.get_FilterCount(ref cnt);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _equalizer = (IDCEqualizer)intf;
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftDownMix);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _downmix = (IDCDownMix)intf;
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftAmplify);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _amplify = (IDCAmplify)intf;

//                    _equalizer.set_Seperate(false);
//                }

//                bool subconnected = false;
//                ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Video"), DsFindPin.ByDirection(lavVideo, PinDirection.Input, 0));
//                ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)lavVideo, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0));
//                if (ret == 0)
//                {
//                    int lc;
//                    ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
//                    subconnected = (lc != 0);
//                    IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle");
//                    if (pn != null)
//                    {
//                        ret = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1));
//                        ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
//                        subconnected = (lc != 0);
//                    }
//                    ret = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0),
//                                          DsFindPin.ByDirection(renderer, PinDirection.Input, 0));
//                }
//                else
//                {
//                    ret = m_graph.Connect(DsFindPin.ByDirection(lavVideo, PinDirection.Output, 0),
//                                      DsFindPin.ByDirection(renderer, PinDirection.Input, 0));
//                }

//                /* Loop over each pin of the source filter */
//                while (pinEnum.Next(pins.Length, pins, fetched) == 0)
//                {
//                    IPin cTo;
//                    pins[0].ConnectedTo(out cTo);
//                    if (cTo == null)
//                    {
//                        // this should not happen if the filtegraph is manually connected in a good manner
//                        ret = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero);
//                    }
//                    else
//                    {
//                        Marshal.ReleaseComObject(cTo);
//                    }
//                    Marshal.ReleaseComObject(pins[0]);
//                }

//                if (lavVideoSettings != null)
//                {
//                    if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack);
//                    }
//                }

//                //hr = m_graph.RenderFile(fileSource, null);

//                Marshal.ReleaseComObject(pinEnum);

//                IAMStreamSelect selector = splitter as IAMStreamSelect;
//                int numstreams;
//                selector.Count(out numstreams);
//                AMMediaType mt;
//                AMStreamSelectInfoFlags fl;
//                SubtitleStreams.Clear();
//                VideoStreams.Clear();
//                AudioStreams.Clear();
//                for (int i = 0; i < numstreams; i++)
//                {
//                    int lcid;
//                    int group;
//                    string name;
//                    object o, o2;
//                    selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2);
//                    switch (group)
//                    {
//                        case 0:
//                            VideoStreams.Add(name);
//                            break;
//                        case 1:
//                            AudioStreams.Add(name);
//                            break;
//                        case 2:
//                            SubtitleStreams.Add(name);
//                            break;
//                    }

//                    if (o != null) Marshal.ReleaseComObject(o);
//                    if (o2 != null) Marshal.ReleaseComObject(o2);
//                }

//                OnPropertyChanged("SubtitleStreams");
//                OnPropertyChanged("VideoStreams");
//                OnPropertyChanged("AudioStreams");

//                //Marshal.ReleaseComObject(splitter);


//                /* Configure the graph in the base class */
//                SetupFilterGraph(m_graph);

//#if DEBUG
//                /* Adds the GB to the ROT so we can view
//* it in graphedit */
//                m_dsRotEntry = new DsROTEntry(m_graph);
//#endif

//                //if (_splitterSettings != null)
//                //{
//                // Marshal.ReleaseComObject(_splitterSettings);
//                // _splitterSettings = null;
//                //}
//                if (_splitterSettings != null) Marshal.ReleaseComObject(_splitterSettings);
//                _splitterSettings = (ILAVSplitterSettings)splitter;
//                //ret = _splitterSettings.SetRuntimeConfig(true);
//                //if (ret != 0)
//                // throw new Exception("Could not set SetRuntimeConfig to true");

//                //string sss = "*:*";

//                //LAVSubtitleMode mode = LAVSubtitleMode.LAVSubtitleMode_NoSubs;
//                //mode = _splitterSettings.GetSubtitleMode();
//                //if (mode != LAVSubtitleMode.LAVSubtitleMode_Default)
//                // throw new Exception("Could not set GetAdvancedSubtitleConfige");

//                //ret = _splitterSettings.SetSubtitleMode(LAVSubtitleMode.LAVSubtitleMode_Advanced);
//                //if (ret != 1)
//                // throw new Exception("Could not set SetAdvancedSubtitleConfige");

//                //ret = _splitterSettings.SetAdvancedSubtitleConfig(sss);
//                //if (ret != 1)
//                // throw new Exception("Could not set SetAdvancedSubtitleConfige");

//                //sss = "";
//                //ret = _splitterSettings.GetAdvancedSubtitleConfig(out sss);
//                //if (ret != 0)
//                // throw new Exception("Could not set GetAdvancedSubtitleConfige");

//                //IPin sub = DsFindPin.ByDirection((IBaseFilter)splitter, PinDirection.Output, 2);
//                //PinInfo pi;
//                //sub.QueryPinInfo(out pi);
//                SIZE a, b;
//                if ((_displayControl).GetNativeVideoSize(out a, out b) == 0)
//                {
//                    if (a.cx > 0 && a.cy > 0)
//                    {
//                        HasVideo = true;
//                        SetNativePixelSizes(a);
//                    }
//                }

//                if (!subconnected)
//                {
//                    InvokeNoSubtitleLoaded(new EventArgs());
//                }
//                else
//                {
//                    InitSubSettings();
//                }
//                /* Sets the NaturalVideoWidth/Height */
//                //SetNativePixelSizes(renderer);

//                //InvokeMediaFailed(new MediaFailedEventArgs(sss, null));
//            }
//            catch (Exception ex)
//            {
//                /* This exection will happen usually if the media does
//                * not exist or could not open due to not having the
//                * proper filters installed */
//                FreeResources();

//                /* Fire our failed event */
//                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
//            }
//            finally
//            {
//                string filters = string.Join(Environment.NewLine, EnumAllFilters(m_graph).ToArray());
//                System.Diagnostics.Debug.WriteLine(filters);
//            }
//            InvokeMediaOpened();
//        }
        protected virtual void OpenSource()
        {
            _eqEnabled = false;
            //if (m_graph != null)
            //{
            //    //RemoveAllFilters(m_graph);
            //    Marshal.ReleaseComObject(m_graph);
            //}

            /* Make sure we clean up any remaining mess */
            FreeResources();

            if (m_sourceUri == null)
            {
                return;
            }

            string fileSource = m_sourceUri.OriginalString;

            if (string.IsNullOrEmpty(fileSource))
            {
                return;
            }

            try
            {
                int hr = 0;

                /* Creates the GraphBuilder COM object */
                m_graph = new FilterGraphNoThread() as IGraphBuilder;

                if (_displayControl != null)
                {
                    Marshal.ReleaseComObject(_displayControl);
                    _displayControl = null;
                }

                if (_displayControlVMR != null)
                {
                    Marshal.ReleaseComObject(_displayControlVMR);
                    _displayControlVMR = null;
                }

                if (m_graph == null)
                {
                    throw new Exception("Could not create a graph");
                }

                var filterGraph = m_graph as IFilterGraph2;

                var flt = EnumAllFilters(m_graph).ToList();

                if (filterGraph == null)
                {
                    throw new Exception("Could not QueryInterface for the IFilterGraph2");
                }

                /* Add our prefered audio renderer */
                var audioRenderer = InsertAudioRenderer(AudioRenderer);
                if (audioRenderer != null)
                {
                    if (_audioRenderer != null)
                    {
                        Marshal.ReleaseComObject(_audioRenderer);
                    }
                    _audioRenderer = audioRenderer;
                }

                if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT &&
                     (System.Environment.OSVersion.Version.Major == 5)))
                {
                    VideoRenderer = VideoRendererType.VideoMixingRenderer9;
                }

                if (_presenterSettings != null)
                {
                    Marshal.ReleaseComObject(_presenterSettings);
                }
                if (_renderer != null)
                {
                    Marshal.ReleaseComObject(_renderer);
                }

                IBaseFilter renderer = InsertVideoRenderer(VideoRenderer, m_graph, 1);
                _renderer = renderer;

                ILAVAudioSettings lavAudioSettings;
                ILAVAudioStatus   lavStatus;
                IBaseFilter       audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus);
                if (audioDecoder != null)
                {
                    if (_audio != null)
                    {
                        Marshal.ReleaseComObject(_audio);
                    }
                    _audio         = audioDecoder;
                    _audioStatus   = lavStatus;
                    _audioSettings = lavAudioSettings;

                    hr = (int)lavAudioSettings.SetRuntimeConfig(true);
                    hr = m_graph.AddFilter((IBaseFilter)audioDecoder, "LavAudio");
                    DsError.ThrowExceptionForHR(hr);
#if DEBUG
                    hr = (int)lavAudioSettings.SetTrayIcon(true);
#endif
                }

                ILAVSplitterSettings splitterSettings;
                IFileSourceFilter    splitter = FilterProvider.GetSplitterSource(out splitterSettings);

                if (splitter != null)
                {
                    if (_splitter != null)
                    {
                        Marshal.ReleaseComObject(_splitter);
                    }
                    _splitter = splitter;

                    _splitterSettings = (ILAVSplitterSettings)splitterSettings;

                    hr = splitterSettings.SetRuntimeConfig(true);
                    hr = splitter.Load(fileSource, null);
                    if (hr != 0)
                    {
                        throw new Exception("Playback of this file is not supported!");
                    }
                    hr = m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter");
                    DsError.ThrowExceptionForHR(hr);
                }

                IEnumPins pinEnum;
                hr = ((IBaseFilter)splitter).EnumPins(out pinEnum);
                DsError.ThrowExceptionForHR(hr);

                IntPtr fetched = IntPtr.Zero;
                IPin[] pins    = { null };

                if (VideoRenderer == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = _renderer as IVMRMixerControl9;

                    if (mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetRGB;
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                ILAVVideoSettings lavVideoSettings;
                IBaseFilter       lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings);

                if (lavVideo != null)
                {
                    if (_video != null)
                    {
                        Marshal.ReleaseComObject(_video);
                    }
                    _video = lavVideo;

                    if (lavVideoSettings != null)
                    {
                        _videoSettings = lavVideoSettings;

                        lavVideoSettings.SetRuntimeConfig(true);
                        hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_None);

                        // check for best acceleration available
                        //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}
                        //else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}
                        //else
                        if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0)
                        {
                            hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native);
                            hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        }
                        //else
                        //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}

#if DEBUG
                        hr = lavVideoSettings.SetTrayIcon(true);
#endif
                    }

                    hr = m_graph.AddFilter(_video, "LavVideo");
                    DsError.ThrowExceptionForHR(hr);
                }

                IBaseFilter vobSub = FilterProvider.GetVobSubFilter();

                if (vobSub != null)
                {
                    try
                    {
                        hr = m_graph.AddFilter(vobSub, "VobSub");
                        DsError.ThrowExceptionForHR(hr);
                        IDirectVobSub vss = vobSub as IDirectVobSub;
                        if (_vobsub != null)
                        {
                            Marshal.ReleaseComObject(_vobsub);
                        }
                        _vobsub = vss;
                        InitSubSettings();
                    }
                    catch { }
                }

                hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(_audio, PinDirection.Input, 0));
                if (hr == 0)
                {
                    HasAudio = true;
                }
                else
                {
                    HasAudio = false;
                }


                IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter();
                if (dcDsp != null)
                {
                    if (_dspFilter != null)
                    {
                        Marshal.ReleaseComObject(_dspFilter);
                    }
                    _dspFilter = (IDCDSPFilterInterface)dcDsp;

                    if (HasAudio)
                    {
                        hr = m_graph.AddFilter((IBaseFilter)_dspFilter, "AudioProcessor");
                        hr = _dspFilter.set_EnableBitrateConversionBeforeDSP(true);
                        hr = ((IDCDSPFilterVisualInterface)_dspFilter).set_VISafterDSP(true);
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0));
                        DsError.ThrowExceptionForHR(hr);
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0));

                        var cb = new AudioCallback(this);
                        hr = _dspFilter.set_CallBackPCM(cb);

                        object intf = null;
                        hr         = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer);
                        hr         = _dspFilter.get_FilterInterface(0, out intf);
                        _equalizer = (IDCEqualizer)intf;
                        _equalizer.set_Seperate(false);
                    }
                }
                else
                {
                    if (HasAudio)
                    {
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0));
                    }
                }

                bool subconnected = false;

                hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)_splitter, "Video"), DsFindPin.ByDirection(_video, PinDirection.Input, 0));
                if (hr == 0)
                {
                    HasVideo = true;
                }
                else
                {
                    HasVideo = false;
                }

                if (HasVideo)
                {
                    hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_video, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0));
                    DsError.ThrowExceptionForHR(hr);
                    if (hr == 0)
                    {
                        int lc;
                        ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
                        subconnected = (lc != 0);
                        IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle");
                        if (pn != null)
                        {
                            hr = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1));
                            ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
                            subconnected = (lc != 0);
                        }
                        hr = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0),
                                             DsFindPin.ByDirection(_renderer, PinDirection.Input, 0));
                    }
                    else
                    {
                        if (_vobsub != null)
                        {
                            Marshal.ReleaseComObject(_vobsub);
                        }
                        _vobsub = null;
                        hr      = m_graph.Connect(DsFindPin.ByDirection(_video, PinDirection.Output, 0),
                                                  DsFindPin.ByDirection(_renderer, PinDirection.Input, 0));
                    }
                }

                /* Loop over each pin of the source filter */
                while (pinEnum.Next(pins.Length, pins, fetched) == 0)
                {
                    IPin cTo;
                    pins[0].ConnectedTo(out cTo);
                    if (cTo == null)
                    {
                        // this should not happen if the filtegraph is manually connected in a good manner
                        hr = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero);
                    }
                    else
                    {
                        Marshal.ReleaseComObject(cTo);
                    }
                    Marshal.ReleaseComObject(pins[0]);
                }

                Marshal.ReleaseComObject(pinEnum);

                var selector = splitter as IAMStreamSelect;
                int numstreams;
                selector.Count(out numstreams);
                AMMediaType             mt;
                AMStreamSelectInfoFlags fl;
                SubtitleStreams.Clear();
                VideoStreams.Clear();
                AudioStreams.Clear();
                for (int i = 0; i < numstreams; i++)
                {
                    int    lcid;
                    int    group;
                    string name;
                    object o, o2;
                    selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2);
                    switch (group)
                    {
                    case 0:
                        VideoStreams.Add(name);
                        break;

                    case 1:
                        AudioStreams.Add(name);
                        break;

                    case 2:
                        SubtitleStreams.Add(name);
                        break;
                    }

                    if (o != null)
                    {
                        Marshal.ReleaseComObject(o);
                    }
                    if (o2 != null)
                    {
                        Marshal.ReleaseComObject(o2);
                    }
                }

                OnPropertyChanged("SubtitleStreams");
                OnPropertyChanged("VideoStreams");
                OnPropertyChanged("AudioStreams");

                /* Configure the graph in the base class */
                SetupFilterGraph(m_graph);

#if DEBUG
                /* Adds the GB to the ROT so we can view
                 * it in graphedit */
                m_dsRotEntry = new DsROTEntry(m_graph);
#endif

                SIZE a, b;
                if (HasVideo && _displayControl != null && (_displayControl).GetNativeVideoSize(out a, out b) == 0)
                {
                    var sz = MediaPlayerBase.GetVideoSize(_renderer, PinDirection.Input, 0);
                    if (a.cx > 0 && a.cy > 0)
                    {
                        SetNativePixelSizes(a);
                    }
                }

                if (!subconnected)
                {
                    InvokeNoSubtitleLoaded(new EventArgs());
                }
                else
                {
                    InitSubSettings();
                }
            }
            catch (Exception ex)
            {
                /* This exection will happen usually if the media does
                 * not exist or could not open due to not having the
                 * proper filters installed */
                FreeResources();

                /* Fire our failed event */
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            InvokeMediaOpened();
        }
コード例 #24
0
        private void StartCapture()
        {
            int hr;

            ISampleGrabber        sampGrabber = null;
            IBaseFilter           capFilter   = null;
            ICaptureGraphBuilder2 capGraph    = null;

            if (System.IO.File.Exists(txtAviFileName.Text))
            {
                // Get the graphbuilder object
                m_FilterGraph = (IFilterGraph2) new FilterGraph();
                m_mediaCtrl   = m_FilterGraph as IMediaControl;

                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Get the SampleGrabber interface
                sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the video source
                hr = m_FilterGraph.AddSourceFilter(txtAviFileName.Text, "File Source (Async.)", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                //add AVI Decompressor
                IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec();
                hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor");
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter ffdshow;
                try
                {
                    // Create Decoder filter COM object (ffdshow video decoder)
                    Type comtype = Type.GetTypeFromCLSID(new Guid("{04FE9017-F873-410E-871E-AB91661A4EF7}"));
                    if (comtype == null)
                    {
                        throw new NotSupportedException("Creating ffdshow video decoder COM object fails.");
                    }
                    object comobj = Activator.CreateInstance(comtype);
                    ffdshow = (IBaseFilter)comobj; // error ocurrs! raised exception
                    comobj  = null;
                }
                catch { CustomMessageBox.Show("Please install/reinstall ffdshow"); return; }

                hr = m_FilterGraph.AddFilter(ffdshow, "ffdshow");
                DsError.ThrowExceptionForHR(hr);

                //
                IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber;
                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);


                IBaseFilter vidrender = (IBaseFilter) new VideoRenderer();
                hr = m_FilterGraph.AddFilter(vidrender, "Render");
                DsError.ThrowExceptionForHR(hr);


                IPin captpin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

                IPin ffdpinin = DsFindPin.ByName(ffdshow, "In");

                IPin ffdpinout = DsFindPin.ByName(ffdshow, "Out");

                IPin samppin = DsFindPin.ByName(baseGrabFlt, "Input");

                hr = m_FilterGraph.Connect(captpin, ffdpinin);
                DsError.ThrowExceptionForHR(hr);
                hr = m_FilterGraph.Connect(ffdpinout, samppin);
                DsError.ThrowExceptionForHR(hr);

                FileWriter      filewritter = new FileWriter();
                IFileSinkFilter filemux     = (IFileSinkFilter)filewritter;
                //filemux.SetFileName("test.avi",);

                //hr = capGraph.RenderStream(null, MediaType.Video, capFilter, null, vidrender);
                // DsError.ThrowExceptionForHR(hr);

                SaveSizeInfo(sampGrabber);

                // setup buffer
                if (m_handle == IntPtr.Zero)
                {
                    m_handle = Marshal.AllocCoTaskMem(m_stride * m_videoHeight);
                }

                // tell the callback to ignore new images
                m_PictureReady = new ManualResetEvent(false);
                m_bGotOne      = false;
                m_bRunning     = false;

                timer1 = new Thread(timer);
                timer1.IsBackground = true;
                timer1.Start();

                m_mediaextseek = m_FilterGraph as IAMExtendedSeeking;
                m_mediapos     = m_FilterGraph as IMediaPosition;
                m_mediaseek    = m_FilterGraph as IMediaSeeking;
                double length = 0;
                m_mediapos.get_Duration(out length);
                trackBar_mediapos.Minimum = 0;
                trackBar_mediapos.Maximum = (int)length;

                Start();
            }
            else
            {
                MessageBox.Show("File does not exist");
            }
        }
コード例 #25
0
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl, long iframerate)
        {
            int hr;

            ISampleGrabber sampGrabber = null;
            IBaseFilter    capFilter   = null;
            IPin           pCaptureOut = null;
            IPin           pSampleIn   = null;
            IPin           pRenderIn   = null;

            // Get the graphbuilder object
            m_FilterGraph = new FilterGraph() as IFilterGraph2;

            try
            {
#if DEBUG
                m_rot = new DsROTEntry(m_FilterGraph);
#endif
                // add the video input device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // Find the still pin
                m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0);

                // Didn't find one.  Is there a preview pin?
                if (m_pinStill == null)
                {
                    m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0);
                }

                // Still haven't found one.  Need to put a splitter in so we have
                // one stream to capture the bitmap from, and one to display.  Ok, we
                // don't *have* to do it that way, but we are going to anyway.
                if (m_pinStill == null)
                {
                    IPin pRaw   = null;
                    IPin pSmart = null;

                    // There is no still pin
                    m_VidControl = null;

                    // Add a splitter
                    IBaseFilter iSmartTee = (IBaseFilter) new SmartTee();

                    try
                    {
                        hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee");
                        DsError.ThrowExceptionForHR(hr);

                        // Find the find the capture pin from the video device and the
                        // input pin for the splitter, and connnect them
                        pRaw   = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);
                        pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0);

                        hr = m_FilterGraph.Connect(pRaw, pSmart);
                        DsError.ThrowExceptionForHR(hr);

                        // Now set the capture and still pins (from the splitter)
                        m_pinStill  = DsFindPin.ByName(iSmartTee, "Preview");
                        pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture");

                        // If any of the default config items are set, perform the config
                        // on the actual video device (rather than the splitter)
                        if (iHeight + iWidth + iBPP + iframerate > 0)
                        {
                            SetConfigParms(pRaw, iWidth, iHeight, iBPP, iframerate);
                        }
                    }
                    finally
                    {
                        if (pRaw != null)
                        {
                            Marshal.ReleaseComObject(pRaw);
                        }
                        if (pRaw != pSmart)
                        {
                            Marshal.ReleaseComObject(pSmart);
                        }
                        if (pRaw != iSmartTee)
                        {
                            Marshal.ReleaseComObject(iSmartTee);
                        }
                    }
                }
                else
                {
                    // Get a control pointer (used in Click())
                    m_VidControl = capFilter as IAMVideoControl;

                    pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);

                    // If any of the default config items are set
                    if (iHeight + iWidth + iBPP + iframerate > 0)
                    {
                        SetConfigParms(m_pinStill, iWidth, iHeight, iBPP, iframerate);
                    }
                }

                // Get the SampleGrabber interface
                sampGrabber = new SampleGrabber() as ISampleGrabber;

                // Configure the sample grabber
                IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter;
                ConfigureSampleGrabber(sampGrabber);
                pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);

                // Get the default video renderer
                IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter;
                hr = m_FilterGraph.AddFilter(pRenderer, "Renderer");
                DsError.ThrowExceptionForHR(hr);

                pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0);

                // Add the sample grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                if (m_VidControl == null)
                {
                    // Connect the Still pin to the sample grabber
                    hr = m_FilterGraph.Connect(m_pinStill, pSampleIn);
                    DsError.ThrowExceptionForHR(hr);

                    // Connect the capture pin to the renderer
                    hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn);
                    DsError.ThrowExceptionForHR(hr);
                }
                else
                {
                    // Connect the capture pin to the renderer
                    hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn);
                    DsError.ThrowExceptionForHR(hr);

                    // Connect the Still pin to the sample grabber
                    hr = m_FilterGraph.Connect(m_pinStill, pSampleIn);
                    DsError.ThrowExceptionForHR(hr);
                }

                // Learn the video properties
                SaveSizeInfo(sampGrabber);
                ConfigVideoWindow(hControl);

                // Start the graph
                IMediaControl mediaCtrl = m_FilterGraph as IMediaControl;
                hr = mediaCtrl.Run();
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (pCaptureOut != null)
                {
                    Marshal.ReleaseComObject(pCaptureOut);
                    pCaptureOut = null;
                }
                if (pRenderIn != null)
                {
                    Marshal.ReleaseComObject(pRenderIn);
                    pRenderIn = null;
                }
                if (pSampleIn != null)
                {
                    Marshal.ReleaseComObject(pSampleIn);
                    pSampleIn = null;
                }
            }
        }
コード例 #26
0
        /// <summary>
        /// Get the image from the Still pin.  The returned image can turned into a bitmap with
        /// Bitmap b = new Bitmap(cam.Width, cam.Height, cam.Stride, PixelFormat.Format24bppRgb, m_ip);
        /// If the image is upside down, you can fix it with
        /// b.RotateFlip(RotateFlipType.RotateNoneFlipY);
        /// </summary>
        /// <returns>Returned pointer to be freed by caller with Marshal.FreeCoTaskMem</returns>
        public Bitmap Grab(
            ref IntPtr m_ip,
            bool update_bitmap)
        {
            Bitmap grabbed_frame = null;

            if (!paused)
            {
                int hr;

                if (m_ipBuffer != IntPtr.Zero)
                {
                    try
                    {
                        Marshal.FreeCoTaskMem(m_ipBuffer);
                    }
                    catch
                    {
                    }
                    m_ipBuffer = IntPtr.Zero;
                }

                // get ready to wait for new image
                m_PictureReady.Reset();

                bool free_mem = false;
                try
                {
                    m_ipBuffer = Marshal.AllocCoTaskMem(Math.Abs(m_stride) * m_videoHeight);

                    m_WantOne = true;

                    // If we are using a still pin, ask for a picture
                    if (m_VidControl != null)
                    {
                        // Tell the camera to send an image
                        hr = m_VidControl.SetMode(m_pinStill, VideoControlFlags.Trigger);
                        DsError.ThrowExceptionForHR(hr);
                    }

                    // Start waiting
                    if (!m_PictureReady.WaitOne(500, false))
                    {
                        //throw new Exception("Timeout waiting to get picture");
                        //Console.WriteLine("Timeout");
                    }
                }
                catch
                {
                    free_mem = true;
                    //throw;
                }

                if (free_mem)
                {
                    try
                    {
                        if (m_ipBuffer != IntPtr.Zero)
                        {
                            Marshal.FreeCoTaskMem(m_ipBuffer);
                            m_ipBuffer = IntPtr.Zero;
                        }
                    }
                    catch
                    {
                    }
                }

                if (m_ipBuffer != IntPtr.Zero)
                {
                    if (lastFrame != null)
                    {
                        lastFrame.Dispose();
                    }

                    // store the last frame as a bitmap
                    if (update_bitmap)
                    {
                        if (bytes_per_pixel == 1)
                        {
                            grabbed_frame = new Bitmap(m_videoWidth, m_videoHeight, m_stride, PixelFormat.Format8bppIndexed, m_ipBuffer);
                        }
                        else
                        {
                            grabbed_frame = new Bitmap(m_videoWidth, m_videoHeight, m_stride, PixelFormat.Format24bppRgb, m_ipBuffer);
                        }

                        try
                        {
                            grabbed_frame.RotateFlip(RotateFlipType.RotateNoneFlipY);
                        }
                        catch
                        {
                        }
                        lastFrame = grabbed_frame;
                        m_ip      = m_ipBuffer;
                    }
                }
            }
            //m_ip = m_ipBuffer;

            return(grabbed_frame);
        }
コード例 #27
0
        /// <summary>
        /// Sets parameters for source capture pin.
        /// </summary>
        /// <param name="pinSourceCapture">Pin of source capture.</param>
        /// <param name="resolution">Resolution to set if possible.</param>
        private static void SetSourceParams(IPin pinSourceCapture, Resolution resolution_desired)
        {
            int hr = 0;

            AMMediaType media_type_most_appropriate = null;
            AMMediaType media_type = null;

            //NOTE: pSCC is not used. All we need is media_type
            IntPtr pSCC = IntPtr.Zero;


            bool appropriate_media_type_found = false;

            try
            {
                IAMStreamConfig videoStreamConfig = pinSourceCapture as IAMStreamConfig;

                // -------------------------------------------------------------------------
                // We want the interface to expose all media types it supports and not only the last one set
                hr = videoStreamConfig.SetFormat(null);
                DsError.ThrowExceptionForHR(hr);

                int piCount = 0;
                int piSize  = 0;

                hr = videoStreamConfig.GetNumberOfCapabilities(out piCount, out piSize);
                DsError.ThrowExceptionForHR(hr);

                for (int i = 0; i < piCount; i++)
                {
                    // ---------------------------------------------------
                    pSCC = Marshal.AllocCoTaskMem(piSize);
                    videoStreamConfig.GetStreamCaps(i, out media_type, pSCC);
                    FreeSCCMemory(ref pSCC);

                    // NOTE: we could use VideoStreamConfigCaps.InputSize or something like that to get resolution, but it's deprecated
                    //VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pSCC, typeof(VideoStreamConfigCaps));
                    // ---------------------------------------------------

                    bool bit_count_ok  = false;
                    bool sub_type_ok   = false;
                    bool resolution_ok = false;

                    AnalyzeMediaType(media_type, resolution_desired, out bit_count_ok, out sub_type_ok, out resolution_ok);

                    if (bit_count_ok && resolution_ok)
                    {
                        if (sub_type_ok)
                        {
                            hr = videoStreamConfig.SetFormat(media_type);
                            DsError.ThrowExceptionForHR(hr);

                            appropriate_media_type_found = true;
                            break;                             // stop search, we've found appropriate media type
                        }
                        else
                        {
                            // save as appropriate if no other found
                            if (media_type_most_appropriate == null)
                            {
                                media_type_most_appropriate = media_type;
                                media_type = null;                                 // we don't want for free it, now it's media_type_most_appropriate's problem
                            }
                        }
                    }

                    FreeMediaType(ref media_type);
                }

                if (!appropriate_media_type_found)
                {
                    // Found nothing exactly as we were asked

                    if (media_type_most_appropriate != null)
                    {
                        // set appropriate RGB format with different resolution
                        hr = videoStreamConfig.SetFormat(media_type_most_appropriate);
                        DsError.ThrowExceptionForHR(hr);
                    }
                    else
                    {
                        // throw. We didn't find exactly what we were asked to
                        throw new Exception("Camera doesn't support media type with requested resolution and bits per pixel.");
                        //DsError.ThrowExceptionForHR(DsResults.E_InvalidMediaType);
                    }
                }
            }
            catch
            {
                throw;
            }
            finally
            {
                // clean up
                FreeMediaType(ref media_type);
                FreeMediaType(ref media_type_most_appropriate);
                FreeSCCMemory(ref pSCC);
            }
        }
コード例 #28
0
ファイル: Capture.cs プロジェクト: OOP-03376400/Software
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, AMMediaType media)
        {
            int hr;

            ISampleGrabber        sampGrabber = null;
            IBaseFilter           capFilter   = null;
            ICaptureGraphBuilder2 capGraph    = null;

            // Get the graphbuilder object
            m_FilterGraph = (IFilterGraph2) new FilterGraph();
            m_mediaCtrl   = m_FilterGraph as IMediaControl;
            try
            {
                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Get the SampleGrabber interface
                sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the video device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // add video crossbar
                // thanks to Andrew Fernie - this is to get tv tuner cards working
                IAMCrossbar crossbar = null;
                object      o;

                hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMCrossbar).GUID, out o);
                if (hr >= 0)
                {
                    crossbar = (IAMCrossbar)o;
                    int oPin, iPin;
                    int ovLink, ivLink;
                    ovLink = ivLink = 0;

                    crossbar.get_PinCounts(out oPin, out iPin);
                    int pIdxRel;
                    PhysicalConnectorType tp;
                    for (int i = 0; i < iPin; i++)
                    {
                        crossbar.get_CrossbarPinInfo(true, i, out pIdxRel, out tp);
                        if (tp == PhysicalConnectorType.Video_Composite)
                        {
                            ivLink = i;
                        }
                    }

                    for (int i = 0; i < oPin; i++)
                    {
                        crossbar.get_CrossbarPinInfo(false, i, out pIdxRel, out tp);
                        if (tp == PhysicalConnectorType.Video_VideoDecoder)
                        {
                            ovLink = i;
                        }
                    }

                    try
                    {
                        crossbar.Route(ovLink, ivLink);
                        o = null;
                    }

                    catch
                    {
                        throw new Exception("Failed to get IAMCrossbar");
                    }
                }

                //add AVI Decompressor
                IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec();
                hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor");
                DsError.ThrowExceptionForHR(hr);

                //
                IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber;
                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                SetConfigParms(capGraph, capFilter, media);

                hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, pAVIDecompressor, baseGrabFlt);
                if (hr < 0)
                {
                    hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
                }

                DsError.ThrowExceptionForHR(hr);

                SaveSizeInfo(sampGrabber);
            }
            finally
            {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
            }
        }
コード例 #29
0
        private void closeInterfaces()
        {
            int hr = 0;

            try
            {
                lock (this)
                {
                    //relinquish ownership (IMPORTANT!) after hiding video window
                    hr = this.VideoWindow.put_Visible(OABool.False);
                    DsError.ThrowExceptionForHR(hr);
                    hr = this.VideoWindow.put_Owner(IntPtr.Zero);
                    DsError.ThrowExceptionForHR(hr);

                    if (this.MediaEventEx != null)
                    {
                        hr = this.MediaEventEx.SetNotifyWindow(IntPtr.Zero, 0, IntPtr.Zero);
                        DsError.ThrowExceptionForHR(hr);
                    }
                    //release and zero DirectShow interfaces
                    if (this.MediaEventEx != null)
                    {
                        this.MediaEventEx = null;
                    }
                    if (this.MediaSeeking != null)
                    {
                        this.MediaSeeking = null;
                    }
                    if (this.MediaPosition != null)
                    {
                        this.MediaPosition = null;
                    }
                    if (this.MediaControl != null)
                    {
                        this.MediaControl = null;
                    }
                    if (this.BasicAudio != null)
                    {
                        this.BasicAudio = null;
                    }
                    if (this.BasicVideo != null)
                    {
                        this.BasicVideo = null;
                    }
                    if (this.VideoWindow != null)
                    {
                        this.VideoWindow = null;
                    }
                    if (this.VideoFrameStep != null)
                    {
                        this.VideoFrameStep = null;
                    }
                    if (this.GraphBuilder != null)
                    {
                        this.GraphBuilder = null;
                    }
                    GC.Collect();
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.ToString());
            }
        }
コード例 #30
0
        /// <summary>
        /// Creates an instance of the EVR
        /// </summary>
        private IBaseFilter CreateEnhancedVideoRenderer(IGraphBuilder graph, int streamCount)
        {
            EvrPresenter presenter;
            IBaseFilter  filter;

            lock (m_videoRendererInitLock)
            {
                var evr = new EnhancedVideoRenderer();
                filter = evr as IBaseFilter;

                int hr = graph.AddFilter(filter, string.Format("Renderer: {0}", VideoRendererType.EnhancedVideoRenderer));
                DsError.ThrowExceptionForHR(hr);

                /* QueryInterface for the IMFVideoRenderer */
                var videoRenderer = filter as IMFVideoRenderer;

                if (videoRenderer == null)
                {
                    throw new WPFMediaKitException("Could not QueryInterface for the IMFVideoRenderer");
                }

                /* Create a new EVR presenter */
                presenter = EvrPresenter.CreateNew();

                /* Initialize the EVR renderer with the custom video presenter */
                hr = videoRenderer.InitializeRenderer(null, presenter.VideoPresenter);
                DsError.ThrowExceptionForHR(hr);

                var presenterSettings = presenter.VideoPresenter as IEVRPresenterSettings;
                if (presenterSettings == null)
                {
                    throw new WPFMediaKitException("Could not QueryInterface for the IEVRPresenterSettings");
                }

                presenterSettings.SetBufferCount(3);

                /* Use our interop hWnd */
                IntPtr handle = GetDesktopWindow();//HwndHelper.Handle;

                /* QueryInterface the IMFVideoDisplayControl */
                var displayControl = presenter.VideoPresenter as IMFVideoDisplayControl;

                if (displayControl == null)
                {
                    throw new WPFMediaKitException("Could not QueryInterface the IMFVideoDisplayControl");
                }

                /* Configure the presenter with our hWnd */
                hr = displayControl.SetVideoWindow(handle);
                DsError.ThrowExceptionForHR(hr);

                var filterConfig = filter as IEVRFilterConfig;

                if (filterConfig != null)
                {
                    filterConfig.SetNumberOfStreams(streamCount);
                }
            }


            RegisterCustomAllocator(presenter);

            return(filter);
        }