예제 #1
0
        /// <summary>
        /// Test all IMediaSampleTest methods
        /// </summary>
        public void DoTests()
        {
            m_graphBuilder = BuildGraph(g_TestFile);

            // All the tests are called in ISampleGrabberCB.SampleCB, since
            // that's where we are when we get the IMediaSample2
            GetSample();

            // All done.  Release everything
            if (m_graphBuilder != null)
            {
                Marshal.ReleaseComObject(m_graphBuilder);
                m_graphBuilder = null;
            }

            if (m_MediaType != null)
            {
                DsUtils.FreeAMMediaType(m_MediaType);
                m_MediaType = null;
            }

            m_ims = null;
        }
예제 #2
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video/RBG24
            media            = new AMMediaType();
            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB32;
            media.formatType = FormatType.VideoInfo;

            sampGrabber.SetBufferSamples(false);

            hr = sampGrabber.SetMediaType(media);

            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
예제 #3
0
        public static void EnumMediaTypes(this IPin pPin, Action <AMMediaType> action)
        {
            IEnumMediaTypes pEnumTypes;

            var hr = pPin.EnumMediaTypes(out pEnumTypes);

            if (hr == DsHlp.S_OK)
            {
                IntPtr ptr;
                int    cFetched;

                if (pEnumTypes.Next(1, out ptr, out cFetched) == DsHlp.S_OK)
                {
                    AMMediaType mt = (AMMediaType)Marshal.PtrToStructure(ptr, typeof(AMMediaType));

                    action(mt);

                    DsUtils.FreeFormatBlock(ptr);
                    Marshal.FreeCoTaskMem(ptr);
                }
                Marshal.ReleaseComObject(pEnumTypes);
            }
        }
예제 #4
0
        /// <summary>
        /// Helper function to get a pin from a filter
        /// </summary>
        /// <param name="filter"></param>
        /// <param name="pinname"></param>
        /// <returns></returns>
        private IPin GetPin(IBaseFilter filter, string pinname)
        {
            IEnumPins epins;
            int       hr = filter.EnumPins(out epins);

            CheckHr(hr, "Can't enumerate pins");
            IntPtr fetched = Marshal.AllocCoTaskMem(4);
            var    pins    = new IPin[1];

            while (epins.Next(1, pins, fetched) == 0)
            {
                PinInfo pinfo;
                pins[0].QueryPinInfo(out pinfo);
                bool found = (pinfo.name == pinname);
                DsUtils.FreePinInfo(pinfo);
                if (found)
                {
                    return(pins[0]);
                }
            }
            CheckHr(-1, "Pin not found");
            return(null);
        }
        private void SaveSizeInfo(ISampleGrabber sampleGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampleGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            _previewStride = _previewWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
예제 #6
0
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            m_videoWidth  = videoInfoHeader.BmiHeader.Width;
            m_videoHeight = videoInfoHeader.BmiHeader.Height;
            m_stride      = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
예제 #7
0
파일: Capture.cs 프로젝트: zx901/ArduPlane
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, AMMediaType media)
        {
            int    hr;
            object o;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Set the new format
            hr = videoStreamConfig.SetFormat(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        private void InitVideo()
        {
            int            hr;
            IAMTimelineObj pVideoGroupObj;

            // make the root group/composition
            hr = m_pTimeline.CreateEmptyNode(out pVideoGroupObj, TimelineMajorType.Group);
            DESError.ThrowExceptionForHR(hr);

            m_pVideoGroup = (IAMTimelineGroup)pVideoGroupObj;

            // all we set is the major type. The group will automatically use other defaults
            AMMediaType VideoGroupType = new AMMediaType();

            VideoGroupType.majorType = MediaType.Video;

            hr = m_pVideoGroup.SetMediaType(VideoGroupType);
            DESError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(VideoGroupType);

            // add the video group to the timeline
            hr = m_pTimeline.AddGroup(pVideoGroupObj);
            DESError.ThrowExceptionForHR(hr);

            IAMTimelineObj pTrack1Obj;

            hr = m_pTimeline.CreateEmptyNode(out pTrack1Obj, TimelineMajorType.Track);
            DESError.ThrowExceptionForHR(hr);

            // tell the composition about the track
            IAMTimelineComp pRootComp = (IAMTimelineComp)pVideoGroupObj;

            hr = pRootComp.VTrackInsBefore(pTrack1Obj, -1);
            DESError.ThrowExceptionForHR(hr);

            m_VideoTrack = (IAMTimelineTrack)pTrack1Obj;
        }
예제 #9
0
        // Token: 0x0600037A RID: 890 RVA: 0x00014720 File Offset: 0x00012920
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig)
        {
            AMMediaType ammediaType = null;
            IntPtr      intPtr      = IntPtr.Zero;

            try
            {
                int num2;
                int num3;
                int num = videoStreamConfig.GetNumberOfCapabilities(ref num2, ref num3);
                intPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));
                IntPtr ptr;
                num         = videoStreamConfig.GetStreamCaps(0, out ptr, intPtr);
                ammediaType = (AMMediaType)Marshal.PtrToStructure(ptr, typeof(AMMediaType));
                VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(intPtr, typeof(VideoStreamConfigCaps));
                this.InputSize             = videoStreamConfigCaps.InputSize;
                this.MinFrameSize          = videoStreamConfigCaps.MinOutputSize;
                this.MaxFrameSize          = videoStreamConfigCaps.MaxOutputSize;
                this.FrameSizeGranularityX = videoStreamConfigCaps.OutputGranularityX;
                this.FrameSizeGranularityY = videoStreamConfigCaps.OutputGranularityY;
                this.MinFrameRate          = 10000000.0 / (double)videoStreamConfigCaps.MaxFrameInterval;
                this.MaxFrameRate          = 10000000.0 / (double)videoStreamConfigCaps.MinFrameInterval;
            }
            finally
            {
                if (intPtr != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(intPtr);
                }
                intPtr = IntPtr.Zero;
                if (ammediaType != null)
                {
                    DsUtils.FreeAMMediaType(ammediaType);
                }
                ammediaType = null;
            }
        }
예제 #10
0
        /// <summary>
        /// Saves the video properties of the SampleGrabber into member fields
        /// and creates a file mapping for the captured frames.
        /// </summary>
        /// <param name="sampGrabber">The <see cref="ISampleGrabber"/>
        /// from which to retreive the sample information.</param>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            var media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);

            //if (hr != 0)
            //{
            //    ErrorLogger.WriteLine("Could not SaveSizeInfo in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            //}

            //if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            //{
            //    ErrorLogger.WriteLine("Error in Camera.Capture. Unknown Grabber Media Format");
            //}

            // Grab the size info
            var videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            width    = videoInfoHeader.BmiHeader.Width;
            height   = videoInfoHeader.BmiHeader.Height;
            stride   = width * (videoInfoHeader.BmiHeader.BitCount / 8);
            this.fps = (int)(10000000 / videoInfoHeader.AvgTimePerFrame);

            bufferLength = width * height * 3; // RGB24 = 3 bytes

            // create memory section and map for the OpenCV Image.
            section    = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, (uint)bufferLength, null);
            map        = MapViewOfFile(section, 0xF001F, 0, 0, (uint)bufferLength);
            videoImage = new Image <Bgr, byte>(width, height, stride, map);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
예제 #11
0
        public async Task UnFriend(string user)
        {
            if (Limiter.Limit(Context, TimeSpan.FromSeconds(1)))
            {
                if (DsUtils.IsMention(user))
                {
                    ulong discordId   = DsUtils.GetMentionId(user);
                    var   discordUser = await Garden.TheFriendTree.GetUserAsync(discordId);

                    var treeUser = Garden.Tree.GetUser(discordId);

                    var currentTreeUser = Garden.Tree.GetUser(Context.User.Id);

                    if (treeUser != null && treeUser.TreeId != currentTreeUser.TreeId && !discordUser.IsBot)
                    {
                        if (currentTreeUser.Friends.Contains(treeUser.TreeId))
                        {
                            treeUser.Friends.Remove(currentTreeUser.TreeId);
                            currentTreeUser.Friends.Remove(treeUser.TreeId);
                            await ReplyAsync($"You are no longer the friend of {DsUtils.GetDiscordUsername(discordUser.Id)}!");
                        }
                        else
                        {
                            await ReplyAsync($"You are not friends with the person.");
                        }
                    }
                    else
                    {
                        await ReplyAsync($"The target user is not valid in this context!");
                    }
                }
                else
                {
                    await ReplyAsync("Please tag a user!");
                }
            }
        }
예제 #12
0
        private void TestCopy()
        {
            int hr;

            AMMediaType pmt1 = new AMMediaType();
            AMMediaType pmt2 = new AMMediaType();
            FilterGraph f    = new FilterGraph();
            IntPtr      ip   = Marshal.GetIUnknownForObject(f);

            pmt1.fixedSizeSamples = true;
            pmt1.formatPtr        = Marshal.AllocCoTaskMem(8);
            Marshal.WriteInt64(pmt1.formatPtr, long.MaxValue);
            pmt1.formatSize          = 8;
            pmt1.formatType          = FormatType.DvInfo;
            pmt1.majorType           = MediaType.AuxLine21Data;
            pmt1.sampleSize          = 65432;
            pmt1.subType             = MediaSubType.AIFF;
            pmt1.temporalCompression = true;
            pmt1.unkPtr = ip;

            hr = DMOUtils.MoCopyMediaType(pmt2, pmt1);

            Debug.Assert(hr == 0 &&
                         pmt2.fixedSizeSamples == true &&
                         pmt2.formatPtr != pmt1.formatPtr &&
                         Marshal.ReadInt64(pmt2.formatPtr) == long.MaxValue &&
                         pmt2.formatSize == 8 &&
                         pmt2.formatType == FormatType.DvInfo &&
                         pmt2.majorType == MediaType.AuxLine21Data &&
                         pmt2.sampleSize == 65432 &&
                         pmt2.subType == MediaSubType.AIFF &&
                         pmt2.temporalCompression == true &&
                         pmt2.unkPtr == ip, "MoCopyMediaType");

            DsUtils.FreeAMMediaType(pmt1);
            DsUtils.FreeAMMediaType(pmt2);
        }
예제 #13
0
        private void StartTVCapture()
        {
            if (!DsUtils.IsCorrectDirectXVersion())
            {
                MessageBox.Show(this, "DirectX 8.1 NOT installed!", "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                this.Close(); return;
            }

            if (!DsDev.GetDevicesOfCat(FilterCategory.VideoInputDevice, out capDevices))
            {
                MessageBox.Show(this, "No video capture devices found!", "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                this.Close(); return;
            }

            DsDevice dev = null;

            if (capDevices.Count == 1)
            {
                dev = capDevices[0] as DsDevice;
            }
            else
            {
                DeviceSelector selector = new DeviceSelector(capDevices);
                selector.ShowDialog(this);
                dev = selector.SelectedDevice;
            }

            if (dev == null)
            {
                this.Close(); return;
            }

            if (!StartupVideo(dev.Mon))
            {
                this.Close();
            }
        }
예제 #14
0
        protected virtual void InsertAudioFilter(IBaseFilter sourceFilter, string audioDecoder)
        {
            if (string.IsNullOrEmpty(audioDecoder))
            {
                return;
            }

            // Set Audio Codec
            // Remove Pin
            var  audioPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Audio");
            IPin audioPinTo;

            if (audioPinFrom != null)
            {
                int hr = audioPinFrom.ConnectedTo(out audioPinTo);
                if (hr >= 0 && audioPinTo != null)
                {
                    PinInfo pInfo;
                    audioPinTo.QueryPinInfo(out pInfo);
                    FilterInfo fInfo;
                    pInfo.filter.QueryFilterInfo(out fInfo);

                    DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter);
                    m_graph.RemoveFilter(pInfo.filter);

                    DsUtils.FreePinInfo(pInfo);
                    Marshal.ReleaseComObject(fInfo.pGraph);
                    Marshal.ReleaseComObject(audioPinTo);
                    audioPinTo = null;
                }
                Marshal.ReleaseComObject(audioPinFrom);
                audioPinFrom = null;
            }

            DirectShowUtil.AddFilterToGraph(m_graph, audioDecoder, Guid.Empty);
        }
예제 #15
0
파일: BaseGraph.cs 프로젝트: ewin66/media
        protected virtual void Dispose(bool disposeManaged)
        {
            GC.SuppressFinalize(this);
            Process currentProc = Process.GetCurrentProcess();

            currentProc.PriorityClass = ProcessPriorityClass.Normal;

            _hostControl.Paint  -= new PaintEventHandler(PaintHandler);
            _hostControl.Resize -= new EventHandler(ResizeMoveHandler);
            _hostControl.Move   -= new EventHandler(ResizeMoveHandler);

            ForceReleaseComObject(_captureFilter);
            _captureFilter = null;
            ForceReleaseComObject(_fileWriter);
            _fileWriter = null;

            ForceReleaseComObject(_mediaControl);
            _mediaControl = null;
            ForceReleaseComObject(_graphBuilder);
            _graphBuilder = null;
            ForceReleaseComObject(_captureGraphBuilder);
            _captureGraphBuilder = null;

            ForceReleaseComObject(_videoRender);
            _videoRender = null;
            ForceReleaseComObject(_audioRender);
            _audioRender = null;
            ForceReleaseComObject(_audioVolumeFilter);
            _audioVolumeFilter          = null;
            _audioVolumeFilterInterface = null;

            ForceReleaseComObject(_vmrWindowlessControl);
            _vmrWindowlessControl = null;

            DsUtils.FreeAMMediaType(_emptyAMMediaType);
        }
예제 #16
0
        static IPin GetPin(IBaseFilter pFilter, string pinname)
        {
            IEnumPins pEnum;
            IntPtr    pPin = Marshal.AllocCoTaskMem(4);

            int hr = pFilter.EnumPins(out pEnum);

            checkHR(hr, "Can't enumerate pins");

            IPin[] pins = new IPin[1];
            while (pEnum.Next(1, pins, pPin) == 0)
            {
                PinInfo pinfo;
                pins[0].QueryPinInfo(out pinfo);
                bool found = (pinname == pinfo.name);
                DsUtils.FreePinInfo(pinfo);
                if (found)
                {
                    return(pins[0]);
                }
            }
            checkHR(-1, "Pin not found");
            return(null);
        }
예제 #17
0
        /// <summary>
        /// Inserts a group into a timeline, and assigns it the supplied media type.
        /// Will free the media type upon completion.
        /// </summary>
        /// <param name="timeline"></param>
        /// <param name="mediaType"></param>
        /// <returns></returns>
        internal static IAMTimelineGroup InsertGroup(IAMTimeline timeline, AMMediaType mediaType, string name)
        {
            try
            {
                int hr = 0;

                IAMTimelineObj groupObj;

                // make the root group/composition
                hr = timeline.CreateEmptyNode(out groupObj, TimelineMajorType.Group);
                DESError.ThrowExceptionForHR(hr);

                if (!string.IsNullOrEmpty(name))
                {
                    hr = groupObj.SetUserName(name);
                    DESError.ThrowExceptionForHR(hr);
                }

                IAMTimelineGroup group = (IAMTimelineGroup)groupObj;

                // Set the media type we just created
                hr = group.SetMediaType(mediaType);
                DESError.ThrowExceptionForHR(hr);


                // add the group to the timeline
                hr = timeline.AddGroup(groupObj);
                DESError.ThrowExceptionForHR(hr);

                return(group);
            }
            finally
            {
                DsUtils.FreeAMMediaType(mediaType);
            }
        }
예제 #18
0
        public WavFileRenderer(ITimeline timeline, string outputFile, AudioFormat format,
                               ICallbackParticipant[] audioParticipants)
            : base(timeline)
        {
            AudioCompressor compressor = null;

            try
            {
                compressor = AudioCompressorFactory.Create(format);

                Cleanup.Add(compressor.Filter);

                RenderToWavDest(outputFile, compressor.Filter, compressor.MediaType, audioParticipants);

                ChangeState(RendererState.Initialized);
            }
            finally
            {
                if ((compressor != null) && (compressor.MediaType != null))
                {
                    DsUtils.FreeAMMediaType(compressor.MediaType);
                }
            }
        }
예제 #19
0
        //internal override Bitmap TakePicture(SynchronizedPictureBox pictureControl)
        //{
        //    if (m_callbackCompleted != null)
        //    {
        //        return null;
        //    }
        //    m_pictureControl = pictureControl;
        //    m_takePictureEnd = false;

        //    DsDevice cameraDevice = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[m_cameraDeviceIndex];

        //    IFilterGraph2 filterGraph = null;
        //    IBaseFilter cam = null; IPin camCapture = null;                                     // cam
        //    ISampleGrabber sg = null; IPin sgIn = null;                                         // samplegrabber

        //    try
        //    {
        //        // setup filterGraph & connect camera
        //        filterGraph = (IFilterGraph2)new FilterGraph();
        //        DsError.ThrowExceptionForHR(filterGraph.AddSourceFilterForMoniker(cameraDevice.Mon, null, cameraDevice.Name, out cam));

        //        // setup smarttee and connect so that cam(PinCategory.Capture)->st(PinDirection.Input)
        //        camCapture = DsFindPin.ByCategory(cam, PinCategory.Capture, 0);                 // output
        //        ConfStreamDimensions((IAMStreamConfig)camCapture);

        //        // connect Camera output to SampleGrabber input
        //        sg = (ISampleGrabber)new SampleGrabber();

        //        // configure
        //        AMMediaType media = new AMMediaType();
        //        try
        //        {
        //            media.majorType = MediaType.Video;
        //            media.subType = BPP2MediaSubtype(m_configuration.BPP);  // this will ask samplegrabber to do convertions for us
        //            media.formatType = FormatType.VideoInfo;
        //            DsError.ThrowExceptionForHR(sg.SetMediaType(media));
        //        }
        //        finally
        //        {
        //            DsUtils.FreeAMMediaType(media);
        //            media = null;
        //        }

        //        DsError.ThrowExceptionForHR(sg.SetCallback(this, 1));                           // 1 = BufferCB
        //        DsError.ThrowExceptionForHR(filterGraph.AddFilter((IBaseFilter)sg, "SG"));
        //        sgIn = DsFindPin.ByDirection((IBaseFilter)sg, PinDirection.Input, 0);           // input
        //        DsError.ThrowExceptionForHR(filterGraph.Connect(camCapture, sgIn));
        //        GetSizeInfo(sg);

        //        // wait until timeout - or picture has been taken
        //        if (m_callbackCompleted == null)
        //        {
        //            m_callbackCompleted = new ManualResetEvent(false);

        //            // start filter
        //            DsError.ThrowExceptionForHR(((IMediaControl)filterGraph).Run());
        //            m_callbackState = 5;
        //            if (m_pictureControl != null)
        //            {
        //                m_callbackCompleted.WaitOne();
        //            }
        //            else
        //            {
        //                if (!m_callbackCompleted.WaitOne(15000, false))
        //                {
        //                    throw new Exception(); //"Timeout while waiting for Picture");
        //                }
        //            }
        //            return m_capturedBitmap;
        //        }
        //        else
        //        {
        //            return null;
        //        }
        //    }
        //    finally
        //    {
        //        // release allocated objects
        //        if (m_callbackCompleted != null)
        //        {
        //            m_callbackCompleted.Close();
        //            m_callbackCompleted = null;
        //        }
        //        if (sgIn != null)
        //        {
        //            Marshal.ReleaseComObject(sgIn);
        //            sgIn = null;
        //        }
        //        if (sg != null)
        //        {
        //            Marshal.ReleaseComObject(sg);
        //            sg = null;
        //        }
        //        if (camCapture != null)
        //        {
        //            Marshal.ReleaseComObject(camCapture);
        //            camCapture = null;
        //        }
        //        if (cam != null)
        //        {
        //            Marshal.ReleaseComObject(cam);
        //            cam = null;
        //        }
        //        if (filterGraph != null)
        //        {
        //            try
        //            {
        //                ((IMediaControl)filterGraph).Stop();
        //            }
        //            catch (Exception) { }
        //            Marshal.ReleaseComObject(filterGraph);
        //            filterGraph = null;
        //        }
        //        m_capturedBitmap = null;
        //        m_callbackCompleted = null;
        //    }
        //}

        #region TakePicture helpers
        private void GetSizeInfo(ISampleGrabber sampleGrabber)
        {
            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            try
            {
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(media));
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException(); //"Unknown Grabber Media Format");
                }

                VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                m_configuration.Size = new Size(v.BmiHeader.Width, v.BmiHeader.Height);
                m_configuration.BPP  = v.BmiHeader.BitCount;
                //m_configuration.MediaSubtype = media.subType;
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.WaveEx) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Audio Format");
            }

            WaveFormatEx infoHeader = (WaveFormatEx)Marshal.PtrToStructure(media.formatPtr, typeof(WaveFormatEx));

            m_Channels      = infoHeader.nChannels;
            m_SampleRate    = infoHeader.nSamplesPerSec;
            m_BitsPerSample = infoHeader.wBitsPerSample;

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
예제 #21
0
        private void ConfStreamDimensions(IAMStreamConfig streamConfig)
        {
            AMMediaType media = null;

            DsError.ThrowExceptionForHR(streamConfig.GetFormat(out media));

            try
            {
                VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                if (m_configuration.Size.Width > 0)
                {
                    v.BmiHeader.Width = m_configuration.Size.Width;
                }
                if (m_configuration.Size.Height > 0)
                {
                    v.BmiHeader.Height = m_configuration.Size.Height;
                }
                if (m_configuration.BPP > 0)
                {
                    v.BmiHeader.BitCount = m_configuration.BPP;
                }
                if (m_configuration.MediaSubtype != Guid.Empty)
                {
                    media.subType = m_configuration.MediaSubtype;
                }
                //v.AvgTimePerFrame = 10000000 / 30; // 30 fps. FPS might be controlled by the camera, because of lightning exposure may increase and FPS decrease.

                Marshal.StructureToPtr(v, media.formatPtr, false);
                DsError.ThrowExceptionForHR(streamConfig.SetFormat(media));
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
예제 #22
0
        public IPin GetPin(IBaseFilter filter, string pinname)
        {
            IEnumPins epins;

            if (filter != null)
            {
                int hr = filter.EnumPins(out epins);

                IntPtr fetched = Marshal.AllocCoTaskMem(4);
                IPin[] pins    = new IPin[1];
                while (epins.Next(1, pins, fetched) == 0)
                {
                    PinInfo pinfo;
                    pins[0].QueryPinInfo(out pinfo);
                    bool found = (pinfo.name == pinname);
                    DsUtils.FreePinInfo(pinfo);
                    if (found)
                    {
                        return(pins[0]);
                    }
                }
            }
            return(null);
        }
예제 #23
0
        /// <summary> Read and store the properties </summary>
        public void SaveSizeInfo(ISampleGrabber sampGrabber, int Width, int Height)
        {
            int hr = 0;

            // Get the media type from the SampleGrabber
            AMMediaType media = DESHelper.GetVideoMediaType(DESConsts.BitCount, Width, Height);

            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            m_videoWidth  = videoInfoHeader.BmiHeader.Width;
            m_videoHeight = videoInfoHeader.BmiHeader.Height;
            m_stride      = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        private void GetFormat(IBaseFilter filter)
        {
            var config = (IAMStreamConfig)GetOutputPin(filter);

            AMMediaType media;
            var         hr = config.GetFormat(out media);

            DsError.ThrowExceptionForHR(hr);

            if (currentFormat != null)
            {
                DsUtils.FreeAMMediaType(currentFormat);
            }
            currentFormat = media;

            var resolutionInfo = ResolutionInfo.Create(currentFormat);

            Settings.Default.Width  = resolutionInfo.Width;
            Settings.Default.Height = resolutionInfo.Height;
            Settings.Default.Bpp    = resolutionInfo.Bpp;
            Settings.Default.Save();

            OnFormatChanged(resolutionInfo);
        }
예제 #25
0
        /// <summary>
        /// Crée le filtre SampleGrabber
        /// </summary>
        void CreateSampleGrabber()
        {
            Type comType = Type.GetTypeFromCLSID(new Guid(SAMPLE_GRABBER));

            _sampleGrabber = (ISampleGrabber)Activator.CreateInstance(comType);

            AMMediaType mediaType = new AMMediaType
            {
                majorType  = MediaType.Video,
                subType    = MediaSubType.RGB32,
                formatType = FormatType.VideoInfo
            };

            _sampleGrabber.SetMediaType(mediaType);

            DsUtils.FreeAMMediaType(mediaType);

            int hr = _sampleGrabber.SetOneShot(true);

            DsError.ThrowExceptionForHR(hr);

            hr = _sampleGrabber.SetBufferSamples(true);
            DsError.ThrowExceptionForHR(hr);
        }
예제 #26
0
        /// <summary>
        /// Retrieve capabilities of a video device
        /// </summary>
        /// <param name="videoStreamConfig">The video stream configuration.</param>
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException("videoStreamConfig");
            }

            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = null;
            IntPtr pCaps = IntPtr.Zero;

            try
            {
                // Ensure this device reports capabilities
                int c, size;
                int hr = videoStreamConfig.GetNumberOfCapabilities(out c, out size);
                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if (c <= 0)
                {
                    throw new NotSupportedException("This video device does not report capabilities.");
                }
                if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
                {
                    throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
                }

                // Alloc memory for structure
                pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));

                // Retrieve first (and hopefully only) capabilities struct
                hr = videoStreamConfig.GetStreamCaps(0, out mediaType, pCaps);
                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Convert pointers to managed structures
                caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps));

                // Extract info
                InputSize             = caps.InputSize;
                MinFrameSize          = caps.MinOutputSize;
                MaxFrameSize          = caps.MaxOutputSize;
                FrameSizeGranularityX = caps.OutputGranularityX;
                FrameSizeGranularityY = caps.OutputGranularityY;
                MinFrameRate          = (double)10000000 / caps.MaxFrameInterval;
                MaxFrameRate          = (double)10000000 / caps.MinFrameInterval;
            }
            finally
            {
                if (pCaps != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(pCaps);
                }
                pCaps = IntPtr.Zero;
                if (mediaType != null)
                {
                    DsUtils.FreeAMMediaType(mediaType);
                }
                mediaType = null;
            }
        }
예제 #27
0
        /// <summary> build the capture graph for grabber. </summary>
        bool SetupGraph()
        {
            int hr;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, " Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle);

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, " Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Stop);

                return(false);
            }
        }
예제 #28
0
        /// <summary>
        /// 构建捕获图
        /// </summary>
        public void CreateGraph(string Resolution, int Frames)
        {
            if (graphBuilder != null)
            {
                return;
            }
            graphBuilder        = (IFilterGraph2) new FilterGraph();                  // 获取IFilterGraph2接口对象
            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); //获取ICaptureGraphBuilder2接口对象


            int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);//将过滤器图形附加到捕获图

            DsError.ThrowExceptionForHR(hr);

            //将视频输入设备添加到图形
            hr = graphBuilder.AddFilter(theDevice, "source filter");
            DsError.ThrowExceptionForHR(hr);

            //将视频压缩器过滤器添加到图形
            if (theDeviceCompressor != null)
            {
                hr = graphBuilder.AddFilter(theDeviceCompressor, "devicecompressor filter");
                DsError.ThrowExceptionForHR(hr);
            }
            //将音频输入设备添加到图形
            if (theAudio != null)
            {
                hr = graphBuilder.AddFilter(theAudio, "audio filter");
                DsError.ThrowExceptionForHR(hr);
            }
            //将音频压缩器过滤器添加到图形
            if (theAudioCompressor != null)
            {
                hr = graphBuilder.AddFilter(theAudioCompressor, "audiocompressor filter");
                DsError.ThrowExceptionForHR(hr);
            }
            mediaControl = (IMediaControl)this.graphBuilder;//获取IMediaControl接口对象

            m_PictureReady = new ManualResetEvent(false);

            sampleGrabber = new SampleGrabber() as ISampleGrabber;                            //添加采样器接口.
            ConfigureSampleGrabber(sampleGrabber);                                            // 配置SampleGrabber。添加预览回调
            hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); // 将SampleGrabber添加到图形.
            DsError.ThrowExceptionForHR(hr);


            //读取摄像头配置信息
            AMMediaType mediaType = new AMMediaType();
            object      oVideoStreamConfig;//视频流配置信息

            hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, theDevice, typeof(IAMStreamConfig).GUID, out oVideoStreamConfig);
            if (!(oVideoStreamConfig is IAMStreamConfig videoStreamConfig))
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            //test Failed
            //读取摄像头中的配置
            //int iCount;
            //int iSize;
            //hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
            //if (hr != 0)
            //    Marshal.ThrowExceptionForHR(hr);
            //if (iSize == Marshal.SizeOf(typeof(VideoStreamConfigCaps)))//?? sizeof
            //{
            //    IntPtr sccPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));
            //    for (int iFormat = 0; iFormat < iCount; iFormat++)
            //    {
            //        VideoStreamConfigCaps scc=new VideoStreamConfigCaps();
            //        IntPtr pmtConfigIntPtr;
            //        AMMediaType pmtConfig = new AMMediaType() ;
            //        hr = videoStreamConfig.GetStreamCaps(iFormat, out pmtConfigIntPtr, sccPtr);
            //        if (hr != 0)
            //            Marshal.ThrowExceptionForHR(hr);
            //        Marshal.PtrToStructure(pmtConfigIntPtr, pmtConfig);
            //        //读取配置值
            //        if (pmtConfig.majorType == MediaType.Video && pmtConfig.subType== MediaSubType.RGB24 && pmtConfig.formatType == FormatType.VideoInfo)
            //        {


            //        }
            //    }
            //}
            //test end


            hr = videoStreamConfig.GetFormat(out mediaType);
            if (hr != 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }



            DsError.ThrowExceptionForHR(hr);


            // The formatPtr member points to different structures
            // dependingon the formatType
            object formatStruct;

            if (mediaType.formatType == FormatType.WaveEx)
            {
                formatStruct = new WaveFormatEx();
            }
            else if (mediaType.formatType == FormatType.VideoInfo)
            {
                formatStruct = new VideoInfoHeader();
            }
            else if (mediaType.formatType == FormatType.VideoInfo2)
            {
                formatStruct = new VideoInfoHeader2();
            }
            else
            {
                throw new NotSupportedException("This device does not support a recognized format block.");
            }

            // Retrieve the nested structure
            Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);


            if (formatStruct is VideoInfoHeader)
            {
                VideoInfoHeader videoInfoHeader = formatStruct as VideoInfoHeader;
                // 设置帧率
                if (Frames > 0)
                {
                    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
                }
                // 设置宽度 设置高度
                if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
                {
                    videoInfoHeader.BmiHeader.Width  = Convert.ToInt32(Resolution.Split('*')[0]);
                    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
                }
                // 复制媒体结构
                Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            }
            else if (formatStruct is VideoInfoHeader2)
            {
                VideoInfoHeader2 videoInfoHeader = formatStruct as VideoInfoHeader2;
                // 设置帧率
                if (Frames > 0)
                {
                    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
                }
                // 设置宽度 设置高度
                if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
                {
                    videoInfoHeader.BmiHeader.Width  = Convert.ToInt32(Resolution.Split('*')[0]);
                    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
                }
                // 复制媒体结构
                Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            }


            //VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            //Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader);


            //if (Frames > 0)
            //{
            //    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
            //}
            //// 设置宽度 设置高度
            //if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
            //{
            //    videoInfoHeader.BmiHeader.Width = Convert.ToInt32(Resolution.Split('*')[0]);
            //    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
            //}
            //// 复制媒体结构
            //Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            // 设置新的视频格式
            hr = videoStreamConfig.SetFormat(mediaType);
            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(mediaType);
            mediaType = null;
        }
예제 #29
0
        public override int Initialize()
        {
            if (!File.Exists(filename))
            {
                return(-1);
            }

            try
            {
                int hr = 0;
                graphBuilder2   = (IFilterGraph2) new FilterGraph();
                lavSplitter     = new LAVSplitter() as IBaseFilter;
                lavVideoDecoder = new LAVVideoDecoder() as IBaseFilter;
                lavAudioDecoder = new LAVAudioDecoder() as IBaseFilter;
                var lavSplitterSource = lavSplitter as IFileSourceFilter;
                soundDevice   = new DirectSoundDevice() as IBaseFilter;
                videoRenderer = new VideoRenderer() as IBaseFilter;
                lavSplitterSource.Load(filename, null);
                hr = graphBuilder2.AddFilter(lavSplitter, "LAV Splitter");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(lavVideoDecoder, "LAV Video Decoder");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(lavAudioDecoder, "LAV Audio Decoder");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(soundDevice, "Default Direct Sound Device");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(videoRenderer, "Video Renderer");
                DsError.ThrowExceptionForHR(hr);
                var videoPin              = GetPin(lavSplitter, "Video");
                var audioPin              = GetPin(lavSplitter, "Audio");
                var videoDecoderInputPin  = GetPin(lavVideoDecoder, "Input");
                var videoDecoderOutputPin = GetPin(lavVideoDecoder, "Output");
                var audioDecoderInputPin  = GetPin(lavAudioDecoder, "Input");
                var audioDecoderOutputPin = GetPin(lavAudioDecoder, "Output");
                var soundInputPin         = GetPin(soundDevice, "Audio Input pin (rendered)");
                var videoRendererInputPin = GetPin(videoRenderer, "Input");
                hr = graphBuilder2.Connect(videoPin, videoDecoderInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(audioPin, audioDecoderInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(audioDecoderOutputPin, soundInputPin);
                DsError.ThrowExceptionForHR(hr);
                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                var amMediaType = new AMMediaType
                {
                    majorType  = MediaType.Video,
                    subType    = MediaSubType.RGB32,
                    formatType = FormatType.VideoInfo
                };
                hr = sampleGrabber.SetMediaType(amMediaType);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(amMediaType);
                hr = graphBuilder2.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber");
                DsError.ThrowExceptionForHR(hr);
                var sampleGrabberInputPin  = GetPin((IBaseFilter)sampleGrabber, "Input");
                var sampleGrabberOutputPin = GetPin((IBaseFilter)sampleGrabber, "Output");
                hr = graphBuilder2.Connect(videoDecoderOutputPin, sampleGrabberInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(sampleGrabberOutputPin, videoRendererInputPin);
                DsError.ThrowExceptionForHR(hr);
                base.Initialize();
                sampleGrabber.SetCallback(this, 1);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                var mediaType = new AMMediaType();
                videoPin.ConnectionMediaType(mediaType);
                var bitmapInfoHeader = (BitmapInfoHeader)mediaType;
                this.width  = bitmapInfoHeader.Width;
                this.height = bitmapInfoHeader.Height;
                this.maxu   = 1;
                this.maxv   = 1;
                textures    = new TextureBase[5];
                for (var i = 0; i < textures.Length; i++)
                {
                    textures[i] = TextureFactoryManager.Factory.Create(device, width, height, 1, false);
                }

                videoWindow = (IVideoWindow)graphBuilder2;

                hr = videoWindow.put_Visible((int)OABool.False);
                DsError.ThrowExceptionForHR(hr);
                hr = videoWindow.put_WindowState((int)WindowState.Hide);
                DsError.ThrowExceptionForHR(hr);
                hr = videoWindow.SetWindowPosition(-1000, -1000, 10, 10);
                DsError.ThrowExceptionForHR(hr);
                videoWindow.put_AutoShow((int)OABool.False);
                DsError.ThrowExceptionForHR(hr);
                hr = hr = videoWindow.put_Owner(MovieUtility.Window);
                DsError.ThrowExceptionForHR(hr);
            }
            catch (Exception e)
            {
                throw new Exception("Fatal Error in Movie Loading", e);
            }
            return(0);
        }
예제 #30
0
        /// <summary>
        /// Opens the media by initializing the DirectShow graph
        /// </summary>
        protected virtual void OpenSource()
        {
            /* Make sure we clean up any remaining mess */
            FreeResources();

            if (m_sourceUri == null)
            {
                return;
            }

            string fileSource = m_sourceUri.OriginalString;

            if (string.IsNullOrEmpty(fileSource))
            {
                return;
            }

            try
            {
                /* Creates the GraphBuilder COM object */
                m_graph = new FilterGraphNoThread() as IGraphBuilder;

                if (m_graph == null)
                {
                    throw new Exception("Could not create a graph");
                }

                var filterGraph = m_graph as IFilterGraph2;

                if (filterGraph == null)
                {
                    throw new Exception("Could not QueryInterface for the IFilterGraph2");
                }

                IBaseFilter sourceFilter;
                int         hr;

                //var file = System.IO.File.CreateText(@"M:\DirectShowLog.txt");
                //filterGraph.SetLogFile((file.BaseStream as System.IO.FileStream).SafeFileHandle.DangerousGetHandle());


                // Set LAV Splitter
                LAVSplitterSource reader = new LAVSplitterSource();
                sourceFilter = reader as IBaseFilter;
                var objectWithSite = reader as IObjectWithSite;
                if (objectWithSite != null)
                {
                    objectWithSite.SetSite(this);
                }

                hr = m_graph.AddFilter(sourceFilter, SplitterSource);
                DsError.ThrowExceptionForHR(hr);


                IFileSourceFilter interfaceFile = (IFileSourceFilter)sourceFilter;
                hr = interfaceFile.Load(fileSource, null);
                DsError.ThrowExceptionForHR(hr);


                // Set Video Codec
                // Remove Pin
                var  videoPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Video");
                IPin videoPinTo;
                if (videoPinFrom != null)
                {
                    hr = videoPinFrom.ConnectedTo(out videoPinTo);
                    if (hr >= 0 && videoPinTo != null)
                    {
                        PinInfo pInfo;
                        videoPinTo.QueryPinInfo(out pInfo);
                        FilterInfo fInfo;
                        pInfo.filter.QueryFilterInfo(out fInfo);

                        DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter);
                        m_graph.RemoveFilter(pInfo.filter);

                        DsUtils.FreePinInfo(pInfo);
                        Marshal.ReleaseComObject(fInfo.pGraph);
                        Marshal.ReleaseComObject(videoPinTo);
                        videoPinTo = null;
                    }
                    Marshal.ReleaseComObject(videoPinFrom);
                    videoPinFrom = null;
                }

                DirectShowUtil.AddFilterToGraph(m_graph, VideoDecoder, Guid.Empty);

                // Set Audio Codec
                // Remove Pin
                var  audioPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Audio");
                IPin audioPinTo;
                if (audioPinFrom != null)
                {
                    hr = audioPinFrom.ConnectedTo(out audioPinTo);
                    if (hr >= 0 && audioPinTo != null)
                    {
                        PinInfo pInfo;
                        audioPinTo.QueryPinInfo(out pInfo);
                        FilterInfo fInfo;
                        pInfo.filter.QueryFilterInfo(out fInfo);

                        DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter);
                        m_graph.RemoveFilter(pInfo.filter);

                        DsUtils.FreePinInfo(pInfo);
                        Marshal.ReleaseComObject(fInfo.pGraph);
                        Marshal.ReleaseComObject(audioPinTo);
                        audioPinTo = null;
                    }
                    Marshal.ReleaseComObject(audioPinFrom);
                    audioPinFrom = null;
                }

                DirectShowUtil.AddFilterToGraph(m_graph, AudioDecoder, Guid.Empty);


                /* Add our prefered audio renderer */
                InsertAudioRenderer(AudioRenderer);

                IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2);



                /* We will want to enum all the pins on the source filter */
                IEnumPins pinEnum;

                hr = sourceFilter.EnumPins(out pinEnum);
                DsError.ThrowExceptionForHR(hr);

                IntPtr fetched = IntPtr.Zero;
                IPin[] pins    = { null };

                /* Counter for how many pins successfully rendered */
                int pinsRendered = 0;

                if (VideoRenderer == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = renderer as IVMRMixerControl9;

                    if (mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetRGB;
                        //mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                /* Test using FFDShow Video Decoder Filter
                 * var ffdshow = new FFDShow() as IBaseFilter;
                 *
                 * if (ffdshow != null)
                 *  m_graph.AddFilter(ffdshow, "ffdshow");
                 */


                /* Loop over each pin of the source filter */
                while (pinEnum.Next(pins.Length, pins, fetched) == 0)
                {
                    if (filterGraph.RenderEx(pins[0],
                                             AMRenderExFlags.RenderToExistingRenderers,
                                             IntPtr.Zero) >= 0)
                    {
                        pinsRendered++;
                    }


                    Marshal.ReleaseComObject(pins[0]);
                }


                Marshal.ReleaseComObject(pinEnum);
                Marshal.ReleaseComObject(sourceFilter);

                if (pinsRendered == 0)
                {
                    throw new Exception("Could not render any streams from the source Uri");
                }

#if DEBUG
                /* Adds the GB to the ROT so we can view
                 * it in graphedit */
                m_dsRotEntry = new DsROTEntry(m_graph);
#endif
                /* Configure the graph in the base class */
                SetupFilterGraph(m_graph);

                HasVideo = true;
                /* Sets the NaturalVideoWidth/Height */
                //SetNativePixelSizes(renderer);
            }
            catch (Exception ex)
            {
                /* This exection will happen usually if the media does
                 * not exist or could not open due to not having the
                 * proper filters installed */
                FreeResources();

                /* Fire our failed event */
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            InvokeMediaOpened();
        }