/// <summary>
 ///  Free the nested structures and release any
 ///  COM objects within an WMMediaType struct.
 /// </summary>
 public static void FreeWMMediaType(AMMediaType mediaType)
 {
     if (mediaType != null)
     {
         if (mediaType.formatSize != 0)
         {
             Marshal.FreeCoTaskMem(mediaType.formatPtr);
             mediaType.formatSize = 0;
             mediaType.formatPtr = IntPtr.Zero;
         }
         if (mediaType.unkPtr != IntPtr.Zero)
         {
             Marshal.Release(mediaType.unkPtr);
             mediaType.unkPtr = IntPtr.Zero;
         }
     }
 }
Esempio n. 2
0
        /// <summary>
        /// Set the Framerate, and video size
        /// </summary>
        /// <param name="capGraph">The <see cref="ICaptureGraphBuilder2"/> interface.</param>
        /// <param name="capFilter">The <see cref="IBaseFilter"/> of the capture device.</param>
        /// <param name="frameRate">The new framerate to be used.</param>
        /// <param name="width">The new video width to be used.</param>
        /// <param name="height">The new video height to be used.</param>
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int frameRate, int width,
                                    int height)
        {
            int         hr;
            object      o;
            AMMediaType media = null;

            // Find the stream config interface
            hr = this.capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter,
                                             typeof(IAMStreamConfig).GUID, out o);

            videoControl      = capFilter as IAMVideoControl;
            videoStreamConfig = o as IAMStreamConfig;

            //if (videoStreamConfig == null)
            //    ErrorLogger.WriteLine("Error in Capture.SetConfigParams(). Failed to get IAMStreamConfig");

            // Get the existing format block
            if (videoStreamConfig != null)
            {
                hr = videoStreamConfig.GetFormat(out media);
            }

            //if (hr != 0)
            //    ErrorLogger.WriteLine("Could not SetConfigParms in Camera.Capture. Message: " + DsError.GetErrorText(hr));

            // copy out the videoinfoheader
            var v = new VideoInfoHeader();

            Marshal.PtrToStructure(media.formatPtr, v);

            // if overriding set values
            if (frameRate > 0)
            {
                v.AvgTimePerFrame = 10000000 / frameRate;
                this.fps          = frameRate;
            }
            else
            {
                this.fps = (int)(10000000 / v.AvgTimePerFrame);
            }

            if (width > 0)
            {
                v.BmiHeader.Width = width;
            }

            if (height > 0)
            {
                v.BmiHeader.Height = height;
            }

            // Copy the media structure back
            Marshal.StructureToPtr(v, media.formatPtr, true);

            // Set the new format
            if (videoStreamConfig != null)
            {
                hr = videoStreamConfig.SetFormat(media);
            }
            //if (hr != 0)
            //    ErrorLogger.WriteLine(
            //        "Error while setting new camera format (videoStreamConfig) in Camera.Capture. Message: " +
            //        DsError.GetErrorText(hr));

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Esempio n. 3
0
        // Thread entry point
        public void WorkerThread()
        {
            int hr;
            Guid cat;
            Guid med;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder graphBuilder = null;
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            IBaseFilter videoDeviceFilter = null;
            IBaseFilter grabberFilter = null;
            ISampleGrabber sg = null;
            IMediaControl mc = null;

            try
            {
                // Make a new filter graph
                graphObj = Activator.CreateInstance(
                Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
                graphBuilder = (IGraphBuilder)graphObj;

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)
                TempFix.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Get the video device and add it to the filter graph
                if (source != null)
                {
                    videoDeviceFilter = (IBaseFilter)
                    Marshal.BindToMoniker(source);
                    hr = graphBuilder.AddFilter(videoDeviceFilter,
                    "Video Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // create sample grabber, object and filter
                grabberObj = Activator.CreateInstance(
                Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
                grabberFilter = (IBaseFilter)grabberObj;
                sg = (ISampleGrabber)grabberObj;

                // add sample grabber filter to filter graph
                hr = graphBuilder.AddFilter(grabberFilter, "grabber");
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Try looking for an video device interleaved media type
                IBaseFilter testFilter = videoDeviceFilter;
                // grabberFilter (not supported)
                object o;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                ref cat, ref med, testFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, testFilter, ref iid, out o);

                    if (hr != 0)
                        o = null;
                }
                // Set the video stream configuration to data member
                videoStreamConfig = o as IAMStreamConfig;
                o = null;

                // Experimental testing: Try to set the Frame Size & Rate
                // Results: When enabled, the grabber video breaks up into
                // several duplicate frames (6 frames)
                bool bdebug = true;
                if (bdebug)
                {
                    BitmapInfoHeader bmiHeader;
                    bmiHeader = (BitmapInfoHeader)
                    getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                    bmiHeader.Width = framesize.Width;
                    bmiHeader.Height = framesize.Height;
                    setStreamConfigSetting(videoStreamConfig,
                    "BmiHeader", bmiHeader);

                    long avgTimePerFrame = (long)(10000000 / framerate);
                    setStreamConfigSetting(videoStreamConfig,
                    "AvgTimePerFrame", avgTimePerFrame);
                }

                // connect pins (Turns on the video device)
                if (graphBuilder.Connect(DSTools.GetOutPin(
                videoDeviceFilter, 0),
                DSTools.GetInPin(grabberFilter, 0)) < 0)
                    throw new ApplicationException(
                    "Failed connecting filters");

                // Set the sample grabber media type settings
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));
                    System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height);
                    grabber.Width = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graphBuilder.Render(DSTools.GetOutPin(grabberFilter, 0));

                // Set various sample grabber properties
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // Do not show active (source) window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;

                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc = null;
                graphBuilder = null;
                captureGraphBuilder = null;
                videoDeviceFilter = null;
                grabberFilter = null;
                sg = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
Esempio n. 4
0
		/// <summary>
		/// Set video type for the specified pin interface
		/// </summary>
		/// <param name="streamConfig"></param>
		/// <param name="newValue"></param>
		/// <returns></returns>
		public bool setMediaSubType(IAMStreamConfig streamConfig, Guid newValue)
		{
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
				int hr = streamConfig.GetFormat(out pmt);
				if(hr < 0)
				{
					return false;
				}
				Marshal.PtrToStructure(pmt, mediaType);
#else
				int hr = streamConfig.GetFormat(out mediaType);
				if(hr < 0)
				{
					return false;
				}
#endif

				// Change the media subtype
				// Each enum value has a Guid associated with it
				// We store the Guid as a string in a LabelAttribute
				// applied to each enum value. See the ColorSpaceEnum.
				mediaType.subType = newValue;

				// Save the changes
				hr = streamConfig.SetFormat(mediaType);
				if(hr < 0)
				{
					return false;
				}		
			}
			finally
			{
				DsUtils.FreeAMMediaType(mediaType);
#if DSHOWNET
				Marshal.FreeCoTaskMem(pmt);
#endif
			}
			return true;
		}
		/// <summary>
		/// Read the properties of the first bitmap to finish initializing the writer.
		/// </summary>
		/// <param name="bitmap">First bitmap</param>
		private void Initialize(Bitmap mBitmap,string licenseURL,string licenseIssuerURL)
		{
			AMMediaType mtype = new AMMediaType();

			VideoInfoHeader videoInfoHeader = new VideoInfoHeader();

			// Create the VideoInfoHeader using info from the bitmap
			videoInfoHeader.BmiHeader.Size = Marshal.SizeOf(typeof(BitmapInfoHeader));
			videoInfoHeader.BmiHeader.Width = mBitmap.Width;
			videoInfoHeader.BmiHeader.Height = mBitmap.Height;
			videoInfoHeader.BmiHeader.Planes = 1;                

			// compression thru clrimportant don't seem to be used. Init them anyway
			videoInfoHeader.BmiHeader.Compression = 0;
			videoInfoHeader.BmiHeader.ImageSize = 0;
			videoInfoHeader.BmiHeader.XPelsPerMeter = 0;
			videoInfoHeader.BmiHeader.YPelsPerMeter = 0;
			videoInfoHeader.BmiHeader.ClrUsed = 0;
			videoInfoHeader.BmiHeader.ClrImportant = 0;

			switch(hBitmap.PixelFormat)
			{
			case PixelFormat.Format32bppRgb:
				mtype.subType = MediaSubType.RGB32;
				videoInfoHeader.BmiHeader.BitCount = 32;
				break;
			case PixelFormat.Format24bppRgb:
				mtype.subType = MediaSubType.RGB24;
				videoInfo.BmiHeader.BitCount = 24;
				break;
			case PixelFormat.Format16bppRgb555:
				mtype.subType = MediaSubType.RGB555;
				videoInfo.BmiHeader.BitCount = 16;
				break;
			default:
				throw new Exception("Unrecognized Pixelformat in bitmap");
			}

			videoInfoHeader.SrcRect = new Rectangle(0, 0, mBitmap.Width, mBitmap.Height);
			videoInfoHeader.TargetRect = videoInfoHeader.SrcRect;
			videoInfo.BmiHeader.ImageSize = mBitmap.Width * mBitmap.Height * (videoInfoHeader.BmiHeader.BitCount / 8);
			videoInfo.BitRate = videoInfoHeader.BmiHeader.ImageSize * mFrameRate;
			videoInfo.BitErrorRate = 0;
			videoInfo.AvgTimePerFrame = 10000 * 1000 / mFrameRate;

			mtype.majorType = MediaType.Video;
			mtype.fixedSizeSamples = true;
			mtype.temporalCompression = false;
			mtype.sampleSize = mediaHeader.ImageSize;
			mtype.formatType = FormatType.VideoInfo;
			mtype.unkPtr = IntPtr.Zero;
			mtype.formatSize = Marshal.SizeOf(typeof(VideoInfoHeader));

			GCHandle gHandle = GCHandle.Alloc(mediaHeader, GCHandleType.Pinned);

			try
			{
				// Set the inputprops using the structures
				mtype.formatPtr = gHandle.AddrOfPinnedObject();


			}
			finally
			{
				gHan.Free();
				mt.formatPtr = IntPtr.Zero;
			}


			byte[] bytes = (byte[])mtype;
			mediaWriter.Write ( bytes);

		}
Esempio n. 6
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="mType">Media type of the new group</param>
        /// <param name="pTimeline">Timeline to use for the group</param>
        /// <param name="fps">FPS for the group</param>
        public Group(AMMediaType mType, IAMTimeline pTimeline, double fps)
        {
            int hr;
            IAMTimelineObj pGroupObj;

            m_Length = 0;
            m_Files = new ArrayList();
            m_FPS = fps;
            m_pTimeline = pTimeline;

            // make the root group/composition
            hr = m_pTimeline.CreateEmptyNode( out pGroupObj, TimelineMajorType.Group);
            DESError.ThrowExceptionForHR(hr);

            try
            {
                IAMTimelineGroup pGroup = (IAMTimelineGroup)pGroupObj;

                // Set the media type we just created
                hr = pGroup.SetMediaType( mType );
                DESError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(mType);

                // add the video group to the timeline
                hr = m_pTimeline.AddGroup( pGroupObj );
                DESError.ThrowExceptionForHR(hr);

                IAMTimelineObj pTrack1Obj;
                hr = m_pTimeline.CreateEmptyNode(out pTrack1Obj, TimelineMajorType.Track);
                DESError.ThrowExceptionForHR(hr);

                // tell the composition about the track
                IAMTimelineComp pRootComp = (IAMTimelineComp)pGroupObj;
                hr = pRootComp.VTrackInsBefore( pTrack1Obj, -1 );
                DESError.ThrowExceptionForHR(hr);

                m_Track = (IAMTimelineTrack)pTrack1Obj;
            }
            finally
            {
                Marshal.ReleaseComObject(pGroupObj);
            }
            //Marshal.ReleaseComObject(pTrack1Obj);  // Released as m_VideoTrack in dispose
        }
Esempio n. 7
0
        /// <summary>
        /// Gets available resolutions (which are appropriate for us) for capture pin (PinCategory.Capture).
        /// </summary>
        /// <param name="captureFilter">Capture pin (PinCategory.Capture) for asking for resolution list.</param>
        private static ResolutionList GetResolutionsAvailable(IPin pinOutput)
        {
            int hr = 0;

            ResolutionList ResolutionsAvailable = new ResolutionList();

            //ResolutionsAvailable.Clear();

            // Media type (shoudl be cleaned)
            AMMediaType media_type = null;

            //NOTE: pSCC is not used. All we need is media_type
            IntPtr pSCC = IntPtr.Zero;

            try
            {
                IAMStreamConfig videoStreamConfig = pinOutput as IAMStreamConfig;

                // -------------------------------------------------------------------------
                // We want the interface to expose all media types it supports and not only the last one set
                hr = videoStreamConfig.SetFormat(null);
                DsError.ThrowExceptionForHR(hr);

                int piCount = 0;
                int piSize  = 0;

                hr = videoStreamConfig.GetNumberOfCapabilities(out piCount, out piSize);
                DsError.ThrowExceptionForHR(hr);

                for (int i = 0; i < piCount; i++)
                {
                    // ---------------------------------------------------
                    pSCC = Marshal.AllocCoTaskMem(piSize);
                    videoStreamConfig.GetStreamCaps(i, out media_type, pSCC);

                    // NOTE: we could use VideoStreamConfigCaps.InputSize or something like that to get resolution, but it's deprecated
                    //VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pSCC, typeof(VideoStreamConfigCaps));
                    // ---------------------------------------------------

                    if (IsBitCountAppropriate(GetBitCountForMediaType(media_type)))
                    {
                        ResolutionsAvailable.AddIfNew(GetResolutionForMediaType(media_type));
                    }

                    FreeSCCMemory(ref pSCC);
                    FreeMediaType(ref media_type);
                }
            }
            catch
            {
                throw;
            }
            finally
            {
                // clean up
                FreeSCCMemory(ref pSCC);
                FreeMediaType(ref media_type);
            }

            return(ResolutionsAvailable);
        }
Esempio n. 8
0
        private void RenderSource()
        {
            int hr;

            IEnumPins enumPins;

            IPin[] pins = new IPin[1];

            hr = _netSrc.EnumPins(out enumPins);
            DsError.ThrowExceptionForHR(hr);
            try
            {
                while (enumPins.Next(pins.Length, pins, IntPtr.Zero) == 0)
                {
                    try
                    {
                        PinInfo pinInfo;
                        IPin    upstreamPin = pins[0];
                        hr = upstreamPin.QueryPinInfo(out pinInfo);
                        DsError.ThrowExceptionForHR(hr);
                        if (pinInfo.dir == PinDirection.Output)
                        {
                            IEnumMediaTypes enumMediaTypes;
                            hr = upstreamPin.EnumMediaTypes(out enumMediaTypes);
                            DsError.ThrowExceptionForHR(hr);
                            AMMediaType[] mediaTypes = new AMMediaType[1];
                            if (enumMediaTypes.Next(1, mediaTypes, IntPtr.Zero) == 0)
                            {
                                AMMediaType mediaType = mediaTypes[0];
                                if (mediaType.majorType == MediaType.Video)
                                {
                                    if ((mediaType.subType == new Guid("34363268-0000-0010-8000-00AA00389B71")) ||
                                        (mediaType.subType == new Guid("34363248-0000-0010-8000-00aa00389b71")) ||
                                        (mediaType.subType == new Guid("3436324c-0000-0010-8000-00aa00389b71")) ||
                                        (mediaType.subType == new Guid("31637661-5349-4d4f-4544-494154595045")) ||
                                        (mediaType.subType == new Guid("8d2d71cb-243f-45e3-b2d8-5fd7967ec09b")))
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD H264 Decoder (3.0)");
                                        ConnectFilters(_graphBuilder, _netSrc, pinInfo.name, _infPinTee, "Input", true);
                                        ConnectFilters(_graphBuilder, _infPinTee, "Output1", _decoderFilter, "XForm In", true);
                                        ConnectFilters(_graphBuilder, _infPinTee, "Output2", _bridgeSink, "Input 1", true);
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoCallbackFilter, "Input", true);
                                        ConnectFilters(_graphBuilder, _videoCallbackFilter, "Output", _videoRender, "VMR Input0", true);
                                    }
                                    else if (mediaType.subType == new Guid("4B324A4C-0000-0010-8000-00AA00389B71"))
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD MJ2K Decoder (2.0)");
                                        ConnectFilters(_graphBuilder, _netSrc, pinInfo.name, _decoderFilter, "Input", true);
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoRender, "VMR Input0", true);
                                    }
                                    else if (mediaType.subType == MediaSubType.MJPG)
                                    {
                                        _decoderFilter = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "LEAD MCMP/MJPEG Decoder (2.0)");
                                        ConnectFilters(_graphBuilder, _decoderFilter, "XForm Out", _videoRender, "VMR Input0", true);
                                    }
                                    else
                                    {
                                        throw new Exception("Can't Render--Unsupported codec in stream");
                                    }
                                }
                                else
                                {
                                    //    throw new Exception("Can't Render--Unsupported type - audio? not supported");
                                }
                            }
                        }
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(pins[0]);
                    }
                }
            }
            finally
            {
                Marshal.ReleaseComObject(enumPins);
            }
        }
Esempio n. 9
0
        private async void Start()
        {
            await Program.ComputeContext.SwitchTo();

            CapGraphBuilder2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2)) as ICaptureGraphBuilder2;
            FilterGraph2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph)) as IFilterGraph2;

            SampleGrabberBaseFilter = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber)) as IBaseFilter;
            SampleGrabber = SampleGrabberBaseFilter as ISampleGrabber;

            CapGraphBuilder2.SetFiltergraph(FilterGraph2 as IGraphBuilder);

            FilterInfo.CreateFilter(this.Uuid, out CaptureSourceBaseFilter);
            CaptureSourceBaseFilter.SetSyncSource(IntPtr.Zero);
            SampleGrabberBaseFilter.SetSyncSource(IntPtr.Zero);

            VideoProcAmp = CaptureSourceBaseFilter as IAMVideoProcAmp;
            CameraControl = CaptureSourceBaseFilter as IAMCameraControl;
            KsPropertySet = CaptureSourceBaseFilter as IKsPropertySet;

            VideoProcAmp.Set(VideoProcAmpProperty.ColorEnable, 1, VideoProcAmpFlags.Manual);
            KsPropertySet.SetExposure(TimeSpan.FromSeconds(1000 / 120));

            FilterGraph2.AddFilter(CaptureSourceBaseFilter, "source");
            FilterGraph2.AddFilter(SampleGrabberBaseFilter, "grabber");

            object streamConfigObj;
            CapGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, CaptureSourceBaseFilter, typeof(IAMStreamConfig).GUID, out streamConfigObj);
            IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObj;

            VideoCaps = Pentacorn.Captures.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig);

            var desiredFormat = VideoCaps.Where(vc => vc.FrameSize.Width == this.Width && vc.FrameSize.Height == this.Height)
                                                 .OrderByDescending(vc => vc.MaxFrameRate).First();
            streamConfig.SetFormat(desiredFormat.MediaType);

            var hr = SampleGrabber.SetMediaType(desiredFormat.MediaType);
            if (hr < 0)
                throw new Win32Exception(hr);

            SampleGrabber.SetBufferSamples(true);
            SampleGrabber.SetOneShot(false);
            SampleGrabber.SetCallback(this, 1);

            CapGraphBuilder2.RenderStream(PinCategory.Capture, MediaType.Video, CaptureSourceBaseFilter, null, SampleGrabberBaseFilter);
            if (hr < 0)
                throw new Win32Exception(hr);

            AMMediaType mediaType = new AMMediaType();
            if (SampleGrabber.GetConnectedMediaType(mediaType) >= 0)
            {
                VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                if (this.Width != vih.BmiHeader.Width)
                    throw new Exception("DirectShow capture width not what's requested.");
                if (this.Height != vih.BmiHeader.Height)
                    throw new Exception("DirectShow capture width not what's requested.");
                mediaType.Dispose();
            }

            MediaControl = (IMediaControl)FilterGraph2;
            MediaControl.Run();
        }
Esempio n. 10
0
        /// <summary>
        /// Sets parameters for source capture pin.
        /// </summary>
        /// <param name="pinSourceCapture">Pin of source capture.</param>
        /// <param name="resolution">Resolution to set if possible.</param>
        private static void SetSourceParams(IPin pinSourceCapture, Resolution resolution_desired)
        {
            int hr = 0;

            AMMediaType media_type_most_appropriate = null;
            AMMediaType media_type = null;

            //NOTE: pSCC is not used. All we need is media_type
            IntPtr pSCC = IntPtr.Zero;


            bool appropriate_media_type_found = false;

            try
            {
                IAMStreamConfig videoStreamConfig = pinSourceCapture as IAMStreamConfig;

                // -------------------------------------------------------------------------
                // We want the interface to expose all media types it supports and not only the last one set
                hr = videoStreamConfig.SetFormat(null);
                DsError.ThrowExceptionForHR(hr);

                int piCount = 0;
                int piSize  = 0;

                hr = videoStreamConfig.GetNumberOfCapabilities(out piCount, out piSize);
                DsError.ThrowExceptionForHR(hr);

                for (int i = 0; i < piCount; i++)
                {
                    // ---------------------------------------------------
                    pSCC = Marshal.AllocCoTaskMem(piSize);
                    videoStreamConfig.GetStreamCaps(i, out media_type, pSCC);
                    FreeSCCMemory(ref pSCC);

                    // NOTE: we could use VideoStreamConfigCaps.InputSize or something like that to get resolution, but it's deprecated
                    //VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pSCC, typeof(VideoStreamConfigCaps));
                    // ---------------------------------------------------

                    bool bit_count_ok  = false;
                    bool sub_type_ok   = false;
                    bool resolution_ok = false;

                    AnalyzeMediaType(media_type, resolution_desired, out bit_count_ok, out sub_type_ok, out resolution_ok);

                    if (bit_count_ok && resolution_ok)
                    {
                        if (sub_type_ok)
                        {
                            hr = videoStreamConfig.SetFormat(media_type);
                            DsError.ThrowExceptionForHR(hr);

                            appropriate_media_type_found = true;
                            break;                             // stop search, we've found appropriate media type
                        }
                        else
                        {
                            // save as appropriate if no other found
                            if (media_type_most_appropriate == null)
                            {
                                media_type_most_appropriate = media_type;
                                media_type = null;                                 // we don't want for free it, now it's media_type_most_appropriate's problem
                            }
                        }
                    }

                    FreeMediaType(ref media_type);
                }

                if (!appropriate_media_type_found)
                {
                    // Found nothing exactly as we were asked

                    if (media_type_most_appropriate != null)
                    {
                        // set appropriate RGB format with different resolution
                        hr = videoStreamConfig.SetFormat(media_type_most_appropriate);
                        DsError.ThrowExceptionForHR(hr);
                    }
                    else
                    {
                        // throw. We didn't find exactly what we were asked to
                        throw new Exception("Camera doesn't support media type with requested resolution and bits per pixel.");
                        //DsError.ThrowExceptionForHR(DsResults.E_InvalidMediaType);
                    }
                }
            }
            catch
            {
                throw;
            }
            finally
            {
                // clean up
                FreeMediaType(ref media_type);
                FreeMediaType(ref media_type_most_appropriate);
                FreeSCCMemory(ref pSCC);
            }
        }
Esempio n. 11
0
 public int GetCurFile(out string pszFileName, AMMediaType pmt)
 {
     pszFileName = _fileName;
     return(string.IsNullOrEmpty(pszFileName) ? VFW_E_NOT_CONNECTED : NOERROR);
 }
Esempio n. 12
0
        public static void AddPreferredFilters(IGraphBuilder graphBuilder, IBaseFilter sourceFilter)
        {
            using (Settings xmlreader = new MPSettings())
            {
                bool autodecodersettings = xmlreader.GetValueAsBool("movieplayer", "autodecodersettings", false);

                if (!autodecodersettings) // the user has not chosen automatic graph building by merits
                {
                    // bool vc1ICodec,vc1Codec,xvidCodec = false; - will come later
                    bool aacCodec  = false;
                    bool h264Codec = false;

                    // check the output pins of the splitter for known media types
                    IEnumPins pinEnum = null;
                    if (sourceFilter.EnumPins(out pinEnum) == 0)
                    {
                        int    fetched = 0;
                        IPin[] pins    = new IPin[1];
                        while (pinEnum.Next(1, pins, out fetched) == 0 && fetched > 0)
                        {
                            IPin         pin = pins[0];
                            PinDirection pinDirection;
                            if (pin.QueryDirection(out pinDirection) == 0 && pinDirection == PinDirection.Output)
                            {
                                IEnumMediaTypes enumMediaTypesVideo = null;
                                if (pin.EnumMediaTypes(out enumMediaTypesVideo) == 0)
                                {
                                    AMMediaType[] mediaTypes = new AMMediaType[1];
                                    int           typesFetched;
                                    while (enumMediaTypesVideo.Next(1, mediaTypes, out typesFetched) == 0 && typesFetched > 0)
                                    {
                                        if (mediaTypes[0].majorType == MediaType.Video &&
                                            (mediaTypes[0].subType == MediaSubType.H264 || mediaTypes[0].subType == MEDIASUBTYPE_AVC1))
                                        {
                                            Log.Instance.Info("found H264 video on output pin");
                                            h264Codec = true;
                                        }
                                        else if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
                                        {
                                            Log.Instance.Info("found AAC audio on output pin");
                                            aacCodec = true;
                                        }
                                    }
                                    DirectShowUtil.ReleaseComObject(enumMediaTypesVideo);
                                }
                            }
                            DirectShowUtil.ReleaseComObject(pin);
                        }
                        DirectShowUtil.ReleaseComObject(pinEnum);
                    }

                    // add filters for found media types to the graph as configured in MP
                    if (h264Codec)
                    {
                        DirectShowUtil.ReleaseComObject(
                            DirectShowUtil.AddFilterToGraph(graphBuilder, xmlreader.GetValueAsString("movieplayer", "h264videocodec", "")));
                    }
                    else
                    {
                        DirectShowUtil.ReleaseComObject(
                            DirectShowUtil.AddFilterToGraph(graphBuilder, xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", "")));
                    }
                    if (aacCodec)
                    {
                        DirectShowUtil.ReleaseComObject(
                            DirectShowUtil.AddFilterToGraph(graphBuilder, xmlreader.GetValueAsString("movieplayer", "aacaudiocodec", "")));
                    }
                    else
                    {
                        DirectShowUtil.ReleaseComObject(
                            DirectShowUtil.AddFilterToGraph(graphBuilder, xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", "")));
                    }
                }
            }
        }
Esempio n. 13
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           mediaType       = null;
            GCHandle              mediaTypeHandle = new GCHandle();
            VideoStreamConfigCaps caps            = new VideoStreamConfigCaps();
            IntPtr capsHandle = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, capsHandle);

                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                caps            = (VideoStreamConfigCaps)Marshal.PtrToStructure(capsHandle, typeof(VideoStreamConfigCaps));
                mediaTypeHandle = GCHandle.Alloc(mediaType);

                if (mediaType.formatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader));

                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AvgTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                }
                else if (mediaType.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader2));

                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AvgTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                }
                else
                {
                    throw new ApplicationException("Unsupported format found.");
                }

                // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8
                // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail
                // on such formats
                if (BitCount <= 12)
                {
                    throw new ApplicationException("Unsupported format found.");
                }
            }
            finally
            {
                if (capsHandle != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(capsHandle);
                }
                capsHandle = IntPtr.Zero;
                if (mediaType != null)
                {
                    mediaTypeHandle.Free();
                }
            }
        }
Esempio n. 14
0
        private void OnLButtonUp()
        {
            if (movingFilter != null)
                foreach (Filter f in graph.SelectedFilters)
                    f.movingStartCoords = f.Coords;
            movingFilter = null;

            if (connectingPin != null)
            {
                if (mousepos == movingStart) //just click on pin
                {
                    if (connectingPin.Connection != null)
                    {
                        AMMediaType mt = new AMMediaType();
                        connectingPin.IPin.ConnectionMediaType(mt);
                        MediaTypeProps mtp = MediaTypeProps.CreateMTProps(mt); //new MediaTypeProps(mt);
                        Program.mainform.propform.SetObject(mtp);
                    }
                    else
                    {
                        IEnumMediaTypes mtenum;
                        if (connectingPin.IPin.EnumMediaTypes(out mtenum) >= 0)
                        {
                            AMMediaType[] mts = new AMMediaType[1];
                            List<MediaTypeProps> mtypes = new List<MediaTypeProps>();
                            IntPtr fetched = Marshal.AllocHGlobal(4);
                            while (mtenum.Next(1, mts, fetched) == 0)
                                mtypes.Add(MediaTypeProps.CreateMTProps(mts[0]));
                            Marshal.FreeHGlobal(fetched);
                            Program.mainform.propform.SetObject(mtypes.ToArray());
                        }
                    }
                }
                else
                {
                    if (otherPin != null)
                    {
                        Pin inpin, outpin;
                        if (connectingPin.Direction == PinDirection.Input)
                        {
                            inpin = connectingPin;
                            outpin = otherPin;
                        }
                        else
                        {
                            inpin = otherPin;
                            outpin = connectingPin;
                        }
                        graph.Connect(outpin, inpin, true);
                    }
                }
                Invalidate();
            }

            if (selecting)
            {
                Rectangle rc = new Rectangle(Math.Min(mousepos.X, movingStart.X), Math.Min(mousepos.Y, movingStart.Y),
                    Math.Abs(mousepos.X - movingStart.X), Math.Abs(mousepos.Y - movingStart.Y));
                if (ModifierKeys != Keys.Shift)
                    graph.ClearFiltersSelection();
                graph.SelectSeveralFilters(rc);
                selecting = false;
                Invalidate();
            }
            connectingPin = null;
            otherPin = null;
        }
Esempio n. 15
0
            /// <summary>
            /// AMMediaTypeオブジェクトを解放する
            /// </summary>
            /// <param name="media">解放するオブジェクト</param>
            public static void FreeMediaType(AMMediaType media)
            {
                if (media.pUnk != IntPtr.Zero)
                {
                    Marshal.Release(media.pUnk);
                    media.pUnk = IntPtr.Zero;
                }

                if (media.cbFormat != 0)
                {
                    Marshal.FreeCoTaskMem(media.pbFormat);
                    media.cbFormat = 0;
                    media.pbFormat = IntPtr.Zero;
                }
            }
Esempio n. 16
0
        private void CMB_videosources_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (MainV2.MONO)
            {
                return;
            }

            int                   hr;
            int                   count;
            int                   size;
            object                o;
            IBaseFilter           capFilter = null;
            ICaptureGraphBuilder2 capGraph  = null;
            AMMediaType           media     = null;
            VideoInfoHeader       v;
            VideoStreamConfigCaps c;
            var                   modes = new List <GCSBitmapInfo>();

            // Get the ICaptureGraphBuilder2
            capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            var m_FilterGraph = (IFilterGraph2) new FilterGraph();

            DsDevice[] capDevices;
            capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);

            // Add the video device
            hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices[CMB_videosources.SelectedIndex].Mon, null,
                                                         "Video input", out capFilter);
            try
            {
                DsError.ThrowExceptionForHR(hr);
            }
            catch (Exception ex)
            {
                CustomMessageBox.Show("Can not add video source\n" + ex);
                return;
            }

            // Find the stream config interface
            hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID,
                                        out o);
            DsError.ThrowExceptionForHR(hr);

            var videoStreamConfig = o as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                CustomMessageBox.Show("Failed to get IAMStreamConfig");
                return;
            }

            hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size);
            DsError.ThrowExceptionForHR(hr);
            var TaskMemPointer = Marshal.AllocCoTaskMem(size);

            for (var i = 0; i < count; i++)
            {
                var ptr = IntPtr.Zero;

                hr = videoStreamConfig.GetStreamCaps(i, out media, TaskMemPointer);
                v  = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                c  = (VideoStreamConfigCaps)Marshal.PtrToStructure(TaskMemPointer, typeof(VideoStreamConfigCaps));
                modes.Add(new GCSBitmapInfo(v.BmiHeader.Width, v.BmiHeader.Height, c.MaxFrameInterval,
                                            c.VideoStandard.ToString(), media));
            }
            Marshal.FreeCoTaskMem(TaskMemPointer);
            DsUtils.FreeAMMediaType(media);

            CMB_videoresolutions.DataSource = modes;

            if (Settings.Instance["video_options"] != "" && CMB_videosources.Text != "")
            {
                try
                {
                    CMB_videoresolutions.SelectedIndex = Settings.Instance.GetInt32("video_options");
                }
                catch
                {
                } // ignore bad entries
            }
        }
Esempio n. 17
0
            /// <summary>
            /// 指定したピンが指定したMediaTypeをサポートするか?
            /// </summary>
            /// <param name="pin">ピン</param>
            /// <param name="major">majorType</param>
            /// <param name="sub">subType or 検証不要な場合 Guid.Empty</param>
            /// <returns>サポートしてる場合true</returns>
            public static bool IsPinSupportsMediaType(IPin pin, Guid major, Guid sub)
            {
                bool found = false;
                IEnumMediaTypes enumerator = null;
                try
                {
                    pin.EnumMediaTypes(out enumerator);
                    IntPtr pMedia = IntPtr.Zero;
                    AMMediaType media = new AMMediaType();

                    found = false;
                    enumerator.Reset();
                    while (!found && enumerator.Next(1, out pMedia, IntPtr.Zero) == S_OK)
                    {
                        if (pMedia == IntPtr.Zero) continue;
                        Marshal.PtrToStructure(pMedia, media);

                        if (media.majorType == major && (sub == Guid.Empty || media.subType != sub))
                            found = true;

                        Util.FreeMediaType(media);
                        Marshal.FreeCoTaskMem(pMedia);
                    }
                }
                finally
                {
                    if (enumerator != null)
                        Marshal.ReleaseComObject(enumerator);
                    enumerator = null;
                }
                return found;
            }
        // Called just after invoking the COM method.  The IntPtr is the same one that just got returned
        // from MarshalManagedToNative.  The return value is unused.
        public object MarshalNativeToManaged(IntPtr pNativeData)
        {
            if (m_mt == null)
            {
                m_mt = new AMMediaType();
            }
            Marshal.PtrToStructure(pNativeData, m_mt);

            if (m_mt.formatSize > 0)
            {
                IntPtr ip = m_mt.formatPtr;

                m_mt.formatPtr = Marshal.AllocCoTaskMem(m_mt.formatSize);
                CopyMemory(m_mt.formatPtr, ip, m_mt.formatSize);
            }

            return m_mt;
        }
Esempio n. 19
0
        private void BuildMosaicGraph(ITuningSpace tuningSpace, ArrayList programs)
        {
            this.graphBuilder = (IFilterGraph2) new FilterGraph();
            rot = new DsROTEntry(this.graphBuilder);

            // Method names should be self explanatory
            AddNetworkProviderFilter(tuningSpace);
            AddMPEG2DemuxFilter();

            AddAndConnectBDABoardFilters();
            AddTransportStreamFiltersToGraph();
            AddRenderers();

            //unsafe
            //{
            IntPtr formatPtr = Marshal.AllocHGlobal(g_Mpeg2ProgramVideo.Length);

            Marshal.Copy(g_Mpeg2ProgramVideo, 0, formatPtr, g_Mpeg2ProgramVideo.Length);

            IMpeg2Demultiplexer mpeg2Demultiplexer = this.mpeg2Demux as IMpeg2Demultiplexer;

            for (int p = 1; p < programs.Count; p++)
            {
                PSI.PSIPMT      pmt    = (PSI.PSIPMT)programs[p];
                PSI.PSIPMT.Data stream = (PSI.PSIPMT.Data)pmt.GetStreamByType(CodeTV.PSI.STREAM_TYPES.STREAMTYPE_13818_VIDEO);

                AMMediaType mediaType = new AMMediaType();
                mediaType.majorType           = MediaType.Video;
                mediaType.subType             = MediaSubType.Mpeg2Video;
                mediaType.fixedSizeSamples    = false;
                mediaType.temporalCompression = false;
                mediaType.sampleSize          = 0;
                mediaType.formatType          = FormatType.Mpeg2Video;
                mediaType.unkPtr = IntPtr.Zero;

                mediaType.formatSize = g_Mpeg2ProgramVideo.Length;
                mediaType.formatPtr  = formatPtr;

                //mediaType.formatType = FormatType.Mpeg2Video;
                //mediaType.formatSize = 0;
                //mediaType.formatPtr = IntPtr.Zero;

                string pinName = "video" + p;
                IPin   outputPin;
                int    hr = mpeg2Demultiplexer.CreateOutputPin(mediaType, pinName, out outputPin);
                if (outputPin != null)
                {
                    IMPEG2PIDMap mpeg2PIDMap = outputPin as IMPEG2PIDMap;
                    if (mpeg2PIDMap != null)
                    {
                        hr = mpeg2PIDMap.MapPID(1, new int[] { stream.Pid }, MediaSampleContent.ElementaryStream);
                    }
                    Marshal.ReleaseComObject(outputPin);
                }
            }
            Marshal.FreeHGlobal(formatPtr);
            //}

            ConfigureVMR9InWindowlessMode(programs.Count);
            ConnectAllOutputFilters();

            int   numberColumn  = 4;
            int   numberRow     = 4;
            float widthPadding  = 0.01f;
            float heightPadding = 0.01f;

            float width  = (1.0f / numberColumn) - 2.0f * widthPadding;
            float height = (1.0f / numberRow) - 2.0f * heightPadding;

            IVMRMixerControl9 vmrMixerControl9 = this.videoRenderer as IVMRMixerControl9;

            for (int p = 1; p < programs.Count; p++)
            {
                int            column, row = Math.DivRem(p - 1, numberColumn, out column);
                NormalizedRect rect = new NormalizedRect();
                rect.left   = (float)column / (float)numberColumn + widthPadding;
                rect.top    = (float)row / (float)numberRow + heightPadding;
                rect.right  = rect.left + width;
                rect.bottom = rect.top + height;
                vmrMixerControl9.SetOutputRect(p, ref rect);
            }
        }
Esempio n. 20
0
        private void logMediaTypes(IPin pin)
        {
            IEnumMediaTypes mediaTypes = null;
            AMMediaType[] mediaType = new AMMediaType[1];

            AMMediaType connectedMediaType = new AMMediaType();
            reply = pin.ConnectionMediaType(connectedMediaType);

            reply = pin.EnumMediaTypes(out mediaTypes);
            if (reply != 0)
            {
                LogMessage("Media types cannot be determined at this time (not connected yet?)");
                return;
            }

            while (mediaTypes.Next(mediaType.Length, mediaType, IntPtr.Zero) == 0)
            {
                foreach (AMMediaType currentMediaType in mediaType)
                {
                    PinInfo pinInfo;
                    reply = pin.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(reply);

                    string majorType = TranslateMediaMajorType(currentMediaType.majorType);
                    string subType = TranslateMediaSubType(currentMediaType.subType);

                    string connectedComment;

                    if (currentMediaType.majorType == connectedMediaType.majorType && currentMediaType.subType == connectedMediaType.subType)
                        connectedComment = "** Connected **";
                    else
                        connectedComment = string.Empty;

                    LogMessage("Media type: " +
                        majorType + " ; " +
                        subType + " " +
                        currentMediaType.fixedSizeSamples + " " +
                        currentMediaType.sampleSize + " " +
                        connectedComment);
                }
            }
        }
Esempio n. 21
0
        private void CreateFilters(Guid audioSubType)
        {
            isValid = false;
            int r;

            // grabber
            grabberAudio = new GrabberAudio(this);

            // objects
            graphObject        = null;
            grabberObjectAudio = null;

            try {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                r = graph.AddSourceFilter(fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObjectAudio = Activator.CreateInstance(type);
                sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                // add grabber filters to graph
                r = graph.AddFilter(grabberBaseAudio, "grabberAudio");

                // set media type
                AMMediaType mediaType = new AMMediaType {
                    MajorType  = MediaType.Audio,
                    SubType    = audioSubType,
                    FormatType = FormatType.WaveEx
                };
                r = sampleGrabberAudio.SetMediaType(mediaType);

                // render pin
                // TODO: Improve this! We can't always assume that the second pin will always be the audio pin -- we need to find it.
                IPin sbPin = Tools.GetOutPin(sourceBase, 1);
                if (sbPin == null)
                {
                    sbPin = Tools.GetOutPin(sourceBase, 0);
                }
                r = graph.Render(sbPin);

                IPin        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                AMMediaType mt     = new AMMediaType();
                r = outPin.ConnectionMediaType(mt);
                if (!Tools.IsPinConnected(outPin))
                {
                    throw new ApplicationException("Failed obtaining media information");
                }

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    r = mediaFilter.SetSyncSource(null);
                }

                wavFormat = new WaveFormatEx();
                Marshal.PtrToStructure(mt.FormatPtr, wavFormat);
                Marshal.ReleaseComObject(outPin);

                // configure sample grabber
                r = sampleGrabberAudio.SetBufferSamples(false);
                r = sampleGrabberAudio.SetOneShot(false);
                r = sampleGrabberAudio.SetCallback(grabberAudio, 1);

                if (useNullRenderer)
                {
                    // Get a list of all the filters connected to the sample grabber
                    List <Tools.FilterInfo2> filtersInfo2    = new List <Tools.FilterInfo2>();
                    Tools.FilterInfo2        testFilterInfo2 = Tools.GetNextFilter(grabberBaseAudio, PinDirection.Output, 0);
                    while (true)
                    {
                        filtersInfo2.Add(testFilterInfo2);
                        testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0);
                        if (testFilterInfo2.Filter == null)
                        {
                            break;
                        }
                    }
                    // Remove the last filter, the audio renderer
                    r = graph.RemoveFilter(filtersInfo2[filtersInfo2.Count - 1].Filter);

                    // create null renderer
                    type = Type.GetTypeFromCLSID(Clsid.NullRenderer);
                    if (type == null)
                    {
                        throw new ApplicationException("Failed creating null renderer");
                    }

                    nullRendererObjectAudio = Activator.CreateInstance(type);
                    IBaseFilter nullRendererAudio = (IBaseFilter)nullRendererObjectAudio;

                    // add grabber filters to graph
                    r = graph.AddFilter(nullRendererAudio, "nullRenderer");

                    //outPin = Tools.GetOutPin(filtersInfo2[filtersInfo2.Count - 2].Filter, 0);
                    outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                    IPin inPin = Tools.GetInPin(nullRendererAudio, 0);
                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        throw new ApplicationException("Failed obtaining media audio information");
                    }
                    Marshal.ReleaseComObject(outPin);
                    Marshal.ReleaseComObject(inPin);
                }

                // configure video window
                IVideoWindow window = (IVideoWindow)graphObject;
                if (window != null)
                {
                    window.put_AutoShow(false);
                    window = null;
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media seek control
                mediaSeekControl = (IMediaSeeking)graphObject;
                mediaSeekControl.SetTimeFormat(TimeFormat.MediaTime);

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // get media audio control
                basicAudio = (IBasicAudio)graphObject;

                isValid = true;
            } catch (Exception exception) {
                DestroyFilters();

                // provide information to clients
                AudioSourceError?.Invoke(this, new AudioSourceErrorEventArgs(exception.Message));
            }
        }
Esempio n. 22
0
            /// <summary> Read and store the properties </summary>
            private void SaveSizeInfo(ISampleGrabber sampGrabber)
            {
                int hr;

                // Get the media type from the SampleGrabber
                AMMediaType media = new AMMediaType();
                hr = sampGrabber.GetConnectedMediaType(media);
                DsError.ThrowExceptionForHR(hr);

                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                // Grab the size info
                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                m_videoWidth = videoInfoHeader.BmiHeader.Width;
                m_videoHeight = videoInfoHeader.BmiHeader.Height;
                m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

                DsUtils.FreeAMMediaType(media);
                media = null;
            }
Esempio n. 23
0
        public int CompositeImage(IntPtr pD3DDevice, IntPtr pddsRenderTarget, AMMediaType pmtRenderTarget, long rtStart, long rtEnd, int dwClrBkGnd, VMR9VideoStreamInfo[] pVideoStreamInfo, int cStreams)
        {
            try
            {
                // Just in case the filter call CompositeImage before InitCompositionDevice (this sometime occure)
                if (unmanagedDevice != pD3DDevice)
                {
                    SetManagedDevice(pD3DDevice);
                }

                // Create a managed Direct3D surface (the Render Target) from the unmanaged pointer.
                // The constructor don't call IUnknown.AddRef but the "destructor" seem to call IUnknown.Release
                // Direct3D seem to be happier with that according to the DirectX log
                Marshal.AddRef(pddsRenderTarget);
                Surface            renderTarget     = new Surface(pddsRenderTarget);
                SurfaceDescription renderTargetDesc = renderTarget.Description;
                Rectangle          renderTargetRect = new Rectangle(0, 0, renderTargetDesc.Width, renderTargetDesc.Height);

                // Same thing for the first video surface
                // WARNING : This Compositor sample only use the video provided to the first pin.
                Marshal.AddRef(pVideoStreamInfo[0].pddsVideoSurface);
                Surface            surface     = new Surface(pVideoStreamInfo[0].pddsVideoSurface);
                SurfaceDescription surfaceDesc = surface.Description;
                Rectangle          surfaceRect = new Rectangle(0, 0, surfaceDesc.Width, surfaceDesc.Height);

                // Get the current time (to write it over the video later)
                TimeSpan timeStart = TimeSpan.FromTicks(rtStart);

                // Set the device's render target (this doesn't seem to be needed)
                device.SetRenderTarget(0, renderTarget);

                // Copy the whole video surface into the render target
                // it's a de facto surface cleaning...
                device.StretchRectangle(surface, surfaceRect, renderTarget, renderTargetRect, TextureFilter.None);

                // sprite's methods need to be called between device.BeginScene and device.EndScene
                device.BeginScene();

                // Init the sprite engine for AlphaBlending operations
                sprite.Begin(SpriteFlags.AlphaBlend);

                // Write the current video time (using the sprite)...
                d3dFont.DrawText(sprite, timeStart.ToString(), Point.Empty, Color.White);

                // Compute the spider moves
                if (spiderPos.X == 0)
                {
                    spiderMove.X = +1;
                }
                if (spiderPos.X + spiderSize.Width > renderTargetDesc.Width)
                {
                    spiderMove.X = -1;
                }

                spiderPos.X += spiderMove.X;

                if (spiderPos.Y == 0)
                {
                    spiderMove.Y = +1;
                }
                if (spiderPos.Y + spiderSize.Height > renderTargetDesc.Height)
                {
                    spiderMove.Y = -1;
                }

                spiderPos.Y += spiderMove.Y;

                // Draw the spider
                sprite.Draw2D(spiderTex, Rectangle.Empty, Rectangle.Empty, spiderPos, -1);

                // End the spite engine (drawings take place here)
                sprite.End();

                // End the sceen.
                device.EndScene();

                // No Present requiered because the rendering is on a render target...

                // Dispose the managed surface
                surface.Dispose();
                surface = null;

                // and the managed render target
                renderTarget.Dispose();
                renderTarget = null;
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.ToString());
            }

            // return a success to the filter
            return(0);
        }
Esempio n. 24
0
        /// <summary>
        /// Populate the media type
        /// </summary>
        /// <param name="psc">IGenericSampleConfig onto which we set the media type</param>
        public override void SetMediaType(IGenericSampleConfig psc)
        {
            AMMediaType amt = new AMMediaType();
            amt.majorType = MediaType.Stream;
            amt.subType = MediaSubType.Mpeg2Program;
            amt.formatType = Guid.Empty;

            int hr = psc.SetMediaTypeEx(amt, BUFSIZE);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(amt);
        }
        /*
         * protected void InitAudioSampleGrabber()
         * {
         *  // Get the graph builder
         *  IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
         *  if (graphBuilder == null)
         *      return;
         *
         *  try
         *  {
         *      // Build the sample grabber
         *      sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
         *          as ISampleGrabber;
         *
         *      if (sampleGrabber == null)
         *          return;
         *
         *      // Add it to the filter graph
         *      int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber");
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      AMMediaType mtAudio = new AMMediaType();
         *      mtAudio.majorType = MediaType.Audio;
         *      mtAudio.subType = MediaSubType.PCM;
         *      mtAudio.formatPtr = IntPtr.Zero;
         *
         *      _actualAudioFormat = null;
         *
         *      hr = sampleGrabber.SetMediaType(mtAudio);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetBufferSamples(true);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetOneShot(false);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetCallback(this, 1);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      sampleAnalyzerMustStop.Reset();
         *      sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
         *      sampleAnalyzerThread.Priority = ThreadPriority.Highest;
         *      sampleAnalyzerThread.Start();
         *  }
         *  catch(Exception ex)
         *  {
         *      Logger.LogException(ex);
         *  }
         *
         *  rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
         * }*/

        protected void InitAudioSampleGrabber_v2()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);

            if (graphBuilder == null)
            {
                return;
            }

            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                                as ISampleGrabber;

                if (sampleGrabber == null)
                {
                    return;
                }

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2");
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter ffdAudioDecoder = null;

                IPin   ffdAudioDecoderOutput = null;
                IPin   soundDeviceInput      = null;
                IPin   sampleGrabberInput    = null;
                IPin   sampleGrabberOutput   = null;
                IntPtr pSoundDeviceInput     = IntPtr.Zero;

                // When using FFDShow, typically we'll find
                // a ffdshow Audio Decoder connected to the sound device filter
                //
                // i.e. [ffdshow Audio Decoder] --> [DirectSound Device]
                //
                // Our audio sample grabber supports only PCM sample input and output.
                // Its entire processing is based on this assumption.
                //
                // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device
                // because this is the only place where we can find PCM samples. The sound device only accepts PCM.
                //
                // So we need to turn this graph:
                //
                // .. -->[ffdshow Audio Decoder]-->[DirectSound Device]
                //
                // into this:
                //
                // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device]
                //
                // Actions to do to achieve the graph change:
                //
                // 1. Locate the ffdshow Audio Decoder in the graph
                // 2. Find its output pin and the pin that it's connected to
                // 3. Locate the input and output pins of sample grabber
                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                // 6. Connect the sample grabber output to sound device input
                // that's all.

                // --------------
                // 1. Locate the ffdshow Audio Decoder in the graph
                hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder);
                DsError.ThrowExceptionForHR(hr);

                // 2. Find its output pin and the pin that it's connected to
                hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput);
                DsError.ThrowExceptionForHR(hr);

                hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput);
                DsError.ThrowExceptionForHR(hr);

                soundDeviceInput = new DSPin(pSoundDeviceInput).Value;

                // 3. Locate the input and output pins of sample grabber
                hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput);
                DsError.ThrowExceptionForHR(hr);

                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                hr = ffdAudioDecoderOutput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                hr = soundDeviceInput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                // 6. Connect the sample grabber output to sound device input
                hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput);
                DsError.ThrowExceptionForHR(hr);


                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType   = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                sampleGrabber.SetMediaType(mtAudio);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(this, 1);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread          = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch (Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }
Esempio n. 26
0
		/// <summary>
		///  Retrieves the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object getStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();

			derenderGraph();

			object returnValue = null;
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
				Marshal.PtrToStructure( pmt, mediaType );
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Extract the field's current value
				returnValue = fieldInfo.GetValue( formatStruct ); 
						
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
				Marshal.FreeCoTaskMem( pmt );
#endif
			}
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}
        protected void CompleteAudioSampleGrabberIntialization()
        {
            _actualAudioFormat = null;
            if (sampleGrabber != null)
            {
                AMMediaType mtAudio = new AMMediaType();
                if (HRESULT.SUCCEEDED(sampleGrabber.GetConnectedMediaType(mtAudio)))
                {
                    _actualAudioFormat = (WaveFormatEx)Marshal.PtrToStructure(mtAudio.formatPtr, typeof(WaveFormatEx));

                    const int WAVEFORM_WNDSIZEFACTOR = 128;
                    const int VU_WNDSIZEFACTOR       = 4096;
                    const int FFT_WNDSIZEFACTOR      = 16;

                    int freq =
                        (MediaRenderer.DefaultInstance.ActualAudioFormat == null) ? 44100 :
                        MediaRenderer.DefaultInstance.ActualAudioFormat.nSamplesPerSec;

                    try
                    {
                        int k1 = 0, k2 = 0, k3 = 0;

                        while (freq / (1 << k1) > WAVEFORM_WNDSIZEFACTOR)
                        {
                            k1++;
                        }
                        while (freq / (1 << k2) > FFT_WNDSIZEFACTOR)
                        {
                            k2++;
                        }
                        while (freq / (1 << k3) > VU_WNDSIZEFACTOR)
                        {
                            k3++;
                        }

                        _waveformWindowSize = (1 << k1);
                        _fftWindowSize      = (1 << k2);
                        _vuMeterWindowSize  = (1 << k3);

                        _maxLevel =
                            (MediaRenderer.DefaultInstance.ActualAudioFormat != null) ?
                            (1 << (MediaRenderer.DefaultInstance.ActualAudioFormat.wBitsPerSample - 1)) - 1 :
                            short.MaxValue;
                    }
                    catch
                    {
                        _vuMeterWindowSize  = 64;
                        _waveformWindowSize = 512;
                        _fftWindowSize      = 4096;
                        _maxLevel           = short.MaxValue;
                    }
                    finally
                    {
                        _maxLogLevel = Math.Log(_maxLevel);
                    }

                    sampleGrabberConfigured.Set();
                    return;
                }
            }
        }
Esempio n. 28
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
                throw new NotSupportedException();

            object returnValue = null;
            IntPtr pmt = IntPtr.Zero;
            AMMediaType mediaType = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == FormatType.WaveEx)
                    formatStruct = new WaveFormatEx();
                else if (mediaType.formatType == FormatType.VideoInfo)
                    formatStruct = new VideoInfoHeader();
                else if (mediaType.formatType == FormatType.VideoInfo2)
                    formatStruct = new VideoInfoHeader2();
                else
                    throw new NotSupportedException("This device does not support a recognized format block.");

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type structType = formatStruct.GetType();
                FieldInfo fieldInfo = structType.GetField(fieldName);
                if (fieldInfo == null)
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");

                // Update the value of the field
                fieldInfo.SetValue(formatStruct, newValue);

                // PtrToStructure copies the data so we need to copy it back
                Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                //DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return (returnValue);
        }
Esempio n. 29
0
 private void OnCancel(object sender, EventArgs e)
 {
     selected_mt  = null;
     DialogResult = DialogResult.Cancel;
     Close();
 }
        /// <summary>
        /// Initializes the graph for which the capture data will be piped into
        /// </summary>
        /// <param name="p_capDev">The device to be capturesd</param>
        private void buildGraph(DsDevice p_capDev)
        {
            int hr = 0; //For error checking

            if (m_graph != null)
            {
                m_graph = null;
            }

            m_graph = (IGraphBuilder) new FilterGraph();
            IBaseFilter           captureFilter;                                                 //Filter for the captureDevice
            ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); //Rendering portion

            //Add the graph to the builder, like adding canvas to the stand
            hr = pBuilder.SetFiltergraph(m_graph);
            DsError.ThrowExceptionForHR(hr);

            //Initialize captureFilter with the unique identifier from capDev and add it to the graph
            captureFilter = createFilterByDevice(p_capDev);
            hr            = m_graph.AddFilter(captureFilter, "CapFilter");
            DsError.ThrowExceptionForHR(hr);

            //Create a sample grabber and add it to the graph
            m_sampleGrabber = (IBaseFilter)Activator.CreateInstance(typeof(CamSampleGrabber));
            hr = m_graph.AddFilter(m_sampleGrabber, "SampleGrabber");
            DsError.ThrowExceptionForHR(hr);

            //Set the callback function for the sample grabber.  It will be CamCaptureGrabberCallBack.bufferCB()
            // this is because sampleCB only support single image getting.
            hr = ((CamSampleGrabber)m_sampleGrabber).SetCallback(new CamCaptureGrabberCallBack(), 1);
            DsError.ThrowExceptionForHR(hr);
            hr = ((ISampleGrabber)m_sampleGrabber).SetOneShot(false);
            DsError.ThrowExceptionForHR(hr);

            //Get pins
            IPin capPin = DsFindPin.ByCategory(captureFilter, PinCategory.Capture, 0);
            IPin samPin = DsFindPin.ByDirection(m_sampleGrabber, PinDirection.Input, 0);

            m_camControl = captureFilter as IAMCameraControl;

            //Create the media type, just a video RGB24 with VideoInfo formatType
            AMMediaType media = null;

            hr = getMedia(capPin, out media);
            DsError.ThrowExceptionForHR(hr);
            media.majorType = MediaType.Video;

            hr = ((IAMStreamConfig)capPin).SetFormat(media);
            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(media);

            //Connect capture device to the sample grabber
            hr = m_graph.Connect(capPin, samPin);
            DsError.ThrowExceptionForHR(hr);

            //Render video
            // For a filter with only an output filter (ie. m_sample) then the first two
            // parameters are null.  The 4 and 5 parameter could not be null, however the 4th
            // is an intermediate filter which i don't want and the 5th is the sink if not defined
            // will end up being a default filter.
            hr = pBuilder.RenderStream(null, null, m_sampleGrabber, null, null);
            DsError.ThrowExceptionForHR(hr);
        }
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog lopenFileDialog = new OpenFileDialog();

            lopenFileDialog.AddExtension = true;

            var lresult = lopenFileDialog.ShowDialog();

            if (lresult != true)
            {
                return;
            }

            IBaseFilter lDSoundRender = new DSoundRender() as IBaseFilter;

            m_pGraph.AddFilter(lDSoundRender, "Audio Renderer");


            int k = 0;

            IPin[] lAudioRendererPins = new IPin[1];

            IEnumPins ppEnum;

            k = lDSoundRender.EnumPins(out ppEnum);

            k = ppEnum.Next(1, lAudioRendererPins, IntPtr.Zero);

            var lCaptureManagerEVRMultiSinkFactory = CaptureManagerVideoRendererMultiSinkFactory.getInstance().getICaptureManagerEVRMultiSinkFactory();

            uint lMaxVideoRenderStreamCount = lCaptureManagerEVRMultiSinkFactory.getMaxVideoRenderStreamCount();

            if (lMaxVideoRenderStreamCount == 0)
            {
                return;
            }

            List <object> lOutputNodesList = new List <object>();

            lCaptureManagerEVRMultiSinkFactory.createOutputNodes(
                IntPtr.Zero,
                mEVRDisplay.Surface.texture,
                1,// lMaxVideoRenderStreamCount,
                out lOutputNodesList);

            if (lOutputNodesList.Count == 0)
            {
                return;
            }

            IBaseFilter lVideoMixingRenderer9 = (IBaseFilter)lOutputNodesList[0];

            var h = m_pGraph.AddFilter(lVideoMixingRenderer9, "lVideoMixingRenderer9");


            IPin[] lVideoRendererPin = new IPin[1];


            k = lVideoMixingRenderer9.EnumPins(out ppEnum);

            k = ppEnum.Next(1, lVideoRendererPin, IntPtr.Zero);


            IBaseFilter m_SourceFilter = null;

            m_pGraph.AddSourceFilter(lopenFileDialog.FileName, null, out m_SourceFilter);

            IEnumPins lEnumPins = null;

            m_SourceFilter.EnumPins(out lEnumPins);

            IPin[] lPins = new IPin[1];

            while (lEnumPins.Next(1, lPins, IntPtr.Zero) == 0)
            {
                IEnumMediaTypes lIEnumMediaTypes;

                lPins[0].EnumMediaTypes(out lIEnumMediaTypes);

                AMMediaType[] ppMediaTypes = new AMMediaType[1];

                while (lIEnumMediaTypes.Next(1, ppMediaTypes, IntPtr.Zero) == 0)
                {
                    var gh = ppMediaTypes[0].subType;

                    if (ppMediaTypes[0].majorType == DirectShowLib.MediaType.Video)
                    {
                        k = m_pGraph.Connect(lPins[0], lVideoRendererPin[0]);
                    }
                }

                foreach (var item in lPins)
                {
                    k = m_pGraph.Render(item);
                }
            }

            IMediaControl lIMediaControl = m_pGraph as IMediaControl;

            k = lIMediaControl.Run();
        }
Esempio n. 32
0
        // Thread entry point
        public void WorkerThread()
        {
            bool failed = false;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder     graph       = null;
            IBaseFilter       sourceBase  = null;
            IBaseFilter       grabberBase = null;
            ISampleGrabber    sg          = null;
            IFileSourceFilter fileSource  = null;
            IMediaControl     mc          = null;
            IMediaEventEx     mediaEvent  = null;

            int code, param1, param2;

            while ((!failed) && (!stopEvent.WaitOne(0, true)))
            {
                try
                {
                    // Get type for filter graph
                    Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating filter graph");
                    }

                    // create filter graph
                    graphObj = Activator.CreateInstance(srvType);
                    graph    = (IGraphBuilder)graphObj;

                    // Get type for windows media source filter
                    srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating WM source");
                    }

                    // create windows media source filter
                    sourceObj  = Activator.CreateInstance(srvType);
                    sourceBase = (IBaseFilter)sourceObj;

                    // Get type for sample grabber
                    srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating sample grabber");
                    }

                    // create sample grabber
                    grabberObj  = Activator.CreateInstance(srvType);
                    sg          = (ISampleGrabber)grabberObj;
                    grabberBase = (IBaseFilter)grabberObj;

                    // add source filter to graph
                    graph.AddFilter(sourceBase, "source");
                    graph.AddFilter(grabberBase, "grabber");

                    // set media type
                    AMMediaType mt = new AMMediaType();
                    mt.majorType = MediaType.Video;
                    mt.subType   = MediaSubType.RGB24;
                    sg.SetMediaType(mt);

                    // load file
                    fileSource = (IFileSourceFilter)sourceObj;
                    fileSource.Load(this.source, null);

                    // connect pins
                    if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                    {
                        throw new ApplicationException("Failed connecting filters");
                    }

                    // get media type
                    if (sg.GetConnectedMediaType(mt) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                        grabber.Width  = vih.BmiHeader.Width;
                        grabber.Height = vih.BmiHeader.Height;
                        mt.Dispose();
                    }

                    // render
                    graph.Render(DSTools.GetOutPin(grabberBase, 0));

                    //
                    sg.SetBufferSamples(false);
                    sg.SetOneShot(false);
                    sg.SetCallback(grabber, 1);

                    // window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(false);
                    win = null;

                    // get events interface
                    mediaEvent = (IMediaEventEx)graphObj;

                    // get media control
                    mc = (IMediaControl)graphObj;

                    // run
                    mc.Run();

                    while (!stopEvent.WaitOne(0, true))
                    {
                        Thread.Sleep(100);

                        // get an event
                        if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0)
                        {
                            // release params
                            mediaEvent.FreeEventParams(code, param1, param2);

                            //
                            if (code == (int)EventCode.Complete)
                            {
                                System.Diagnostics.Debug.WriteLine("completed");
                                break;
                            }
                        }
                    }

                    mc.StopWhenReady();
                }
                // catch any exceptions
                catch (Exception e)
                {
                    System.Diagnostics.Debug.WriteLine("----: " + e.Message);
                    failed = true;
                }
                // finalization block
                finally
                {
                    // release all objects
                    mediaEvent  = null;
                    mc          = null;
                    fileSource  = null;
                    graph       = null;
                    sourceBase  = null;
                    grabberBase = null;
                    sg          = null;

                    if (graphObj != null)
                    {
                        Marshal.ReleaseComObject(graphObj);
                        graphObj = null;
                    }
                    if (sourceObj != null)
                    {
                        Marshal.ReleaseComObject(sourceObj);
                        sourceObj = null;
                    }
                    if (grabberObj != null)
                    {
                        Marshal.ReleaseComObject(grabberObj);
                        grabberObj = null;
                    }
                }
            }
        }
Esempio n. 33
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public XNAPlayer(Feel feel, string FileName, GraphicsDevice graphicsDevice, Action callback)
        {
            Utils.RunAsynchronously(() =>
            {
                try
                {
                    // Set video state
                    currentState = VideoState.Stopped;

                    // Store Filename
                    filename = FileName;

                    // Open DirectShow Interfaces
                    InitInterfaces();

                    // Create a SampleGrabber Filter and add it to the FilterGraph
                    SampleGrabber sg             = new SampleGrabber();
                    ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                    DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));

                    // Setup Media type info for the SampleGrabber
                    AMMediaType mt = new AMMediaType();
                    mt.majorType   = MEDIATYPE_Video;    // Video
                    mt.subType     = MEDIASUBTYPE_RGB24; // RGB24
                    mt.formatType  = FORMAT_VideoInfo;   // VideoInfo
                    DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                    // Construct the rest of the FilterGraph
                    DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));

                    // Set SampleGrabber Properties
                    DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                    DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                    DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                    // Hide Default Video Window
                    IVideoWindow pVideoWindow = (IVideoWindow)gb;
                    DsError.ThrowExceptionForHR(pVideoWindow.put_MessageDrain(IntPtr.Zero));
                    DsError.ThrowExceptionForHR(pVideoWindow.put_WindowState(WindowState.Hide));
                    DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                    // Create AMMediaType to capture video information
                    AMMediaType MediaType = new AMMediaType();
                    DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                    VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                    Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                    // Store video information
                    videoHeight     = pVideoHeader.BmiHeader.Height;
                    videoWidth      = pVideoHeader.BmiHeader.Width;
                    avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                    bitRate         = pVideoHeader.BitRate;
                    DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));

                    // Create byte arrays to hold video data
                    videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                    bgrData         = new byte[(videoHeight * videoWidth) * 4]; // BGR24 format (3 bytes per pixel + 1 for safety)

                    // Create Output Frame Texture2D with the height and width of the video
                    outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);

                    feel.RunOnUIThread(callback);
                }
                catch
                {
                    feel.ShowToast("Unable to Load or Play the video file");
                }
            }, () => { });
        }
Esempio n. 34
0
        /// <summary>
        /// Builds the DVD DirectShow graph
        /// </summary>
        private void BuildGraph()
        {
            try
            {
                FreeResources();

                int hr;

                /* Create our new graph */
                m_graph = (IGraphBuilder) new FilterGraphNoThread();

#if DEBUG
                m_rot = new DsROTEntry(m_graph);
#endif

                /* We are going to use the VMR9 for now.  The EVR does not
                 * seem to work with the interactive menus yet.  It should
                 * play Dvds fine otherwise */
                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates and initializes a new renderer ready to render to WPF */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 2);

                /* Do some VMR9 specific stuff */
                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;

                        /* Enable this line to prefer YUV */
                        //hr = mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                /* Create a new DVD Navigator. */
                var dvdNav = (IBaseFilter) new DVDNavigator();

                /* The DVDControl2 interface lets us control DVD features */
                m_dvdControl = dvdNav as IDvdControl2;

                if (m_dvdControl == null)
                {
                    throw new Exception("Could not QueryInterface the IDvdControl2 interface");
                }

                /* QueryInterface the DVDInfo2 */
                m_dvdInfo = dvdNav as IDvdInfo2;

                /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */
                if (!string.IsNullOrEmpty(DvdDirectory))
                {
                    hr = m_dvdControl.SetDVDDirectory(DvdDirectory);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */
                hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true);
                DsError.ThrowExceptionForHR(hr);

                /* If the graph stops, resume at the same point */
                m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false);

                hr = m_graph.AddFilter(dvdNav, "DVD Navigator");
                DsError.ThrowExceptionForHR(hr);

                IPin dvdVideoPin      = null;
                IPin dvdAudioPin      = null;
                IPin dvdSubPicturePin = null;

                IPin dvdNavPin;
                int  i = 0;

                /* Loop all the output pins on the DVD Navigator, trying to find which pins are which.
                 * We could more easily find the pins by name, but this is more fun...and more flexible
                 * if we ever want to use a 3rd party DVD navigator that used different pin names */
                while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null)
                {
                    var    mediaTypes = new AMMediaType[1];
                    IntPtr pFetched   = IntPtr.Zero;

                    IEnumMediaTypes mediaTypeEnum;
                    dvdNavPin.EnumMediaTypes(out mediaTypeEnum);

                    /* Loop over each of the mediaTypes of each pin */
                    while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0)
                    {
                        AMMediaType mediaType = mediaTypes[0];

                        /* This will be the video stream pin */
                        if (mediaType.subType == MediaSubType.Mpeg2Video)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdVideoPin = dvdNavPin;
                            break;
                        }

                        /* This will be the audio stream pin */
                        if (mediaType.subType == MediaSubType.DolbyAC3 ||
                            mediaType.subType == MediaSubType.Mpeg2Audio)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdAudioPin = dvdNavPin;
                            break;
                        }

                        /* This is the Dvd sub picture pin.  This generally
                         * shows overlays for Dvd menus and sometimes closed captions */
                        if (mediaType.subType == DVD_SUBPICTURE_TYPE)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdSubPicturePin = dvdNavPin;
                            break;
                        }
                    }

                    mediaTypeEnum.Reset();
                    Marshal.ReleaseComObject(mediaTypeEnum);
                    i++;
                }

                /* This is the windowed renderer.  This is *NEEDED* in order
                 * for interactive menus to work with the other VMR9 in renderless mode */
                var dummyRenderer       = (IBaseFilter) new VideoMixingRenderer9();
                var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer;

                /* In order for this interactive menu trick to work, the VMR9
                 * must be set to Windowed.  We will make sure the window is hidden later on */
                hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed);
                DsError.ThrowExceptionForHR(hr);

                hr = dummyRendererConfig.SetNumberOfStreams(1);
                DsError.ThrowExceptionForHR(hr);

                hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed");
                DsError.ThrowExceptionForHR(hr);

                if (dvdAudioPin != null)
                {
                    /* This should render out to the default audio device. We
                     * could modify this code here to go out any audio
                     * device, such as SPDIF or another sound card */
                    hr = m_graph.Render(dvdAudioPin);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* Get the first input pin on our dummy renderer */
                m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */
                                                                  PinConnectedStatus.Unconnected,
                                                                  0);

                /* Get an available pin on our real renderer */
                IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                                PinConnectedStatus.Unconnected,
                                                                0);         /* Pin index */

                /* Connect the pin to the renderer */
                hr = m_graph.Connect(dvdVideoPin, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Get the next available pin on our real renderer */
                rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                           PinConnectedStatus.Unconnected,
                                                           0);         /* Pin index */

                /* Render the sub picture, which will connect
                 * the DVD navigator to the codec, not the renderer */
                hr = m_graph.Render(dvdSubPicturePin);
                DsError.ThrowExceptionForHR(hr);

                /* These are the subtypes most likely to be our dvd subpicture */
                var preferedSubpictureTypes = new[] { MediaSubType.ARGB4444,
                                                      MediaSubType.AI44,
                                                      MediaSubType.AYUV,
                                                      MediaSubType.ARGB32 };
                IPin dvdSubPicturePinOut = null;

                /* Find what should be the subpicture pin out */
                foreach (var guidType in preferedSubpictureTypes)
                {
                    dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */
                                                                    PinDirection.Output,
                                                                    m_graph); /* Our current graph */
                    if (dvdSubPicturePinOut != null)
                    {
                        break;
                    }
                }

                if (dvdSubPicturePinOut == null)
                {
                    throw new Exception("Could not find the sub picture pin out");
                }

                /* Here we connec thte Dvd sub picture pin to the video renderer.
                 * This enables the overlays on Dvd menus and some closed
                 * captions to be rendered. */
                hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Search for the Line21 out in the graph */
                IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data,
                                                           PinDirection.Output,
                                                           m_graph);
                if (line21Out == null)
                {
                    throw new Exception("Could not find the Line21 pin out");
                }

                /* We connect our line21Out out in to the dummy renderer
                 * this is what ultimatly makes interactive DVDs work with
                 * VMR9 in renderless (for WPF) */
                hr = m_graph.Connect(line21Out, m_dummyRendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* This is the dummy renderers Win32 window. */
                m_dummyRenderWindow = dummyRenderer as IVideoWindow;

                if (m_dummyRenderWindow == null)
                {
                    throw new Exception("Could not QueryInterface for IVideoWindow");
                }

                ConfigureDummyWindow();

                /* Setup our base classes with this filter graph */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);
            }
            catch (Exception ex)
            {
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
                return;
            }

            InvokeMediaOpened();
        }
Esempio n. 35
0
        internal IEnumerable <WMAFormatInfo> EnumerateFormatInfo(IWMProfileManager pProfileManager2)
        {
            IWMProfileManager pProfileManager = null;

            try
            {
                if (pProfileManager2 == null)
                {
                    WMUtils.WMCreateProfileManager(out pProfileManager);
                }
                var pCodecInfo3 = (pProfileManager2 ?? pProfileManager) as IWMCodecInfo3;
                int cCodecs;
                pCodecInfo3.GetCodecInfoCount(MediaType.Audio, out cCodecs);
                for (int iCodec = 0; iCodec < cCodecs; iCodec++)
                {
                    int szCodecName = 0;
                    pCodecInfo3.GetCodecName(MediaType.Audio, iCodec, null, ref szCodecName);
                    var codecName = new StringBuilder(szCodecName);
                    pCodecInfo3.GetCodecName(MediaType.Audio, iCodec, codecName, ref szCodecName);
                    var    attrDataType = new AttrDataType();
                    int    dwAttrSize   = 0;
                    byte[] pAttrValue   = new byte[4];
                    pCodecInfo3.GetCodecProp(MediaType.Audio, iCodec, Constants.g_wszIsVBRSupported, out attrDataType, pAttrValue, ref dwAttrSize);
                    if (pAttrValue[0] != 1)
                    {
                        continue;
                    }
                    if (m_vbr)
                    {
                        pCodecInfo3.SetCodecEnumerationSetting(MediaType.Audio, iCodec, Constants.g_wszVBREnabled, AttrDataType.BOOL, new byte[] { 1, 0, 0, 0 }, 4);
                        pCodecInfo3.SetCodecEnumerationSetting(MediaType.Audio, iCodec, Constants.g_wszNumPasses, AttrDataType.DWORD, new byte[] { 1, 0, 0, 0 }, 4);
                    }
                    else
                    {
                        pCodecInfo3.SetCodecEnumerationSetting(MediaType.Audio, iCodec, Constants.g_wszVBREnabled, AttrDataType.BOOL, new byte[] { 0, 0, 0, 0 }, 4);
                    }

                    int cFormat;
                    pCodecInfo3.GetCodecFormatCount(MediaType.Audio, iCodec, out cFormat);
                    for (int iFormat = 0; iFormat < cFormat; iFormat++)
                    {
                        IWMStreamConfig pStreamConfig;
                        int             cchDesc = 1024;
                        StringBuilder   szDesc  = new StringBuilder(cchDesc);
                        pCodecInfo3.GetCodecFormatDesc(MediaType.Audio, iCodec, iFormat, out pStreamConfig, szDesc, ref cchDesc);
                        if (szDesc.ToString().Contains("(A/V)"))
                        {
                            continue;
                        }
                        try
                        {
                            var         pProps     = pStreamConfig as IWMMediaProps;
                            int         cbType     = 0;
                            AMMediaType pMediaType = null;
                            pProps.GetMediaType(pMediaType, ref cbType);
                            pMediaType            = new AMMediaType();
                            pMediaType.formatSize = cbType - Marshal.SizeOf(typeof(AMMediaType));
                            pProps.GetMediaType(pMediaType, ref cbType);
                            try
                            {
                                if (pMediaType.majorType == MediaType.Audio && pMediaType.formatType == FormatType.WaveEx && pMediaType.subType == m_subType)
                                {
                                    var pcm = WaveFormatExtensible.FromMediaType(pMediaType).GetConfig();
                                    if (PCM == null || (pcm.ChannelCount == PCM.ChannelCount && pcm.SampleRate == PCM.SampleRate && pcm.BitsPerSample >= PCM.BitsPerSample))
                                    {
                                        yield return new WMAFormatInfo()
                                               {
                                                   codec      = iCodec,
                                                   codecName  = codecName.ToString(),
                                                   format     = iFormat,
                                                   formatName = szDesc.ToString(),
                                                   subType    = pMediaType.subType,
                                                   pcm        = pcm
                                               }
                                    }
                                    ;
                                }
                            }
                            finally
                            {
                                WMUtils.FreeWMMediaType(pMediaType);
                            }
                        }
                        finally
                        {
                            Marshal.ReleaseComObject(pStreamConfig);
                        }
                    }
                }
            }
            finally
            {
                if (pProfileManager != null)
                {
                    Marshal.ReleaseComObject(pProfileManager);
                }
            }
        }
        public IntPtr MarshalManagedToNative(object managedObj)
        {
            m_mt = managedObj as AMMediaType;

            IntPtr ip = Marshal.AllocCoTaskMem(Marshal.SizeOf(m_mt) + m_mt.formatSize);

            // This class is only used for output.  No need to burn the cpu cycles to copy
            // over data that just gets overwritten.

            //Marshal.StructureToPtr(m_mt, ip, false);

            //if ((m_mt.formatSize > 0) && (m_mt.formatPtr != IntPtr.Zero))
            //{
            //    CopyMemory(new IntPtr(ip.ToInt64() + Marshal.SizeOf(m_mt)), m_mt.formatPtr, m_mt.formatSize);
            //}

            return ip;
        }
Esempio n. 37
0
        /// <summary>
        /// Read the properties of the first bitmap to finish initializing the writer.
        /// </summary>
        /// <param name="hBitmap">First bitmap</param>
        private void Initialize(Bitmap hBitmap)
        {
            int             hr;
            AMMediaType     mt        = new AMMediaType();
            VideoInfoHeader videoInfo = new VideoInfoHeader();

            // Create the VideoInfoHeader using info from the bitmap
            videoInfo.BmiHeader.Size   = Marshal.SizeOf(typeof(BitmapInfoHeader));
            videoInfo.BmiHeader.Width  = hBitmap.Width;
            videoInfo.BmiHeader.Height = hBitmap.Height;
            videoInfo.BmiHeader.Planes = 1;

            // compression thru clrimportant don't seem to be used. Init them anyway
            videoInfo.BmiHeader.Compression   = 0;
            videoInfo.BmiHeader.ImageSize     = 0;
            videoInfo.BmiHeader.XPelsPerMeter = 0;
            videoInfo.BmiHeader.YPelsPerMeter = 0;
            videoInfo.BmiHeader.ClrUsed       = 0;
            videoInfo.BmiHeader.ClrImportant  = 0;

            switch (hBitmap.PixelFormat)
            {
            case PixelFormat.Format32bppRgb:
                mt.subType = MediaSubType.RGB32;
                videoInfo.BmiHeader.BitCount = 32;
                break;

            case PixelFormat.Format24bppRgb:
                mt.subType = MediaSubType.RGB24;
                videoInfo.BmiHeader.BitCount = 24;
                break;

            case PixelFormat.Format16bppRgb555:
                mt.subType = MediaSubType.RGB555;
                videoInfo.BmiHeader.BitCount = 16;
                break;

            default:
                throw new Exception("Unrecognized Pixelformat in bitmap");
            }

            videoInfo.SrcRect             = new Rectangle(0, 0, hBitmap.Width, hBitmap.Height);
            videoInfo.TargetRect          = videoInfo.SrcRect;
            videoInfo.BmiHeader.ImageSize = hBitmap.Width * hBitmap.Height * (videoInfo.BmiHeader.BitCount / 8);
            videoInfo.BitRate             = videoInfo.BmiHeader.ImageSize * m_iFrameRate;
            videoInfo.BitErrorRate        = 0;
            videoInfo.AvgTimePerFrame     = 10000 * 1000 / m_iFrameRate;

            mt.majorType           = MediaType.Video;
            mt.fixedSizeSamples    = true;
            mt.temporalCompression = false;
            mt.sampleSize          = videoInfo.BmiHeader.ImageSize;
            mt.formatType          = FormatType.VideoInfo;
            mt.unkPtr     = IntPtr.Zero;
            mt.formatSize = Marshal.SizeOf(typeof(VideoInfoHeader));

            // Lock the videoInfo structure, and put the pointer
            // into the mediatype structure
            GCHandle gHan = GCHandle.Alloc(videoInfo, GCHandleType.Pinned);

            try
            {
                // Set the inputprops using the structures
                mt.formatPtr = gHan.AddrOfPinnedObject();

                hr = m_pInputProps.SetMediaType(mt);
                Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                gHan.Free();
            }

            // Now take the inputprops, and set them on the file writer
            hr = m_pWMWriter.SetInputProps(m_dwVideoInput, m_pInputProps);
            Marshal.ThrowExceptionForHR(hr);

            // Done with config, prepare to write
            hr = m_pWMWriter.BeginWriting();
            Marshal.ThrowExceptionForHR(hr);

            m_Init = true;
        }
 // It appears this routine is never called
 public void CleanUpManagedData(object ManagedObj)
 {
     m_mt = null;
 }
Esempio n. 39
0
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread()
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;

            IMediaEventEx mediaEvent   = null;
            IMediaSeeking mediaSeeking = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(_fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType
                {
                    MajorType = MediaType.Video,
                    SubType   = MediaSubType.RGB24
                };
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = Tools.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = Tools.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose();
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!_preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!_referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // Get media seeking & check seeking capability
                mediaSeeking = (IMediaSeeking)graphObject;
                mediaSeeking.GetDuration(out _duration);
                _onVideoLoad(_duration);
                const SeekingCapabilities caps = SeekingCapabilities.CanSeekAbsolute | SeekingCapabilities.CanGetDuration;
                SeekingCapabilities       canSeekCap;
                int hr = mediaSeeking.GetCapabilities(out canSeekCap);
                if (hr < 0)
                {
                    throw new ApplicationException("Failed getting seeking capabilities");
                }
                _isSeekEnabled = (canSeekCap & caps) == caps;

                // run
                mediaControl.Run();
                IsPlaying = true;
                do
                {
                    // GetCurrentTime
                    if (_isGetCurrentTime)
                    {
                        mediaSeeking.GetCurrentPosition(out _currentGetTime);
                        _isGetCurrentTime = false;
                    }
                    if (IsSetPause)
                    {
                        mediaControl.Pause();
                        IsSetPause = false;
                        IsPlaying  = false;
                    }
                    if (IsSetPlay)
                    {
                        mediaControl.Run();
                        IsSetPlay = false;
                        IsPlaying = true;
                    }
                    // SetCurrentTime
                    if (_isSetCurrentTime)
                    {
                        long stop = 0;
                        mediaSeeking.SetPositions(ref _currentSetTime, SeekingFlags.AbsolutePositioning, ref stop,
                                                  SeekingFlags.NoPositioning);
                        _isSetCurrentTime = false;
                    }
                    IntPtr   p1;
                    IntPtr   p2;
                    DsEvCode code;
                    if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                    {
                        mediaEvent.FreeEventParams(code, p1, p2);

                        if (code == DsEvCode.Complete)
                        {
                            reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                            break;
                        }
                    }
                } while (!_stopEvent.WaitOne(100, false));
                IsPlaying = false;
                mediaControl.Stop();
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;
                mediaSeeking  = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
Esempio n. 40
0
        private static bool checkMediaTypes(IPin pin, Guid mediaType, Guid mediaSubType)
        {
            int reply = 0;

            IEnumMediaTypes enumMediaTypes = null;
            AMMediaType[] mediaTypes = new AMMediaType[1];

            reply = pin.EnumMediaTypes(out enumMediaTypes);
            DsError.ThrowExceptionForHR(reply);

            while (enumMediaTypes.Next(mediaTypes.Length, mediaTypes, IntPtr.Zero) == 0)
            {
                foreach (AMMediaType currentMediaType in mediaTypes)
                {
                    if (currentMediaType != null)
                    {
                        if (currentMediaType.majorType == mediaType &&
                            currentMediaType.subType == mediaSubType)
                        {
                            Marshal.ReleaseComObject(enumMediaTypes);
                            return (true);
                        }
                    }
                }
            }

            Marshal.ReleaseComObject(enumMediaTypes);

            return (false);
        }
Esempio n. 41
0
        /// <summary> build the capture graph for grabber. </summary>
        private void SetupGraph(DsDevice dev, AMMediaType media)
        {
            int hr;

            ISampleGrabber        sampGrabber = null;
            IBaseFilter           capFilter   = null;
            ICaptureGraphBuilder2 capGraph    = null;

            // Get the graphbuilder object
            m_FilterGraph = (IFilterGraph2) new FilterGraph();
            m_mediaCtrl   = m_FilterGraph as IMediaControl;
            try
            {
                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Get the SampleGrabber interface
                sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(m_FilterGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add the video device
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
                DsError.ThrowExceptionForHR(hr);

                // add video crossbar
                // thanks to Andrew Fernie - this is to get tv tuner cards working
                IAMCrossbar crossbar = null;
                object      o;

                hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMCrossbar).GUID, out o);
                if (hr >= 0)
                {
                    crossbar = (IAMCrossbar)o;
                    int oPin, iPin;
                    int ovLink, ivLink;
                    ovLink = ivLink = 0;

                    crossbar.get_PinCounts(out oPin, out iPin);
                    int pIdxRel;
                    PhysicalConnectorType tp;
                    for (int i = 0; i < iPin; i++)
                    {
                        crossbar.get_CrossbarPinInfo(true, i, out pIdxRel, out tp);
                        if (tp == PhysicalConnectorType.Video_Composite)
                        {
                            ivLink = i;
                        }
                    }

                    for (int i = 0; i < oPin; i++)
                    {
                        crossbar.get_CrossbarPinInfo(false, i, out pIdxRel, out tp);
                        if (tp == PhysicalConnectorType.Video_VideoDecoder)
                        {
                            ovLink = i;
                        }
                    }

                    try
                    {
                        crossbar.Route(ovLink, ivLink);
                        o = null;
                    }

                    catch
                    {
                        throw new Exception("Failed to get IAMCrossbar");
                    }
                }

                //add AVI Decompressor
                IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec();
                hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor");
                DsError.ThrowExceptionForHR(hr);

                //
                IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber;
                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                DsError.ThrowExceptionForHR(hr);

                SetConfigParms(capGraph, capFilter, media);

                hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, pAVIDecompressor, baseGrabFlt);
                if (hr < 0)
                {
                    hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
                }

                DsError.ThrowExceptionForHR(hr);

                SaveSizeInfo(sampGrabber);
            }
            finally
            {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
            }
        }
Esempio n. 42
0
        public static int FindPinIndexByMediaType(IBaseFilter filter, PinDirection direction, Guid mType, Guid sType)
        {
            IPin tPin = null;
            int hr;
            int index = 0;
            int pinIndex = 0;

            tPin = DsFindPin.ByDirection(filter, direction, index);
            while (tPin != null)
            {
                IEnumMediaTypes emtDvr = null;
                AMMediaType[] amtDvr = new AMMediaType[1];

                try
                {
                    tPin.EnumMediaTypes(out emtDvr);

                    hr = emtDvr.Next(1, amtDvr, IntPtr.Zero);
                    DsError.ThrowExceptionForHR(hr);

                    if (amtDvr[0] != null && amtDvr[0].majorType == mType && (amtDvr[0].subType == sType || sType == MediaSubType.Null))
                    {
                        pinIndex = index;
                        break;
                    }
                }
                finally
                {
                    DsUtils.FreeAMMediaType(amtDvr[0]);
                    if (emtDvr != null)
                        Marshal.ReleaseComObject(emtDvr);
                }

                if (tPin != null)
                    Marshal.ReleaseComObject(tPin);
                tPin = null;
                index++;
                tPin = DsFindPin.ByDirection(filter, direction, index);
            }

            return pinIndex;
        }
Esempio n. 43
0
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, AMMediaType media)
        {
            int    hr;
            object o;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Set the new format
            hr = videoStreamConfig.SetFormat(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Esempio n. 44
0
            /// <summary> Set the options on the sample grabber </summary>
            private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
            {
                AMMediaType media;
                int hr;

                // Set the media type to Video/RBG24
                media = new AMMediaType();
                media.majorType = MediaType.Video;
                media.subType = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;
                hr = sampGrabber.SetMediaType(media);
                DsError.ThrowExceptionForHR(hr);

                DsUtils.FreeAMMediaType(media);
                media = null;

                // Choose to call BufferCB instead of SampleCB
                hr = sampGrabber.SetCallback(this, 1);
                DsError.ThrowExceptionForHR(hr);
            }
Esempio n. 45
0
        internal override WebcamConfiguration[] QueryFormats()
        {
            List <WebcamConfiguration> result = new List <WebcamConfiguration>();

            DsDevice      cameraDevice = cameraDevice = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[m_cameraDeviceIndex];
            IFilterGraph2 filterGraph = null; IBaseFilter cam = null; IPin camOutPin = null;

            try
            {
                filterGraph = (IFilterGraph2) new FilterGraph();
                DsError.ThrowExceptionForHR(filterGraph.AddSourceFilterForMoniker(cameraDevice.Mon, null, cameraDevice.Name, out cam));
                camOutPin = DsFindPin.ByCategory(cam, PinCategory.Capture, 0);

                if (camOutPin != null)
                {
                    IAMStreamConfig config = (IAMStreamConfig)camOutPin;

                    int piCount, piSize;
                    config.GetNumberOfCapabilities(out piCount, out piSize);

                    byte[]   temp       = new byte[piSize];
                    GCHandle tempHandle = GCHandle.Alloc(temp, GCHandleType.Pinned);
                    try
                    {
                        for (int x = 0; x < piCount; x++)
                        {
                            AMMediaType mediaType = null;
                            try
                            {
                                DsError.ThrowExceptionForHR(config.GetStreamCaps(x, out mediaType, tempHandle.AddrOfPinnedObject()));
                                VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader));

                                if (BPPIsValid(v.BmiHeader.BitCount))
                                {
                                    result.Add(new WebcamConfiguration(new Size(v.BmiHeader.Width, v.BmiHeader.Height), v.BmiHeader.BitCount, mediaType.subType));
                                }
                                else
                                {
                                    //System.Diagnostics.Debug.WriteLine("BPP " + v.BmiHeader.BitCount + " was not accepted!");
                                }
                            }
                            finally
                            {
                                if (mediaType != null)
                                {
                                    DsUtils.FreeAMMediaType(mediaType);
                                    mediaType = null;
                                }
                            }
                        }
                    }
                    finally
                    {
                        tempHandle.Free();
                    }
                }
            }
            finally
            {
                if (camOutPin != null)
                {
                    Marshal.ReleaseComObject(camOutPin);
                    camOutPin = null;
                }
                if (filterGraph != null)
                {
                    Marshal.ReleaseComObject(filterGraph);
                    filterGraph = null;
                }
            }

            result.Sort();
            return(result.ToArray());
        }
Esempio n. 46
0
        public DirectShowCapture(string name, string deviceMoniker)
        {
            this.Name = name;
            this.Uuid = deviceMoniker;
            this.Width = 1600;
            this.Height = 1200;

            captureGraphBuilder2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2)) as ICaptureGraphBuilder2;
            filterGraph2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph)) as IFilterGraph2;

            sampleGrabberBaseFilter = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber)) as IBaseFilter;
            sampleGrabber = sampleGrabberBaseFilter as ISampleGrabber;

            captureGraphBuilder2.SetFiltergraph(filterGraph2 as IGraphBuilder);

            FilterInfo.CreateFilter(this.Uuid, out captureSourceBaseFilter);
            captureSourceBaseFilter.SetSyncSource(IntPtr.Zero);
            sampleGrabberBaseFilter.SetSyncSource(IntPtr.Zero);

            videoProcAmp = captureSourceBaseFilter as IAMVideoProcAmp;
            cameraControl = captureSourceBaseFilter as IAMCameraControl;
            ksPropertySet = captureSourceBaseFilter as IKsPropertySet;

            videoProcAmp.Set(VideoProcAmpProperty.ColorEnable, 1, VideoProcAmpFlags.Manual);
            ksPropertySet.SetExposure(TimeSpan.FromSeconds(1000 / 120));

            filterGraph2.AddFilter(captureSourceBaseFilter, "source");
            filterGraph2.AddFilter(sampleGrabberBaseFilter, "grabber");

            object streamConfigObj;
            captureGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, captureSourceBaseFilter, typeof(IAMStreamConfig).GUID, out streamConfigObj);
            IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObj;

            videoCapabilities = Pentacorn.Vision.Captures.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig);

            var desiredFormat = videoCapabilities.Where(vc => vc.FrameSize.Width == this.Width && vc.FrameSize.Height == this.Height)
                                                 .OrderByDescending(vc => vc.MaxFrameRate).First();
            streamConfig.SetFormat(desiredFormat.MediaType);

            var hr = sampleGrabber.SetMediaType(desiredFormat.MediaType);
            if (hr < 0)
                throw new Win32Exception(hr);

            sampleGrabber.SetBufferSamples(true);
            sampleGrabber.SetOneShot(false);
            sampleGrabber.SetCallback(this, 1);

            captureGraphBuilder2.RenderStream(PinCategory.Capture, MediaType.Video, captureSourceBaseFilter, null, sampleGrabberBaseFilter);
            if (hr < 0)
                throw new Win32Exception(hr);

            AMMediaType mediaType = new AMMediaType();
            if (sampleGrabber.GetConnectedMediaType(mediaType) >= 0)
            {
                VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                if (this.Width != vih.BmiHeader.Width)
                    throw new Exception("DirectShow capture width not what's requested.");
                if (this.Height != vih.BmiHeader.Height)
                    throw new Exception("DirectShow capture width not what's requested.");
                mediaType.Dispose();
            }

            mediaControl = (IMediaControl)filterGraph2;
            mediaControl.Run();
        }
Esempio n. 47
0
        internal override Bitmap TakePicture()
        {
            if (m_callbackCompleted != null)
            {
                return(null);
            }
            //  m_pictureControl = pictureControl;
            m_takePictureEnd = false;

            DsDevice cameraDevice = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[m_cameraDeviceIndex];

            IFilterGraph2  filterGraph = null;
            IBaseFilter    cam = null; IPin camCapture = null;                                  // cam
            ISampleGrabber sg = null; IPin sgIn = null;                                         // samplegrabber

            try
            {
                // setup filterGraph & connect camera
                filterGraph = (IFilterGraph2) new FilterGraph();
                DsError.ThrowExceptionForHR(filterGraph.AddSourceFilterForMoniker(cameraDevice.Mon, null, cameraDevice.Name, out cam));

                // setup smarttee and connect so that cam(PinCategory.Capture)->st(PinDirection.Input)
                camCapture = DsFindPin.ByCategory(cam, PinCategory.Capture, 0);                 // output
                ConfStreamDimensions((IAMStreamConfig)camCapture);

                // connect Camera output to SampleGrabber input
                sg = (ISampleGrabber) new SampleGrabber();

                // configure
                AMMediaType media = new AMMediaType();
                try
                {
                    media.majorType  = MediaType.Video;
                    media.subType    = BPP2MediaSubtype(m_configuration.BPP); // this will ask samplegrabber to do convertions for us
                    media.formatType = FormatType.VideoInfo;
                    DsError.ThrowExceptionForHR(sg.SetMediaType(media));
                }
                finally
                {
                    DsUtils.FreeAMMediaType(media);
                    media = null;
                }

                DsError.ThrowExceptionForHR(sg.SetCallback(this, 1));                           // 1 = BufferCB
                DsError.ThrowExceptionForHR(filterGraph.AddFilter((IBaseFilter)sg, "SG"));
                sgIn = DsFindPin.ByDirection((IBaseFilter)sg, PinDirection.Input, 0);           // input
                DsError.ThrowExceptionForHR(filterGraph.Connect(camCapture, sgIn));
                GetSizeInfo(sg);

                // wait until timeout - or picture has been taken
                if (m_callbackCompleted == null)
                {
                    m_callbackCompleted = new ManualResetEvent(false);

                    // start filter
                    DsError.ThrowExceptionForHR(((IMediaControl)filterGraph).Run());
                    m_callbackState = 5;
                    //if (m_pictureControl != null)
                    //{
                    //    m_callbackCompleted.WaitOne();
                    //}
                    //else
                    //{
                    if (!m_callbackCompleted.WaitOne(15000, false))
                    {
                        throw new Exception();     //"Timeout while waiting for Picture");
                    }
                    //}
                    return(m_capturedBitmap);
                }
                else
                {
                    return(null);
                }
            }
            finally
            {
                // release allocated objects
                if (m_callbackCompleted != null)
                {
                    m_callbackCompleted.Close();
                    m_callbackCompleted = null;
                }
                if (sgIn != null)
                {
                    Marshal.ReleaseComObject(sgIn);
                    sgIn = null;
                }
                if (sg != null)
                {
                    Marshal.ReleaseComObject(sg);
                    sg = null;
                }
                if (camCapture != null)
                {
                    Marshal.ReleaseComObject(camCapture);
                    camCapture = null;
                }
                if (cam != null)
                {
                    Marshal.ReleaseComObject(cam);
                    cam = null;
                }
                if (filterGraph != null)
                {
                    try
                    {
                        ((IMediaControl)filterGraph).Stop();
                    }
                    catch (Exception) { }
                    Marshal.ReleaseComObject(filterGraph);
                    filterGraph = null;
                }
                m_capturedBitmap    = null;
                m_callbackCompleted = null;
            }
        }
Esempio n. 48
0
        /// <summary>
        /// Set the media type based on values from BASS.DLL
        /// </summary>
        /// <param name="psc">The IGenericSampleConfig onto which we set the mediatype</param>
        public override void SetMediaType(IGenericSampleConfig psc)
        {
            int lFrequency = 0;
            int lVolume = 0;
            int lPan = 0;

            WaveFormatEx w = new WaveFormatEx();
            BASS_CHANNELINFO lInfo = new BASS_CHANNELINFO();

            Bass.BASS_ChannelGetInfo(m_fChan, lInfo);
            if ((lInfo.flags & (int)BASSStream.BASS_SAMPLE_8BITS) == (int)BASSStream.BASS_SAMPLE_8BITS)
            {
                w.wBitsPerSample = 8;
            }
            else
            {
                w.wBitsPerSample = 16;
            }
            Bass.BASS_ChannelGetAttributes(m_fChan, ref lFrequency, ref lVolume, ref lPan);

            w.cbSize = (short)Marshal.SizeOf(typeof(WaveFormatEx));
            w.nChannels = (short)lInfo.chans;
            w.nSamplesPerSec = lFrequency;
            w.wFormatTag = 1;
            w.nAvgBytesPerSec = w.nSamplesPerSec * w.nBlockAlign;
            m_BytesPerSample = (short)(w.nChannels * (w.wBitsPerSample / 8));
            m_Frequency = lFrequency;
            m_Channels = lInfo.chans;
            w.nBlockAlign = (short)m_BytesPerSample;
            w.nAvgBytesPerSec = w.nSamplesPerSec * w.nBlockAlign;

            AMMediaType amt = new AMMediaType();
            amt.majorType = MediaType.Audio;
            amt.subType = MediaSubType.PCM;
            amt.formatType = FormatType.WaveEx;
            amt.formatPtr = Marshal.AllocCoTaskMem(w.cbSize);
            amt.formatSize = w.cbSize;
            Marshal.StructureToPtr(w, amt.formatPtr, false);

            int hr = psc.SetMediaTypeEx(amt, BUFSIZE);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(amt);
        }
Esempio n. 49
0
        /// <summary> build the capture graph for grabber. </summary>
        private bool SetupGraph()
        {
            int hr;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                //DsUtils.ShowCapPinDialog( capGraph, capFilter, this.Handle );

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return(false);
            }
        }
Esempio n. 50
0
		/// <summary>
		/// Get the video type for the specified pin interface
		/// </summary>
		/// <param name="streamConfig"></param>
		/// <returns></returns>
		public ColorSpaceEnum getMediaSubType(IAMStreamConfig streamConfig)
		{
			ColorSpaceEnum retval = ColorSpaceEnum.RGB24;
			bool found;
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
				int hr = streamConfig.GetFormat(out pmt);
				if (hr < 0)
				{
					Marshal.ThrowExceptionForHR(hr);
				}
				Marshal.PtrToStructure(pmt, mediaType);
#else
				int hr = streamConfig.GetFormat(out mediaType);
				if (hr < 0)
				{
					Marshal.ThrowExceptionForHR(hr);
				}
#endif

				// Search the Guids to find the correct enum value.
				// Each enum value has a Guid associated with it
				// We store the Guid as a string in a LabelAttribute
				// applied to each enum value. See the ColorSpaceEnum.
				found = false;
				foreach (object c in Enum.GetValues(typeof(ColorSpaceEnum)))
				{
					if (mediaType.subType == new Guid(LabelAttribute.FromMember(c)))
					{
						found = true;
						retval = (ColorSpaceEnum)c;
					}
				}
				if(!found)
				{
#if DEBUG
					String mediaSubType;
					MakeFourCC(mediaType.subType, out mediaSubType);
					Debug.WriteLine("Unknown color space (media subtype=" + mediaSubType + "):" + mediaType.subType.ToString());
#endif
					throw new ApplicationException("Unknown color space (media subtype):" + mediaType.subType.ToString());
				}
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
				Marshal.FreeCoTaskMem( pmt );
#endif
			}

			return retval;
		}
Esempio n. 51
0
        private StreamInfo GatherStreamInfo(AMMediaType pmt)
        {
            var streamInfo = new StreamInfo();

            streamInfo.MajorType  = pmt.majorType;
            streamInfo.SubType    = pmt.subType;
            streamInfo.FormatType = pmt.formatType;

            if (pmt.formatType == FormatType.VideoInfo)
            {
                // Check the buffer size.
                if (pmt.formatSize >= Marshal.SizeOf(typeof(VIDEOINFOHEADER)))
                {
                    VIDEOINFOHEADER pVih = (VIDEOINFOHEADER)Marshal.PtrToStructure(pmt.formatPtr, typeof(VIDEOINFOHEADER));
                    streamInfo.dwBitRate       = pVih.dwBitRate;
                    streamInfo.AvgTimePerFrame = pVih.AvgTimePerFrame;
                    streamInfo.Flags           = StreamInfoFlags.SI_VIDEOBITRATE | StreamInfoFlags.SI_FRAMERATE;

                    streamInfo.rcSrc.right  = GetVideoDimension(SourceRect.right, pVih.bmiHeader.biWidth);
                    streamInfo.rcSrc.bottom = GetVideoDimension(SourceRect.bottom, pVih.bmiHeader.biHeight);
                }
                else
                {
                    streamInfo.rcSrc.right  = SourceRect.right;
                    streamInfo.rcSrc.bottom = SourceRect.bottom;
                }

                streamInfo.Flags |= (StreamInfoFlags.SI_RECT | StreamInfoFlags.SI_FOURCC);
            }
            else if (pmt.formatType == FormatType.VideoInfo2)
            {
                // Check the buffer size.
                if (pmt.formatSize >= Marshal.SizeOf(typeof(VIDEOINFOHEADER2)))
                {
                    VIDEOINFOHEADER2 pVih2 = (VIDEOINFOHEADER2)Marshal.PtrToStructure(pmt.formatPtr, typeof(VIDEOINFOHEADER2));
                    streamInfo.dwBitRate          = pVih2.dwBitRate;
                    streamInfo.AvgTimePerFrame    = pVih2.AvgTimePerFrame;
                    streamInfo.dwPictAspectRatioX = pVih2.dwPictAspectRatioX;
                    streamInfo.dwPictAspectRatioY = pVih2.dwPictAspectRatioY;
                    streamInfo.dwInterlaceFlags   = pVih2.dwInterlaceFlags;
                    streamInfo.Flags = StreamInfoFlags.SI_VIDEOBITRATE |
                                       StreamInfoFlags.SI_FRAMERATE | StreamInfoFlags.SI_ASPECTRATIO |
                                       StreamInfoFlags.SI_INTERLACEMODE;

                    streamInfo.rcSrc.right  = GetVideoDimension(SourceRect.right, pVih2.bmiHeader.biWidth);
                    streamInfo.rcSrc.bottom = GetVideoDimension(SourceRect.bottom, pVih2.bmiHeader.biHeight);
                }
                else
                {
                    streamInfo.rcSrc.right  = SourceRect.right;
                    streamInfo.rcSrc.bottom = SourceRect.bottom;
                }

                streamInfo.Flags |= (StreamInfoFlags.SI_RECT | StreamInfoFlags.SI_FOURCC);
            }
            else if (pmt.formatType == FormatType.WaveEx)
            {
                // Check the buffer size.
                if (pmt.formatSize >= /*Marshal.SizeOf(typeof(WAVEFORMATEX))*/ 18)
                {
                    WAVEFORMATEX pWfx = (WAVEFORMATEX)Marshal.PtrToStructure(pmt.formatPtr, typeof(WAVEFORMATEX));
                    streamInfo.wFormatTag      = pWfx.wFormatTag;
                    streamInfo.nSamplesPerSec  = pWfx.nSamplesPerSec;
                    streamInfo.nChannels       = pWfx.nChannels;
                    streamInfo.wBitsPerSample  = pWfx.wBitsPerSample;
                    streamInfo.nAvgBytesPerSec = pWfx.nAvgBytesPerSec;
                    streamInfo.Flags           = StreamInfoFlags.SI_WAVEFORMAT |
                                                 StreamInfoFlags.SI_SAMPLERATE | StreamInfoFlags.SI_WAVECHANNELS |
                                                 StreamInfoFlags.SI_BITSPERSAMPLE | StreamInfoFlags.SI_AUDIOBITRATE;
                }
            }
            else if (pmt.formatType == FormatType.MpegVideo)
            {
                // Check the buffer size.
                if (pmt.formatSize >= Marshal.SizeOf(typeof(MPEG1VIDEOINFO)))
                {
                    MPEG1VIDEOINFO pM1vi = (MPEG1VIDEOINFO)Marshal.PtrToStructure(pmt.formatPtr, typeof(MPEG1VIDEOINFO));
                    streamInfo.dwBitRate       = pM1vi.hdr.dwBitRate;
                    streamInfo.AvgTimePerFrame = pM1vi.hdr.AvgTimePerFrame;
                    streamInfo.Flags           = StreamInfoFlags.SI_VIDEOBITRATE | StreamInfoFlags.SI_FRAMERATE;

                    streamInfo.rcSrc.right  = GetVideoDimension(SourceRect.right, pM1vi.hdr.bmiHeader.biWidth);
                    streamInfo.rcSrc.bottom = GetVideoDimension(SourceRect.bottom, pM1vi.hdr.bmiHeader.biHeight);
                }
                else
                {
                    streamInfo.rcSrc.right  = SourceRect.right;
                    streamInfo.rcSrc.bottom = SourceRect.bottom;
                }

                streamInfo.Flags |= (StreamInfoFlags.SI_RECT | StreamInfoFlags.SI_FOURCC);
            }
            else if (pmt.formatType == FormatType.Mpeg2Video)
            {
                // Check the buffer size.
                if (pmt.formatSize >= Marshal.SizeOf(typeof(MPEG2VIDEOINFO)))
                {
                    MPEG2VIDEOINFO pM2vi = (MPEG2VIDEOINFO)Marshal.PtrToStructure(pmt.formatPtr, typeof(MPEG2VIDEOINFO));
                    streamInfo.dwBitRate          = pM2vi.hdr.dwBitRate;
                    streamInfo.AvgTimePerFrame    = pM2vi.hdr.AvgTimePerFrame;
                    streamInfo.dwPictAspectRatioX = pM2vi.hdr.dwPictAspectRatioX;
                    streamInfo.dwPictAspectRatioY = pM2vi.hdr.dwPictAspectRatioY;
                    streamInfo.dwInterlaceFlags   = pM2vi.hdr.dwInterlaceFlags;
                    streamInfo.Flags = StreamInfoFlags.SI_VIDEOBITRATE | StreamInfoFlags.SI_FRAMERATE |
                                       StreamInfoFlags.SI_ASPECTRATIO | StreamInfoFlags.SI_INTERLACEMODE;

                    streamInfo.rcSrc.right  = GetVideoDimension(SourceRect.right, pM2vi.hdr.bmiHeader.biWidth);
                    streamInfo.rcSrc.bottom = GetVideoDimension(SourceRect.bottom, pM2vi.hdr.bmiHeader.biHeight);
                }
                else
                {
                    streamInfo.rcSrc.right  = SourceRect.right;
                    streamInfo.rcSrc.bottom = SourceRect.bottom;
                }

                streamInfo.Flags |= (StreamInfoFlags.SI_RECT | StreamInfoFlags.SI_FOURCC);
            }

            return(streamInfo);
        }
Esempio n. 52
0
		private object GetField(AMMediaType mediaType, String fieldName)
		{
			object formatStruct;
			if ( mediaType.formatType == FormatType.WaveEx )
				formatStruct = new WaveFormatEx();
			else if ( mediaType.formatType == FormatType.VideoInfo )
				formatStruct = new VideoInfoHeader();
			else if ( mediaType.formatType == FormatType.VideoInfo2 )
				formatStruct = new VideoInfoHeader2();
			else
				throw new NotSupportedException( "This device does not support a recognized format block." );

			// Retrieve the nested structure
			Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

			// Find the required field
			Type structType = formatStruct.GetType();
			FieldInfo fieldInfo = structType.GetField(fieldName);
			if(fieldInfo != null)
			{
				return fieldInfo.GetValue(formatStruct);
			}
			return null;
		}
Esempio n. 53
0
        // Thread entry point
        public void WorkerThread()
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder  graph       = null;
            IBaseFilter    sourceBase  = null;
            IBaseFilter    grabberBase = null;
            ISampleGrabber sg          = null;
            IMediaControl  mc          = null;

            try
            {
                // Get type for filter graph
                Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObj = Activator.CreateInstance(srvType);
                graph    = (IGraphBuilder)graphObj;

                // ----
                UCOMIBindCtx bindCtx = null;
                UCOMIMoniker moniker = null;
                int          n       = 0;

                // create bind context
                if (Win32.CreateBindCtx(0, out bindCtx) == 0)
                {
                    // convert moniker`s string to a moniker
                    if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0)
                    {
                        // get device base filter
                        Guid filterId = typeof(IBaseFilter).GUID;
                        moniker.BindToObject(null, null, ref filterId, out sourceObj);

                        Marshal.ReleaseComObject(moniker);
                        moniker = null;
                    }
                    Marshal.ReleaseComObject(bindCtx);
                    bindCtx = null;
                }
                // ----

                if (sourceObj == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }

                sourceBase = (IBaseFilter)sourceObj;

                // Get type for sample grabber
                srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObj  = Activator.CreateInstance(srvType);
                sg          = (ISampleGrabber)grabberObj;
                grabberBase = (IBaseFilter)grabberObj;

                // add source filter to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType   = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // connect pins
                if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                    System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height);
                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graph.Render(DSTools.GetOutPin(grabberBase, 0));

                //
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;


                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc          = null;
                graph       = null;
                sourceBase  = null;
                grabberBase = null;
                sg          = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (sourceObj != null)
                {
                    Marshal.ReleaseComObject(sourceObj);
                    sourceObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
Esempio n. 54
0
    public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                          MediaPortal.Core.Transcoding.Quality quality, Standard standard)
    {
      if (!Supports(format)) return false;
      string ext = System.IO.Path.GetExtension(info.file);
      if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe") return false;

      //Type comtype = null;
      //object comobj = null;
      try
      {
        Log.Info("DVR2MPG: create graph");
        graphBuilder = (IGraphBuilder)new FilterGraph();

        _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);

        Log.Info("DVR2MPG: add streambuffersource");
        bufferSource = (IStreamBufferSource)new StreamBufferSource();


        IBaseFilter filter = (IBaseFilter)bufferSource;
        graphBuilder.AddFilter(filter, "SBE SOURCE");

        Log.Info("DVR2MPG: load file:{0}", info.file);
        IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
        int hr = fileSource.Load(info.file, null);


        Log.Info("DVR2MPG: Add Cyberlink MPEG2 multiplexer to graph");
        string monikerPowerDvdMuxer =
          @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{7F2BBEAF-E11C-4D39-90E8-938FB5A86045}";
        powerDvdMuxer = Marshal.BindToMoniker(monikerPowerDvdMuxer) as IBaseFilter;
        if (powerDvdMuxer == null)
        {
          Log.Warn("DVR2MPG: FAILED:Unable to create Cyberlink MPEG Muxer (PowerDVD)");
          Cleanup();
          return false;
        }

        hr = graphBuilder.AddFilter(powerDvdMuxer, "PDR MPEG Muxer");
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:Add Cyberlink MPEG Muxer to filtergraph :0x{0:X}", hr);
          Cleanup();
          return false;
        }

        //add filewriter 
        Log.Info("DVR2MPG: Add FileWriter to graph");
        string monikerFileWrite =
          @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{3E8868CB-5FE8-402C-AA90-CB1AC6AE3240}";
        IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;
        if (fileWriterbase == null)
        {
          Log.Warn("DVR2MPG: FAILED:Unable to create FileWriter");
          Cleanup();
          return false;
        }


        fileWriterFilter = fileWriterbase as IFileSinkFilter;
        if (fileWriterFilter == null)
        {
          Log.Warn("DVR2MPG: FAILED:Add unable to get IFileSinkFilter for filewriter");
          Cleanup();
          return false;
        }

        hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:Add FileWriter to filtergraph :0x{0:X}", hr);
          Cleanup();
          return false;
        }


        //connect output #0 of streambuffer source->powerdvd audio in
        //connect output #1 of streambuffer source->powerdvd video in
        Log.Info("DVR2MPG: connect streambuffer->multiplexer");
        IPin pinOut0, pinOut1;
        IPin pinIn0, pinIn1;
        pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0);
        pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1);

        pinIn0 = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Input, 0);
        pinIn1 = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Input, 1);
        if (pinOut0 == null || pinOut1 == null || pinIn0 == null || pinIn1 == null)
        {
          Log.Warn("DVR2MPG: FAILED:unable to get pins of muxer&source");
          Cleanup();
          return false;
        }

        bool usingAc3 = false;
        AMMediaType amAudio = new AMMediaType();
        amAudio.majorType = MediaType.Audio;
        amAudio.subType = MediaSubType.Mpeg2Audio;
        hr = pinOut0.Connect(pinIn1, amAudio);
        if (hr != 0)
        {
          amAudio.subType = MediaSubType.DolbyAC3;
          hr = pinOut0.Connect(pinIn1, amAudio);
          usingAc3 = true;
        }
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED: unable to connect audio pins: 0x{0:X}", hr);
          Cleanup();
          return false;
        }

        if (usingAc3)
          Log.Info("DVR2MPG: using AC3 audio");
        else
          Log.Info("DVR2MPG: using MPEG audio");

        AMMediaType amVideo = new AMMediaType();
        amVideo.majorType = MediaType.Video;
        amVideo.subType = MediaSubType.Mpeg2Video;
        hr = pinOut1.Connect(pinIn0, amVideo);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED: unable to connect video pins: 0x{0:X}", hr);
          Cleanup();
          return false;
        }


        //connect output of powerdvd muxer->input of filewriter
        Log.Info("DVR2MPG: connect multiplexer->filewriter");
        IPin pinOut, pinIn;
        pinOut = DsFindPin.ByDirection(powerDvdMuxer, PinDirection.Output, 0);
        if (pinOut == null)
        {
          Log.Warn("DVR2MPG: FAILED:cannot get output pin of Cyberlink MPEG muxer :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
        if (pinIn == null)
        {
          Log.Warn("DVR2MPG: FAILED:cannot get input pin of Filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        AMMediaType mt = new AMMediaType();
        hr = pinOut.Connect(pinIn, mt);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:connect muxer->filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }

        //set output filename
        string outputFileName = System.IO.Path.ChangeExtension(info.file, ".mpg");
        Log.Info("DVR2MPG: set output file to :{0}", outputFileName);
        mt.majorType = MediaType.Stream;
        mt.subType = MediaSubTypeEx.MPEG2;

        hr = fileWriterFilter.SetFileName(outputFileName, mt);
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:unable to set filename for filewriter :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        mediaControl = graphBuilder as IMediaControl;
        mediaSeeking = graphBuilder as IMediaSeeking;
        mediaEvt = graphBuilder as IMediaEventEx;
        Log.Info("DVR2MPG: start transcoding");
        hr = mediaControl.Run();
        if (hr != 0)
        {
          Log.Warn("DVR2MPG: FAILED:unable to start graph :0x{0:X}", hr);
          Cleanup();
          return false;
        }
      }
      catch (Exception ex)
      {
        Log.Error("DVR2MPG: Unable create graph: {0}", ex.Message);
        Cleanup();
        return false;
      }
      return true;
    }
Esempio n. 55
0
        // Use IWMStreamConfig interface to access codec names
        ///////////////////////////////////////////////////////////////////////////////
        void PrintCodecName(IWMStreamConfig pConfig)
        {
            IWMMediaProps pMediaProps = null;
            pMediaProps = (IWMMediaProps)pConfig;

            int cbType = 0;
            pMediaProps.GetMediaType(null, ref cbType);

            AMMediaType mt = new AMMediaType();
            mt.formatSize = cbType;

            pMediaProps.GetMediaType(mt, ref cbType);

            try
            {
                //
                // Audio Codec Names
                //

                if (mt.subType == MediaSubType.WMAudioV9)
                {
                    Console.WriteLine("Codec Name: Windows Media Audio V9");
                }
                else if (mt.subType == MediaSubType.WMAudio_Lossless)
                {
                    Console.WriteLine("Codec Name: Windows Media Audio V9 (Lossless Mode)");
                }
                else if (mt.subType == MediaSubType.WMAudioV7)
                {
                    Console.WriteLine("Codec Name: Windows Media Audio V7/V8");
                }
                else if (mt.subType == MediaSubType.WMSP1)
                {
                    Console.WriteLine("Codec Name: Windows Media Speech Codec V9");
                }
                else if (mt.subType == MediaSubType.WMAudioV2)
                {
                    Console.WriteLine("Codec Name: Windows Media Audio V2");
                }
                else if (mt.subType == MediaSubType.ACELPnet)
                {
                    Console.WriteLine("Codec Name: ACELP.net");
                }

                // Video Codec Names
                //

                else if (mt.subType == MediaSubType.WMV1)
                {
                    Console.WriteLine("Codec Name: Windows Media Video V7");
                }
                else if (mt.subType == MediaSubType.MSS1)
                {
                    Console.WriteLine("Codec Name: Windows Media Screen V7");
                }
                else if (mt.subType == MediaSubType.MSS2)
                {
                    Console.WriteLine("Codec Name: Windows Media Screen V9");
                }
                else if (mt.subType == MediaSubType.WMV2)
                {
                    Console.WriteLine("Codec Name: Windows Media Video V8");
                }
                else if (mt.subType == MediaSubType.WMV3)
                {
                    Console.WriteLine("Codec Name: Windows Media Video V9");
                }
                else if (mt.subType == MediaSubType.MP43)
                {
                    Console.WriteLine("Codec Name: Microsoft MPEG-4 Video Codec V3 ");
                }
                else if (mt.subType == MediaSubType.MP4S)
                {
                    Console.WriteLine("Codec Name: ISO MPEG-4 Video V1");
                }
                else
                {
                    Console.WriteLine("Codec Name: {0}", AMToString.MediaSubTypeToString(mt.subType));
                }
                Console.WriteLine();
            }
            finally
            {
                WMUtils.FreeWMMediaType(mt);
            }
        }
Esempio n. 56
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType mediaType = null;
            VideoStreamConfigCaps caps = new VideoStreamConfigCaps();

            // retrieve capabilities struct at the specified index
            int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

            if (hr != 0)
                Marshal.ThrowExceptionForHR(hr);

            // extract info
            MediaType = mediaType;
            FrameSize = caps.InputSize;
            MaxFrameRate = (int)(10000000 / caps.MinFrameInterval);
        }
Esempio n. 57
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
Esempio n. 58
0
		private void SetMediaSampleGrabber()
		{
			this.snapShotValid = false;
			if((this.baseGrabFlt != null)&&(this.AllowSampleGrabber))
			{
				AMMediaType media = new AMMediaType();
				VideoInfoHeader videoInfoHeader;
				int hr;

				hr = sampGrabber.GetConnectedMediaType(media);
				if (hr < 0)
				{
					Marshal.ThrowExceptionForHR(hr);
				}
				if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
				{
					throw new NotSupportedException("Unknown Grabber Media Format");
				}

				videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
				this.snapShotWidth = videoInfoHeader.BmiHeader.Width;
				this.snapShotHeight = videoInfoHeader.BmiHeader.Height;
				this.snapShotImageSize = videoInfoHeader.BmiHeader.ImageSize;
				Marshal.FreeCoTaskMem(media.formatPtr);
				media.formatPtr = IntPtr.Zero;
				this.snapShotValid = true;
			}

			if (!this.snapShotValid)
			{
				this.snapShotWidth = 0;
				this.snapShotHeight = 0;
				this.snapShotImageSize = 0;
			}
		}
Esempio n. 59
0
		private bool InitSampleGrabber()
		{
			if (this.VideoDevice == null)
			{
				// nothing to do
				return false;
			}

			if (!this.allowSampleGrabber)
			{
				return false;
			}

			this.DisposeSampleGrabber();

			int hr  = 0;

			// Get SampleGrabber if needed
			if(this.sampGrabber == null)
			{
				this.sampGrabber = new SampleGrabber() as ISampleGrabber;
			}

			if(this.sampGrabber == null)
			{
				return false;
			}

#if DSHOWNET
			this.baseGrabFlt	= (IBaseFilter)this.sampGrabber;
#else
            this.baseGrabFlt = sampGrabber as IBaseFilter;
#endif

			if(this.baseGrabFlt == null)
			{
				Marshal.ReleaseComObject(this.sampGrabber);
				this.sampGrabber = null;
			}

			AMMediaType media = new AMMediaType();

			media.majorType	= MediaType.Video;
			media.subType	= MediaSubType.RGB24;
			media.formatPtr = IntPtr.Zero;
			hr = sampGrabber.SetMediaType(media);
			if(hr < 0)
			{
				Marshal.ThrowExceptionForHR(hr);
			}

			hr = graphBuilder.AddFilter(baseGrabFlt, "SampleGrabber");
			if(hr < 0)
			{
				Marshal.ThrowExceptionForHR(hr);
			}

			hr = sampGrabber.SetBufferSamples(false);
			if( hr == 0 )
			{
				hr = sampGrabber.SetOneShot(false);
			}
			if( hr == 0 )
			{
				hr = sampGrabber.SetCallback(null, 0);
			}
			if( hr < 0 )
			{
				Marshal.ThrowExceptionForHR(hr);
			}

			return true;
		}
Esempio n. 60
-1
		/// <summary>
		///  Set the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();
			derenderGraph();

			object returnValue = null;
#if DSHOWNET
            IntPtr pmt = IntPtr.Zero;
#endif
            AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
                Marshal.PtrToStructure(pmt, mediaType);
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Update the value of the field
				fieldInfo.SetValue( formatStruct, newValue );

				// Update fields that may depend on specific values of other attributes
				if (mediaType.formatType == FormatType.WaveEx)
				{
					WaveFormatEx waveFmt = formatStruct as WaveFormatEx;
					waveFmt.nBlockAlign = (short)(waveFmt.nChannels * waveFmt.wBitsPerSample / 8);
					waveFmt.nAvgBytesPerSec = waveFmt.nBlockAlign * waveFmt.nSamplesPerSec;
				}

                // PtrToStructure copies the data so we need to copy it back
				Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); 

				// Save the changes
				hr = streamConfig.SetFormat( mediaType );
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
                Marshal.FreeCoTaskMem(pmt);
#endif
            }
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}