Beispiel #1
0
 public static void FreeAMMediaType(AMMediaType mediaType)
 {
     if (mediaType.formatSize != 0)
     {
         Marshal.FreeCoTaskMem(mediaType.formatPtr);
     }
     if (mediaType.unkPtr != IntPtr.Zero)
     {
         Marshal.Release(mediaType.unkPtr);
     }
     mediaType.formatSize = 0;
     mediaType.formatPtr = IntPtr.Zero;
     mediaType.unkPtr = IntPtr.Zero;
 }
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        protected VideoPlayer(string FileName)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                //SampleGrabber sg = new SampleGrabber();
                var comtype = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comtype == null)
                    throw new NotSupportedException("DirectX (8.1 or higher) not installed?");
                m_comObject = Activator.CreateInstance(comtype);

                ISampleGrabber sampleGrabber = (ISampleGrabber)m_comObject;
                m_graphBuilder.AddFilter((IBaseFilter)m_comObject, "Grabber");

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType = MEDIATYPE_Video;     // Video
                mt.subType = MEDIASUBTYPE_RGB32;    // RGB32
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                sampleGrabber.SetMediaType(mt);

                // Construct the rest of the FilterGraph
                m_graphBuilder.RenderFile(filename, null);

                // Set SampleGrabber Properties
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)m_graphBuilder;
                //pVideoWindow.put_AutoShow(OABool.False);
                pVideoWindow.put_AutoShow(0);

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                sampleGrabber.GetConnectedMediaType(MediaType);
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight = pVideoHeader.BmiHeader.Height;
                videoWidth = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate = pVideoHeader.BitRate;
                m_mediaSeeking.GetDuration(out videoDuration);

                // Create byte arrays to hold video data
                m_videoDataRgba = new MySwapQueue<byte[]>(() => new byte[(videoHeight * videoWidth) * 4]); // RGBA format (4 bytes per pixel)
            }
            catch (Exception e)
            {
                throw new Exception("Unable to Load or Play the video file", e);
            }
        }
Beispiel #3
0
        //画像のプレビューや動画キャプチャ用の設定を行う.
        private bool SetupGraph()
        {
            int result;

            try
            {
                //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = captureGraphBuilder_.SetFiltergraph(graphBuilder_);
                if (result < 0) Marshal.ThrowExceptionForHR(result);

                //captureFilter(ソースフィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = graphBuilder_.AddFilter(captureFilter_, "Video Capture Device");
                if (result < 0) Marshal.ThrowExceptionForHR(result);

                // キャプチャサイズ設定ダイアログの表示
                DsUtils.ShowCapPinDialog(captureGraphBuilder_, captureFilter_, IntPtr.Zero);

                //キャプチャするビデオデータのフォーマットを設定.
                AMMediaType amMediaType = new AMMediaType();
                amMediaType.majorType = MediaType.Video;
                amMediaType.subType = MediaSubType.RGB24;
                amMediaType.formatType = FormatType.VideoInfo;
                result = sampleGrabber_.SetMediaType(amMediaType);
                if (result < 0) Marshal.ThrowExceptionForHR(result);

                //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = graphBuilder_.AddFilter(grabFilter_, "Frame Grab Filter");
                if (result < 0) Marshal.ThrowExceptionForHR(result);

                // キャプチャフィルタをサンプルグラバーフィルタに接続する.
                // (画像処理用)
                Guid pinCategory;
                Guid mediaType;

                pinCategory = PinCategory.Capture;
                mediaType = MediaType.Video;
                result = captureGraphBuilder_.RenderStream(ref pinCategory, ref mediaType,
                    captureFilter_, null, grabFilter_);
                if (result < 0) Marshal.ThrowExceptionForHR(result);

                //フレームキャプチャの設定が完了したかを確認する.
                amMediaType = new AMMediaType();
                result = sampleGrabber_.GetConnectedMediaType(amMediaType);
                if (result < 0) Marshal.ThrowExceptionForHR(result);
                if ((amMediaType.formatType != FormatType.VideoInfo) || (amMediaType.formatPtr == IntPtr.Zero))
                    throw new NotSupportedException("キャプチャ(Grab)できないメディアフォーマットです.");

                //キャプチャするビデオデータのフォーマットから,videoInfoHeaderを作成する.
                videoInfoHeader_ =
                    (VideoInfoHeader)Marshal.PtrToStructure(amMediaType.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(amMediaType.formatPtr);
                amMediaType.formatPtr = IntPtr.Zero;

                //フィルタ内を通るサンプルをバッファにコピーするように指定する.
                result = sampleGrabber_.SetBufferSamples(true);

                //サンプルを一つ(1フレーム)受け取ったらフィルタを停止するように指定する.
                if (result == 0) result = sampleGrabber_.SetOneShot(false);

                //コールバック関数の利用を停止する.
                if (result == 0) result = sampleGrabber_.SetCallback(null, 0);
                if (result < 0) Marshal.ThrowExceptionForHR(result);

            }
            catch (Exception e)
            {
                MessageBox.Show("フィルターグラフの設定に失敗しました." + e.ToString());
                return false;
            }

            return true;
        }
Beispiel #4
0
        /// <summary> build the capture graph for grabber. </summary>
        private bool SetupGraph()
        {
            int hr;
            hr = capGraph.SetFiltergraph( graphBuilder );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            hr = graphBuilder.AddFilter( capFilter, "Ds.NET Video Capture Device" );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            // will thow up user input for quality
            //DsUtils.ShowCapPinDialog( capGraph, capFilter, IntPtr.Zero );

            AMMediaType media = new AMMediaType();
            media.majorType	= MediaType.Video;
            media.subType	= MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;		// ???
            hr = sampGrabber.SetMediaType( media );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            hr = graphBuilder.AddFilter( baseGrabFlt, "Ds.NET Grabber" );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            Guid cat;
            Guid med;

            cat = PinCategory.Capture;
            med = MediaType.Video;
            hr = capGraph.RenderStream( ref cat, ref med, capFilter, null, baseGrabFlt ); // baseGrabFlt

            media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType( media );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );
            if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) )
                throw new NotSupportedException( "Unknown Grabber Media Format" );

            videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) );
            Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero;

            hr = sampGrabber.SetBufferSamples( false );
            if( hr == 0 )
                hr = sampGrabber.SetOneShot( false );
            if( hr == 0 )
                hr = sampGrabber.SetCallback( null, 0 );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            return true;
        }
Beispiel #5
0
 protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
 {
     if (streamConfig == null)
     {
         throw new NotSupportedException();
     }
     this.assertStopped();
     this.derenderGraph();
     IntPtr zero = IntPtr.Zero;
     AMMediaType structure = new AMMediaType();
     try
     {
         object obj3;
         int format = streamConfig.GetFormat(out zero);
         if (format != 0)
         {
             Marshal.ThrowExceptionForHR(format);
         }
         Marshal.PtrToStructure(zero, structure);
         if (structure.formatType == FormatType.WaveEx)
         {
             obj3 = new WaveFormatEx();
         }
         else if (structure.formatType == FormatType.VideoInfo)
         {
             obj3 = new VideoInfoHeader();
         }
         else
         {
             if (structure.formatType != FormatType.VideoInfo2)
             {
                 throw new NotSupportedException("This device does not support a recognized format block.");
             }
             obj3 = new VideoInfoHeader2();
         }
         Marshal.PtrToStructure(structure.formatPtr, obj3);
         FieldInfo field = obj3.GetType().GetField(fieldName);
         if (field == null)
         {
             throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
         }
         field.SetValue(obj3, newValue);
         Marshal.StructureToPtr(obj3, structure.formatPtr, false);
         format = streamConfig.SetFormat(structure);
         if (format != 0)
         {
             Marshal.ThrowExceptionForHR(format);
         }
     }
     finally
     {
         DsUtils.FreeAMMediaType(structure);
         Marshal.FreeCoTaskMem(zero);
     }
     this.renderGraph();
     this.startPreviewIfNeeded();
     return null;
 }
Beispiel #6
0
        /// <summary> build the capture graph for grabber. </summary>
        bool SetupGraph()
        {
            int hr;
            try {
            hr = capGraph.SetFiltergraph( graphBuilder );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            hr = graphBuilder.AddFilter( capFilter, "Ds.NET Video Capture Device" );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            DsUtils.ShowCapPinDialog( capGraph, capFilter, this.Handle );

            AMMediaType media = new AMMediaType();
            media.majorType	= MediaType.Video;
            media.subType	= MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;		// ???
            hr = sampGrabber.SetMediaType( media );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            hr = graphBuilder.AddFilter( baseGrabFlt, "Ds.NET Grabber" );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            Guid cat = PinCategory.Preview;
            Guid med = MediaType.Video;
            hr = capGraph.RenderStream( ref cat, ref med, capFilter, null, null ); // baseGrabFlt
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            cat = PinCategory.Capture;
            med = MediaType.Video;
            hr = capGraph.RenderStream( ref cat, ref med, capFilter, null, baseGrabFlt ); // baseGrabFlt
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType( media );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );
            if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) )
                throw new NotSupportedException( "Unknown Grabber Media Format" );

            videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) );
            Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero;

            hr = sampGrabber.SetBufferSamples( false );
            if( hr == 0 )
                hr = sampGrabber.SetOneShot( false );
            if( hr == 0 )
                hr = sampGrabber.SetCallback( null, 0 );
            if( hr < 0 )
                Marshal.ThrowExceptionForHR( hr );

            return true;
            }
            catch( Exception ee )
            {
            MessageBox.Show( this, "Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop );
            return false;
            }
        }
Beispiel #7
0
		/// <summary>
		///  Set the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue)
		{
			//if ( streamConfig == null )
			//	throw new NotSupportedException();
			assertStopped();
			derenderGraph();

			object returnValue = null;
			IntPtr pmt = IntPtr.Zero;
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
				int hr = streamConfig.GetFormat( out pmt );
				//if ( hr != 0 )
				//	Marshal.ThrowExceptionForHR( hr );
				Marshal.PtrToStructure( pmt, mediaType );

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Update the value of the field
				fieldInfo.SetValue( formatStruct, newValue );

				// PtrToStructure copies the data so we need to copy it back
				Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); 

				// Save the changes
				hr = streamConfig.SetFormat( mediaType );
				//if ( hr != 0 )
				//	Marshal.ThrowExceptionForHR( hr );
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
				Marshal.FreeCoTaskMem( pmt );
			}
			renderStream = false;
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}
Beispiel #8
0
		/// <summary>
		///  Connects the filters of a previously created graph 
		///  (created by createGraph()). Once rendered the graph
		///  is ready to be used. This method may also destroy
		///  streams if we have streams we no longer want.
		/// </summary>
		protected void renderGraph()
		{
			Guid					cat;
			Guid					med;
			int						hr;
			bool					didSomething = false;
			const int WS_CHILD			= 0x40000000;	
			const int WS_CLIPCHILDREN	= 0x02000000;
			const int WS_CLIPSIBLINGS	= 0x04000000;

			assertStopped();

			// Ensure required properties set
			if ( filename == null )
				throw new ArgumentException( "The Filename property has not been set to a file.\n" );

			// Stop the graph
			if ( mediaControl != null )
				mediaControl.Stop();

			// Create the graph if needed (group should already be created)
			createGraph();

			// Derender the graph if we have a capture or preview stream
			// that we no longer want. We can't derender the capture and 
			// preview streams seperately. 
			// Notice the second case will leave a capture stream intact
			// even if we no longer want it. This allows the user that is
			// not using the preview to Stop() and Start() without
			// rerendering the graph.
			if ( !wantPreviewRendered && isPreviewRendered )
				derenderGraph();
			if ( !wantCaptureRendered && isCaptureRendered )
				if ( wantPreviewRendered )
				{
					derenderGraph();
					graphState = GraphState.Null;
					createGraph();
				}

			// Video Capture
			// ===================================================================================
			if ( wantCaptureRendered && !isCaptureRendered )
			{
                            
				// Render the file writer portion of graph (mux -> file)
				Guid mediaSubType = MediaSubType.Avi;
				hr = captureGraphBuilder.SetOutputFileName( ref mediaSubType, Filename, out muxFilter, out fileWriterFilter );
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				if ( VideoDevice != null )
				{
					// Try interleaved first, because if the device supports it,
					// it's the only way to get audio as well as video
					cat = PinCategory.Capture;
					med = MediaType.Interleaved;
					hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter); 
					if( hr < 0 ) 
					{
						med = MediaType.Video;
						hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
						//if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr );
						//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
					}
				}
				// Render audio (audio -> mux)
				if ( AudioDevice != null )
				{
					cat = PinCategory.Capture;
					med = MediaType.Audio;
					hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}
				isCaptureRendered = true;
				didSomething = true;
			}


			// Render preview stream and launch the baseGrabFlt to capture frames
			// ===================================================================================
			if ( wantPreviewRendered && renderStream && !isPreviewRendered )
			{
				// Render preview (video.PinPreview -> baseGrabFlt -> renderer)
				// At this point intelligent connect is used, because my webcams don't have a preview pin and
				// a capture pin, so Smart Tee filter will be used. I have tested it using GraphEdit.
				// I can type hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, baseGrabFlt); 
				// because baseGrabFlt is a transform filter, like videoCompressorFilter.
				
				cat = PinCategory.Preview;
				med = MediaType.Video;
				hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, baseGrabFlt, null ); 
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				// Get the IVideoWindow interface
				videoWindow = (IVideoWindow) graphBuilder;

				// Set the video window to be a child of the main window
				hr = videoWindow.put_Owner( previewWindow.Handle );
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				// Set video window style
				hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				// Position video window in client rect of owner window
				previewWindow.Resize += new EventHandler( onPreviewWindowResize );
				onPreviewWindowResize( this, null );

				// Make the video window visible, now that it is properly positioned
				hr = videoWindow.put_Visible( DsHlp.OATRUE );
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				hr = mediaEvt.SetNotifyWindow( this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero );
				//if( hr < 0 )
				//	Marshal.ThrowExceptionForHR( hr );

				isPreviewRendered = true;
				didSomething = true;

				// Begin Configuration of SampGrabber	<<<<<<----------------------------------------------------
                
				AMMediaType media = new AMMediaType();
				hr = sampGrabber.GetConnectedMediaType( media );
				//if( hr < 0 )
				//	Marshal.ThrowExceptionForHR( hr );
				if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) )
					throw new NotSupportedException( "Unknown Grabber Media Format" ); 

				videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) );
				Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero;

				hr = sampGrabber.SetBufferSamples( false );
				if( hr == 0 )
					hr = sampGrabber.SetOneShot( false );
				if( hr == 0 )
					hr = sampGrabber.SetCallback( null, 0 );
				//if( hr < 0 )
				//	Marshal.ThrowExceptionForHR( hr );	
				
				// Finish Configuration of SampGrabber	<<<<<<----------------------------------------------------
			}
			
			if ( didSomething )
				graphState = GraphState.Rendered;
		}
Beispiel #9
0
		// --------------------- Private Methods -----------------------
		
		/// <summary> 
		///  Create a new filter graph and add filters (devices, compressors, 
		///  misc), but leave the filters unconnected. Call renderGraph()
		///  to connect the filters.
		/// </summary>
		protected void createGraph()
		{
			Guid					cat;
			Guid					med;
			int						hr;
			Type comType = null;
			object comObj = null;

			// Ensure required properties are set
			if ( videoDevice == null && audioDevice == null )
				throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" );

			// Skip if we are already created
			if ( (int)graphState < (int)GraphState.Created )
			{
				// Garbage collect, ensure that previous filters are released
				GC.Collect();

				// Make a new filter graph
				graphBuilder = (IGraphBuilder) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.FilterGraph, true ) ); 

				// Get the Capture Graph Builder
				Guid clsid = Clsid.CaptureGraphBuilder2;
				Guid riid = typeof(ICaptureGraphBuilder2).GUID;
				captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance( ref clsid, ref riid ); 

				// sampGrabber, ISampleGrabber to capture frames
				comType=Type.GetTypeFromCLSID( Clsid.SampleGrabber, true );
				if(comType==null)
					throw new NotImplementedException (@"DirectShow SampleGrabber not installed/registered");
				comObj=Activator.CreateInstance( comType );
				sampGrabber = (ISampleGrabber) comObj; comObj = null;

				// Link the CaptureGraphBuilder to the filter graph
				hr = captureGraphBuilder.SetFiltergraph( graphBuilder );
				//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				// Begin set up of SampGrabber	<<<<<<----------------------------------------------------
				AMMediaType media = new AMMediaType();
				media.majorType= MediaType.Video;
				media.subType = MediaSubType.RGB24;
				media.formatType = FormatType.VideoInfo;
				hr = sampGrabber.SetMediaType( media );
				//if( hr<0 ) Marshal.ThrowExceptionForHR( hr );
				// Finish set up of SampGrabber	<<<<<<----------------------------------------------------
			
				// Add the graph to the Running Object Table so it can be
				// viewed with GraphEdit
				#if DEBUG
				DsROT.AddGraphToRot( graphBuilder, out rotCookie );
				#endif

				// Get the video device and add it to the filter graph
				if ( VideoDevice != null )
				{
					videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString );
					hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" );
					//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

					// Add SampGrabber Filter	<<<<<<----------------------------------------------------
					mediaEvt	= (IMediaEventEx)	graphBuilder;
					baseGrabFlt = (IBaseFilter) sampGrabber;
					hr = graphBuilder.AddFilter( baseGrabFlt, "DS.NET Grabber" );
					//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

				}

				// Get the audio device and add it to the filter graph
				if ( AudioDevice != null )
				{
					audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString );
					hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" );
					//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the video compressor and add it to the filter graph
				if ( VideoCompressor != null )
				{
					videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" );
					//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the audio compressor and add it to the filter graph
				if ( AudioCompressor != null )
				{
					audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" );
					//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}
				
				// Retrieve the stream control interface for the video device
				// FindInterface will also add any required filters
				// (WDM devices in particular may need additional
				// upstream filters to function).

				// Try looking for an interleaved media type
				object o;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved;
				Guid iid = typeof(IAMStreamConfig).GUID;
				hr = captureGraphBuilder.FindInterface(
					ref cat, ref med, videoDeviceFilter, ref iid, out o );

				if ( hr != 0 )
				{
					// If not found, try looking for a video media type
					med = MediaType.Video;
					hr = captureGraphBuilder.FindInterface(
						ref cat, ref med, videoDeviceFilter, ref iid, out o );
				
					if ( hr != 0 )
						o = null;
				}
				videoStreamConfig = o as IAMStreamConfig;

				// Retrieve the stream control interface for the audio device
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Audio ;
				iid = typeof(IAMStreamConfig).GUID;
				hr = captureGraphBuilder.FindInterface(
					ref cat, ref med, audioDeviceFilter, ref iid, out o );
				if ( hr != 0 )
					o = null;
				audioStreamConfig = o as IAMStreamConfig;

				// Retreive the media control interface (for starting/stopping graph)
				mediaControl = (IMediaControl) graphBuilder;

				// Reload any video crossbars
				if ( videoSources != null ) videoSources.Dispose(); videoSources = null;

				// Reload any audio crossbars
				if ( audioSources != null ) audioSources.Dispose(); audioSources = null;
				
				// Reload any property pages exposed by filters
				if ( propertyPages != null ) propertyPages.Dispose(); propertyPages = null;

				// Reload capabilities of video device
				videoCaps = null;

				// Reload capabilities of video device
				audioCaps = null;

				// Retrieve TV Tuner if available
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved; 
				iid = typeof(IAMTVTuner).GUID;
				hr = captureGraphBuilder.FindInterface( 
					ref cat, ref med, videoDeviceFilter, ref iid, out o );
				if ( hr != 0 )
				{
					med = MediaType.Video ;
					hr = captureGraphBuilder.FindInterface( 
						ref cat, ref med, videoDeviceFilter, ref iid, out o );
					if ( hr != 0 )
						o = null;
				}
				IAMTVTuner t = o as IAMTVTuner;
				if ( t != null )
					tuner = new Tuner( t );


				/*
							// ----------- VMR 9 -------------------
							//## check out samples\inc\vmrutil.h :: RenderFileToVMR9

							IBaseFilter vmr = null;
							if ( ( VideoDevice != null ) && ( previewWindow != null ) )
							{
								vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); 
								hr = graphBuilder.AddFilter( vmr, "VMR" );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr;
								hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr;	
								hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
							}
							//------------------------------------------- 

							// ---------- SmartTee ---------------------

							IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); 
							hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" );
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// Video -> SmartTee
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, smartTeeFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> mux
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, muxFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> vmr
							cat = PinCategory.Preview;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, vmr ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// -------------------------------------
				*/

				// Update the state now that we are done
				graphState = GraphState.Created;
			}
		}
Beispiel #10
0
        protected object setStreamConfigSetting(DShowNET.IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
            {
                throw new NotSupportedException();
            }

            object returnValue = null;
            IntPtr pmt         = IntPtr.Zero;

            DShowNET.AMMediaType mediaType = new DShowNET.AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == DShowNET.FormatType.WaveEx)
                {
                    formatStruct = new DShowNET.WaveFormatEx();
                }
                else if (mediaType.formatType == DShowNET.FormatType.VideoInfo)
                {
                    formatStruct = new VideoInfoHeader();
                }
                else if (mediaType.formatType == DShowNET.FormatType.VideoInfo2)
                {
                    formatStruct = new DShowNET.VideoInfoHeader2();
                }
                else
                {
                    throw new NotSupportedException("This device does not support a recognized format block.");
                }

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type      structType = formatStruct.GetType();
                FieldInfo fieldInfo  = structType.GetField(fieldName);
                if (fieldInfo == null)
                {
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
                }

                // Update the value of the field
                fieldInfo.SetValue(formatStruct, newValue);

                // PtrToStructure copies the data so we need to copy it back
                Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }
            finally
            {
                //DShowNET.DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return(returnValue);
        }
Beispiel #11
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        protected VideoPlayer(string FileName)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                //SampleGrabber sg = new SampleGrabber();
                var comtype = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comtype == null)
                {
                    throw new NotSupportedException("DirectX (8.1 or higher) not installed?");
                }
                m_comObject = Activator.CreateInstance(comtype);

                ISampleGrabber sampleGrabber = (ISampleGrabber)m_comObject;
                m_graphBuilder.AddFilter((IBaseFilter)m_comObject, "Grabber");

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB32; // RGB32
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                sampleGrabber.SetMediaType(mt);

                // Construct the rest of the FilterGraph
                m_graphBuilder.RenderFile(filename, null);

                // Set SampleGrabber Properties
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)m_graphBuilder;
                //pVideoWindow.put_AutoShow(OABool.False);
                pVideoWindow.put_AutoShow(0);

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                sampleGrabber.GetConnectedMediaType(MediaType);
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                m_mediaSeeking.GetDuration(out videoDuration);

                // Create byte arrays to hold video data
                m_videoDataRgba = new MySwapQueue <byte[]>(() => new byte[(videoHeight * videoWidth) * 4]); // RGBA format (4 bytes per pixel)
            }
            catch (Exception e)
            {
                throw new Exception("Unable to Load or Play the video file", e);
            }
        }
Beispiel #12
0
        void SetupVideoGrabber()
        {
            AMMediaType media = new AMMediaType();
            int hr = grabberConfig.GetConnectedMediaType(media);
            if (hr < 0)
                Marshal.ThrowExceptionForHR(hr);
            if (((!media.formatType.Equals(FormatType.VideoInfo)) &&
                (!media.formatType.Equals(FormatType.WaveEx))) ||
                (media.formatPtr == IntPtr.Zero))
                throw new NotSupportedException("Unknown Grabber Media Format");

            videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

            hr = grabberConfig.SetBufferSamples(false);
            if (hr == 0)
                hr = grabberConfig.SetOneShot(false);
            if (hr == 0)
                hr = grabberConfig.SetCallback(null, 0);
            if (hr < 0)
                Marshal.ThrowExceptionForHR(hr);

            grabberConfig.SetCallback(this, 1);
        }
Beispiel #13
0
        /// <summary>
        ///  Connects the filters of a previously created graph 
        ///  (created by createGraph()). Once rendered the graph
        ///  is ready to be used. This method may also destroy
        ///  streams if we have streams we no longer want.
        /// </summary>
        protected void renderGraph()
        {
            Guid cat;
            Guid med;
            int hr;
            bool didSomething = false;
            const int WS_CHILD = 0x40000000;
            const int WS_CLIPCHILDREN = 0x02000000;
            const int WS_CLIPSIBLINGS = 0x04000000;

            assertStopped();

            // Ensure required properties set
            if (filename == null)
                throw new ArgumentException("The Filename property has not been set to a file.\n");

            // Stop the graph
            if (mediaControl != null)
                mediaControl.Stop();

            // Create the graph if needed (group should already be created)
            createGraph();

            // Derender the graph if we have a capture or preview stream
            // that we no longer want. We can't derender the capture and 
            // preview streams seperately. 
            // Notice the second case will leave a capture stream intact
            // even if we no longer want it. This allows the user that is
            // not using the preview to Stop() and Start() without
            // rerendering the graph.
            if (!wantPreviewRendered && isPreviewRendered)
                derenderGraph();
            if (!wantCaptureRendered && isCaptureRendered)
                if (wantPreviewRendered)
                    derenderGraph();

            // Render capture stream (only if necessary)
            if (wantCaptureRendered && !isCaptureRendered)
            {
                // Render the file writer portion of graph (mux -> file)
                Guid mediaSubType = MediaSubType.Avi;
                hr = captureGraphBuilder.SetOutputFileName(ref mediaSubType, Filename, out muxFilter, out fileWriterFilter);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Render video (video -> mux)
                if (VideoDevice != null)
                {
                    // Try interleaved first, because if the device supports it,
                    // it's the only way to get audio as well as video
                    cat = PinCategory.Capture;
                    med = MediaType.Interleaved;
                    hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
                    if (hr < 0)
                    {
                        med = MediaType.Video;
                        hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
                        if (hr == -2147220969) throw new DeviceInUseException("Video device", hr);
                        if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // Render audio (audio -> mux)
                if (AudioDevice != null)
                {
                    cat = PinCategory.Capture;
                    med = MediaType.Audio;
                    hr = captureGraphBuilder.RenderStream(ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter);
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }
                isCaptureRendered = true;
                didSomething = true;
            }

            // Render preview stream (only if necessary)
            if (wantPreviewRendered && !isPreviewRendered)
            {
                // Render preview (video -> renderer)
                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, baseGrabFlt, null);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Get the IVideoWindow interface
                videoWindow = (IVideoWindow)graphBuilder;

                // Set the video window to be a child of the main window
                //hr = videoWindow.put_Owner( previewWindow.Handle );

                //hr = videoWindow.put_Owner(new System.Windows.Interop.WindowInteropHelper(previewWindow).Handle);
                hr = videoWindow.put_Owner(((HwndSource)PresentationSource.FromVisual(previewWindow)).Handle);

                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Set video window style
                hr = videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Position video window in client rect of owner window
                previewWindow.SizeChanged += new System.Windows.SizeChangedEventHandler(onPreviewWindowResize);

                // Make the video window visible, now that it is properly positioned
                hr = videoWindow.put_Visible(DsHlp.OATRUE);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                isPreviewRendered = true;
                didSomething = true;

                AMMediaType media = new AMMediaType();
                hr = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                    Marshal.ThrowExceptionForHR(hr);
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                    throw new NotSupportedException("Unknown Grabber Media Format");

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

            }

            if (didSomething)
                graphState = GraphState.Rendered;
        }