public DSVideoPlayer(string filename, GraphicsDevice graphicsDevice) { try { // Open DirectShow Interfaces InitInterfaces(); Info = new DSVideoInfo(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(FG_GraphBuilder.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = DSVideoInfo.MEDIATYPE_Video; // Video mt.subType = DSVideoInfo.MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = DSVideoInfo.FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); //// Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(FG_GraphBuilder.RenderFile(filename, null)); Info.FileName = filename; //// Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)FG_GraphBuilder; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); //// Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information Info.Height = pVideoHeader.BmiHeader.Height; Info.Width = pVideoHeader.BmiHeader.Width; Info.AvgTimePerFrame = pVideoHeader.AvgTimePerFrame; Info.BitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(FG_MediaSeeking.GetDuration(out Info.Duration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(Info.Height * Info.Width) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(Info.Height * Info.Width) * 3]; // BGR24 format (3 bytes per pixel) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, Info.Width, Info.Height, 1, TextureUsage.None, SurfaceFormat.Color); } catch (Exception ex) { throw new Exception("不能加载或播放该视频: " + ex.Message); } }
/// <summary> /// Stops playing the video /// </summary> public void Stop() { StoppingEvent.Set(); // Stop the FilterGraph DsError.ThrowExceptionForHR(FG_MediaControl.Stop()); // Reset the current position DsError.ThrowExceptionForHR(FG_MediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning)); // Update VideoState currentState = VideoState.Stopped; }
/// <summary> /// Updates the Output Frame data using data from the video stream. Call this in Game.Update(). /// </summary> public void Update() { // Remove the OutputFrame from the GraphicsDevice to prevent an InvalidOperationException on the SetData line. if (outputFrame.GraphicsDevice.Textures[0] == outputFrame) { outputFrame.GraphicsDevice.Textures[0] = null; } // Set video data into the Output Frame outputFrame.SetData <byte>(videoFrameBytes); // Update current position read-out DsError.ThrowExceptionForHR(FG_MediaSeeking.GetCurrentPosition(out currentPosition)); if (currentPosition >= Duration * 10000000) { DsError.ThrowExceptionForHR(FG_MediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning)); } }