public CAviDS(string filename, double playSpeed) { builder = new FilterGraph() as IGraphBuilder; grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; DsError.ThrowExceptionForHR(grabber.SetMediaType(mediaType)); DsError.ThrowExceptionForHR(builder.AddFilter(grabber as IBaseFilter, "Sample Grabber(DTXMania)")); DsError.ThrowExceptionForHR(builder.RenderFile(filename, null)); CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); if (builder is IVideoWindow videoWindow) { videoWindow.put_AutoShow(OABool.False); } DsError.ThrowExceptionForHR(grabber.GetConnectedMediaType(mediaType)); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; seeker = builder as IMediaSeeking; DsError.ThrowExceptionForHR(seeker.GetDuration(out nMediaLength)); DsError.ThrowExceptionForHR(seeker.SetRate(playSpeed / 20.0)); control = builder as IMediaControl; filter = builder as IMediaFilter; grabber.SetBufferSamples(BufferThem: true); Run(); Pause(); bPlaying = false; bPause = false; }
private void UpdateInterface() { if (File != null) { fileNameLabel.Text = File.Name; } else { fileNameLabel.Text = null; } if (mediaSeeking != null && (seekingCapabilities & AMSeekingSeekingCapabilities.CanGetDuration) != 0) { long duration; mediaSeeking.GetDuration(out duration); trackBar.Maximum = checked ((int)(duration / timeScalingFactor)); trackBar.Enabled = (seekingCapabilities & AMSeekingSeekingCapabilities.CanSeekAbsolute) != 0; } else { trackBar.Maximum = 0; trackBar.Enabled = false; } }
/// <summary> /// Called when a node or a connection is added/removed in the DaggerLib graph /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void Graph_OnTopologyChanged(object sender, EventArgs e) { // the topology of the graph has changed // see if seeking is available and set the extent of the TimeSliderControl _mediaSeeking = _graph as IMediaSeeking; if (_mediaSeeking != null) { long duration = 0; _mediaSeeking.GetDuration(out duration); // only set the extents if something has changed if (_timeSliderControl.Extent != (int)(duration / 10000)) { _timeSliderControl.Extent = (int)(duration / 10000); _timeSliderControl.Min = 0; _timeSliderControl.Max = _timeSliderControl.Extent; } } // see if frame step is available _frameStepButton.Enabled = GetFrameStepInterface(); // see if IVideoWindow is available _videoWindow = _graph as IVideoWindow; }
private void UpdateSeekBar() { // If the player can seek, set the seekbar range and start the time. // Otherwise, disable the seekbar. if (_mediaSeek != null) { seekbar.Enabled = true; long rtDuration; _mediaSeek.GetDuration(out rtDuration); rtDuration = TimeSpan.Parse(mvs.PlayTime).Ticks; seekbar.Maximum = (int)(rtDuration / ONE_MSEC); seekbar.LargeChange = seekbar.Maximum / 10; // Start the timer timer1.Enabled = true; } else { seekbar.Enabled = false; // Stop the old timer, if any. timer1.Enabled = false; } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { currentState = VideoState.Stopped; filename = FileName; InitInterfaces(); SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
public static int getTotal(IMediaSeeking mediaSeeking, double fps) { if (mediaSeeking == null) return 1; long dur; mediaSeeking.GetDuration(out dur); if (!isInFrames(mediaSeeking)) dur = refTime2frame(dur,fps); return (int)dur; }
public long NativeDuration() { // native duration with no limits applied long tDur = 0; IMediaSeeking pMS = m_pGraph as IMediaSeeking; int hr = pMS.GetDuration(out tDur); return(tDur); }
//Reset: start position is 0, end position is the last frame, needed after playshot public void Reset() { IMediaSeeking i_media_seeking = m_FilterGraph as IMediaSeeking; i_media_seeking.SetTimeFormat(TimeFormat.Frame); long endFrame = 0; int test = i_media_seeking.GetDuration(out endFrame); i_media_seeking.SetPositions(0, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning, endFrame, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning); }
protected void SetDuration() { if (m_mediaSeeking == null) { return; } long duration; /* Get the duration of the media. This value will * be in whatever format that was set. ie Frame, MediaTime */ m_mediaSeeking.GetDuration(out duration); Duration = duration; }
public void GetStopTime(out long pDuration) { if (m_pSeek == null) { throw new COMException("No seek pointer", E_Unexpected); } int hr = m_pSeek.GetStopPosition(out pDuration); // If we cannot get the stop time, try to get the duration. if (Failed(hr)) { hr = m_pSeek.GetDuration(out pDuration); } DsError.ThrowExceptionForHR(hr); }
public long Duration() { if (m_tStop == 0) { long tDur = 0; IMediaSeeking pMS = m_pGraph as IMediaSeeking; int hr = pMS.GetDuration(out tDur); if (hr < 0) { return(0); } return(tDur - m_tStart); } return(m_tStop - m_tStart); }
public int Percentage() { if (mediaSeeking == null) { return(100); } long lDuration, lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); mediaSeeking.GetDuration(out lDuration); float percent = ((float)lCurrent) / ((float)lDuration); percent *= 100.0f; if (percent > 100) { percent = 100; } return((int)percent); }
/// <summary> /// Queries the current video source for its capabilities regarding seeking and time info. /// The graph should be fully constructed for accurate information /// </summary> protected void QuerySeekingCapabilities() { try { _mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); //get capabilities from the graph, and see what it supports that interests us AMSeekingSeekingCapabilities caps; int r = _mediaSeeking.GetCapabilities(out caps); long lTest = 0; double dblTest = 0; if (r != 0) { _seek_canGetCurrentPos = false; _seek_canSeek = false; _seek_canGetDuration = false; } else //if we were able to read the capabilities, then determine if the capability works, both by checking the // advertisement, and actually trying it out. { _seek_canSeek = ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute) && (_mediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning) == 0); _seek_canGetDuration = ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration) && (_mediaSeeking.GetDuration(out lTest) == 0); _seek_canGetCurrentPos = ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos) && (_mediaSeeking.GetCurrentPosition(out lTest) == 0); } //check capabilities for the IMediaPosition interface _pos_canSeek = (_mediaPosition.put_CurrentPosition(0) == 0); _pos_canGetDuration = (_mediaPosition.get_Duration(out dblTest) == 0); _pos_canGetCurrentPos = (_mediaPosition.get_CurrentPosition(out dblTest) == 0); } catch (Exception) { _seek_canSeek = false; _pos_canSeek = false; } }
private void open() { int hr; if (this.GraphBuilder == null) { this.GraphBuilder = (IGraphBuilder) new FilterGraph(); hr = GraphBuilder.RenderFile(file, null);//读取文件 DsError.ThrowExceptionForHR(hr); this.MediaControl = (IMediaControl)this.GraphBuilder; this.MediaEventEx = (IMediaEventEx)this.GraphBuilder; MediaSeeking = (IMediaSeeking)this.GraphBuilder; MediaSeeking.SetTimeFormat(TIME_FORMAT_FRAME); MediaSeeking.SetRate(0.3); this.VideoFrameStep = (IVideoFrameStep)this.GraphBuilder; // MediaPosition= (IMediaPosition)this.GraphBuilder; this.VideoWindow = this.GraphBuilder as IVideoWindow; this.BasicVideo = this.GraphBuilder as IBasicVideo; this.BasicAudio = this.GraphBuilder as IBasicAudio; hr = this.MediaEventEx.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); hr = this.VideoWindow.put_Owner(this.Handle); DsError.ThrowExceptionForHR(hr); hr = this.VideoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); DsError.ThrowExceptionForHR(hr); this.Focus(); hr = InitVideoWindow(1, 1); DsError.ThrowExceptionForHR(hr); long time; MediaSeeking.GetDuration(out time); label20.Text = time.ToString(); trackBar1.SetRange(0, (int)time); t = new Thread(new ThreadStart(updateTimeBarThread)); } }
private bool CheckGraphConversion(ref IMediaSeeking mediaSeeking) { int hr; IMediaEvent mediaEvent = (IMediaEvent)currentFilterGraph; // Check the graph / conversion is going ok, and raise any progress events EventCode statusCode; hr = mediaEvent.WaitForCompletion(100, out statusCode); switch (statusCode) { case EventCode.Complete: return(true); case 0: // Still going - fire event with an update on where we are if (mediaSeeking != null) { long curPos; mediaSeeking.GetCurrentPosition(out curPos); long length; mediaSeeking.GetDuration(out length); double progress = curPos * 100.0 / (double)length; if (ConversionProgressChanged != null) { ConversionProgressChanged(new object(), new ProgressChangedEventArgs(progress)); } } return(false); default: // Error EventCode tryCode; IntPtr lp1, lp2; hr = mediaEvent.GetEvent(out tryCode, out lp1, out lp2, 200); DsError.ThrowExceptionForHR(hr); throw new Exception(statusCode.ToString()); } }
/// <summary> /// Test the functions that read the current position /// </summary> void TestPosition() { int hr; long pCurrent1, pStop1, pDuration1; long pCurrent2, pStop2; long pEarliest, pLatest; // Read the current play position hr = m_ims.GetCurrentPosition(out pCurrent1); Marshal.ThrowExceptionForHR(hr); // Read the current stop position hr = m_ims.GetStopPosition(out pStop1); Marshal.ThrowExceptionForHR(hr); // Read the duraton (probably related to StopPosition - Position hr = m_ims.GetDuration(out pDuration1); Marshal.ThrowExceptionForHR(hr); // Read both current and stop positions hr = m_ims.GetPositions(out pCurrent2, out pStop2); Marshal.ThrowExceptionForHR(hr); // Get the cached range of values hr = m_ims.GetAvailable(out pEarliest, out pLatest); Marshal.ThrowExceptionForHR(hr); // Since we aren't playing, current should be 0, stop & // duration should be the same (the length of the clip) Debug.Assert(pCurrent1 == 0, "CurrentPosition"); Debug.Assert(pStop1 == pDuration1, "Stop, Duration"); Debug.Assert(pCurrent1 == pCurrent2, "GetPositions"); Debug.Assert(pStop1 == pStop2, "CurrentPosition stop"); Debug.Assert(pEarliest == pCurrent1, "CurrentPosition stop"); Debug.Assert(pLatest == pStop2, "CurrentPosition stop"); }
/// <summary>Runs the graph</summary> /// <param name="graphBuilder">The graph to be run.</param> /// <param name="seekableFilter">The filter to use for computing percent complete. Must implement IMediaSeeking.</param> protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter) { // Get the necessary control and event interfaces IMediaControl mediaControl = (IMediaControl)graphBuilder; IMediaEvent mediaEvent = (IMediaEvent)graphBuilder; // Get the media seeking interface to use for computing status and progress updates IMediaSeeking mediaSeeking = seekableFilter as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = graphBuilder as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = null; } } // Publish the graph to the running object table and to a temporary file for examination/debugging purposes //using (new GraphPublisher(graphBuilder, "C:\\vidtests\\grf\\" + Guid.NewGuid().ToString("N") + ".grf")) { // Run the graph int hr = 0; hr = mediaControl.Pause(); hr = mediaControl.Run(); DsError.ThrowExceptionForHR(hr); try { ProgressChanged(new object(), new ProgressChangedEventArgs(0.0)); // initial progress update stating 0% done bool done = false; while (!CancellationPending && !done) // continue until we're done/cancelled { // Poll to see how we're doing EventCode statusCode; hr = mediaEvent.WaitForCompletion(200, out statusCode); Console.Write(" <" + statusCode.ToString() + ">,"); switch (statusCode) { case EventCode.Complete: done = true; break; case 0: // Get an update on where we are with the conversion if (mediaSeeking != null) { long curPos; mediaSeeking.GetCurrentPosition(out curPos); long length; mediaSeeking.GetDuration(out length); double progress = curPos * 100.0 / (double)length; if (progress > 0) { ProgressChanged(new object(), new ProgressChangedEventArgs(progress)); } } break; default: // Error, so throw exception EventCode tryCode; IntPtr lp1, lp2; hr = mediaEvent.GetEvent(out tryCode, out lp1, out lp2, 200); DsError.ThrowExceptionForHR(hr); throw new Exception(statusCode.ToString()); } } ProgressChanged(new object(), new ProgressChangedEventArgs(100)); // final progress update stating 100% done } finally { // We're done converting, so stop the graph FilterState graphState; mediaControl.GetState(100, out graphState); if (graphState == FilterState.Running) { mediaControl.Pause(); } mediaControl.Stop(); // Return done Completed(new object(), new EventArgs()); } } }
public CAviDS(string filename, double playSpeed) { int hr = 0x0; builder = (IGraphBuilder) new FilterGraph(); #region [Sample Grabber] { grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; hr = grabber.SetMediaType(mediaType); DsError.ThrowExceptionForHR(hr); hr = builder.AddFilter((IBaseFilter)grabber, "Sample Grabber"); DsError.ThrowExceptionForHR(hr); } #endregion hr = builder.RenderFile(filename, null); DsError.ThrowExceptionForHR(hr); // Null レンダラに接続しないとウィンドウが表示される。 // また、レンダリングを行わないため処理速度を向上できる。 CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); CDirectShow.tグラフを解析しデバッグ出力する(builder); IVideoWindow videoWindow = builder as IVideoWindow; if (videoWindow != null) { videoWindow.put_AutoShow(OABool.False); } #region [Video Info] { hr = grabber.GetConnectedMediaType(mediaType); DsError.ThrowExceptionForHR(hr); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; } #endregion #region [ Seeker ] { seeker = builder as IMediaSeeking; hr = seeker.GetDuration(out nMediaLength); DsError.ThrowExceptionForHR(hr); hr = seeker.SetRate(playSpeed / 20); DsError.ThrowExceptionForHR(hr); } #endregion #region [Control] { control = builder as IMediaControl; } #endregion #region [Filter] { filter = builder as IMediaFilter; } #endregion grabber.SetBufferSamples(true); this.Run(); this.Pause(); bPlaying = false; bPause = false; // 外見えには演奏停止している。PAUSE中として外に見せないこと。 }
private bool CheckGraphConversion(ref IMediaSeeking mediaSeeking) { int hr; IMediaEvent mediaEvent = (IMediaEvent)currentFilterGraph; // Check the graph / conversion is going ok, and raise any progress events EventCode statusCode; hr = mediaEvent.WaitForCompletion(100, out statusCode); switch (statusCode) { case EventCode.Complete: return true; case 0: // Still going - fire event with an update on where we are if (mediaSeeking != null) { long curPos; mediaSeeking.GetCurrentPosition(out curPos); long length; mediaSeeking.GetDuration(out length); double progress = curPos * 100.0 / (double)length; if (ConversionProgressChanged != null) ConversionProgressChanged(new object(), new ProgressChangedEventArgs(progress)); } return false; default: // Error EventCode tryCode; IntPtr lp1, lp2; hr = mediaEvent.GetEvent(out tryCode, out lp1, out lp2, 200); DsError.ThrowExceptionForHR(hr); throw new Exception(statusCode.ToString()); } }
/// <summary>Runs the graph</summary> /// <param name="graphBuilder">The graph to be run.</param> /// <param name="seekableFilter">The filter to use for computing percent complete. Must implement IMediaSeeking.</param> protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter) { // Get the necessary control and event interfaces IMediaControl mediaControl = (IMediaControl)graphBuilder; IMediaEvent mediaEvent = (IMediaEvent)graphBuilder; // Get the media seeking interface to use for computing status and progress updates IMediaSeeking mediaSeeking = seekableFilter as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = graphBuilder as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = null; } } // Publish the graph to the running object table and to a temporary file for examination/debugging purposes using (new GraphPublisher(graphBuilder, Path.GetTempPath() + Guid.NewGuid().ToString("N") + ".grf")) { // Run the graph mediaControl.Run(); try { OnProgressChanged(0); // initial progress update stating 0% done bool done = false; while (!CancellationPending && !done) // continue until we're done/cancelled { // Poll to see how we're doing EventCode statusCode = EventCode.None; int hr = mediaEvent.WaitForCompletion(PollFrequency, out statusCode); switch (statusCode) { case EventCode.Complete: done = true; break; case EventCode.None: // Get an update on where we are with the conversion if (mediaSeeking != null) { ulong curPos = mediaSeeking.GetCurrentPosition(); ulong length = mediaSeeking.GetDuration(); double progress = curPos * 100.0 / (double)length; if (progress > 0) { OnProgressChanged(progress); } } break; default: // Error, so throw exception throw new DirectShowException(hr, null); } } OnProgressChanged(100); // final progress update stating 100% done } finally { // We're done converting, so stop the graph mediaControl.Stop(); } } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public XNAPlayer(Feel feel, string FileName, GraphicsDevice graphicsDevice, Action callback) { Utils.RunAsynchronously(() => { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); // Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); // Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_MessageDrain(IntPtr.Zero)); DsError.ThrowExceptionForHR(pVideoWindow.put_WindowState(WindowState.Hide)); DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 4]; // BGR24 format (3 bytes per pixel + 1 for safety) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); feel.RunOnUIThread(callback); } catch { feel.ShowToast("Unable to Load or Play the video file"); } }, () => { }); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; IMediaSeeking mediaSeeking = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(_fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Video, SubType = MediaSubType.RGB24 }; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose(); } // let's do rendering, if we don't need to prevent freezing if (!_preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!_referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // Get media seeking & check seeking capability mediaSeeking = (IMediaSeeking)graphObject; mediaSeeking.GetDuration(out _duration); _onVideoLoad(_duration); const SeekingCapabilities caps = SeekingCapabilities.CanSeekAbsolute | SeekingCapabilities.CanGetDuration; SeekingCapabilities canSeekCap; int hr = mediaSeeking.GetCapabilities(out canSeekCap); if (hr < 0) { throw new ApplicationException("Failed getting seeking capabilities"); } _isSeekEnabled = (canSeekCap & caps) == caps; // run mediaControl.Run(); IsPlaying = true; do { // GetCurrentTime if (_isGetCurrentTime) { mediaSeeking.GetCurrentPosition(out _currentGetTime); _isGetCurrentTime = false; } if (IsSetPause) { mediaControl.Pause(); IsSetPause = false; IsPlaying = false; } if (IsSetPlay) { mediaControl.Run(); IsSetPlay = false; IsPlaying = true; } // SetCurrentTime if (_isSetCurrentTime) { long stop = 0; mediaSeeking.SetPositions(ref _currentSetTime, SeekingFlags.AbsolutePositioning, ref stop, SeekingFlags.NoPositioning); _isSetCurrentTime = false; } IntPtr p1; IntPtr p2; DsEvCode code; if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } while (!_stopEvent.WaitOne(100, false)); IsPlaying = false; mediaControl.Stop(); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; mediaSeeking = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
/// <summary> /// Open a new video feed (either web-cam or video file). /// </summary> /// <param name="filter">Specifies the web-cam filter to use, or <i>null</i> when opening a video file.</param> /// <param name="pb">Specifies the output window, or <i>null</i> when running headless and only receiving snapshots.</param> /// <param name="strFile">Specifies the video file to use, or <i>null</i> when opening a web-cam feed.</param> /// <param name="vidCap">Optionally specifies the video capabilities to use, or <i>null</i> to ignore and use the default video capabilities.</param> /// <returns>The duration (if any) is returned, or 0.</returns> /// <remarks>To get the video capabilities see the GetVideoCapatiblities method.</remarks> public long Open(Filter filter, PictureBox pb, string strFile, VideoCapability vidCap = null) { int hr; if (filter != null && strFile != null) { throw new ArgumentException("Both the filter and file are non NULL - only one of these can be used at a time; The filter is used with the web-cam and the file is used with a video file."); } m_selectedFilter = filter; m_graphBuilder = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // When using a web-cam, create the moniker for the filter and add the filter to the graph. if (strFile == null) { IMoniker moniker = m_selectedFilter.CreateMoniker(); m_graphBuilder.AddSourceFilterForMoniker(moniker, null, m_selectedFilter.Name, out m_camFilter); Marshal.ReleaseComObject(moniker); m_camControl = m_camFilter as IAMCameraControl; // Create the capture builder used to build the web-cam filter graph. m_captureGraphBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true)); hr = m_captureGraphBuilder.SetFiltergraph(m_graphBuilder as IGraphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Add the web-cam filter to the graph. hr = m_graphBuilder.AddFilter(m_camFilter, m_selectedFilter.Name); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the desired video capabilities. if (vidCap != null) { setVideoCapabilities(m_captureGraphBuilder, m_camFilter, vidCap); } } else { // Build the graph with the video file. hr = m_graphBuilder.RenderFile(strFile, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } m_mediaSeek = m_graphBuilder as IMediaSeeking; if (pb != null) { m_videoFrameStep = m_graphBuilder as IVideoFrameStep; } } // Create the sample grabber used to get snapshots. m_sampleGrabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); m_baseGrabFilter = m_sampleGrabber as IBaseFilter; m_mediaControl = m_graphBuilder as IMediaControl; // When using a target window, get the video window used with the target output window if (pb != null) { m_mediaEventEx = m_graphBuilder as IMediaEventEx; m_videoWindow = m_graphBuilder as IVideoWindow; } // Otherwise create the null renderer for no video output is needed (only snapshots). else { m_nullRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.NullRenderer, true)); } // Add the sample grabber to the filter graph. hr = m_graphBuilder.AddFilter(m_baseGrabFilter, "Ds.Lib Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber buffers. hr = m_sampleGrabber.SetBufferSamples(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber one-shot. hr = m_sampleGrabber.SetOneShot(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn ON the sample grabber callback where video data is to be received. hr = m_sampleGrabber.SetCallback(this, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the media format used by the sample grabber. AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = m_sampleGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the WebCam Filters and Frame Grabber. if (m_selectedFilter != null) { Guid cat; Guid med; cat = PinCategory.Preview; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } cat = PinCategory.Capture; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, m_baseGrabFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Connect the Frame Grabber and (optionally the Null Renderer) else { // Get the video decoder and its pins. m_videoFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Decoder", false); IPin pOutput; hr = Utility.GetPin(m_videoFilter, PinDirection.Output, out pOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pInput; hr = pOutput.ConnectedTo(out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } PinInfo pinInfo; hr = pInput.QueryPinInfo(out pinInfo); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber pins. IPin pGrabInput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Input, out pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the source filter output and the input it is connected to. hr = pOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = pInput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the source output to the Grabber input. hr = m_graphBuilder.Connect(pOutput, pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When rendering video output, connect the Grabber output to the original downstream input that the source was connected to. if (m_nullRenderer == null) { hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } Marshal.ReleaseComObject(pOutput); Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabInput); Marshal.ReleaseComObject(pGrabOutput); } // Remove sound filters. IBaseFilter soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Audio Decoder", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Sound", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } // When using a headless (no video rendering) setup, connect the null renderer to the Sample Grabber. if (m_nullRenderer != null) { // Add the null renderer. hr = m_graphBuilder.AddFilter(m_nullRenderer, "Null Renderer"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber output pin. IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the null renderer input pin. IPin pInput; hr = Utility.GetPin(m_nullRenderer, PinDirection.Input, out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the sample grabber pin. hr = pGrabOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the Grabber output to the null renderer. hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabOutput); // Remove the Video Renderer for it is no longer needed. IBaseFilter ivideorender = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Renderer"); if (ivideorender != null) { m_graphBuilder.RemoveFilter(ivideorender); Marshal.ReleaseComObject(ivideorender); } } // Get the sample grabber media settings and video header. media = new AMMediaType(); hr = m_sampleGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo && media.formatType != FormatType.WaveEx && media.formatType != FormatType.MpegVideo) || media.formatPtr == IntPtr.Zero) { throw new Exception("Media grabber format is unknown."); } // Get the video header with frame sizing information. m_videoInfoHeader = Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader; Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; // If we are rendering video output, setup the video window (which requires a message pump). if (m_videoWindow != null) { // setup the video window hr = m_videoWindow.put_Owner(pb.Handle); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // resize the window hr = m_videoWindow.SetWindowPosition(0, 0, pb.Width, pb.Height); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_Visible(DsHlp.OATRUE); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Subscribe to the picturebox size changed event. pb.SizeChanged += Pb_SizeChanged; } // start the capturing hr = m_mediaControl.Run(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When using a video file, immediately stop at the start. if (strFile != null) { hr = m_mediaControl.Pause(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // When using a media file, we need to save the video file's duration. if (m_mediaSeek != null) { hr = m_mediaSeek.GetDuration(out m_lDuration); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } m_bConnected = true; return(m_lDuration); }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> protected VideoPlayer(string FileName) { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph //SampleGrabber sg = new SampleGrabber(); var comtype = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comtype == null) { throw new NotSupportedException("DirectX (8.1 or higher) not installed?"); } m_comObject = Activator.CreateInstance(comtype); ISampleGrabber sampleGrabber = (ISampleGrabber)m_comObject; m_graphBuilder.AddFilter((IBaseFilter)m_comObject, "Grabber"); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB32; // RGB32 mt.formatType = FORMAT_VideoInfo; // VideoInfo sampleGrabber.SetMediaType(mt); // Construct the rest of the FilterGraph m_graphBuilder.RenderFile(filename, null); // Set SampleGrabber Properties sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)m_graphBuilder; //pVideoWindow.put_AutoShow(OABool.False); pVideoWindow.put_AutoShow(0); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); sampleGrabber.GetConnectedMediaType(MediaType); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; m_mediaSeeking.GetDuration(out videoDuration); // Create byte arrays to hold video data m_videoDataRgba = new MySwapQueue <byte[]>(() => new byte[(videoHeight * videoWidth) * 4]); // RGBA format (4 bytes per pixel) } catch (Exception e) { throw new Exception("Unable to Load or Play the video file", e); } }
private static Bitmap GetBitmap(IGraphBuilder graph, ISampleGrabber sg, long grabPosition, out EventCode ec) { IntPtr pBuffer = IntPtr.Zero; int pBufferSize = 0; Bitmap b = null; int hr = 0; try { IMediaSeeking ims = graph as IMediaSeeking; bool canDuration = false; bool canPos = false; bool canSeek = false; long pDuration = 0; long pCurrent = 0; if (ims != null) { AMSeekingSeekingCapabilities caps; hr = ims.GetCapabilities(out caps); if ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration) { canDuration = true; } if ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos) { canPos = true; } if ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute) { canSeek = true; } if (canDuration) { hr = ims.GetDuration(out pDuration); } if (grabPosition > pDuration) { grabPosition = pDuration - 1; } if (canSeek) { hr = ims.SetPositions(new DsLong(grabPosition), AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning); DsError.ThrowExceptionForHR(hr); } if (canPos) { hr = ims.GetCurrentPosition(out pCurrent); } } if (canPos) { hr = ims.GetCurrentPosition(out pCurrent); } IMediaControl mControl = graph as IMediaControl; IMediaEvent mEvent = graph as IMediaEvent; //ec = EventCode.SystemBase; hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Run(); DsError.ThrowExceptionForHR(hr); hr = mEvent.WaitForCompletion(int.MaxValue, out ec); DsError.ThrowExceptionForHR(hr); hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Stop(); DsError.ThrowExceptionForHR(hr); if (ec != EventCode.Complete) { return(null); } hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer); DsError.ThrowExceptionForHR(hr); pBuffer = Marshal.AllocCoTaskMem(pBufferSize); hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer); DsError.ThrowExceptionForHR(hr); if (pBuffer != IntPtr.Zero) { AMMediaType sgMt = new AMMediaType(); int videoWidth = 0; int videoHeight = 0; int stride = 0; try { hr = sg.GetConnectedMediaType(sgMt); DsError.ThrowExceptionForHR(hr); if (sgMt.formatPtr != IntPtr.Zero) { if (sgMt.formatType == FormatType.VideoInfo) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(sgMt.formatPtr, typeof(VideoInfoHeader)); videoWidth = vih.BmiHeader.Width; videoHeight = vih.BmiHeader.Height; stride = videoWidth * (vih.BmiHeader.BitCount / 8); } else { throw new ApplicationException("Unsupported Sample"); } b = new Bitmap(videoWidth, videoHeight, stride, System.Drawing.Imaging.PixelFormat.Format32bppRgb, pBuffer); b.RotateFlip(RotateFlipType.RotateNoneFlipY); } } finally { DsUtils.FreeAMMediaType(sgMt); } } return(b); } finally { if (pBuffer != IntPtr.Zero) { Marshal.FreeCoTaskMem(pBuffer); } } }
/// <summary> /// This method sets up the DirectShow filter graph and obtains the interfaces necessary to control playback /// for VideoTextures created from video files. This method works for .avi, .mpeg, and .wmv files. /// </summary> /// <param name="filename">The .avi, .mpeg, or .wmv video file.</param> private void SetupGraph(string filename) { try { int hr; // 1. Start building the graph, using FilterGraph and CaptureGraphBuilder2 graphBuilder = (IGraphBuilder) new FilterGraph(); ICaptureGraphBuilder2 builder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); hr = builder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); // 2. Add the source filter for the video file input. IBaseFilter sourceFilter; hr = graphBuilder.AddSourceFilter(filename, filename, out sourceFilter); DsError.ThrowExceptionForHR(hr); // 3. Get the SampleGrabber interface, configure it, and add it to the graph. ISampleGrabber sampGrabber = (ISampleGrabber) new SampleGrabber(); ConfigureSampleGrabber(sampGrabber); hr = graphBuilder.AddFilter((IBaseFilter)sampGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); // 4. Add the null renderer (since we don't want to render in a seperate window.) IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer(); hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer"); DsError.ThrowExceptionForHR(hr); // 5. Render the stream. The way the stream is rendered depends on its type. switch (vidType) { case VideoType.AVI: case VideoType.MPEG: hr = builder.RenderStream(null, null, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; case VideoType.WMV: hr = builder.RenderStream(null, MediaType.Video, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; default: throw new Exception("Unsupported Video type: " + vidType); } DsError.ThrowExceptionForHR(hr); // 6. Now that everthing is configured and set up, save the width, height, stride information for use later. SaveSizeInfo(sampGrabber); // 7. Obtain the interfaces that we will use to control the execution of the filter graph. mediaControl = graphBuilder as IMediaControl; mediaSeeking = graphBuilder as IMediaSeeking; mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); long duration; mediaSeeking.GetDuration(out duration); length = (int)(duration / 10000); } catch (Exception ex) { Console.WriteLine(ex.Message); throw; } }