public ClipPlayer(IntPtr hwnd, int msgSegment, int msgEvent) { m_bLoop = false; m_tDuration = 0; m_tStartPosition = 0; m_bActive = false; m_hwndApp = hwnd; m_msgEvent = msgEvent; m_msgSegment = msgSegment; m_pPlayNext = -1; m_Clips = new ArrayList(5); m_pController = new GMFBridgeController() as IGMFBridgeController; m_pController.SetNotify(hwnd, msgSegment); // we use a video and an audio stream, // options: //don't allow compressed in source graphs, //don't discard when not connected m_pController.AddStream(true, eFormatType.Uncompressed, false); m_pController.AddStream(false, eFormatType.Uncompressed, false); // increase buffering at the join, so that audio does not run out m_pController.SetBufferMinimum(200); m_itCurrent = m_Clips.Count; }
public int AddClip(string path, out ClipEntry pClip) { int it = m_Clips.Count; pClip = new ClipEntry(); m_Clips.Add(pClip); int hr = pClip.Create(m_pController, path); // if we expect both audio and video, then all clips // must have both audio and video. // If the first clip is video only, then switch // to video-only automatically if ((hr == VFW_E_UNSUPPORTED_AUDIO) && (m_Clips.Count == 1)) { // new controller, different options (only one video stream) if (m_pController != null) { Marshal.ReleaseComObject(m_pController); m_pController = null; } m_pController = new GMFBridgeController() as IGMFBridgeController; m_pController.SetNotify(m_hwndApp, m_msgSegment); m_pController.AddStream(true, eFormatType.Uncompressed, false); m_pController.SetBufferMinimum(200); // try again hr = pClip.Create(m_pController, path); } if (hr >= 0) { pClip.SetStartPosition(m_tDuration); m_tDuration += pClip.Duration(); // if this is the first clip, create the render graph if (m_Clips.Count == 1) { m_pRenderGraph = new FilterGraph() as IGraphBuilder; hr = m_pController.CreateRenderGraph(pClip.SinkFilter(), m_pRenderGraph, out m_pRenderGraphSourceFilter); if (hr >= 0 && m_hwndApp != IntPtr.Zero) { IMediaEventEx pME = m_pRenderGraph as IMediaEventEx; if (pME != null) { pME.SetNotifyWindow(m_hwndApp, m_msgEvent, IntPtr.Zero); } } } } else { pClip.Dispose(); m_Clips.RemoveAt(it); } return(hr); }
public int AddClip(string path, out ClipEntry pClip) { int it = m_Clips.Count; pClip = new ClipEntry(); m_Clips.Add(pClip); int hr = pClip.Create(m_pController, path); // if we expect both audio and video, then all clips // must have both audio and video. // If the first clip is video only, then switch // to video-only automatically if ((hr == VFW_E_UNSUPPORTED_AUDIO) && (m_Clips.Count == 1)) { // new controller, different options (only one video stream) if (m_pController != null) { Marshal.ReleaseComObject(m_pController); m_pController = null; } m_pController = new GMFBridgeController() as IGMFBridgeController; m_pController.SetNotify(m_hwndApp, m_msgSegment); m_pController.AddStream(true, eFormatType.Uncompressed, false); m_pController.SetBufferMinimum(200); // try again hr = pClip.Create(m_pController, path); } if (hr >= 0) { pClip.SetStartPosition(m_tDuration); m_tDuration += pClip.Duration(); // if this is the first clip, create the render graph if (m_Clips.Count == 1) { m_pRenderGraph = new FilterGraph() as IGraphBuilder; hr = m_pController.CreateRenderGraph(pClip.SinkFilter(), m_pRenderGraph, out m_pRenderGraphSourceFilter); if (hr >= 0 && m_hwndApp != IntPtr.Zero) { IMediaEventEx pME = m_pRenderGraph as IMediaEventEx; if (pME != null) { pME.SetNotifyWindow(m_hwndApp, m_msgEvent, IntPtr.Zero); } } } } else { pClip.Dispose(); m_Clips.RemoveAt(it); } return hr; }