private void InitVideo() { int hr; IAMTimelineGroup pVideoGroup = (IAMTimelineGroup)m_pVideoGroupObj; // all we set is the major type. The group will automatically use other defaults AMMediaType VideoGroupType = new AMMediaType(); VideoGroupType.majorType = MediaType.Video; hr = pVideoGroup.SetMediaType(VideoGroupType); DESError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(VideoGroupType); // add the video group to the timeline hr = m_pTimeline.AddGroup(m_pVideoGroupObj); DESError.ThrowExceptionForHR(hr); IAMTimelineObj pTrack1Obj; hr = m_pTimeline.CreateEmptyNode(out pTrack1Obj, TimelineMajorType.Track); DESError.ThrowExceptionForHR(hr); // tell the composition about the track IAMTimelineComp pRootComp = (IAMTimelineComp)m_pVideoGroupObj; hr = pRootComp.VTrackInsBefore(pTrack1Obj, -1); DESError.ThrowExceptionForHR(hr); m_VideoTrack = (IAMTimelineTrack)pTrack1Obj; }
/// <summary> /// Constructor /// </summary> /// <param name="mType">Media type of the new group</param> /// <param name="pTimeline">Timeline to use for the group</param> /// <param name="fps">FPS for the group</param> public MediaGroup(AMMediaType mType, IAMTimeline pTimeline, double fps) { int hr; IAMTimelineObj pGroupObj; m_Length = 0; m_Files = new ArrayList(); m_FPS = fps; m_pTimeline = pTimeline; // make the root group/composition hr = m_pTimeline.CreateEmptyNode(out pGroupObj, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); try { m_pGroup = (IAMTimelineGroup)pGroupObj; // Set the media type we just created hr = m_pGroup.SetMediaType(mType); DESError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mType); // add the video group to the timeline hr = m_pTimeline.AddGroup(pGroupObj); DESError.ThrowExceptionForHR(hr); } finally { //Marshal.ReleaseComObject(pGroupObj); } //Marshal.ReleaseComObject(pTrack1Obj); // Released as m_VideoTrack in dispose }
/// <summary> /// Constructor /// </summary> /// <param name="type">The type of group this is</param> /// <param name="mediaType">Media type of the new group</param> /// <param name="timeline">Timeline to use for the group</param> /// <param name="fps">FPS for the group</param> public Group(ITimeline timeline, GroupType type, AMMediaType mediaType, string name, double fps) : base(timeline.DesTimeline, name, -1) { _timeline = timeline; _type = type; _fps = fps; _group = TimelineUtils.InsertGroup(_timeline.DesTimeline, mediaType, name); _timelineComposition = (IAMTimelineComp)_group; }
/// <summary> /// Release everything /// </summary> public void Dispose() { if (m_pGroup != null) { Marshal.ReleaseComObject(m_pGroup); m_pGroup = null; } m_Files = null; m_Length = 0; GC.SuppressFinalize(this); }
private void TestCreateNode() { int hr; IAMTimelineObj pVideoGroupObj; // make the root group/composition hr = m_pTimeline.CreateEmptyNode(out pVideoGroupObj, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); m_pVideoGroup = (IAMTimelineGroup)pVideoGroupObj; }
private void Dispose(bool disposing) { DisposeComposition(disposing); if (_group != null) { Marshal.ReleaseComObject(_group); _group = null; TimelineComposition = null; } }
/// <summary> /// Release everything /// </summary> public override void Dispose() { base.Dispose(); if (_group != null) { Marshal.ReleaseComObject(_group); _group = null; } GC.SuppressFinalize(this); }
private void Config() { int hr; IAMTimelineObj m_pVideoGroupObj2; m_pTimeline = (IAMTimeline) new AMTimeline(); // make the root group/composition hr = m_pTimeline.CreateEmptyNode(out m_pVideoGroupObj2, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); m_itg = m_pVideoGroupObj2 as IAMTimelineGroup; }
/// <summary> /// Constructor /// </summary> /// <param name="type">The type of group this is</param> /// <param name="mediaType">Media type of the new group</param> /// <param name="timeline">Timeline to use for the group</param> /// <param name="fps">Fps for the group</param> public Group(ITimeline timeline, GroupType type, AMMediaType mediaType, string name, double fps) : base(timeline, name, -1) { if (timeline == null) throw new ArgumentNullException("timeline"); if (mediaType == null) throw new ArgumentNullException("mediaType"); if (fps <= 0) throw new SplicerException(Resources.ErrorFramesPerSecondMustBeGreaterThenZero); _timeline = timeline; _type = type; _fps = fps; _group = TimelineBuilder.InsertGroup(_timeline.DesTimeline, mediaType, name); TimelineComposition = (IAMTimelineComp) _group; }
/// <summary> /// Constructor /// </summary> /// <param name="type">The type of group this is</param> /// <param name="mediaType">Media type of the new group</param> /// <param name="timeline">Timeline to use for the group</param> /// <param name="fps">Fps for the group</param> public Group(ITimeline timeline, GroupType type, AMMediaType mediaType, string name, double fps) : base(timeline, name, -1) { if (timeline == null) { throw new ArgumentNullException("timeline"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } if (fps <= 0) { throw new SplicerException(Resources.ErrorFramesPerSecondMustBeGreaterThenZero); } _timeline = timeline; _type = type; _fps = fps; _group = TimelineBuilder.InsertGroup(_timeline.DesTimeline, mediaType, name); TimelineComposition = (IAMTimelineComp)_group; }
/// <summary> /// Inserts a group into a timeline, and assigns it the supplied media type. /// Will free the media type upon completion. /// </summary> /// <param name="timeline"></param> /// <param name="mediaType"></param> /// <returns></returns> internal static IAMTimelineGroup InsertGroup(IAMTimeline timeline, AMMediaType mediaType, string name) { try { int hr = 0; IAMTimelineObj groupObj; // make the root group/composition hr = timeline.CreateEmptyNode(out groupObj, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); if (!string.IsNullOrEmpty(name)) { hr = groupObj.SetUserName(name); DESError.ThrowExceptionForHR(hr); } IAMTimelineGroup group = (IAMTimelineGroup)groupObj; // Set the media type we just created hr = group.SetMediaType(mediaType); DESError.ThrowExceptionForHR(hr); // add the group to the timeline hr = timeline.AddGroup(groupObj); DESError.ThrowExceptionForHR(hr); return(group); } finally { DsUtils.FreeAMMediaType(mediaType); } }
/// <summary> /// Configure the graph to output the results to an AVI file. /// </summary> /// <param name="sOutputFile">File name for output (must not be null)</param> /// <param name="ibfVideoCompressor">IBaseFilter of a video compressor to use (or null for none). /// Note that <b><i>no</i></b> configuration of this compressor is done by this method. It merely adds it /// to the graph in the appropriate place. Also, the pointer is not invalidated, so any configuration /// of the compressor that needs to be done after being added to the graph can still be done.</param> /// <param name="ibfAudioCompressor">IBaseFilter of an audio compressor to use (or null for none). /// Note that <b><i>no</i></b> configuration of this compressor is done by this method. It merely adds it /// to the graph in the appropriate place. Also, the pointer is not invalidated, so any configuration /// of the compressor that needs to be done after being added to the graph can still be done.</param> /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param> /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param> /// <remarks> /// The callback routines are invoked once for each sample. This allows for additional processing to /// be performed on the video or audio buffers. /// </remarks> public void RenderToAVI( string sOutputFile, IBaseFilter ibfVideoCompressor, IBaseFilter ibfAudioCompressor, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback) { int hr; IPin pPin; if (sOutputFile == null) { throw new Exception("Output file name cannot be null"); } // Perform initialization common to all render routines RenderCommon(); // Contains useful routines for creating the graph ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { hr = icgb.SetFiltergraph(m_pGraph); DESError.ThrowExceptionForHR(hr); // Create the file writer IBaseFilter pMux; IFileSinkFilter pFilter; hr = icgb.SetOutputFileName(MediaSubType.Avi, sOutputFile, out pMux, out pFilter); DESError.ThrowExceptionForHR(hr); // We don't need this, so let it go Marshal.ReleaseComObject(pFilter); try { int NumGroups; hr = m_pTimeline.GetGroupCount(out NumGroups); DESError.ThrowExceptionForHR(hr); // Walk the groups. For this class, there is one group that // contains all the video, and a second group for the audio. for (int i = 0; i < NumGroups; i++) { IAMTimelineObj pGroup; hr = m_pTimeline.GetGroup(out pGroup, i); DESError.ThrowExceptionForHR(hr); try { // Inform the graph we will be writing to disk (rather than previewing) IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup; hr = pTLGroup.SetPreviewMode(false); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pGroup); } // Get the IPin for the current group hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin); DESError.ThrowExceptionForHR(hr); try { if (IsVideo(pPin)) { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete); RenderHelper(icgb, mcb, "Video", pPin, ibfVideoCompressor, pMux); } else { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete); RenderHelper(icgb, mcb, "Audio", pPin, ibfAudioCompressor, pMux); } } finally { Marshal.ReleaseComObject(pPin); } } } finally { Marshal.ReleaseComObject(pMux); } } finally { Marshal.ReleaseComObject(icgb); } }
/// <summary> /// Add comp and track to timeline /// </summary> public void AddCompAndTrack(VideoLayerCollection ds, IAMTimelineGroup pGroup) { IAMTimelineObj pCompObj; IAMTimelineObj pTrackObj; int hr = 0; int i = 1; long m_StartTime = 0; // Create a composition object IAMTimelineComp pGroupComp = (IAMTimelineComp)pGroup; List <Layer> ImageDS = ds.FindAll(p => p.Visible == true && p.FilePath != "" && (p.LayerType == LayerType.Image || p.LayerType == LayerType.Video)); //create backcolor item hr = m_pTimeline.CreateEmptyNode(out pCompObj, TimelineMajorType.Composite); hr = pGroupComp.VTrackInsBefore(pCompObj, -1); m_CompArr[0] = (IAMTimelineComp)pCompObj; hr = m_pTimeline.CreateEmptyNode(out pTrackObj, TimelineMajorType.Track); hr = m_CompArr[0].VTrackInsBefore(pTrackObj, -1); m_TrackArr[0] = (IAMTimelineTrack)pTrackObj; for (int m = 0; m < m_RepeatNums; m++) { m_StartTime = DESHelper.FormatTime(m * m_MaxLength); i = m * ImageDS.Count + 1; foreach (Layer layer in ImageDS) { IAMTimelineComp tempComp = null; hr = m_pTimeline.CreateEmptyNode(out pCompObj, TimelineMajorType.Composite); hr = pGroupComp.VTrackInsBefore(pCompObj, -1); m_CompArr[i] = (IAMTimelineComp)pCompObj; DESTransition.SetDxtKey(ref m_pTimeline, m_CompArr[i], m_StartTime + DESHelper.FormatTime(layer.StartTime), m_StartTime + DESHelper.FormatTime(layer.EndTime)); //Set Sign Color if (layer.LayerType == LayerType.Video || (layer.LayerType == LayerType.Image && DESHelper.GetMediaLength(layer.FilePath) > 0)) { DESTransition.SetSign(ref m_pTimeline, m_CompArr[i], m_StartTime + DESHelper.FormatTime(layer.StartTime), m_StartTime + DESHelper.FormatTime(layer.EndTime), ds.SignType); DESTransition.SetPixelate(ref m_pTimeline, m_CompArr[i], m_StartTime + DESHelper.FormatTime(layer.StartTime), m_StartTime + DESHelper.FormatTime(layer.EndTime), ds.Zoom); } //Set Layer Comsitor if (layer.Rect.X != 0 || layer.Rect.Y != 0 || layer.Rect.Height != m_Height || layer.Rect.Width != m_Width) { hr = m_pTimeline.CreateEmptyNode(out pCompObj, TimelineMajorType.Composite); hr = m_CompArr[i].VTrackInsBefore(pCompObj, -1); tempComp = (IAMTimelineComp)pCompObj; DESTransition.SetCompositor(ref m_pTimeline, tempComp, m_StartTime + DESHelper.FormatTime(layer.StartTime), m_StartTime + DESHelper.FormatTime(layer.EndTime), layer.Rect); DESTransition.SetAlpha(ref m_pTimeline, tempComp, m_StartTime + DESHelper.FormatTime(layer.StartTime), m_StartTime + DESHelper.FormatTime(layer.EndTime)); hr = m_pTimeline.CreateEmptyNode(out pTrackObj, TimelineMajorType.Track); hr = tempComp.VTrackInsBefore(pTrackObj, -1); // append to the end of the track list m_TrackArr[i] = (IAMTimelineTrack)pTrackObj; } else { hr = m_pTimeline.CreateEmptyNode(out pTrackObj, TimelineMajorType.Track); hr = m_CompArr[i].VTrackInsBefore(pTrackObj, -1); // append to the end of the track list m_TrackArr[i] = (IAMTimelineTrack)pTrackObj; } i++; } } //create Sign Effect item hr = m_pTimeline.CreateEmptyNode(out pCompObj, TimelineMajorType.Composite); hr = pGroupComp.VTrackInsBefore(pCompObj, -1); m_CompArr[m_CompArr.Length - 1] = (IAMTimelineComp)pCompObj; hr = m_pTimeline.CreateEmptyNode(out pTrackObj, TimelineMajorType.Track); hr = m_CompArr[m_CompArr.Length - 1].VTrackInsBefore(pTrackObj, -1); m_TrackArr[m_TrackArr.Length - 1] = (IAMTimelineTrack)pTrackObj; DESTransition.SetDxtKey(ref m_pTimeline, m_CompArr[m_TrackArr.Length - 1], 0, DESHelper.FormatTime(ds.PlayLength)); }
/// <summary> /// Configure the graph to output the results to a video window. /// </summary> /// <remarks> /// The callback routines are invoked once for each sample. This allows for additional processing to /// be performed on the video or audio buffers. /// </remarks> /// <param name="hWnd">Window handle to render to, or IntPtr.Zero to render to its own window</param> /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param> /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param> /// <param name="video">Render only video</param> /// <param name="audio">Render only audio</param> private void RenderToWindow(IntPtr hWnd, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback, bool video, bool audio) { int hr; IPin pPin; IVideoWindow pVidWindow; IAMTimelineObj pGroup; // Perform initialization common to all render routines RenderCommon(); // Contains useful routines for creating the graph ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { hr = icgb.SetFiltergraph(m_pGraph); DESError.ThrowExceptionForHR(hr); int NumGroups; hr = m_pTimeline.GetGroupCount(out NumGroups); DESError.ThrowExceptionForHR(hr); // Walk the groups. For DESCombine, there is one group that // contains all the video, and a second group for the audio. for (int i = 0; i < NumGroups; i++) { hr = m_pTimeline.GetGroup(out pGroup, i); DESError.ThrowExceptionForHR(hr); try { // Inform the graph we will be previewing (rather than writing to disk) IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup; hr = pTLGroup.SetPreviewMode(true); DESError.ThrowExceptionForHR(hr); } finally { // Release the group Marshal.ReleaseComObject(pGroup); } // Get the IPin for the current group hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin); DESError.ThrowExceptionForHR(hr); try { // If this is the video pin if (video && IsVideo(pPin)) { // Get a video renderer IBaseFilter ibfVideoRenderer = (IBaseFilter) new VideoRenderer(); try { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete); RenderWindowHelper(icgb, mcb, "Video", pPin, ibfVideoRenderer); } finally { Marshal.ReleaseComObject(ibfVideoRenderer); } } else if (audio) { // Get an audio renderer IBaseFilter ibfAudioRenderer = (IBaseFilter) new AudioRender(); try { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete); RenderWindowHelper(icgb, mcb, "Audio", pPin, ibfAudioRenderer); } finally { Marshal.ReleaseComObject(ibfAudioRenderer); } } } finally { Marshal.ReleaseComObject(pPin); } } if (video) { // Configure the video window pVidWindow = (IVideoWindow)m_pGraph; // If a window handle was supplied, use it if (hWnd != IntPtr.Zero) { hr = pVidWindow.put_Owner(hWnd); DESError.ThrowExceptionForHR(hr); } else { // Use our own window hr = pVidWindow.put_Caption("Video Rendering Window"); DESError.ThrowExceptionForHR(hr); // since no user interaction is allowed, remove // system menu and maximize/minimize buttons WindowStyle lStyle = 0; hr = pVidWindow.get_WindowStyle(out lStyle); DESError.ThrowExceptionForHR(hr); lStyle &= ~(WindowStyle.MinimizeBox | WindowStyle.MaximizeBox | WindowStyle.SysMenu); hr = pVidWindow.put_WindowStyle(lStyle); DESError.ThrowExceptionForHR(hr); } } } finally { Marshal.ReleaseComObject(icgb); } }
protected void RenderGroups(ICaptureGraphBuilder2 icgb, IBaseFilter audioCompressor, IBaseFilter videoCompressor, IBaseFilter audioDest, IBaseFilter videoDest, IDESCombineCB pAudioCallback, IDESCombineCB pVideoCallback) { int hr = 0; if (audioCompressor != null) { _dc.Add(audioCompressor); } if (videoCompressor != null) { _dc.Add(videoCompressor); } if (audioDest != null) { _dc.Add(audioDest); } if ((videoDest != null) && (audioDest != videoDest)) { _dc.Add(videoDest); } IAMTimeline desTimeline = _timeline.DesTimeline; int NumGroups; hr = desTimeline.GetGroupCount(out NumGroups); DESError.ThrowExceptionForHR(hr); // Walk the groups. For this class, there is one group that // contains all the video, and a second group for the audio. for (int i = (NumGroups - 1); i >= 0; i--) { IAMTimelineObj pGroup; hr = desTimeline.GetGroup(out pGroup, i); DESError.ThrowExceptionForHR(hr); try { // Inform the graph we will be writing to disk (rather than previewing) IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup; hr = pTLGroup.SetPreviewMode(false); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pGroup); } IPin pPin; // Get the IPin for the current group hr = _renderEngine.GetGroupOutputPin(i, out pPin); _dc.Add(pPin); DESError.ThrowExceptionForHR(hr); try { if (PinUtils.IsVideo(pPin)) { // Create a sample grabber, add it to the graph and connect it all up CallbackHandler mcb = new CallbackHandler(_firstVideoGroup, pVideoCallback, (IMediaEventSink)_graph, EC_VideoFileComplete); RenderHelper(icgb, mcb, "Video", pPin, videoCompressor, videoDest); } else { // Create a sample grabber, add it to the graph and connect it all up CallbackHandler mcb = new CallbackHandler(_firstAudioGroup, pAudioCallback, (IMediaEventSink)_graph, EC_AudioFileComplete); RenderHelper(icgb, mcb, "Audio", pPin, audioCompressor, audioDest); } } finally { Marshal.ReleaseComObject(pPin); } } }