private void TestRender() { int hr; hr = m_ire.RenderOutputPins(); DESError.ThrowExceptionForHR(hr); }
private void TestFix() { const int START = 100000001; const int END = 200000001; int hr; long i1, i2; IAMTimelineObj pSource1Obj; hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); hr = pSource1Obj.SetStartStop(START, END); DESError.ThrowExceptionForHR(hr); hr = pSource1Obj.GetStartStop(out i1, out i2); DESError.ThrowExceptionForHR(hr); hr = m_VideoTrack.SrcAdd(pSource1Obj); DESError.ThrowExceptionForHR(hr); hr = pSource1Obj.FixTimes(ref i1, ref i2); DESError.ThrowExceptionForHR(hr); Debug.Assert(i1 == START - 1 && i2 == END - 1, "fix"); }
private void InitVideo() { int hr; IAMTimelineGroup pVideoGroup = (IAMTimelineGroup)m_pVideoGroupObj; // all we set is the major type. The group will automatically use other defaults AMMediaType VideoGroupType = new AMMediaType(); VideoGroupType.majorType = MediaType.Video; hr = pVideoGroup.SetMediaType(VideoGroupType); DESError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(VideoGroupType); // add the video group to the timeline hr = m_pTimeline.AddGroup(m_pVideoGroupObj); DESError.ThrowExceptionForHR(hr); IAMTimelineObj pTrack1Obj; hr = m_pTimeline.CreateEmptyNode(out pTrack1Obj, TimelineMajorType.Track); DESError.ThrowExceptionForHR(hr); // tell the composition about the track IAMTimelineComp pRootComp = (IAMTimelineComp)m_pVideoGroupObj; hr = pRootComp.VTrackInsBefore(pTrack1Obj, -1); DESError.ThrowExceptionForHR(hr); m_VideoTrack = (IAMTimelineTrack)pTrack1Obj; }
private long AddVideo(string VideoFile) { int hr; // create the timeline source object IAMTimelineObj pSource1Obj; hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); DESError.ThrowExceptionForHR(hr); try { hr = pSource1Obj.SetStartStop(0, 10000000000); DESError.ThrowExceptionForHR(hr); IAMTimelineSrc pSource1Src = (IAMTimelineSrc)pSource1Obj; // Put in the file name hr = pSource1Src.SetMediaName(VideoFile); DESError.ThrowExceptionForHR(hr); // Connect the track to the source hr = m_VideoTrack.SrcAdd(pSource1Obj); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pSource1Obj); } return(0); }
private void TestLogError() { int hr; IGraphBuilder fg; IRenderEngine ire = new RenderEngine() as IRenderEngine; hr = ire.SetTimelineObject(m_pTimeline); DESError.ThrowExceptionForHR(hr); hr = ire.ConnectFrontEnd(); DESError.ThrowExceptionForHR(hr); hr = ire.RenderOutputPins(); DESError.ThrowExceptionForHR(hr); hr = ire.GetFilterGraph(out fg); DESError.ThrowExceptionForHR(hr); hr = ((IMediaControl)fg).Run(); DESError.ThrowExceptionForHR(hr); IMediaEvent ime = fg as IMediaEvent; EventCode evCode; const int E_Abort = unchecked ((int)0x80004004); do { System.Windows.Forms.Application.DoEvents(); System.Threading.Thread.Sleep(100); hr = ime.WaitForCompletion(1000, out evCode); } while (evCode == (EventCode)E_Abort); Debug.Assert(m_Called == true, "LogError"); }
/// <summary> /// Construct the class /// </summary> /// <remarks> /// The input files do not need to have the same height/width/fps. DES will /// automatically convert them all to the values specified by the constructor. /// </remarks> /// <param name="FPS">Frames per second (commonly 15 or 30)</param> /// <param name="BitCount">Color depth: 16, 24 or 32</param> /// <param name="Width">Frame width (commonly 320, 640, etc)</param> /// <param name="Height">Frame height (commonly 240, 480, etc)</param> /// <param name="audio">support only audio files</param> /// <param name="video">support only video files</param> public DESCombine(double FPS, short BitCount, int Width, int Height, bool audio, bool video) { threadCompleted = true; // Initialize the data members m_State = ClassState.Constructed; // Create the timeline m_pTimeline = (IAMTimeline) new AMTimeline(); // Set the frames per second int hr = m_pTimeline.SetDefaultFPS(FPS); DESError.ThrowExceptionForHR(hr); supportAudio = audio; supportVideo = video; if (supportVideo) { // Init the video group m_Video = new MediaGroup(GetVideoMediaType(BitCount, Width, Height), m_pTimeline, FPS); } if (supportAudio) { // Init the audio group m_Audio = new MediaGroup(GetAudioMediaType(), m_pTimeline, FPS); } this.ThreadFinished += new EventHandler(DESCombine_ThreadFinished); }
/// <summary> /// Returns an XML description of the capture graph (as seen by DES). /// </summary> /// <remarks> /// This method can only be called after one /// of the render functions has been called. Might be useful for debugging. It is also possible (even easy) /// to build the DES part of a graph from an XML file, however that functionality has not been implemented /// in this version of the library. /// </remarks> /// <returns>string containing XML</returns> public string GetXML() { if (m_State < ClassState.RenderSelected) { throw new Exception("No render method has been selected"); } IXml2Dex pXML; string sRet; int hr; pXML = (IXml2Dex) new Xml2Dex(); try { hr = pXML.WriteXML(m_pTimeline, out sRet); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pXML); } return(sRet); }
protected AbstractRenderer(ITimeline timeline) { if (timeline == null) { throw new ArgumentNullException(TimelineParameterName); } _timeline = timeline; int hr = 0; // create the render engine _renderEngine = (IRenderEngine) new RenderEngine(); _cleanup.Add(_renderEngine); // tell the render engine about the timeline it should use hr = _renderEngine.SetTimelineObject(_timeline.DesTimeline); DESError.ThrowExceptionForHR(hr); // connect up the front end hr = _renderEngine.ConnectFrontEnd(); DESError.ThrowExceptionForHR(hr); // Get the filtergraph - used all over the place hr = _renderEngine.GetFilterGraph(out _graph); _cleanup.Add(Graph); DESError.ThrowExceptionForHR(hr); // find the first (and usually last) audio and video group, we use these // when rendering to track progress _firstAudioGroup = _timeline.FindFirstGroupOfType(GroupType.Audio); _firstVideoGroup = _timeline.FindFirstGroupOfType(GroupType.Video); }
private void ReadXML() { int hr; hr = m_ixd.ReadXMLFile(m_pTimeline, "foo.xml"); DESError.ThrowExceptionForHR(hr); }
private void TestGroupCount() { int hr; int c; IAMTimelineObj pGroup; hr = m_pTimeline.GetGroupCount(out c); DESError.ThrowExceptionForHR(hr); Debug.Assert(c == 1, "GetGroupCount"); hr = m_pTimeline.ClearAllGroups(); DESError.ThrowExceptionForHR(hr); hr = m_pTimeline.GetGroupCount(out c); DESError.ThrowExceptionForHR(hr); Debug.Assert(c == 0, "ClearAllGroups"); hr = m_pTimeline.AddGroup((IAMTimelineObj)m_pVideoGroup); DESError.ThrowExceptionForHR(hr); hr = m_pTimeline.GetGroup(out pGroup, 0); DESError.ThrowExceptionForHR(hr); hr = m_pTimeline.RemGroupFromList((IAMTimelineObj)pGroup); DESError.ThrowExceptionForHR(hr); hr = m_pTimeline.GetGroupCount(out c); DESError.ThrowExceptionForHR(hr); Debug.Assert(c == 0, "RemGroupFromList"); }
private void TestDuration() { int hr; long l; double d; IAMTimelineObj pTrack1Obj; hr = m_pTimeline.CreateEmptyNode(out pTrack1Obj, TimelineMajorType.Track); DESError.ThrowExceptionForHR(hr); // tell the composition about the track IAMTimelineComp pRootComp = (IAMTimelineComp)m_pVideoGroup; hr = pRootComp.VTrackInsBefore(pTrack1Obj, -1); DESError.ThrowExceptionForHR(hr); m_VideoTrack = (IAMTimelineTrack)pTrack1Obj; AddVideo("foo.avi"); AddVideo("foxo.avi"); hr = m_pTimeline.GetDuration(out l); DESError.ThrowExceptionForHR(hr); Debug.Assert(l == 10000000000, "Duration"); hr = m_pTimeline.GetDuration2(out d); DESError.ThrowExceptionForHR(hr); Debug.Assert(d == 1000.0, "GetDuration2"); hr = m_pTimeline.ValidateSourceNames(SFNValidateFlags.Replace | SFNValidateFlags.Check | SFNValidateFlags.Popup, this, IntPtr.Zero); DESError.ThrowExceptionForHR(hr); }
private void TestAddGroup() { int hr; hr = m_pTimeline.AddGroup((IAMTimelineObj)m_pVideoGroup); DESError.ThrowExceptionForHR(hr); }
private void TestAdd() { int hr; hr = m_itt.TransAdd(GetTrans()); DESError.ThrowExceptionForHR(hr); }
private void Config() { int hr; IAMTimelineObj pSource1Obj; m_pTimeline = (IAMTimeline) new AMTimeline(); InitVideo(); // create the timeline source object hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); DESError.ThrowExceptionForHR(hr); m_pSource1Src = (IAMTimelineSrc)pSource1Obj; //////////////////////////// hr = m_pSource1Src.SetMediaName("foo.avi"); DESError.ThrowExceptionForHR(hr); hr = ((IAMTimelineObj)pSource1Obj).SetStartStop(0, 1234563053945); DESError.ThrowExceptionForHR(hr); // Connect the track to the source hr = m_VideoTrack.SrcAdd((IAMTimelineObj)pSource1Obj); DESError.ThrowExceptionForHR(hr); //////////////////////////// m_Splittable = (IAMTimelineSplittable)pSource1Obj; }
/// <summary> /// Time:2008-6-23 /// Author:michael /// Function: Return the length of the media file /// </summary> /// <param name="m_FileName"></param> public static long GetMediaLength(string m_FileName) { int hr; double d; long i; try { IMediaDet imd = (IMediaDet) new MediaDet(); // Set the name hr = imd.put_Filename(m_FileName); DESError.ThrowExceptionForHR(hr); // Read from stream zero hr = imd.put_CurrentStream(0); DESError.ThrowExceptionForHR(hr); // Get the length in seconds hr = imd.get_StreamLength(out d); DESError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(imd); // Convert to UNITS i = (long)(d * DESConsts.UNITS); } catch { i = 0; } return(i); }
/// <summary> /// add Source to video group /// </summary> private void AddSource(IAMTimelineTrack myTrack, string SourceFile, long StartTime, long EndTime) { int hr; IAMTimelineObj pSource1Obj; // create the timeline source object hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); DESError.ThrowExceptionForHR(hr); try { // set up source length hr = pSource1Obj.SetStartStop(StartTime, EndTime); DESError.ThrowExceptionForHR(hr); IAMTimelineSrc pSource1Src = (IAMTimelineSrc)pSource1Obj; // Set the file name hr = pSource1Src.SetMediaName(SourceFile); DESError.ThrowExceptionForHR(hr); // Set the start/end hr = pSource1Src.SetMediaLength(EndTime - StartTime); DESError.ThrowExceptionForHR(hr); hr = pSource1Src.SetStretchMode(0); // Set the times, get back the times adjusted to fit the frame rate hr = pSource1Src.FixMediaTimes(ref StartTime, ref EndTime); DESError.ThrowExceptionForHR(hr); // Connect the track to the source hr = myTrack.SrcAdd(pSource1Obj); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pSource1Obj); } }
/// <summary> /// Determine whether a specified pin is audio or video /// </summary> /// <param name="pPin">Pin to check</param> /// <returns>True if pin is video</returns> private bool IsVideo(IPin pPin) { int hr; bool bRet = false; AMMediaType[] pmt = new AMMediaType[1]; IEnumMediaTypes ppEnum; // Walk the MediaTypes for the pin hr = pPin.EnumMediaTypes(out ppEnum); DESError.ThrowExceptionForHR(hr); try { // Just read the first one hr = ppEnum.Next(1, pmt, IntPtr.Zero); DESError.ThrowExceptionForHR(hr); bRet = pmt[0].majorType == MediaType.Video; } finally { Marshal.ReleaseComObject(ppEnum); } DsUtils.FreeAMMediaType(pmt[0]); return(bRet); }
/// <summary> /// Add Color to video group on Layer /// </summary> private void AddColor(IAMTimelineTrack myTrack, Color sColor, long StartTime, long EndTime) { int hr; IAMTimelineObj pSource1Obj; // create the timeline source object hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); DESError.ThrowExceptionForHR(hr); try { // set up source length hr = pSource1Obj.SetStartStop(StartTime, EndTime); DESError.ThrowExceptionForHR(hr); DESTransition.SetColor(pSource1Obj, sColor); // Connect the track to the source hr = myTrack.SrcAdd(pSource1Obj); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pSource1Obj); } }
/// <summary> /// Begins rendering and returns immediately. /// </summary> /// <remarks> /// Final status is sent as a <see cref="DESCombine.Completed"/> event. /// </remarks> public void StartRendering() { int hr; if (m_State < ClassState.RenderSelected) { throw new Exception("Render method not selected"); } m_State = ClassState.GraphStarted; m_pControl = (IMediaControl)m_pGraph; // Avoid double threads if (threadCompleted) { // Create a new thread to process events Thread t; t = new Thread(new ThreadStart(EventWait)); t.Name = "Media Event Thread"; t.Start(); threadCompleted = false; } hr = m_pControl.Run(); DESError.ThrowExceptionForHR(hr); }
private void RenderToNullRenderer(IDESCombineCB audioCallback, IDESCombineCB videoCallback) { int hr; ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { hr = icgb.SetFiltergraph(_graph); DESError.ThrowExceptionForHR(hr); IBaseFilter audioDest = StandardFilters.RenderNull(_dc, _graph); IBaseFilter videoDest = StandardFilters.RenderNull(_dc, _graph); try { RenderGroups(icgb, null, null, audioDest, videoDest, audioCallback, videoCallback); } finally { if (audioDest != null) { Marshal.ReleaseComObject(audioDest); } if (videoDest != null) { Marshal.ReleaseComObject(videoDest); } } } finally { Marshal.ReleaseComObject(icgb); } }
/// <summary> /// Constructor /// </summary> /// <param name="mType">Media type of the new group</param> /// <param name="pTimeline">Timeline to use for the group</param> /// <param name="fps">FPS for the group</param> public MediaGroup(AMMediaType mType, IAMTimeline pTimeline, double fps) { int hr; IAMTimelineObj pGroupObj; m_Length = 0; m_Files = new ArrayList(); m_FPS = fps; m_pTimeline = pTimeline; // make the root group/composition hr = m_pTimeline.CreateEmptyNode(out pGroupObj, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); try { m_pGroup = (IAMTimelineGroup)pGroupObj; // Set the media type we just created hr = m_pGroup.SetMediaType(mType); DESError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mType); // add the video group to the timeline hr = m_pTimeline.AddGroup(pGroupObj); DESError.ThrowExceptionForHR(hr); } finally { //Marshal.ReleaseComObject(pGroupObj); } //Marshal.ReleaseComObject(pTrack1Obj); // Released as m_VideoTrack in dispose }
private void TestVMMediaType() { int hr; hr = m_itg.SetMediaTypeForVB(0); DESError.ThrowExceptionForHR(hr); }
private void TestBlob() { int hr; IntPtr ip; int i; IntPtr ip2; int i2; hr = m_ips.SaveToBlob(out i, out ip); DESError.ThrowExceptionForHR(hr); hr = m_ips.ClearProps(); hr = m_ips.SaveToBlob(out i2, out ip2); DESError.ThrowExceptionForHR(hr); Debug.Assert(i2 == 4, "Mostly empty blob"); Marshal.FreeCoTaskMem(ip2); hr = m_ips.LoadFromBlob(i, ip); DESError.ThrowExceptionForHR(hr); hr = m_ips.SaveToBlob(out i2, out ip2); DESError.ThrowExceptionForHR(hr); Debug.Assert(i == i2, "Save/Restore blob"); Marshal.FreeCoTaskMem(ip); Marshal.FreeCoTaskMem(ip2); }
private void TestRecompFormatFromSource() { int hr; bool b; IAMTimelineObj pFirst; hr = m_pTimeline.CreateEmptyNode(out pFirst, TimelineMajorType.Source); DESError.ThrowExceptionForHR(hr); hr = pFirst.SetStartStop(0, 10000000000); DESError.ThrowExceptionForHR(hr); IAMTimelineSrc pFirstSrc = (IAMTimelineSrc)pFirst; hr = pFirstSrc.SetMediaTimes(0, 10000000000); // Put in the file name hr = pFirstSrc.SetMediaName("foo.avi"); DESError.ThrowExceptionForHR(hr); hr = m_itg.IsSmartRecompressFormatSet(out b); DESError.ThrowExceptionForHR(hr); Debug.Assert(b == false, "IsSmart"); hr = m_itg.SetRecompFormatFromSource(pFirstSrc); DESError.ThrowExceptionForHR(hr); hr = m_itg.IsSmartRecompressFormatSet(out b); DESError.ThrowExceptionForHR(hr); Debug.Assert(b == true, "IsSmart2"); }
public static long GetLength(string fileName) { int hr; double d; long i; var imd = (IMediaDet) new MediaDet(); // Set the name hr = imd.put_Filename(fileName); DESError.ThrowExceptionForHR(hr); // Read from stream zero hr = imd.put_CurrentStream(0); DESError.ThrowExceptionForHR(hr); // Get the length in seconds hr = imd.get_StreamLength(out d); DESError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(imd); // Convert to Units i = (long)(d * TimelineBuilder.Units); return(i); }
/// <summary> /// Return the length of the media file /// </summary> /// <returns>Length in 100NS</returns> private long GetLength() { int hr; double d; long i; IMediaDet imd = (IMediaDet) new MediaDet(); // Set the name hr = imd.put_Filename(m_FileName); DESError.ThrowExceptionForHR(hr); // Read from stream zero hr = imd.put_CurrentStream(0); DESError.ThrowExceptionForHR(hr); // Get the length in seconds hr = imd.get_StreamLength(out d); DESError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(imd); // Convert to UNITS i = (long)(d * DESCombine.UNITS); return(i); }
private void TestFix2() { const double START = 123.456; const double END = 234.567; int hr; double i1, i2; IAMTimelineObj pSource1Obj; hr = m_pTimeline.CreateEmptyNode(out pSource1Obj, TimelineMajorType.Source); hr = pSource1Obj.SetStartStop2(START, END); DESError.ThrowExceptionForHR(hr); hr = pSource1Obj.GetStartStop2(out i1, out i2); DESError.ThrowExceptionForHR(hr); hr = m_VideoTrack.SrcAdd(pSource1Obj); DESError.ThrowExceptionForHR(hr); hr = pSource1Obj.FixTimes2(ref i1, ref i2); DESError.ThrowExceptionForHR(hr); Debug.Assert((int)(i1 * 10) == 1234 && (int)(i2 * 10) == 2345, "fix2"); }
public static void GetImageFromMedia(string mediaFile, string outImageFile, double time) { int hr; IMediaDet imd = (IMediaDet) new MediaDet(); try { int width = 200; int height = 200; // Set the name hr = imd.put_Filename(mediaFile); DESError.ThrowExceptionForHR(hr); // Read from stream to get video size AMMediaType mediaType = new AMMediaType(); imd.get_StreamMediaType(mediaType); VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); width = infoHeader.BmiHeader.Width; height = infoHeader.BmiHeader.Height; // Get the length in seconds hr = imd.WriteBitmapBits(time, width, height, outImageFile); DESError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(imd); } catch { Marshal.ReleaseComObject(imd); } }
public static bool IsVideo(IPin pin) { if (pin == null) { throw new ArgumentNullException("pin"); } int hr; bool bRet = false; var pmt = new AMMediaType[1]; IEnumMediaTypes ppEnum; IntPtr i = IntPtr.Zero; // Walk the MediaTypes for the pin hr = pin.EnumMediaTypes(out ppEnum); DESError.ThrowExceptionForHR(hr); try { // Just read the first one hr = ppEnum.Next(1, pmt, i); DESError.ThrowExceptionForHR(hr); bRet = pmt[0].majorType == MediaType.Video; } finally { Marshal.ReleaseComObject(ppEnum); } DsUtils.FreeAMMediaType(pmt[0]); return(bRet); }
private void TestConnect() { int hr; hr = m_ire.ConnectFrontEnd(); DESError.ThrowExceptionForHR(hr); }