public void AddEffectSetsApropriateContainer() { using (ITimeline timeline = new DefaultTimeline()) { EffectDefinition def = StandardEffects.CreateDefaultBlur(); IGroup group = timeline.AddVideoGroup(24, 100, 100); IEffect groupEffect = group.AddEffect(0, 10, def); Assert.AreSame(group, groupEffect.Group); Assert.AreSame(group, groupEffect.Container); ITrack track = group.AddTrack(); IEffect trackEffect = track.AddEffect(0, 10, def); Assert.AreSame(group, trackEffect.Group); Assert.AreSame(track, trackEffect.Container); IComposition composition = group.AddComposition(); IEffect compositionEffect = composition.AddEffect(0, 10, def); Assert.AreSame(group, compositionEffect.Group); Assert.AreSame(composition, compositionEffect.Container); IClip clip = track.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, 10); IEffect clipEffect = clip.AddEffect(0, 10, def); Assert.AreSame(group, clip.Group); Assert.AreSame(clip, clipEffect.Container); } }
public void AddClipsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track1 = group.AddTrack(); ITrack track2 = group.AddTrack(); track1.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track1.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image4.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); Assert.AreEqual(2, track1.Clips.Count); Assert.AreEqual(2, track2.Clips.Count); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""image1.jpg"" /> <clip start=""2"" stop=""4"" src=""image3.jpg"" /> </track> <track> <clip start=""0"" stop=""2"" src=""image2.jpg"" /> <clip start=""2"" stop=""4"" src=""image4.jpg"" /> </track> </group> </timeline>"); } }
public void RenderWithInapropriateProfile2() { string outputFile = "RenderWithInapropriateProfile2.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 100, 100); ITrack rootTrack = videoGroup.AddTrack(); rootTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityVideo)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } } }
public void ConvertMp3ToWavWithCompressor() { string outputFile = "ConvertMp3ToWavWithCompressor.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); // render the timeline using ( var renderer = new WavFileRenderer(timeline, outputFile, AudioFormat.LowQualityMonoPcm, new ICallbackParticipant[] { new ConsoleProgressParticipant() })) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
public static ITimeline timeline() { var timeline = new DefaultTimeline(); var audioTrack = timeline.add_AudioTrack(); return(timeline); }
public void RenderAudioAndVideo() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\transitions.wmv"" mstart=""0"" /> </track> </group> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } } }
public void CanRenderAudioVideoAndImages() { using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1); videoTrack.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1); using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" /> </track> </group> <group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""1"" src=""..\..\transitions.wmv"" mstart=""0"" /> <clip start=""1"" stop=""2"" src=""..\..\image1.jpg"" /> </track> </group> </timeline>"); } } }
public void RenderWmvWithImageWatermark() { string outputFile = "RenderWmvWithImageWatermark.wmv"; using (Image waterMarkImage = Image.FromFile("..\\..\\corner_watermark.png")) using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); IClip videoClip = videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); ICallbackParticipant[] videoParticipants = new ICallbackParticipant[] { new ImageWatermarkParticipant(32, 320, 240, true, waterMarkImage, new Point(200, 0)) }; using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo, videoParticipants, null)) { renderer.Render(); } } }
public void ConvertWavToWav() { string outputFile = "ConvertWavToWav.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new WavFileRenderer(timeline, outputFile)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
public void AddAudioClipWhenNoSupportingGroupExists() { using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudio("..\\..\\1sec.wav"); } }
public void RemoveClipEventHandlers() { int count = 0; EventHandler incrementBefore = delegate { count++; }; EventHandler <AddedEffectEventArgs> incrementAfter = delegate { count++; }; using (ITimeline timeline = new DefaultTimeline()) { IClip clip = timeline.AddAudioGroup().AddTrack().AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); clip.AddedEffect += incrementAfter; clip.AddingEffect += incrementBefore; clip.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); Assert.AreEqual(2, count); count = 0; clip.AddedEffect -= incrementAfter; clip.AddingEffect -= incrementBefore; clip.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); Assert.AreEqual(0, count); } }
public void TrackAddEffectBubblesToComposition() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); composition.BeforeEffectAdded += new EventHandler(delegate { beforeCount++; }); composition.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate { afterCount++; }); ITrack track = composition.AddTrack(); track.AddEffect("test", -1, 1, 2, StandardEffects.CreateBlurEffect(2, 2, 10)); Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); } }
public void RemoveEvents() { int count = 0; EventHandler increment = new EventHandler(delegate { count++; }); EventHandler <AfterEffectAddedEventArgs> incrementForAfterEffectAdded = new EventHandler <AfterEffectAddedEventArgs>(delegate { count++; }); EventHandler <AfterTransitionAddedEventArgs> incrementForAfterTransitionAdded = new EventHandler <AfterTransitionAddedEventArgs>(delegate { count++; }); EventHandler <AfterClipAddedEventArgs> incrementForAfterClipAdded = new EventHandler <AfterClipAddedEventArgs>(delegate { count++; }); using (ITimeline timeline = new DefaultTimeline()) { ITrack track = timeline.AddAudioGroup().AddTrack(); track.AfterEffectAdded += incrementForAfterEffectAdded; track.AfterTransitionAdded += incrementForAfterTransitionAdded; track.AfterClipAdded += incrementForAfterClipAdded; track.BeforeEffectAdded += increment; track.BeforeTransitionAdded += increment; track.BeforeClipAdded += increment; track.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 1); track.AddTransition(0, 2, StandardTransitions.CreateFade()); Assert.AreEqual(6, count); count = 0; track.AfterEffectAdded -= incrementForAfterEffectAdded; track.AfterTransitionAdded -= incrementForAfterTransitionAdded; track.AfterClipAdded -= incrementForAfterClipAdded; track.BeforeEffectAdded -= increment; track.BeforeTransitionAdded -= increment; track.BeforeClipAdded -= increment; track.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 1); track.AddTransition(2, 2, StandardTransitions.CreateFade()); Assert.AreEqual(0, count); } }
public void EnsureClipBubblesBeforeAndAfterEffectAddedUp() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track = group.AddTrack(); track.BeforeEffectAdded += new EventHandler(delegate { beforeCount++; }); track.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate { afterCount++; }); IClip clip = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1); clip.AddEffect(0, 1, StandardEffects.CreateDefaultBlur()); Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); } }
public void AddTransitionsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); TransitionDefinition definition = StandardTransitions.CreateFade(); ITransition transition = track.AddTransition("test", 1, 3, definition, false); Assert.AreEqual(1, track.Transitions.Count); Assert.AreSame(transition, track.Transitions[0]); Assert.AreEqual("test", transition.Name); Assert.AreSame(definition, transition.TransitionDefinition); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <transition start=""1"" stop=""4"" clsid=""{16B280C5-EE70-11D1-9066-00C04FD9189D}"" username=""test"" /> </track> </group> </timeline>"); } }
public void ConvertMp3ToWavWithCompressor() { // TODO: the compressor is being added, but the selected media type seems (encoding etc.) // seems to be ignored. string outputFile = "ConvertMp3ToWavWithCompressor.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); // render the timeline using ( WavFileRenderer renderer = new WavFileRenderer(timeline, outputFile, CommonAudioFormats.LowQualityMonoPcm, new ConsoleProgressCallback())) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
public void RenderWithCompressor() { string outputFile = "RenderWithCompressor.wav"; using ( AudioCompressor compressor = AudioCompressorFactory.Create(AudioFormat.CompactDiscQualityStereoPcm)) using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using ( var renderer = new WavFileRenderer(timeline, outputFile, compressor.Filter, compressor.MediaType, null)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline.Fps, 2, outputFile); } }
public void PixelateAndIrisBetweenImages() { string outputFile = "PixelateAndIrisBetweenImages.wmv"; using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack().AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 17); IGroup group = timeline.AddVideoGroup(32, 160, 100); ITrack low = group.AddTrack(); ITrack hi = group.AddTrack(); hi.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 0, 0, 6); low.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 5, 0, 8); hi.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 11, 0, 6); // notice that we must apply "in" and "out" of the pixelation effect, to get the // desired effect, like the fade hi.AddTransition(5.0, 1.0, StandardTransitions.CreatePixelate(), true); hi.AddTransition(6.0, 1.0, StandardTransitions.CreatePixelate(), false); // the iris transition is a one shot hi.AddTransition(11.0, 2.0, StandardTransitions.CreateIris(), false); using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void AddComposition() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); IComposition composition = group.AddComposition(); Assert.AreSame(group, composition.Container); Assert.AreSame(group, composition.Group); bool firedBefore = false; bool firedAfter = false; composition.BeforeCompositionAdded += new EventHandler(delegate { firedBefore = true; }); composition.AfterCompositionAdded += new EventHandler <AfterCompositionAddedEventArgs>(delegate { firedAfter = true; }); IComposition childComposition = composition.AddComposition(); Assert.AreSame(composition, childComposition.Container); Assert.AreSame(group, childComposition.Group); Assert.AreEqual(1, composition.Compositions.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void AddTransition() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.BeforeTransitionAdded += new EventHandler(delegate { firedBefore = true; }); composition.AfterTransitionAdded += new EventHandler <AfterTransitionAddedEventArgs>(delegate { firedAfter = true; }); ITransition transition = composition.AddTransition("test", 0, 2, StandardTransitions.CreateFade(), false); Assert.AreEqual(1, composition.Transitions.Count); Assert.AreEqual("test", transition.Name); Assert.AreEqual(0, transition.Offset); Assert.AreEqual(2, transition.Duration); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void AddEffectSetsApropriateContainer() { using (ITimeline timeline = new DefaultTimeline()) { EffectDefinition def = StandardEffects.CreateDefaultBlur(); IGroup group = timeline.AddVideoGroup(24, 100, 100); IEffect groupEffect = group.AddEffect(0, 10, def); Assert.AreSame(group, groupEffect.Group); Assert.AreSame(group, groupEffect.Container); ITrack track = group.AddTrack(); IEffect trackEffect = track.AddEffect(0, 10, def); Assert.AreSame(group, trackEffect.Group); Assert.AreSame(track, trackEffect.Container); IComposition composition = group.AddComposition(); IEffect compositionEffect = composition.AddEffect(0, 10, def); Assert.AreSame(group, compositionEffect.Group); Assert.AreSame(composition, compositionEffect.Container); IClip clip = track.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, 10); IEffect clipEffect = clip.AddEffect(0, 10, def); Assert.AreSame(group, clip.Group); Assert.AreSame(clip, clipEffect.Container); } }
public void AddAndRemoveHandler() { bool eventTriggered = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1); using (NullRenderer renderer = new NullRenderer(timeline)) { EventHandler handler = new EventHandler(delegate { eventTriggered = true; }); renderer.RenderCompleted += handler; renderer.RenderCompleted -= handler; renderer.BeginRender(null, null); renderer.Cancel(); Assert.IsFalse(eventTriggered); } } }
/// <summary> /// Saves the content of this <see cref="CdgFile"/> as video. /// </summary> /// <param name="path"> /// A <see cref="string"/> indicating the file to which the video should be saved. /// </param> /// <returns> /// The Task. /// </returns> public void SaveAsVideo(string path) { Console.WriteLine("Saving..."); using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 300, 300); ITrack videoTrack = group.AddTrack(); Bitmap frame = this.Next(); int frames = 0; while (frame != null && frames < 100) { Console.WriteLine("<!--Getting frame-->"); videoTrack.AddImage(frame, 0, 0.2D); frame = this.Next(); frames++; } try { Console.WriteLine(videoTrack.Duration); Console.WriteLine(this.stream.Name.Replace("cdg", "mp3")); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); ////IClip audio = audioTrack.AddAudio(this.stream.Name.Replace("cdg", "mp3"), 0, videoTrack.Duration); IRenderer renderer = new WindowsMediaRenderer(timeline, path, WindowsMediaProfiles.HighQualityVideo); renderer.Render(); } catch (Exception exception) { Console.WriteLine(exception); } } Console.WriteLine("Saving finished."); }
public void AddVideoClipWhenNoSupportingGroupExists() { using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideo("1sec.wmv"); } }
public void RemoveGroupAddedHandlers() { int count = 0; EventHandler incrementBefore = delegate { count++; }; EventHandler <AddedGroupEventArgs> incrementAfter = delegate { count++; }; using (ITimeline timeline = new DefaultTimeline()) { timeline.AddingGroup += incrementBefore; timeline.AddedGroup += incrementAfter; timeline.AddAudioGroup(); Assert.AreEqual(2, count); count = 0; timeline.AddingGroup -= incrementBefore; timeline.AddedGroup -= incrementAfter; timeline.AddAudioGroup(); Assert.AreEqual(0, count); } }
public void JumpVolume() { // and audible demonstration of the difference between interpolating // parameter values for an effect, and jumping directly to them. string outputFile = "JumpVolume.wma"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track = group.AddTrack(); IClip clip = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 10); EffectDefinition effectDefinition = new EffectDefinition(DxtSubObjects.AudioMixer); Parameter volumeParameter = new Parameter("Vol", 0.0, 2, 1.0); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 2.5, "0.2")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 3.5, "0.8")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 4.5, "0.2")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 5, "1.0")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Interpolate, clip.Duration, "0.0")); effectDefinition.Parameters.Add(volumeParameter); clip.AddEffect(0, clip.Duration, effectDefinition); using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.MediumQualityAudio)) { renderer.Render(); } } }
public void AddTrackWithNames() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track1 = group.AddTrack("track1", -1); Assert.AreEqual("track1", track1.Name); Assert.AreEqual(1, group.Tracks.Count); track1.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1); Assert.AreSame(group.Tracks[0], track1); ITrack track2 = group.AddTrack("track2", -1); Assert.AreEqual("track2", track2.Name); track2.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1); Assert.AreEqual(2, group.Tracks.Count); Assert.AreEqual(group.Tracks[1], track2); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track username=""track1""> <clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" /> </track> <track username=""track2""> <clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" /> </track> </group> </timeline>"); } }
public void Constructor2() { using (ITimeline timeline = new DefaultTimeline(15)) { Assert.AreEqual(15, timeline.Fps); } }
public void AddEffect() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.BeforeEffectAdded += new EventHandler(delegate { firedBefore = true; }); composition.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate { firedAfter = true; }); IEffect effect = composition.AddEffect("test", -1, 1, 2, StandardEffects.CreateBlurEffect(2, 2, 10)); Assert.AreEqual("test", effect.Name); Assert.AreEqual(1, effect.Offset); Assert.AreEqual(2, effect.Duration); Assert.AreEqual(1, composition.Effects.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void ConvertMp3ToWMA() { string outputFile = "ConvertMp3ToWMA.wma"; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityAudio)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } Assert.IsTrue(File.Exists(outputFile)); AssertLengths(timeline, 2, outputFile); } }
public void AddTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.BeforeTrackAdded += new EventHandler(delegate { firedBefore = true; }); composition.AfterTrackAdded += new EventHandler <AfterTrackAddedEventArgs>(delegate { firedAfter = true; }); ITrack track = composition.AddTrack(); Assert.AreEqual(1, composition.Tracks.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void ConstructWithNullAudioClip() { using (ITimeline timeline = new DefaultTimeline()) { IClip videoClip = timeline.AddVideoGroup(24, 100, 100).AddTrack().AddVideo("1sec.wmv"); var pair = new AudioVideoClipPair(null, videoClip); } }
public void GetGroupContainer() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ICompositionContainer container = group.Container; } }
public void ConstructWithNullVideoClip() { using (ITimeline timeline = new DefaultTimeline()) { IClip audioClip = timeline.AddAudioGroup().AddTrack().AddAudio("1sec.wav"); var pair = new AudioVideoClipPair(audioClip, null); } }
public void GetGroupGroup() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); Assert.AreSame(group, group.Group); } }
public void AddAudioClipToVideoGroup() { using (ITimeline timeline = new DefaultTimeline()) { ITrack track = timeline.AddVideoGroup(24, 64, 64).AddTrack(); track.AddClip("wav file", "1sec.wav", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); } }
public void AddCompositionWithName() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); IComposition composition = group.AddComposition("named", -1); Assert.AreEqual("named", composition.Name); } }
public void AddClipResolvesDuration() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(7.999, clip.Duration); } }
public void Construct() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); var args = new AddedGroupEventArgs(group, timeline); Assert.AreSame(group, args.Item); Assert.AreSame(timeline, args.Container); } }
public void AddAudioAndVideo() { // test all the overloads for AddVideoWithAudio using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 320, 240).AddTrack(); timeline.AddAudioGroup().AddTrack(); IAudioVideoClipPair clip1 = timeline.AddVideoWithAudio("1sec.wmv"); Assert.AreEqual(0, clip1.AudioClip.Offset); Assert.AreEqual(1, clip1.AudioClip.Duration); Assert.AreEqual(0, clip1.VideoClip.Offset); Assert.AreEqual(1, clip1.VideoClip.Duration); IAudioVideoClipPair clip2 = timeline.AddVideoWithAudio("1sec.wmv", 1); Assert.AreEqual(2, clip2.AudioClip.Offset); Assert.AreEqual(1, clip2.AudioClip.Duration); Assert.AreEqual(2, clip2.VideoClip.Offset); Assert.AreEqual(1, clip2.VideoClip.Duration); IAudioVideoClipPair clip3 = timeline.AddVideoWithAudio("1sec.wmv", 0, 0.5); Assert.AreEqual(3, clip3.AudioClip.Offset); Assert.AreEqual(0.5, clip3.AudioClip.Duration); Assert.AreEqual(3, clip3.VideoClip.Offset); Assert.AreEqual(0.5, clip3.VideoClip.Duration); IAudioVideoClipPair clip4 = timeline.AddVideoWithAudio("1sec.wmv", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.AudioClip.Offset); Assert.AreEqual(0.5, clip4.AudioClip.Duration); Assert.AreEqual(0.5, clip4.AudioClip.MediaStart); Assert.AreEqual(3.5, clip4.VideoClip.Offset); Assert.AreEqual(0.5, clip4.VideoClip.Duration); Assert.AreEqual(0.5, clip4.VideoClip.MediaStart); IAudioVideoClipPair clip5 = timeline.AddVideoWithAudio("1sec.wmv", InsertPosition.Absolute, 6, 0, -1); Assert.AreEqual(6, clip5.AudioClip.Offset); Assert.AreEqual(1, clip5.AudioClip.Duration); Assert.AreEqual(6, clip5.VideoClip.Offset); Assert.AreEqual(1, clip5.VideoClip.Duration); IAudioVideoClipPair clip6 = timeline.AddVideoWithAudio("myclip", "1sec.wmv", InsertPosition.Absolute, 8, 0, 0.5); Assert.AreEqual(8, clip6.AudioClip.Offset); Assert.AreEqual(0, clip6.AudioClip.MediaStart); Assert.AreEqual(0.5, clip6.AudioClip.Duration); Assert.AreEqual("myclip", clip6.AudioClip.Name); Assert.AreEqual(8, clip6.VideoClip.Offset); Assert.AreEqual(0, clip6.VideoClip.MediaStart); Assert.AreEqual(0.5, clip6.VideoClip.Duration); Assert.AreEqual("myclip", clip6.VideoClip.Name); } }
public void AddTransitionSetsSwappedInputsProperly() { using (ITimeline timeline = new DefaultTimeline()) { TransitionDefinition def = StandardTransitions.CreateIris(); IGroup group = timeline.AddAudioGroup(); ITransition groupTransition1 = group.AddTransition(0, 5, def, false); Assert.IsFalse(groupTransition1.SwapInputs); ITransition groupTransition2 = group.AddTransition(5, 5, def, true); Assert.IsTrue(groupTransition2.SwapInputs); } }
public void AddClip() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreSame(track, clip.Container); Assert.AreSame(group, clip.Group); Assert.AreEqual(1, track.Clips.Count); Assert.AreSame(track.Clips[0], clip); Assert.IsNull(clip.Name); } }
public void AddingTransitionsChecksForOverlap() { using (ITimeline timeline = new DefaultTimeline()) { TransitionDefinition def = StandardTransitions.CreateIris(); IGroup group = timeline.AddAudioGroup(); ITransition groupTransition1 = group.AddTransition(0, 5, def, false); Assert.IsFalse(groupTransition1.SwapInputs); ITransition groupTransition2 = group.AddTransition(1, 7, def, true); Assert.IsTrue(groupTransition2.SwapInputs); } }
public void AddClipIgnoresUnrequiredAssistant() { using (ITimeline timeline = new DefaultTimeline()) { var assistant = new MockMediaFileAssistant(false); timeline.InstallAssistant(assistant); Assert.AreEqual(0, assistant.ExecutionCount); ITrack track = timeline.AddAudioGroup().AddTrack(); track.AddAudio("1sec.wav"); Assert.AreEqual(0, assistant.ExecutionCount); } }
public void AddTransitionSetsAppropriateContainerAndGroup() { using (ITimeline timeline = new DefaultTimeline()) { TransitionDefinition def = StandardTransitions.CreateIris(); IGroup group = timeline.AddAudioGroup(); ITransition groupTransition = group.AddTransition(0, 0, def); Assert.AreSame(group, groupTransition.Container); Assert.AreSame(group, groupTransition.Group); ITrack track = group.AddTrack(); ITransition trackTransition = track.AddTransition(0, 0, def); Assert.AreSame(track, trackTransition.Container); Assert.AreSame(group, trackTransition.Group); IComposition composition = group.AddComposition(); ITransition compositionTransition = composition.AddTransition(0, 0, def); Assert.AreSame(composition, compositionTransition.Container); Assert.AreSame(group, compositionTransition.Group); } }
public void AddEffect() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.AddingEffect += delegate { firedBefore = true; }; composition.AddedEffect += delegate { firedAfter = true; }; IEffect effect = composition.AddEffect("test", -1, 1, 2, StandardEffects.CreateBlurEffect(2, 2, 10)); Assert.AreEqual("test", effect.Name); Assert.AreEqual(1, effect.Offset); Assert.AreEqual(2, effect.Duration); Assert.AreEqual(1, composition.Effects.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void AddComposition() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); IComposition composition = group.AddComposition(); Assert.AreSame(group, composition.Container); Assert.AreSame(group, composition.Group); bool firedBefore = false; bool firedAfter = false; composition.AddingComposition += delegate { firedBefore = true; }; composition.AddedComposition += delegate { firedAfter = true; }; IComposition childComposition = composition.AddComposition(); Assert.AreSame(composition, childComposition.Container); Assert.AreSame(group, childComposition.Group); Assert.AreEqual(1, composition.Compositions.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void AddAudio() { // test all the overloads for AddAudio using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack(); IClip clip1 = timeline.AddAudio("1sec.wav"); Assert.AreEqual(0, clip1.Offset); Assert.AreEqual(1, clip1.Duration); IClip clip2 = timeline.AddAudio("1sec.wav", 1); Assert.AreEqual(2, clip2.Offset); Assert.AreEqual(1, clip2.Duration); IClip clip3 = timeline.AddAudio("1sec.wav", 0, 0.5); Assert.AreEqual(3, clip3.Offset); Assert.AreEqual(0.5, clip3.Duration); IClip clip4 = timeline.AddAudio("1sec.wav", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.Offset); Assert.AreEqual(0.5, clip4.Duration); Assert.AreEqual(0.5, clip4.MediaStart); IClip clip5 = timeline.AddAudio("1sec.wav", InsertPosition.Absolute, 6, 0, -1); Assert.AreEqual(6, clip5.Offset); Assert.AreEqual(1, clip5.Duration); IClip clip6 = timeline.AddAudio("myclip", "1sec.wav", InsertPosition.Absolute, 8, 0, 0.5); Assert.AreEqual(8, clip6.Offset); Assert.AreEqual(0, clip6.MediaStart); Assert.AreEqual(0.5, clip6.Duration); Assert.AreEqual("myclip", clip6.Name); } }
public void AddVideoClipWhenNoSupportingTrackExists() { using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 100, 100); timeline.AddVideo("1sec.wmv"); } }
public void ShadowCopyAudio() { using (ITimeline timeline = new DefaultTimeline()) { string fullPath = Path.GetFullPath("1sec.wmv"); timeline.AddAudioGroup().AddTrack(); timeline.AddVideoGroup(24, 320, 200).AddTrack(); IAudioVideoClipPair pair = timeline.AddVideoWithAudio("1sec.wmv", true); Assert.AreEqual(fullPath, pair.VideoClip.File.FileName); Assert.AreNotEqual(fullPath, pair.AudioClip.File.FileName); Assert.IsTrue(pair.AudioClip.File.FileName.EndsWith(".wmv")); } }
public void RemoveGroupAddedHandlers() { int count = 0; EventHandler incrementBefore = delegate { count++; }; EventHandler<AddedGroupEventArgs> incrementAfter = delegate { count++; }; using (ITimeline timeline = new DefaultTimeline()) { timeline.AddingGroup += incrementBefore; timeline.AddedGroup += incrementAfter; timeline.AddAudioGroup(); Assert.AreEqual(2, count); count = 0; timeline.AddingGroup -= incrementBefore; timeline.AddedGroup -= incrementAfter; timeline.AddAudioGroup(); Assert.AreEqual(0, count); } }
public void RemoveEvents() { int count = 0; EventHandler increment = delegate { count++; }; EventHandler<AddedCompositionEventArgs> incrementForAfterCompositionAdded = delegate { count++; }; EventHandler<AddedEffectEventArgs> incrementForAfterEffectAdded = delegate { count++; }; EventHandler<AddedTrackEventArgs> incrementForAfterTrackAdded = delegate { count++; }; EventHandler<AddedTransitionEventArgs> incrementForAfterTransitionAdded = delegate { count++; }; EventHandler<AddedClipEventArgs> incrementForAfterClipAdded = delegate { count++; }; EventHandler<AddedGroupEventArgs> incrementForAfterGroupAdded = delegate { count++; }; using (ITimeline timeline = new DefaultTimeline()) { timeline.AddedComposition += incrementForAfterCompositionAdded; timeline.AddedEffect += incrementForAfterEffectAdded; timeline.AddedTrack += incrementForAfterTrackAdded; timeline.AddedTransition += incrementForAfterTransitionAdded; timeline.AddedClip += incrementForAfterClipAdded; timeline.AddedGroup += incrementForAfterGroupAdded; timeline.AddingComposition += increment; timeline.AddingEffect += increment; timeline.AddingTrack += increment; timeline.AddingTransition += increment; timeline.AddingClip += increment; timeline.AddingGroup += increment; IGroup group = timeline.AddAudioGroup(); group.AddComposition(); group.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); group.AddTrack().AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 1); group.AddTransition(0, 2, StandardTransitions.CreateFade()); Assert.AreEqual(12, count); count = 0; timeline.AddedComposition -= incrementForAfterCompositionAdded; timeline.AddedEffect -= incrementForAfterEffectAdded; timeline.AddedTrack -= incrementForAfterTrackAdded; timeline.AddedTransition -= incrementForAfterTransitionAdded; timeline.AddedClip -= incrementForAfterClipAdded; timeline.AddedGroup -= incrementForAfterGroupAdded; timeline.AddingComposition -= increment; timeline.AddingEffect -= increment; timeline.AddingTrack -= increment; timeline.AddingTransition -= increment; timeline.AddingClip -= increment; timeline.AddingGroup -= increment; IGroup group2 = timeline.AddAudioGroup(); group2.AddComposition(); group2.AddEffect(0, 2, StandardEffects.CreateDefaultBlur()); group2.AddTrack().AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 1); group2.AddTransition(2, 2, StandardTransitions.CreateFade()); Assert.AreEqual(0, count); } }
public void GroupAddTransitionBubblesToTimeline() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.AddingTransition += delegate { beforeCount++; }; timeline.AddedTransition += delegate { afterCount++; }; IGroup group = timeline.AddAudioGroup(); group.AddTransition(0, 0, StandardTransitions.CreateFade()); } Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); }
public void GroupAddTrackBubblesToTimeline() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.AddingTrack += delegate { beforeCount++; }; timeline.AddedTrack += delegate { afterCount++; }; IGroup group = timeline.AddAudioGroup(); group.AddTrack(); } Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); }
public void GroupAddEffectBubblesToTimeline() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.AddingEffect += delegate { beforeCount++; }; timeline.AddedEffect += delegate { afterCount++; }; IGroup group = timeline.AddAudioGroup(); group.AddEffect(0, 1, StandardEffects.CreateDefaultBlur()); } Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); }
public void Constructor() { using (ITimeline timeline = new DefaultTimeline()) { Assert.AreEqual(DefaultTimeline.DefaultFps, timeline.Fps); Assert.AreEqual(0, timeline.Groups.Count); Assert.AreEqual(0, timeline.Duration); Assert.AreEqual(0, timeline.LengthInUnits); } }
public void AddVideoGroupWithName() { using (ITimeline timeline = new DefaultTimeline(12)) { IGroup group = timeline.AddVideoGroup("some video", 32, 100, 100); Assert.AreEqual(group.Name, "some video"); Assert.AreSame(timeline, group.Timeline); } }
public void AddVideoGroupWithDefaultFPS() { bool beforeFired = false; bool afterFried = false; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.AddingGroup += delegate { beforeFired = true; }; timeline.AddedGroup += delegate { afterFried = true; }; IGroup group = timeline.AddVideoGroup(32, 100, 100); Assert.AreSame(timeline, group.Timeline); Assert.AreEqual(timeline.Fps, group.Fps); Assert.IsTrue(beforeFired); Assert.IsTrue(afterFried); } }