Exemple #1
0
        public void AddClipsToTrack()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group  = timeline.AddVideoGroup(24, 320, 200);
                ITrack track1 = group.AddTrack();
                ITrack track2 = group.AddTrack();

                track1.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2);
                track2.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2);
                track1.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2);
                track2.AddClip("image4.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2);

                Assert.AreEqual(2, track1.Clips.Count);
                Assert.AreEqual(2, track2.Clips.Count);

                PrepareToExecute(timeline,
                                 @"<timeline framerate=""30.0000000"">
	<group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""image1.jpg"" />
			<clip start=""2"" stop=""4"" src=""image3.jpg"" />
		</track>
		<track>
			<clip start=""0"" stop=""2"" src=""image2.jpg"" />
			<clip start=""2"" stop=""4"" src=""image4.jpg"" />
		</track>
	</group>
</timeline>");
            }
        }
        public void CanRenderAudioVideoAndImages()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100);
                ITrack videoTrack = videoGroup.AddTrack();
                videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1);
                videoTrack.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1);

                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" />
		</track>
	</group>
	<group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""1"" src=""..\..\transitions.wmv"" mstart=""0"" />
			<clip start=""1"" stop=""2"" src=""..\..\image1.jpg"" />
		</track>
	</group>
</timeline>");
                }
            }
        }
        public void PixelateAndIrisBetweenImages()
        {
            string outputFile = "PixelateAndIrisBetweenImages.wmv";

            using (ITimeline timeline = new DefaultTimeline())
            {
                timeline.AddAudioGroup().AddTrack().AddClip("testinput.wav", GroupMediaType.Audio,
                                                            InsertPosition.Relative, 0, 0, 17);

                IGroup group = timeline.AddVideoGroup(32, 160, 100);
                ITrack low   = group.AddTrack();
                ITrack hi    = group.AddTrack();
                hi.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 0, 0, 6);
                low.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 5, 0, 8);
                hi.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 11, 0, 6);

                // notice that we must apply "in" and "out" of the pixelation effect, to get the
                // desired effect, like the fade
                hi.AddTransition(5.0, 1.0, StandardTransitions.CreatePixelate(), true);
                hi.AddTransition(6.0, 1.0, StandardTransitions.CreatePixelate(), false);

                // the iris transition is a one shot
                hi.AddTransition(11.0, 2.0, StandardTransitions.CreateIris(), false);

                using (
                    IRenderer renderer =
                        new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo))
                {
                    renderer.Render();
                }
            }
        }
Exemple #4
0
        public void RemoveEvents()
        {
            int count = 0;

            EventHandler increment = new EventHandler(delegate
            {
                count++;
            });

            EventHandler <AfterEffectAddedEventArgs> incrementForAfterEffectAdded =
                new EventHandler <AfterEffectAddedEventArgs>(delegate
            {
                count++;
            });

            EventHandler <AfterTransitionAddedEventArgs> incrementForAfterTransitionAdded =
                new EventHandler <AfterTransitionAddedEventArgs>(delegate
            {
                count++;
            });

            EventHandler <AfterClipAddedEventArgs> incrementForAfterClipAdded =
                new EventHandler <AfterClipAddedEventArgs>(delegate
            {
                count++;
            });

            using (ITimeline timeline = new DefaultTimeline())
            {
                ITrack track = timeline.AddAudioGroup().AddTrack();

                track.AfterEffectAdded     += incrementForAfterEffectAdded;
                track.AfterTransitionAdded += incrementForAfterTransitionAdded;
                track.AfterClipAdded       += incrementForAfterClipAdded;

                track.BeforeEffectAdded     += increment;
                track.BeforeTransitionAdded += increment;
                track.BeforeClipAdded       += increment;

                track.AddEffect(0, 2, StandardEffects.CreateDefaultBlur());
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 1);
                track.AddTransition(0, 2, StandardTransitions.CreateFade());

                Assert.AreEqual(6, count);
                count = 0;

                track.AfterEffectAdded      -= incrementForAfterEffectAdded;
                track.AfterTransitionAdded  -= incrementForAfterTransitionAdded;
                track.AfterClipAdded        -= incrementForAfterClipAdded;
                track.BeforeEffectAdded     -= increment;
                track.BeforeTransitionAdded -= increment;
                track.BeforeClipAdded       -= increment;

                track.AddEffect(0, 2, StandardEffects.CreateDefaultBlur());
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 1);
                track.AddTransition(2, 2, StandardTransitions.CreateFade());

                Assert.AreEqual(0, count);
            }
        }
        public void AddAndRemoveHandler()
        {
            bool eventTriggered = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track      = audioGroup.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1);

                using (NullRenderer renderer = new NullRenderer(timeline))
                {
                    EventHandler handler = new EventHandler(delegate
                    {
                        eventTriggered = true;
                    });

                    renderer.RenderCompleted += handler;
                    renderer.RenderCompleted -= handler;

                    renderer.BeginRender(null, null);
                    renderer.Cancel();

                    Assert.IsFalse(eventTriggered);
                }
            }
        }
        public void RenderAudioAndVideo()
        {
            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240);
                ITrack videoTrack = videoGroup.AddTrack();
                videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2);

                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack audioTrack = audioGroup.AddTrack();
                audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new NullRenderer(timeline))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
	<group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\transitions.wmv"" mstart=""0"" />
		</track>
	</group>
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" />
		</track>
	</group>
</timeline>");
                }
            }
        }
Exemple #7
0
        public void RenderWithInapropriateProfile2()
        {
            string outputFile = "RenderWithInapropriateProfile2.wmv";

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup videoGroup = timeline.AddVideoGroup(24, 100, 100);
                ITrack rootTrack  = videoGroup.AddTrack();
                rootTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, 2);

                using (
                    WindowsMediaRenderer renderer =
                        new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityVideo))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" />
		</track>
	</group>
</timeline>");
                }
            }
        }
Exemple #8
0
        public void RenderWmvWithImageWatermark()
        {
            string outputFile = "RenderWmvWithImageWatermark.wmv";

            using (Image waterMarkImage = Image.FromFile("..\\..\\corner_watermark.png"))
                using (ITimeline timeline = new DefaultTimeline())
                {
                    IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240);
                    ITrack videoTrack = videoGroup.AddTrack();

                    IClip videoClip =
                        videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2);

                    IGroup audioGroup = timeline.AddAudioGroup();
                    ITrack audioTrack = audioGroup.AddTrack();
                    audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                    ICallbackParticipant[] videoParticipants =
                        new ICallbackParticipant[]
                    { new ImageWatermarkParticipant(32, 320, 240, true, waterMarkImage, new Point(200, 0)) };

                    using (
                        WindowsMediaRenderer renderer =
                            new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo,
                                                     videoParticipants, null))
                    {
                        renderer.Render();
                    }
                }
        }
Exemple #9
0
        public void AddEffectSetsApropriateContainer()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                EffectDefinition def = StandardEffects.CreateDefaultBlur();

                IGroup  group       = timeline.AddVideoGroup(24, 100, 100);
                IEffect groupEffect = group.AddEffect(0, 10, def);
                Assert.AreSame(group, groupEffect.Group);
                Assert.AreSame(group, groupEffect.Container);

                ITrack  track       = group.AddTrack();
                IEffect trackEffect = track.AddEffect(0, 10, def);
                Assert.AreSame(group, trackEffect.Group);
                Assert.AreSame(track, trackEffect.Container);

                IComposition composition       = group.AddComposition();
                IEffect      compositionEffect = composition.AddEffect(0, 10, def);
                Assert.AreSame(group, compositionEffect.Group);
                Assert.AreSame(composition, compositionEffect.Container);

                IClip   clip       = track.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, 10);
                IEffect clipEffect = clip.AddEffect(0, 10, def);
                Assert.AreSame(group, clip.Group);
                Assert.AreSame(clip, clipEffect.Container);
            }
        }
Exemple #10
0
        public void ConvertMp3ToWavWithCompressor()
        {
            string outputFile = "ConvertMp3ToWavWithCompressor.wav";

            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

                // render the timeline
                using (
                    var renderer =
                        new WavFileRenderer(timeline, outputFile, AudioFormat.LowQualityMonoPcm,
                                            new ICallbackParticipant[] { new ConsoleProgressParticipant() }))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }

                AssertLengths(timeline, 2, outputFile);
            }
        }
        public void JumpVolume()
        {
            // and audible demonstration of the difference between interpolating
            // parameter values for an effect, and jumping directly to them.

            string outputFile = "JumpVolume.wma";

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddAudioGroup();
                ITrack track = group.AddTrack();
                IClip  clip  = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 10);

                EffectDefinition effectDefinition = new EffectDefinition(DxtSubObjects.AudioMixer);

                Parameter volumeParameter = new Parameter("Vol", 0.0, 2, 1.0);
                volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 2.5, "0.2"));
                volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 3.5, "0.8"));
                volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 4.5, "0.2"));
                volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 5, "1.0"));
                volumeParameter.Intervals.Add(new Interval(IntervalMode.Interpolate, clip.Duration, "0.0"));

                effectDefinition.Parameters.Add(volumeParameter);

                clip.AddEffect(0, clip.Duration, effectDefinition);

                using (
                    IRenderer renderer =
                        new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.MediumQualityAudio))
                {
                    renderer.Render();
                }
            }
        }
Exemple #12
0
        public void RenderWithCompressor()
        {
            string outputFile = "RenderWithCompressor.wav";

            using (
                AudioCompressor compressor =
                    AudioCompressorFactory.Create(AudioFormat.CompactDiscQualityStereoPcm))
                using (ITimeline timeline = new DefaultTimeline())
                {
                    IGroup audioGroup = timeline.AddAudioGroup();
                    ITrack rootTrack  = audioGroup.AddTrack();
                    rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                    // render the timeline
                    using (
                        var renderer =
                            new WavFileRenderer(timeline, outputFile, compressor.Filter, compressor.MediaType, null))
                    {
                        ExecuteRenderer(renderer,
                                        @"<timeline framerate=""30.0000000"">
<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
    <track>
        <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/>
    </track>
</group>
</timeline>");
                    }

                    AssertLengths(timeline.Fps, 2, outputFile);
                }
        }
        public void ConvertMp3ToWavWithCompressor()
        {
            // TODO: the compressor is being added, but the selected media type seems (encoding etc.)
            // seems to be ignored.

            string outputFile = "ConvertMp3ToWavWithCompressor.wav";

            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2);

                // render the timeline
                using (
                    WavFileRenderer renderer =
                        new WavFileRenderer(timeline, outputFile, CommonAudioFormats.LowQualityMonoPcm,
                                            new ConsoleProgressCallback()))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }

                AssertLengths(timeline, 2, outputFile);
            }
        }
Exemple #14
0
        public void EnsureClipBubblesBeforeAndAfterEffectAddedUp()
        {
            int beforeCount = 0;
            int afterCount  = 0;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddAudioGroup();
                ITrack track = group.AddTrack();
                track.BeforeEffectAdded += new EventHandler(delegate
                {
                    beforeCount++;
                });
                track.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate
                {
                    afterCount++;
                });

                IClip clip = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1);
                clip.AddEffect(0, 1, StandardEffects.CreateDefaultBlur());

                Assert.AreEqual(1, beforeCount);
                Assert.AreEqual(1, afterCount);
            }
        }
Exemple #15
0
        public void ConvertWavToWav()
        {
            string outputFile = "ConvertWavToWav.wav";

            // create the timeline
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2);

                // render the timeline
                using (var renderer = new WavFileRenderer(timeline, outputFile))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }

                AssertLengths(timeline, 2, outputFile);
            }
        }
Exemple #16
0
        public void ConvertMp3ToWMA()
        {
            string outputFile = "ConvertMp3ToWMA.wma";

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2);

                using (
                    WindowsMediaRenderer renderer =
                        new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityAudio))
                {
                    ExecuteRenderer(renderer,
                                    @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/>
        </track>
    </group>
</timeline>");
                }

                Assert.IsTrue(File.Exists(outputFile));
                AssertLengths(timeline, 2, outputFile);
            }
        }
Exemple #17
0
        public void AddTrackWithNames()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group  = timeline.AddAudioGroup();
                ITrack track1 = group.AddTrack("track1", -1);
                Assert.AreEqual("track1", track1.Name);
                Assert.AreEqual(1, group.Tracks.Count);
                track1.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1);

                Assert.AreSame(group.Tracks[0], track1);
                ITrack track2 = group.AddTrack("track2", -1);
                Assert.AreEqual("track2", track2.Name);
                track2.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1);

                Assert.AreEqual(2, group.Tracks.Count);
                Assert.AreEqual(group.Tracks[1], track2);

                PrepareToExecute(timeline,
                                 @"<timeline framerate=""30.0000000"">
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track username=""track1"">
			<clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" />
		</track>
		<track username=""track2"">
			<clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" />
		</track>
	</group>
</timeline>");
            }
        }
 public void AddImageClipToAudioGroup()
 {
     using (ITimeline timeline = new DefaultTimeline())
     {
         ITrack track = timeline.AddAudioGroup().AddTrack();
         track.AddClip("image file", "image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, -1);
     }
 }
 public void AddAudioClipToVideoGroup()
 {
     using (ITimeline timeline = new DefaultTimeline())
     {
         ITrack track = timeline.AddVideoGroup(24, 64, 64).AddTrack();
         track.AddClip("wav file", "1sec.wav", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);
     }
 }
 public void AddClipResolvesDuration()
 {
     using (ITimeline timeline = new DefaultTimeline())
     {
         IGroup group = timeline.AddVideoGroup(24, 64, 64);
         ITrack track = group.AddTrack("root", -1);
         IClip  clip  = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1);
         Assert.AreEqual(7.999, clip.Duration);
     }
 }
Exemple #21
0
 public void ClipsAssignedContainer()
 {
     using (ITimeline timeline = new DefaultTimeline())
     {
         IGroup group = timeline.AddVideoGroup(24, 320, 200);
         ITrack track = group.AddTrack();
         IClip  clip  = track.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2);
         Assert.AreSame(track, clip.Container);
     }
 }
        public void AlterStretchMode()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddVideoGroup(24, 64, 64);
                ITrack track = group.AddTrack("root", -1);
                IClip  clip  = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1);

                Assert.AreEqual(ResizeFlags.Stretch, clip.StretchMode);
                clip.StretchMode = ResizeFlags.PreserveAspectRatio;
                Assert.AreEqual(ResizeFlags.PreserveAspectRatio, clip.StretchMode);
            }
        }
Exemple #23
0
        public void ConvertWithNullFilename()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2);

                using (
                    WindowsMediaRenderer renderer =
                        new WindowsMediaRenderer(timeline, null, WindowsMediaProfiles.LowQualityAudio));
            }
        }
        public void CancelBeforeStart()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack track      = audioGroup.AddTrack();
                track.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1);

                using (var renderer = new NullRenderer(timeline))
                {
                    renderer.Cancel();
                }
            }
        }
 public void AddClip()
 {
     using (ITimeline timeline = new DefaultTimeline())
     {
         IGroup group = timeline.AddVideoGroup(24, 64, 64);
         ITrack track = group.AddTrack("root", -1);
         IClip  clip  = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1);
         Assert.AreSame(track, clip.Container);
         Assert.AreSame(group, clip.Group);
         Assert.AreEqual(1, track.Clips.Count);
         Assert.AreSame(track.Clips[0], clip);
         Assert.IsNull(clip.Name);
     }
 }
Exemple #26
0
        public void AddEffectToClip()
        {
            bool beforeFired = false;
            bool afterFired  = false;

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddVideoGroup(24, 64, 64);
                ITrack track = group.AddTrack();
                IClip  clip  = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, -1);

                clip.BeforeEffectAdded += new EventHandler(delegate
                {
                    beforeFired = true;
                });

                clip.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate
                {
                    afterFired = true;
                });

                EffectDefinition defintion = StandardEffects.CreateBlurEffect(2, clip.Duration, 20);

                IEffect effect =
                    clip.AddEffect("blur", -1, 0, clip.Duration, defintion);

                Assert.IsTrue(beforeFired);
                Assert.IsTrue(afterFired);
                Assert.AreEqual("blur", effect.Name);
                Assert.AreEqual(0, effect.Priority);
                Assert.AreEqual(clip.Duration, effect.Duration);
                Assert.AreEqual(0, clip.Offset);
                Assert.AreSame(defintion, effect.EffectDefinition);

                PrepareToExecute(timeline,
                                 @"<timeline framerate=""30.0000000"">
	<group type=""video"" bitdepth=""24"" width=""64"" height=""64"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""7.9990000"" src=""transitions.wmv"" mstart=""0"">
				<effect start=""0"" stop=""7.9990000"" clsid=""{7312498D-E87A-11D1-81E0-0000F87557DB}"" username=""blur"">
					<param name=""PixelRadius"" value=""2"">
						<linear time=""7.9990000"" value=""20"" />
					</param>
				</effect>
			</clip>
		</track>
	</group>
</timeline>");
            }
        }
        public void RenderWithNoFileName()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddVideoGroup(24, 100, 80);
                ITrack track = group.AddTrack();
                track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2);

                using (var renderer = new AviFileRenderer(timeline, null))
                {
                    renderer.Render();
                }
            }
        }
Exemple #28
0
        public void AddOverlappingClips1()
        {
            // Though we've added 3 clips, the DES track only contains 2 tracks because the third has been occluded.
            // this behaviour is mimicked by the virtual clip collection, which demonstrates which clips are actually
            // visible at run time (only on clip on a track is being rendered at any one time)

            // clip 1 is added, 2 thru 10 secs (8 sec duration)
            // clip 2 is added 1 second before clip 1, and completely occluded it at 56 secs in length - clip 1 is gone
            // clip 3 is added 1 second before clip 1, it will play to completion, so the start position for clip 2 is placed
            // and the end of clip3, and it's media start value is incremented accordingly.

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddAudioGroup();
                ITrack track = group.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 2, 0, -1);
                track.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Absoloute, 1, 0, -1);
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1);

                Assert.AreEqual(
                    @"<clip start=""0"" stop=""8.051875"" src=""testinput.mp3"" mstart=""0"" />
<clip start=""8.051875"" stop=""56.125"" src=""testinput.wav"" mstart=""7.051875"" />",
                    track.VirtualClips.ToString());

                Console.WriteLine(track.VirtualClips.ToString());

                PrepareToExecute(timeline,
                                 @"<timeline framerate=""30.0000000"">
    <group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
        <track>
            <clip start=""0"" stop=""8.0518750"" src=""testinput.mp3"" mstart=""0"" />
            <clip start=""8.0518750"" stop=""56.1250000"" src=""testinput.wav"" mstart=""7.0518750"" />
        </track>
    </group>
</timeline>");
            }
        }
Exemple #29
0
        public void AddOverlappingClips2()
        {
            // What's happening here is..

            // clip 1 is added, 2 thru 10 secs (8 sec duration)
            // clip 2 is added 1 second before clip 1, and completely occluded it at 56 secs in length - clip 1 is gone
            // clip 3 is added 1 second before clip 1, it will play to completion, so the start position for clip 2 is placed
            // and the end of clip3, and it's media start value is incremented accordingly.

            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup group = timeline.AddAudioGroup();
                ITrack track = group.AddTrack();
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1);
                track.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Absoloute, 1, 0, -1);
                track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 2, 0, -1);

                Assert.AreEqual(
                    @"<clip start=""0"" stop=""1"" src=""testinput.mp3"" mstart=""0"" />
<clip start=""1"" stop=""2"" src=""testinput.wav"" mstart=""0"" />
<clip start=""2"" stop=""10.051875"" src=""testinput.mp3"" mstart=""0"" />
<clip start=""10.051875"" stop=""56.125"" src=""testinput.wav"" mstart=""9.051875"" />",
                    track.VirtualClips.ToString());

                PrepareToExecute(timeline,
                                 @"<timeline framerate=""30.0000000"">
	<group type=""audio"" framerate=""30.0000000"" previewmode=""0"">
		<track>
			<clip start=""0"" stop=""1"" src=""testinput.mp3"" mstart=""0"" />
			<clip start=""1"" stop=""2"" src=""testinput.wav"" mstart=""0"" />
			<clip start=""2"" stop=""10.0518750"" src=""testinput.mp3"" mstart=""0"" />
			<clip start=""10.0518750"" stop=""56.1250000"" src=""testinput.wav"" mstart=""9.0518750"" />
		</track>
	</group>
</timeline>");
            }
        }
Exemple #30
0
        public void ConvertWithNullFileName()
        {
            using (ITimeline timeline = new DefaultTimeline())
            {
                IGroup audioGroup = timeline.AddAudioGroup();
                ITrack rootTrack  = audioGroup.AddTrack();
                rootTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2);

#pragma warning disable 642
                using (
                    var renderer =
                        new WindowsMediaRenderer(timeline, null, WindowsMediaProfiles.LowQualityAudio));
#pragma warning restore 642
            }
        }