public void PixelateAndIrisBetweenImages() { string outputFile = "PixelateAndIrisBetweenImages.wmv"; using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack().AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 17); IGroup group = timeline.AddVideoGroup(32, 160, 100); ITrack low = group.AddTrack(); ITrack hi = group.AddTrack(); hi.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 0, 0, 6); low.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 5, 0, 8); hi.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Absoloute, 11, 0, 6); // notice that we must apply "in" and "out" of the pixelation effect, to get the // desired effect, like the fade hi.AddTransition(5.0, 1.0, StandardTransitions.CreatePixelate(), true); hi.AddTransition(6.0, 1.0, StandardTransitions.CreatePixelate(), false); // the iris transition is a one shot hi.AddTransition(11.0, 2.0, StandardTransitions.CreateIris(), false); using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void AddClipsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track1 = group.AddTrack(); ITrack track2 = group.AddTrack(); track1.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track1.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image4.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); Assert.AreEqual(2, track1.Clips.Count); Assert.AreEqual(2, track2.Clips.Count); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""image1.jpg"" /> <clip start=""2"" stop=""4"" src=""image3.jpg"" /> </track> <track> <clip start=""0"" stop=""2"" src=""image2.jpg"" /> <clip start=""2"" stop=""4"" src=""image4.jpg"" /> </track> </group> </timeline>"); } }
public void AddTrackWithNames() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track1 = group.AddTrack("track1", -1); Assert.AreEqual("track1", track1.Name); Assert.AreEqual(1, group.Tracks.Count); track1.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1); Assert.AreSame(group.Tracks[0], track1); ITrack track2 = group.AddTrack("track2", -1); Assert.AreEqual("track2", track2.Name); track2.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, -1); Assert.AreEqual(2, group.Tracks.Count); Assert.AreEqual(group.Tracks[1], track2); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track username=""track1""> <clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" /> </track> <track username=""track2""> <clip start=""0"" stop=""55.1250000"" src=""testinput.wav"" mstart=""0"" /> </track> </group> </timeline>"); } }
private void button1_Click(object sender, EventArgs e) { string firstPath = @""; string secondPath = @""; string endPath = @""; OpenFileDialog firstDialog = new OpenFileDialog(); firstDialog.Filter = "Video files (*.avi)|*.avi"; firstDialog.InitialDirectory = "C:\\"; firstDialog.Title = "Select the first video you want to merge"; if (firstDialog.ShowDialog() == DialogResult.OK) { firstPath = firstDialog.FileName; } OpenFileDialog secondDialog = new OpenFileDialog(); secondDialog.Filter = "Video files (*.avi)|*.avi"; secondDialog.Title = "Select the second video you want to merge"; if (secondDialog.ShowDialog() == DialogResult.OK) { secondPath = secondDialog.FileName; } CommonOpenFileDialog pathFinal = new CommonOpenFileDialog(); pathFinal.IsFolderPicker = true; if (pathFinal.ShowDialog() == CommonFileDialogResult.Ok) { endPath = pathFinal.FileName + "/videoMerged.avi"; } using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 1920, 1080); var firstVideoClip = group.AddTrack().AddVideo(firstPath); var secondVideoClip = group.AddTrack().AddVideo(secondPath, firstVideoClip.Duration); using (AviFileRenderer renderer = new AviFileRenderer(timeline, endPath)) { renderer.Render(); } } }
public void WatermarkVideoClip() { // this demonstrates one way of watermarking a video clip... string outputFile = "WatermarkVideoClip.wmv"; using (ITimeline timeline = new DefaultTimeline(15)) { // greate our default audio track timeline.AddAudioGroup().AddTrack(); // add a video group, 32bpp, 320x240 (32bpp required to allow for an alpha channel) IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240); // add our default video track ITrack videoTrack = videoGroup.AddTrack(); // add another video track, this will be used to contain our watermark image ITrack watermarkTrack = videoGroup.AddTrack(); // add the video in "transitions.wmv" to the first video track, and the audio in "transitions.wmv" // to the first audio track. timeline.AddVideoWithAudio("transitions.wmv"); // add the watermark image in, and apply it for the duration of the videoContent // this image will be stretched to fit the video clip, and in this case is a transparent gif. IClip watermarkClip = watermarkTrack.AddImage("testlogo.gif", 0, videoTrack.Duration); // add a alpha setter effect to the image, this will adjust the alpha of the image to be 0.5 // of it's previous value - so the watermark is 50% transparent. watermarkClip.AddEffect(0, watermarkClip.Duration, StandardEffects.CreateAlphaSetterRamp(0.8)); // add a transition to the watermark track, this allows the video clip to "shine through" the watermark, // base on the values present in the alpha channel of the watermark track. watermarkTrack.AddTransition(0, videoTrack.Duration, StandardTransitions.CreateKey(KeyTransitionType.Alpha, null, null, null, null, null), false); using ( // render it to windows media var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void ConvertMp3ToWMA() { string outputFile = "ConvertMp3ToWMA.wma"; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityAudio)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } Assert.IsTrue(File.Exists(outputFile)); AssertLengths(timeline, 2, outputFile); } }
public void EnsureClipBubblesBeforeAndAfterEffectAddedUp() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track = group.AddTrack(); track.BeforeEffectAdded += new EventHandler(delegate { beforeCount++; }); track.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate { afterCount++; }); IClip clip = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1); clip.AddEffect(0, 1, StandardEffects.CreateDefaultBlur()); Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); } }
public void AddTransitionsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); TransitionDefinition definition = StandardTransitions.CreateFade(); ITransition transition = track.AddTransition("test", 1, 3, definition, false); Assert.AreEqual(1, track.Transitions.Count); Assert.AreSame(transition, track.Transitions[0]); Assert.AreEqual("test", transition.Name); Assert.AreSame(definition, transition.TransitionDefinition); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <transition start=""1"" stop=""4"" clsid=""{16B280C5-EE70-11D1-9066-00C04FD9189D}"" username=""test"" /> </track> </group> </timeline>"); } }
public void RenderWithInapropriateProfile2() { string outputFile = "RenderWithInapropriateProfile2.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 100, 100); ITrack rootTrack = videoGroup.AddTrack(); rootTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.LowQualityVideo)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } } }
public void CanRenderAudioVideoAndImages() { using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); IGroup videoGroup = timeline.AddVideoGroup(24, 160, 100); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 1); videoTrack.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 1); using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" /> </track> </group> <group type=""video"" bitdepth=""24"" width=""160"" height=""100"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""1"" src=""..\..\transitions.wmv"" mstart=""0"" /> <clip start=""1"" stop=""2"" src=""..\..\image1.jpg"" /> </track> </group> </timeline>"); } } }
public void RenderAudioAndVideo() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\transitions.wmv"" mstart=""0"" /> </track> </group> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""..\..\testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } } }
public void RenderWmvWithImageWatermark() { string outputFile = "RenderWmvWithImageWatermark.wmv"; using (Image waterMarkImage = Image.FromFile("..\\..\\corner_watermark.png")) using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); IClip videoClip = videoTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("..\\..\\testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); ICallbackParticipant[] videoParticipants = new ICallbackParticipant[] { new ImageWatermarkParticipant(32, 320, 240, true, waterMarkImage, new Point(200, 0)) }; using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo, videoParticipants, null)) { renderer.Render(); } } }
public void ConvertMp3ToWavWithCompressor() { // TODO: the compressor is being added, but the selected media type seems (encoding etc.) // seems to be ignored. string outputFile = "ConvertMp3ToWavWithCompressor.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); // render the timeline using ( WavFileRenderer renderer = new WavFileRenderer(timeline, outputFile, CommonAudioFormats.LowQualityMonoPcm, new ConsoleProgressCallback())) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
/// <summary> /// Saves the content of this <see cref="CdgFile"/> as video. /// </summary> /// <param name="path"> /// A <see cref="string"/> indicating the file to which the video should be saved. /// </param> /// <returns> /// The Task. /// </returns> public void SaveAsVideo(string path) { Console.WriteLine("Saving..."); using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 300, 300); ITrack videoTrack = group.AddTrack(); Bitmap frame = this.Next(); int frames = 0; while (frame != null && frames < 100) { Console.WriteLine("<!--Getting frame-->"); videoTrack.AddImage(frame, 0, 0.2D); frame = this.Next(); frames++; } try { Console.WriteLine(videoTrack.Duration); Console.WriteLine(this.stream.Name.Replace("cdg", "mp3")); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); ////IClip audio = audioTrack.AddAudio(this.stream.Name.Replace("cdg", "mp3"), 0, videoTrack.Duration); IRenderer renderer = new WindowsMediaRenderer(timeline, path, WindowsMediaProfiles.HighQualityVideo); renderer.Render(); } catch (Exception exception) { Console.WriteLine(exception); } } Console.WriteLine("Saving finished."); }
public void AddAndRemoveHandler() { bool eventTriggered = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack track = audioGroup.AddTrack(); track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, -1); using (NullRenderer renderer = new NullRenderer(timeline)) { EventHandler handler = new EventHandler(delegate { eventTriggered = true; }); renderer.RenderCompleted += handler; renderer.RenderCompleted -= handler; renderer.BeginRender(null, null); renderer.Cancel(); Assert.IsFalse(eventTriggered); } } }
public void JumpVolume() { // and audible demonstration of the difference between interpolating // parameter values for an effect, and jumping directly to them. string outputFile = "JumpVolume.wma"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack track = group.AddTrack(); IClip clip = track.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 10); EffectDefinition effectDefinition = new EffectDefinition(DxtSubObjects.AudioMixer); Parameter volumeParameter = new Parameter("Vol", 0.0, 2, 1.0); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 2.5, "0.2")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 3.5, "0.8")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 4.5, "0.2")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Jump, 5, "1.0")); volumeParameter.Intervals.Add(new Interval(IntervalMode.Interpolate, clip.Duration, "0.0")); effectDefinition.Parameters.Add(volumeParameter); clip.AddEffect(0, clip.Duration, effectDefinition); using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.MediumQualityAudio)) { renderer.Render(); } } }
public void AddEffectSetsApropriateContainer() { using (ITimeline timeline = new DefaultTimeline()) { EffectDefinition def = StandardEffects.CreateDefaultBlur(); IGroup group = timeline.AddVideoGroup(24, 100, 100); IEffect groupEffect = group.AddEffect(0, 10, def); Assert.AreSame(group, groupEffect.Group); Assert.AreSame(group, groupEffect.Container); ITrack track = group.AddTrack(); IEffect trackEffect = track.AddEffect(0, 10, def); Assert.AreSame(group, trackEffect.Group); Assert.AreSame(track, trackEffect.Container); IComposition composition = group.AddComposition(); IEffect compositionEffect = composition.AddEffect(0, 10, def); Assert.AreSame(group, compositionEffect.Group); Assert.AreSame(composition, compositionEffect.Container); IClip clip = track.AddClip("..\\..\\image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, 10); IEffect clipEffect = clip.AddEffect(0, 10, def); Assert.AreSame(group, clip.Group); Assert.AreSame(clip, clipEffect.Container); } }
public void RenderWithCompressor() { string outputFile = "RenderWithCompressor.wav"; using ( AudioCompressor compressor = AudioCompressorFactory.Create(AudioFormat.CompactDiscQualityStereoPcm)) using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using ( var renderer = new WavFileRenderer(timeline, outputFile, compressor.Filter, compressor.MediaType, null)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline.Fps, 2, outputFile); } }
public void ConvertWavToWav() { string outputFile = "ConvertWavToWav.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Relative, 0, 0, 2); // render the timeline using (var renderer = new WavFileRenderer(timeline, outputFile)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.wav"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
public void ConvertMp3ToWavWithCompressor() { string outputFile = "ConvertMp3ToWavWithCompressor.wav"; // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); // render the timeline using ( var renderer = new WavFileRenderer(timeline, outputFile, AudioFormat.LowQualityMonoPcm, new ICallbackParticipant[] { new ConsoleProgressParticipant() })) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0""/> </track> </group> </timeline>"); } AssertLengths(timeline, 2, outputFile); } }
public void TrackPrioirities() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack first = group.AddTrack("first", -1); ITrack second = group.AddTrack("second", 0); ITrack third = group.AddTrack("third", 1); ITrack fourth = group.AddTrack("fourth", -1); ITrack fifth = group.AddTrack("fifth", 2); Assert.AreEqual(3, first.Priority); Assert.AreEqual(0, second.Priority); Assert.AreEqual(1, third.Priority); Assert.AreEqual(4, fourth.Priority); Assert.AreEqual(2, fifth.Priority); } }
/// <summary> /// Creates the video track from the image files, taken in a random order /// </summary> /// <param name="video">The track container that will hold the new track</param> /// <param name="imageFiles">The list of image files to add</param> /// <param name="duration">The total duration of the video track</param> /// <param name="width">Frame image width</param> /// <param name="height">Frame image height</param> /// <returns>The new video track</returns> private ITrack CreateVideo(IGroup video, IList <string> imageFiles, double duration, int width, int height) { var videoTrack = video.AddTrack(); if (imageFiles.Any()) { imageFiles = shuffler.Shuffle(imageFiles).ToList(); LogList("Images", imageFiles); var durations = shuffler.GetRandomizedDurations(duration, imageFiles.Count).ToList(); LogList("Durations", durations); var durationSums = durations.RunningSum().ToList(); LogList("DurationSums", durationSums); imageFiles = CreateImageTimeline(imageFiles, duration, EFFECT_DURATION, durationSums).ToList(); LogList("Image list", imageFiles); var images = imageFiles.Select(LoadImage); foreach (var img in images) { var clip = videoTrack.AddImage(img, 0, EFFECT_DURATION); //video.AddEffect(clip.Offset, clip.Duration, GetRandomEffect()); //video.AddTransition(clip.Offset, clip.Duration, CreateWMTFX_Move_SmallerImage("right")); //videoTrack.AddTransition(clip.Offset, clip.Duration, CreateWMTFX_Move_SmallerImage("right")); img.Dispose(); } var previousTime = 0.0; foreach (var time in durationSums) { var clipDuration = time - previousTime; if (time >= 0.5) { var transition = GetRandomTransition(); logger.Log("Adding transition (true): " + transition.TransitionId); video.AddTransition(time - 0.5, 0.5, transition, true); transition = GetRandomTransition(); logger.Log("Adding transition (false): " + transition.TransitionId); video.AddTransition(time, 0.5, transition, false); } //video.AddEffect(previousTime, clipDuration, GetRandomEffect()); //video.AddTransition(previousTime, clipDuration, GetRandomTransition2()); //video.AddTransition(previousTime, clipDuration, CreateWMTFX_Move_SmallerImage("right")); //videoTrack.AddTransition(time - clipDuration, clipDuration / 2, CreateWMTFX_Move_SmallerImage("2"), true); //videoTrack.AddTransition(time - clipDuration / 2, clipDuration / 2, CreateWMTFX_Move_SmallerImage("up"), false); previousTime = time; } } return(videoTrack); }
public void AddClipResolvesDuration() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(7.999, clip.Duration); } }
public void ClipsAssignedContainer() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); IClip clip = track.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); Assert.AreSame(track, clip.Container); } }
/// <summary> /// Render the video from the music file/background image, asynchronously. /// /// Attach event handlers to audioProgress[0] and videoProgress[0] to monitor /// progress, AFTER this function has been called! /// </summary> public void Render(ProgressBar pbarAudio, ProgressBar pbarVideo) { Console.WriteLine("\nBegin render..."); using (ITimeline timeline = new DefaultTimeline()) { Console.Write("Create group..."); // create our primary group IGroup group = timeline.AddVideoGroup("background", 15.0, 32, 720, 480); Console.WriteLine("done."); Console.Write("Create tracks..."); // add a video and audio track to the group ITrack videoTrack = group.AddTrack(); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); Console.WriteLine("done."); // add the audio/bg image Console.Write("Create audio track..."); IClip audio = audioTrack.AddAudio(this.mp3path); Console.WriteLine("done."); Console.Write("Create video track... length: " + audio.Duration.ToString() + "..."); IClip clip1 = videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); //videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); Console.WriteLine("done."); // set up progress indicators audioProgress[0] = new PercentageProgressParticipant(timeline); videoProgress[0] = new PercentageProgressParticipant(timeline); Console.WriteLine("Timeline duration: " + timeline.Duration); Console.WriteLine("Audio duration: " + audio.Duration); Console.WriteLine("IMG duration: " + videoTrack.Duration); // render our video out Console.Write("Render Start..."); using (renderer = new Splicer.Renderer.WindowsMediaRenderer( timeline, videopath, WindowsMediaProfiles.HighQualityVideo, videoProgress, audioProgress)) { this.pbarA = pbarAudio; this.pbarV = pbarVideo; audioProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedAudio); videoProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedVideo); renderer.Render(); } Console.WriteLine("Render Completed."); //AsyncCallback cb = new AsyncCallback(CallBack); //IAsyncResult ar = renderer.BeginRender(cb, renderer.State); } }
public void FadeBetweenImages() { // generates a little slide-show, with audio track and fades between images. string outputFile = "FadeBetweenImages.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 160, 100); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage("image1.jpg", 0, 2); // play first image for a little while IClip clip2 = videoTrack.AddImage("image2.jpg", 0, 2); // and the next IClip clip3 = videoTrack.AddImage("image3.jpg", 0, 2); // and finally the last IClip clip4 = videoTrack.AddImage("image4.jpg", 0, 2); // and finally the last double halfDuration = 0.5; // fade out and back in group.AddTransition(clip2.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip2.Offset, halfDuration, StandardTransitions.CreateFade(), false); // again group.AddTransition(clip3.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip3.Offset, halfDuration, StandardTransitions.CreateFade(), false); // and again group.AddTransition(clip4.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip4.Offset, halfDuration, StandardTransitions.CreateFade(), false); // add some audio ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio("testinput.wav", 0, videoTrack.Duration); // create an audio envelope effect, this will: // fade the audio from 0% to 100% in 1 second. // play at full volume until 1 second before the end of the track // fade back out to 0% volume audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); // render our slideshow out to a windows media file using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void AlterStretchMode() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(ResizeFlags.Stretch, clip.StretchMode); clip.StretchMode = ResizeFlags.PreserveAspectRatio; Assert.AreEqual(ResizeFlags.PreserveAspectRatio, clip.StretchMode); } }
public void ConvertWithNullFilename() { using (ITimeline timeline = new DefaultTimeline()) { IGroup audioGroup = timeline.AddAudioGroup(); ITrack rootTrack = audioGroup.AddTrack(); rootTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, null, WindowsMediaProfiles.LowQualityAudio)); } }
public void RenderWithNoFileName() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 80); ITrack track = group.AddTrack(); track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); using (var renderer = new AviFileRenderer(timeline, null)) { renderer.Render(); } } }
public void AddTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddAudioGroup(); ITrack rootTrack = group.AddTrack(); Assert.AreSame(group, rootTrack.Group); Assert.AreSame(group, rootTrack.Container); Assert.AreEqual(1, group.Tracks.Count); Assert.AreSame(group.Tracks[0], rootTrack); ITrack track2 = group.AddTrack(); Assert.AreEqual(2, group.Tracks.Count); Assert.AreEqual(group.Tracks[1], track2); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track /> <track /> </group> </timeline>"); } }