public void AddClipsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track1 = group.AddTrack(); ITrack track2 = group.AddTrack(); track1.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image2.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track1.AddClip("image3.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); track2.AddClip("image4.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); Assert.AreEqual(2, track1.Clips.Count); Assert.AreEqual(2, track2.Clips.Count); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""image1.jpg"" /> <clip start=""2"" stop=""4"" src=""image3.jpg"" /> </track> <track> <clip start=""0"" stop=""2"" src=""image2.jpg"" /> <clip start=""2"" stop=""4"" src=""image4.jpg"" /> </track> </group> </timeline>"); } }
public void AddEffectSetsApropriateContainer() { using (ITimeline timeline = new DefaultTimeline()) { EffectDefinition def = StandardEffects.CreateDefaultBlur(); IGroup group = timeline.AddVideoGroup(24, 100, 100); IEffect groupEffect = group.AddEffect(0, 10, def); Assert.AreSame(group, groupEffect.Group); Assert.AreSame(group, groupEffect.Container); ITrack track = group.AddTrack(); IEffect trackEffect = track.AddEffect(0, 10, def); Assert.AreSame(group, trackEffect.Group); Assert.AreSame(track, trackEffect.Container); IComposition composition = group.AddComposition(); IEffect compositionEffect = composition.AddEffect(0, 10, def); Assert.AreSame(group, compositionEffect.Group); Assert.AreSame(composition, compositionEffect.Container); IClip clip = track.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Absolute, 0, 0, 10); IEffect clipEffect = clip.AddEffect(0, 10, def); Assert.AreSame(group, clip.Group); Assert.AreSame(clip, clipEffect.Container); } }
public void AddAudioClipToVideoGroup() { using (ITimeline timeline = new DefaultTimeline()) { ITrack track = timeline.AddVideoGroup(24, 64, 64).AddTrack(); track.AddClip("wav file", "1sec.wav", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, -1); } }
public void ConstructWithNullAudioClip() { using (ITimeline timeline = new DefaultTimeline()) { IClip videoClip = timeline.AddVideoGroup(24, 100, 100).AddTrack().AddVideo("1sec.wmv"); var pair = new AudioVideoClipPair(null, videoClip); } }
public void AddClipResolvesDuration() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(7.999, clip.Duration); } }
public void AddAudioAndVideo() { // test all the overloads for AddVideoWithAudio using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 320, 240).AddTrack(); timeline.AddAudioGroup().AddTrack(); IAudioVideoClipPair clip1 = timeline.AddVideoWithAudio("1sec.wmv"); Assert.AreEqual(0, clip1.AudioClip.Offset); Assert.AreEqual(1, clip1.AudioClip.Duration); Assert.AreEqual(0, clip1.VideoClip.Offset); Assert.AreEqual(1, clip1.VideoClip.Duration); IAudioVideoClipPair clip2 = timeline.AddVideoWithAudio("1sec.wmv", 1); Assert.AreEqual(2, clip2.AudioClip.Offset); Assert.AreEqual(1, clip2.AudioClip.Duration); Assert.AreEqual(2, clip2.VideoClip.Offset); Assert.AreEqual(1, clip2.VideoClip.Duration); IAudioVideoClipPair clip3 = timeline.AddVideoWithAudio("1sec.wmv", 0, 0.5); Assert.AreEqual(3, clip3.AudioClip.Offset); Assert.AreEqual(0.5, clip3.AudioClip.Duration); Assert.AreEqual(3, clip3.VideoClip.Offset); Assert.AreEqual(0.5, clip3.VideoClip.Duration); IAudioVideoClipPair clip4 = timeline.AddVideoWithAudio("1sec.wmv", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.AudioClip.Offset); Assert.AreEqual(0.5, clip4.AudioClip.Duration); Assert.AreEqual(0.5, clip4.AudioClip.MediaStart); Assert.AreEqual(3.5, clip4.VideoClip.Offset); Assert.AreEqual(0.5, clip4.VideoClip.Duration); Assert.AreEqual(0.5, clip4.VideoClip.MediaStart); IAudioVideoClipPair clip5 = timeline.AddVideoWithAudio("1sec.wmv", InsertPosition.Absolute, 6, 0, -1); Assert.AreEqual(6, clip5.AudioClip.Offset); Assert.AreEqual(1, clip5.AudioClip.Duration); Assert.AreEqual(6, clip5.VideoClip.Offset); Assert.AreEqual(1, clip5.VideoClip.Duration); IAudioVideoClipPair clip6 = timeline.AddVideoWithAudio("myclip", "1sec.wmv", InsertPosition.Absolute, 8, 0, 0.5); Assert.AreEqual(8, clip6.AudioClip.Offset); Assert.AreEqual(0, clip6.AudioClip.MediaStart); Assert.AreEqual(0.5, clip6.AudioClip.Duration); Assert.AreEqual("myclip", clip6.AudioClip.Name); Assert.AreEqual(8, clip6.VideoClip.Offset); Assert.AreEqual(0, clip6.VideoClip.MediaStart); Assert.AreEqual(0.5, clip6.VideoClip.Duration); Assert.AreEqual("myclip", clip6.VideoClip.Name); } }
public void AlterStretchMode() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(ResizeFlags.Stretch, clip.StretchMode); clip.StretchMode = ResizeFlags.PreserveAspectRatio; Assert.AreEqual(ResizeFlags.PreserveAspectRatio, clip.StretchMode); } }
public void AddClip() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreSame(track, clip.Container); Assert.AreSame(group, clip.Group); Assert.AreEqual(1, track.Clips.Count); Assert.AreSame(track.Clips[0], clip); Assert.IsNull(clip.Name); } }
public void AddEffectToClip() { bool beforeFired = false; bool afterFired = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack(); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, -1); clip.BeforeEffectAdded += new EventHandler(delegate { beforeFired = true; }); clip.AfterEffectAdded += new EventHandler <AfterEffectAddedEventArgs>(delegate { afterFired = true; }); EffectDefinition defintion = StandardEffects.CreateBlurEffect(2, clip.Duration, 20); IEffect effect = clip.AddEffect("blur", -1, 0, clip.Duration, defintion); Assert.IsTrue(beforeFired); Assert.IsTrue(afterFired); Assert.AreEqual("blur", effect.Name); Assert.AreEqual(0, effect.Priority); Assert.AreEqual(clip.Duration, effect.Duration); Assert.AreEqual(0, clip.Offset); Assert.AreSame(defintion, effect.EffectDefinition); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" width=""64"" height=""64"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""7.9990000"" src=""transitions.wmv"" mstart=""0""> <effect start=""0"" stop=""7.9990000"" clsid=""{7312498D-E87A-11D1-81E0-0000F87557DB}"" username=""blur""> <param name=""PixelRadius"" value=""2""> <linear time=""7.9990000"" value=""20"" /> </param> </effect> </clip> </track> </group> </timeline>"); } }
public void RenderWithNoFileName() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 80); ITrack track = group.AddTrack(); track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); using (var renderer = new AviFileRenderer(timeline, null)) { renderer.Render(); } } }
private void button1_Click(object sender, EventArgs e) { string firstPath = @""; string secondPath = @""; string endPath = @""; OpenFileDialog firstDialog = new OpenFileDialog(); firstDialog.Filter = "Video files (*.avi)|*.avi"; firstDialog.InitialDirectory = "C:\\"; firstDialog.Title = "Select the first video you want to merge"; if (firstDialog.ShowDialog() == DialogResult.OK) { firstPath = firstDialog.FileName; } OpenFileDialog secondDialog = new OpenFileDialog(); secondDialog.Filter = "Video files (*.avi)|*.avi"; secondDialog.Title = "Select the second video you want to merge"; if (secondDialog.ShowDialog() == DialogResult.OK) { secondPath = secondDialog.FileName; } CommonOpenFileDialog pathFinal = new CommonOpenFileDialog(); pathFinal.IsFolderPicker = true; if (pathFinal.ShowDialog() == CommonFileDialogResult.Ok) { endPath = pathFinal.FileName + "/videoMerged.avi"; } using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 1920, 1080); var firstVideoClip = group.AddTrack().AddVideo(firstPath); var secondVideoClip = group.AddTrack().AddVideo(secondPath, firstVideoClip.Duration); using (AviFileRenderer renderer = new AviFileRenderer(timeline, endPath)) { renderer.Render(); } } }
public void RenderWithoutAudioGroup() { string outputFile = "RenderWithoutAudioGroup.wav"; using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 200, 200); // render the timeline using (var renderer = new WavFileRenderer(timeline, outputFile)) { renderer.Render(); } } }
public void WatermarkVideoClip() { // this demonstrates one way of watermarking a video clip... string outputFile = "WatermarkVideoClip.wmv"; using (ITimeline timeline = new DefaultTimeline(15)) { // greate our default audio track timeline.AddAudioGroup().AddTrack(); // add a video group, 32bpp, 320x240 (32bpp required to allow for an alpha channel) IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240); // add our default video track ITrack videoTrack = videoGroup.AddTrack(); // add another video track, this will be used to contain our watermark image ITrack watermarkTrack = videoGroup.AddTrack(); // add the video in "transitions.wmv" to the first video track, and the audio in "transitions.wmv" // to the first audio track. timeline.AddVideoWithAudio("transitions.wmv"); // add the watermark image in, and apply it for the duration of the videoContent // this image will be stretched to fit the video clip, and in this case is a transparent gif. IClip watermarkClip = watermarkTrack.AddImage("testlogo.gif", 0, videoTrack.Duration); // add a alpha setter effect to the image, this will adjust the alpha of the image to be 0.5 // of it's previous value - so the watermark is 50% transparent. watermarkClip.AddEffect(0, watermarkClip.Duration, StandardEffects.CreateAlphaSetterRamp(0.8)); // add a transition to the watermark track, this allows the video clip to "shine through" the watermark, // base on the values present in the alpha channel of the watermark track. watermarkTrack.AddTransition(0, videoTrack.Duration, StandardTransitions.CreateKey(KeyTransitionType.Alpha, null, null, null, null, null), false); using ( // render it to windows media var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void AddInMemoryImageClipsToTrack() { var tempFiles = new StringCollection(); Action <IClip> addClip = delegate(IClip clip) { if (tempFiles.Contains(clip.File.FileName)) { Assert.Fail("TempFile: {0} duplicated", clip.File.FileName); } }; string outputFile = "AddInMemoryImageClipsToTrack.wmv"; Image image = Image.FromFile("..\\..\\image1.jpg"); using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack().AddAudio("..\\..\\testinput.wav", 0, 7.5); ITrack videoTrack = timeline.AddVideoGroup(24, 320, 200).AddTrack(); addClip(videoTrack.AddImage(image)); // 0->1 addClip(videoTrack.AddImage(image, 1)); // 2->3 addClip(videoTrack.AddImage(image, 1, 0.5)); // 4->4.5 addClip(videoTrack.AddImage(image, InsertPosition.Absolute, 5, 0, 1)); // 5->6 IClip clip = videoTrack.AddImage("named", image, InsertPosition.Absolute, 7, 0.5, 1); // 7->7.5 addClip(clip); Assert.AreEqual("named", clip.Name); Assert.AreEqual(7.5, videoTrack.Duration); using ( var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo) ) { renderer.Render(); } } foreach (string file in tempFiles) { Assert.IsFalse(File.Exists(file)); } }
public void AddClipWithName() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("clock animation", "transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, -1); Assert.AreEqual(1, track.Clips.Count); Assert.AreSame(track.Clips[0], clip); Assert.AreEqual("clock animation", clip.Name); Assert.AreEqual(7.999, clip.Duration); Assert.AreEqual(0, clip.Offset); Assert.AreEqual("transitions.wmv", clip.File.FileName); Assert.AreEqual(0, clip.Effects.Count); } }
public void RenderVideoOnly() { string outputFile = "RenderVideoOnly.avi"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 80); ITrack track = group.AddTrack(); track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); using (var renderer = new AviFileRenderer(timeline, outputFile)) { renderer.Render(); } AssertLengths(timeline, 2, outputFile); } }
public void AddVideoGroupWithDefaultFPS() { bool beforeFired = false; bool afterFried = false; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.AddingGroup += delegate { beforeFired = true; }; timeline.AddedGroup += delegate { afterFried = true; }; IGroup group = timeline.AddVideoGroup(32, 100, 100); Assert.AreSame(timeline, group.Timeline); Assert.AreEqual(timeline.Fps, group.Fps); Assert.IsTrue(beforeFired); Assert.IsTrue(afterFried); } }
public void CreateVideoFromImages(List <string> imagePaths, AudioUoW audio, string outputPath, VideoQuality vq, List <VideoRenderedEventHandler> renderCompleted = null) { try { using (ITimeline timeline = new DefaultTimeline()) { var profile = SplicerWMVProfile.GetProfile(vq); var group = timeline.AddVideoGroup(32, profile.Width, profile.Height); var videoTrack = group.AddTrack(); var audioTrack = timeline.AddAudioGroup().AddTrack(); var a = audioTrack.AddAudio(audio.AudioPath); var imageDuration = a.Duration / imagePaths.Count; foreach (var imagePath in imagePaths) { videoTrack.AddImage(imagePath, InsertPosition.Relative, 0, 0, imageDuration); } IRenderer renderer = new WindowsMediaRenderer(timeline, outputPath, profile.Profile); renderer.BeginRender(RenderingCompleted, new VideoRenderedAsyncState() { Renderer = renderer, Audio = audio, RenderCompleted = renderCompleted }); } } catch (Exception ex) { Log.Error(ex.Message, ex); if (renderCompleted != null) { foreach (var rc in renderCompleted) { rc(ex, new VideoRenderedEventArgs()); } } } }
public void WriteSomeImages() { using (var timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 320, 240).AddTrack(); // we want 320x240 sized images timeline.AddVideo("..\\..\\transitions.wmv"); var participant = new ImagesToDiskParticipant(24, 320, 240, Environment.CurrentDirectory, 1, 2, 3, 4, 5, 6, 7); using (var render = new NullRenderer(timeline, null, new ICallbackParticipant[] { participant })) { render.Render(); } for (int i = 0; i < 6; i++) { Assert.IsTrue(File.Exists(string.Format("frame{0}.jpg", i))); } } }
public void TrackAddEffectBubblesToComposition() { int beforeCount = 0; int afterCount = 0; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); composition.AddingEffect += delegate { beforeCount++; }; composition.AddedEffect += delegate { afterCount++; }; ITrack track = composition.AddTrack(); track.AddEffect("test", -1, 1, 2, StandardEffects.CreateBlurEffect(2, 2, 10)); Assert.AreEqual(1, beforeCount); Assert.AreEqual(1, afterCount); } }
public void ConvertAviToWMV() { string outputFile = "ConvertAviToWMV.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(0x20, 320, 240); ITrack videoTrack = videoGroup.AddTrack(); IClip clockClip = videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absoloute, 0, 0, 2); Assert.IsTrue(clockClip.Duration > 0); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("testinput.mp3", GroupMediaType.Audio, InsertPosition.Absoloute, 0, 0, 2); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""32"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""transitions.wmv"" mstart=""0"" /> </track> </group> <group type=""audio"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""2"" src=""testinput.mp3"" mstart=""0"" /> </track> </group> </timeline>"); } Assert.IsTrue(File.Exists(outputFile)); AssertLengths(timeline, 2, outputFile); } }
public void RenderVideoAndAudio() { string outputFile = "RenderVideoAndAudio.avi"; using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 100, 80); ITrack videoTrack = videoGroup.AddTrack(); videoTrack.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, 2); IGroup audioGroup = timeline.AddAudioGroup(); ITrack audioTrack = audioGroup.AddTrack(); audioTrack.AddClip("testinput.wav", GroupMediaType.Audio, InsertPosition.Absolute, 0, 0, 2); using (var renderer = new AviFileRenderer(timeline, outputFile)) { renderer.Render(); } AssertLengths(timeline, 2, outputFile); } }
public void Create(VideoSpec spec) { using (ITimeline timeline = new DefaultTimeline()) { logger.Log("Creating video file " + spec.Name); var video = timeline.AddVideoGroup("video", FRAME_RATE, 32, spec.Width, spec.Height); var audio = timeline.AddAudioGroup("audio", FRAME_RATE); // the length of the movie is governed by the audio, so start with that var voiceTrack = AddVoiceTrack(audio, spec.Text); var duration = voiceTrack.Duration; var musicTrack = AddBackgroundMusic(audio, spec.SoundFiles, duration); // now add the video CreateVideo(video, spec.ImageFiles, duration, spec.Width, spec.Height); AddWatermark(video, spec.WatermarkFile, duration, spec.Width, spec.Height); // combine everything and write out the result RenderVideo(timeline, spec.Name); } }
static void Main(string[] args) { string folderPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Pics"); using (ITimeline timeline = new DefaultTimeline(30)) { double halfDuration = 1; IGroup group = timeline.AddVideoGroup("video", 30, 32, 1920, 1080); ITrack videoTrack = group.AddTrack(); List <string> filePaths = Directory.EnumerateFiles(folderPath, "*.jpg").ToList(); for (int i = 0; i < filePaths.Count; i++) { IClip clip = videoTrack.AddImage(filePaths[i], 0, 10); if (i > 0) { group.AddTransition(clip.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip.Offset, halfDuration, StandardTransitions.CreateFade(), false); } } string folderPath2 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Audio"); string audioPath = Directory.EnumerateFiles(folderPath2, "*.wav").FirstOrDefault(x => x != null); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(audioPath, 0, videoTrack.Duration); audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); using (var renderer = new WindowsMediaRenderer(timeline, "output.wmv", WindowsMediaProfiles.FullHD)) { renderer.Render(); } } Console.WriteLine("Hello World!"); }
public void RenderVideo() { // create the timeline using (ITimeline timeline = new DefaultTimeline()) { IGroup videoGroup = timeline.AddVideoGroup(24, 320, 240); ITrack rootTrack = videoGroup.AddTrack(); rootTrack.AddClip("..\\..\\transitions.wmv", GroupMediaType.Video, InsertPosition.Relative, 5, 0, 2); // render the timeline using (var renderer = new NullRenderer(timeline)) { ExecuteRenderer(renderer, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""5"" stop=""7"" src=""..\..\transitions.wmv"" mstart=""0""/> </track> </group> </timeline>"); } } }
public void AddEffect() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.AddingEffect += delegate { firedBefore = true; }; composition.AddedEffect += delegate { firedAfter = true; }; IEffect effect = composition.AddEffect("test", -1, 1, 2, StandardEffects.CreateBlurEffect(2, 2, 10)); Assert.AreEqual("test", effect.Name); Assert.AreEqual(1, effect.Offset); Assert.AreEqual(2, effect.Duration); Assert.AreEqual(1, composition.Effects.Count); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public void AddEffectsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); IEffect effect = track.AddEffect("test", -1, 1, 3, StandardEffects.CreateBlurEffect(2, 2, 15)); Assert.AreEqual(1, track.Effects.Count); Assert.AreSame(effect, track.Effects[0]); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <effect clsid=""{7312498D-E87A-11D1-81E0-0000F87557DB}"" username=""test""> <param name=""PixelRadius"" value=""2""> <linear time=""2"" value=""15"" /> </param> </effect> </track> </group> </timeline>"); } }
public void AddVideoGroupWithDefaultFPS() { bool beforeFired = false; bool afterFried = false; using (ITimeline timeline = new DefaultTimeline(12)) { timeline.BeforeGroupAdded += new EventHandler(delegate { beforeFired = true; }); timeline.AfterGroupAdded += new EventHandler <AfterGroupAddedEventArgs>(delegate { afterFried = true; }); IGroup group = timeline.AddVideoGroup(32, 100, 100); Assert.AreSame(timeline, group.Timeline); Assert.AreEqual(timeline.FPS, group.FPS); Assert.IsTrue(beforeFired); Assert.IsTrue(afterFried); } }
public void AddTransition() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 100, 100); IComposition composition = group.AddComposition(); bool firedBefore = false; bool firedAfter = false; composition.AddingTransition += delegate { firedBefore = true; }; composition.AddedTransition += delegate { firedAfter = true; }; ITransition transition = composition.AddTransition("test", 0, 2, StandardTransitions.CreateFade(), false); Assert.AreEqual(1, composition.Transitions.Count); Assert.AreEqual("test", transition.Name); Assert.AreEqual(0, transition.Offset); Assert.AreEqual(2, transition.Duration); Assert.IsTrue(firedBefore); Assert.IsTrue(firedAfter); } }
public async Task MakeVideo(double BPM, double BPM_Multiplier, double offset, string videoTitle, string audioFilePath, double videoDuration = 30) { string temporaryVideoFileName = tempFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, true); string VideoFileName = finalFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, false); double FrameTimeFromBPM = videoMakerTimingUtilities.ReturnFrameTimeBasedOnBPM(BPM, BPM_Multiplier); await Task.Run(() => { if (videoMakerIOUtilities.CheckIfImageFilesExist(imageFilesPath: $@"jpg2\", 19)) { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 738, 650); ITrack videoTrack = group.AddTrack(); videoTrack.AddImage($@"jpg2\1.jpg", 0, offset); int framesNumber = videoMakerTimingUtilities.ReturnNumberOfFrames(videoDuration, offset, FrameTimeFromBPM); for (int i = 1; i <= framesNumber; i++) { int picNumber = i % 18; videoTrack.AddImage($@"jpg2\{picNumber + 1}.jpg", 0, FrameTimeFromBPM); } ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); audioTrack.AddAudio(audioFilePath, 0, videoTrack.Duration + 2.75); using (WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, temporaryVideoFileName, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } } } ); if (CutVideo(temporaryVideoFileName, VideoFileName, videoDuration)) { Messaging.ShowMessage($"Your video was generated successfully. Look for your {VideoFileName} file in app folder.", "Success"); } }
public void AddVideo() { // test all the overloads for AddVideo using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 320, 240).AddTrack(); IClip clip1 = timeline.AddVideo("1sec.wmv"); Assert.AreEqual(0, clip1.Offset); Assert.AreEqual(1, clip1.Duration); IClip clip2 = timeline.AddVideo("1sec.wmv", 1); Assert.AreEqual(2, clip2.Offset); Assert.AreEqual(1, clip2.Duration); IClip clip3 = timeline.AddVideo("1sec.wmv", 0, 0.5); Assert.AreEqual(3, clip3.Offset); Assert.AreEqual(0.5, clip3.Duration); IClip clip4 = timeline.AddVideo("1sec.wmv", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.Offset); Assert.AreEqual(0.5, clip4.Duration); Assert.AreEqual(0.5, clip4.MediaStart); IClip clip5 = timeline.AddVideo("1sec.wmv", InsertPosition.Absolute, 6, 0, -1); Assert.AreEqual(6, clip5.Offset); Assert.AreEqual(1, clip5.Duration); IClip clip6 = timeline.AddVideo("myclip", "1sec.wmv", InsertPosition.Absolute, 8, 0, 0.5); Assert.AreEqual(8, clip6.Offset); Assert.AreEqual(0, clip6.MediaStart); Assert.AreEqual(0.5, clip6.Duration); Assert.AreEqual("myclip", clip6.Name); } }
public void AddClipWithName() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack("root", -1); IClip clip = track.AddClip("clock animation", "transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); Assert.AreEqual(1, track.Clips.Count); Assert.AreSame(track.Clips[0], clip); Assert.AreEqual("clock animation", clip.Name); Assert.AreEqual(7.999, clip.Duration); Assert.AreEqual(0, clip.Offset); Assert.AreEqual(Path.GetFullPath("transitions.wmv"), clip.File.FileName); Assert.AreEqual(0, clip.Effects.Count); } }
public void ShadowCopyAudio() { using (ITimeline timeline = new DefaultTimeline()) { string fullPath = Path.GetFullPath("1sec.wmv"); timeline.AddAudioGroup().AddTrack(); timeline.AddVideoGroup(24, 320, 200).AddTrack(); IAudioVideoClipPair pair = timeline.AddVideoWithAudio("1sec.wmv", true); Assert.AreEqual(fullPath, pair.VideoClip.File.FileName); Assert.AreNotEqual(fullPath, pair.AudioClip.File.FileName); Assert.IsTrue(pair.AudioClip.File.FileName.EndsWith(".wmv")); } }
public void AddVideoGroupWithName() { using (ITimeline timeline = new DefaultTimeline(12)) { IGroup group = timeline.AddVideoGroup("some video", 32, 100, 100); Assert.AreEqual(group.Name, "some video"); Assert.AreSame(timeline, group.Timeline); } }
public void AddVideoClipWhenNoSupportingTrackExists() { using (ITimeline timeline = new DefaultTimeline()) { timeline.AddVideoGroup(24, 100, 100); timeline.AddVideo("1sec.wmv"); } }
public void AddTransitionsToTrack() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); TransitionDefinition definition = StandardTransitions.CreateFade(); ITransition transition = track.AddTransition("test", 1, 3, definition, false); Assert.AreEqual(1, track.Transitions.Count); Assert.AreSame(transition, track.Transitions[0]); Assert.AreEqual("test", transition.Name); Assert.AreSame(definition, transition.TransitionDefinition); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" height=""200"" framerate=""30.0000000"" previewmode=""0""> <track> <transition start=""1"" stop=""4"" clsid=""{16B280C5-EE70-11D1-9066-00C04FD9189D}"" username=""test"" /> </track> </group> </timeline>"); } }
private void makeMovie(int imageName) { int simpleCount = 0; string simpleAudio = ""; string videoPath = ""; string title = ""; string description = ""; string tags = ""; foreach (VideoListModel videoItem in vList.getData()) { if (simpleCount == 0) { title = videoItem.VideoName; description = videoItem.VideoDescription; tags = videoItem.VideoTags; simpleAudio = videoItem.AudioLocation; videoPath = Application.StartupPath + "\\VideoData\\video\\" + videoItem.VideoName + ".wmv"; } simpleCount++; } label9.Text = title; Console.WriteLine(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg"); using (ITimeline timeline = new DefaultTimeline(1)) { IGroup group = timeline.AddVideoGroup(32, 1920, 1080); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, 0); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(simpleAudio); IClip clip2 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, audio.Duration); var participant = new PercentageProgressParticipant(timeline); participant.ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(participant_ProgressChanged); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, videoPath, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } progressBar1.Value = 0; vList.getData().RemoveAt(0); vList.saveData(); refreshList(); try { //2. Get credentials and upload the file Run(title, description, videoPath, tags); } catch (AggregateException ex) { foreach (var exception in ex.InnerExceptions) { Console.WriteLine(exception.Message); } } }
public void AddInMemoryImageClipsToTrack() { var tempFiles = new StringCollection(); Action<IClip> addClip = delegate(IClip clip) { if (tempFiles.Contains(clip.File.FileName)) Assert.Fail("TempFile: {0} duplicated", clip.File.FileName); }; string outputFile = "AddInMemoryImageClipsToTrack.wmv"; Image image = Image.FromFile("image1.jpg"); using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack().AddAudio("testinput.wav", 0, 7.5); ITrack videoTrack = timeline.AddVideoGroup(24, 320, 200).AddTrack(); addClip(videoTrack.AddImage(image)); // 0->1 addClip(videoTrack.AddImage(image, 1)); // 2->3 addClip(videoTrack.AddImage(image, 1, 0.5)); // 4->4.5 addClip(videoTrack.AddImage(image, InsertPosition.Absolute, 5, 0, 1)); // 5->6 IClip clip = videoTrack.AddImage("named", image, InsertPosition.Absolute, 7, 0.5, 1); // 7->7.5 addClip(clip); Assert.AreEqual("named", clip.Name); Assert.AreEqual(7.5, videoTrack.Duration); using ( var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo) ) { renderer.Render(); } } foreach (string file in tempFiles) { Assert.IsFalse(File.Exists(file)); } }
public void AddEffectToClip() { bool beforeFired = false; bool afterFired = false; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 64, 64); ITrack track = group.AddTrack(); IClip clip = track.AddClip("transitions.wmv", GroupMediaType.Video, InsertPosition.Absolute, 0, 0, -1); clip.AddingEffect += delegate { beforeFired = true; }; clip.AddedEffect += delegate { afterFired = true; }; EffectDefinition defintion = StandardEffects.CreateBlurEffect(2, clip.Duration, 20); IEffect effect = clip.AddEffect("blur", -1, 0, clip.Duration, defintion); Assert.IsTrue(beforeFired); Assert.IsTrue(afterFired); Assert.AreEqual("blur", effect.Name); Assert.AreEqual(0, effect.Priority); Assert.AreEqual(clip.Duration, effect.Duration); Assert.AreEqual(0, clip.Offset); Assert.AreSame(defintion, effect.EffectDefinition); PrepareToExecute(timeline, @"<timeline framerate=""30.0000000""> <group type=""video"" bitdepth=""24"" width=""64"" height=""64"" framerate=""30.0000000"" previewmode=""0""> <track> <clip start=""0"" stop=""7.9990000"" src=""transitions.wmv"" mstart=""0""> <effect start=""0"" stop=""7.9990000"" clsid=""{7312498D-E87A-11D1-81E0-0000F87557DB}"" username=""blur""> <param name=""PixelRadius"" value=""2""> <linear time=""7.9990000"" value=""20"" /> </param> </effect> </clip> </track> </group> </timeline>"); } }
public static void createVideo(string outputFile, string tempFileName, int width, int height, bool append) { if (!append) { tempFileName = outputFile; } using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, width, height); ITrack videoTrack = group.AddTrack(); foreach (var filename in Directory.GetFiles(@"Output\")) { videoTrack.AddImage(filename, 0, (1.0 / 48.0)); } ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); try { using (AviFileRenderer renderer = new AviFileRenderer(timeline, tempFileName)) { renderer.Render(); } } catch (Exception ex) { throw; } } if (append) { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, width, height); ITrack videoTrack = group.AddTrack(); var firstVideoClip = group.AddTrack().AddVideo(outputFile); var secondVideoClip = group.AddTrack().AddVideo(tempFileName, firstVideoClip.Duration); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); try { using (AviFileRenderer renderer = new AviFileRenderer(timeline, "_" + outputFile)) { renderer.Render(); } Application.DoEvents(); System.Threading.Thread.Sleep(1000); File.Delete(outputFile); File.Delete(tempFileName); File.Move("_" + outputFile, outputFile); } catch (Exception ex) { throw; } } } }
public void ClipsAssignedContainer() { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(24, 320, 200); ITrack track = group.AddTrack(); IClip clip = track.AddClip("image1.jpg", GroupMediaType.Image, InsertPosition.Relative, 0, 0, 2); Assert.AreSame(track, clip.Container); } }