/// <summary> /// Render the video from the music file/background image, asynchronously. /// /// Attach event handlers to audioProgress[0] and videoProgress[0] to monitor /// progress, AFTER this function has been called! /// </summary> public void Render(ProgressBar pbarAudio, ProgressBar pbarVideo) { Console.WriteLine("\nBegin render..."); using (ITimeline timeline = new DefaultTimeline()) { Console.Write("Create group..."); // create our primary group IGroup group = timeline.AddVideoGroup("background", 15.0, 32, 720, 480); Console.WriteLine("done."); Console.Write("Create tracks..."); // add a video and audio track to the group ITrack videoTrack = group.AddTrack(); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); Console.WriteLine("done."); // add the audio/bg image Console.Write("Create audio track..."); IClip audio = audioTrack.AddAudio(this.mp3path); Console.WriteLine("done."); Console.Write("Create video track... length: " + audio.Duration.ToString() + "..."); IClip clip1 = videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); //videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); Console.WriteLine("done."); // set up progress indicators audioProgress[0] = new PercentageProgressParticipant(timeline); videoProgress[0] = new PercentageProgressParticipant(timeline); Console.WriteLine("Timeline duration: " + timeline.Duration); Console.WriteLine("Audio duration: " + audio.Duration); Console.WriteLine("IMG duration: " + videoTrack.Duration); // render our video out Console.Write("Render Start..."); using (renderer = new Splicer.Renderer.WindowsMediaRenderer( timeline, videopath, WindowsMediaProfiles.HighQualityVideo, videoProgress, audioProgress)) { this.pbarA = pbarAudio; this.pbarV = pbarVideo; audioProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedAudio); videoProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedVideo); renderer.Render(); } Console.WriteLine("Render Completed."); //AsyncCallback cb = new AsyncCallback(CallBack); //IAsyncResult ar = renderer.BeginRender(cb, renderer.State); } }
public void FadeBetweenImages() { // generates a little slide-show, with audio track and fades between images. string outputFile = "FadeBetweenImages.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 160, 100); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage("image1.jpg", 0, 2); // play first image for a little while IClip clip2 = videoTrack.AddImage("image2.jpg", 0, 2); // and the next IClip clip3 = videoTrack.AddImage("image3.jpg", 0, 2); // and finally the last IClip clip4 = videoTrack.AddImage("image4.jpg", 0, 2); // and finally the last double halfDuration = 0.5; // fade out and back in group.AddTransition(clip2.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip2.Offset, halfDuration, StandardTransitions.CreateFade(), false); // again group.AddTransition(clip3.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip3.Offset, halfDuration, StandardTransitions.CreateFade(), false); // and again group.AddTransition(clip4.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip4.Offset, halfDuration, StandardTransitions.CreateFade(), false); // add some audio ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio("testinput.wav", 0, videoTrack.Duration); // create an audio envelope effect, this will: // fade the audio from 0% to 100% in 1 second. // play at full volume until 1 second before the end of the track // fade back out to 0% volume audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); // render our slideshow out to a windows media file using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
public void AddClipUsesAssistant() { using (ITimeline timeline = new DefaultTimeline()) { var assistant = new MockMediaFileAssistant(true); timeline.InstallAssistant(assistant); Assert.AreEqual(0, assistant.ExecutionCount); ITrack track = timeline.AddAudioGroup().AddTrack(); track.AddAudio("1sec.wav"); Assert.AreEqual(1, assistant.ExecutionCount); } }
public void AddClipIgnoresUnrequiredAssistant() { using (ITimeline timeline = new DefaultTimeline()) { MockMediaFileAssistant assistant = new MockMediaFileAssistant(false); timeline.InstallAssistant(assistant); Assert.AreEqual(0, assistant.ExecutionCount); ITrack track = timeline.AddAudioGroup().AddTrack(); track.AddAudio("1sec.wav"); Assert.AreEqual(0, assistant.ExecutionCount); } }
private static void PopulateTimeline(ApplicationOptions options, ITimeline timeline) { IGroup group = timeline.AddVideoGroup(32, options.Width, options.Height); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audioClip = audioTrack.AddAudio(options.SoundtrackFile); ITrack videoTrack = group.AddTrack(); double step = audioClip.Duration / options.SourceImage.Length; for (int i = 0; i < options.SourceImage.Length; i++) { videoTrack.AddImage(options.SourceImage[i], 0, step); } }
static void Main(string[] args) { string folderPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Pics"); using (ITimeline timeline = new DefaultTimeline(30)) { double halfDuration = 1; IGroup group = timeline.AddVideoGroup("video", 30, 32, 1920, 1080); ITrack videoTrack = group.AddTrack(); List <string> filePaths = Directory.EnumerateFiles(folderPath, "*.jpg").ToList(); for (int i = 0; i < filePaths.Count; i++) { IClip clip = videoTrack.AddImage(filePaths[i], 0, 10); if (i > 0) { group.AddTransition(clip.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip.Offset, halfDuration, StandardTransitions.CreateFade(), false); } } string folderPath2 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Audio"); string audioPath = Directory.EnumerateFiles(folderPath2, "*.wav").FirstOrDefault(x => x != null); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(audioPath, 0, videoTrack.Duration); audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); using (var renderer = new WindowsMediaRenderer(timeline, "output.wmv", WindowsMediaProfiles.FullHD)) { renderer.Render(); } } Console.WriteLine("Hello World!"); }
public async Task MakeVideo(double BPM, double BPM_Multiplier, double offset, string videoTitle, string audioFilePath, double videoDuration = 30) { string temporaryVideoFileName = tempFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, true); string VideoFileName = finalFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, false); double FrameTimeFromBPM = videoMakerTimingUtilities.ReturnFrameTimeBasedOnBPM(BPM, BPM_Multiplier); await Task.Run(() => { if (videoMakerIOUtilities.CheckIfImageFilesExist(imageFilesPath: $@"jpg2\", 19)) { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 738, 650); ITrack videoTrack = group.AddTrack(); videoTrack.AddImage($@"jpg2\1.jpg", 0, offset); int framesNumber = videoMakerTimingUtilities.ReturnNumberOfFrames(videoDuration, offset, FrameTimeFromBPM); for (int i = 1; i <= framesNumber; i++) { int picNumber = i % 18; videoTrack.AddImage($@"jpg2\{picNumber + 1}.jpg", 0, FrameTimeFromBPM); } ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); audioTrack.AddAudio(audioFilePath, 0, videoTrack.Duration + 2.75); using (WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, temporaryVideoFileName, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } } } ); if (CutVideo(temporaryVideoFileName, VideoFileName, videoDuration)) { Messaging.ShowMessage($"Your video was generated successfully. Look for your {VideoFileName} file in app folder.", "Success"); } }
public void AddAudioOverloads() { // test all the overloads for AddAudio using (ITimeline timeline = new DefaultTimeline()) { ITrack track = timeline.AddAudioGroup().AddTrack(); IClip clip1 = track.AddAudio("1sec.wav"); Assert.AreEqual(0, clip1.Offset); Assert.AreEqual(1, clip1.Duration); IClip clip2 = track.AddAudio("1sec.wav", 1); Assert.AreEqual(2, clip2.Offset); Assert.AreEqual(1, clip2.Duration); IClip clip3 = track.AddAudio("1sec.wav", 0, 0.5); Assert.AreEqual(3, clip3.Offset); Assert.AreEqual(0.5, clip3.Duration); IClip clip4 = track.AddAudio("1sec.wav", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.Offset); Assert.AreEqual(0.5, clip4.Duration); Assert.AreEqual(0.5, clip4.MediaStart); IClip clip5 = track.AddAudio("1sec.wav", InsertPosition.Absoloute, 6, 0, -1); Assert.AreEqual(6, clip5.Offset); Assert.AreEqual(1, clip5.Duration); IClip clip6 = track.AddAudio("myclip", "1sec.wav", InsertPosition.Absoloute, 8, 0, 0.5); Assert.AreEqual(8, clip6.Offset); Assert.AreEqual(0, clip6.MediaStart); Assert.AreEqual(0.5, clip6.Duration); Assert.AreEqual("myclip", clip6.Name); } }
private void makeMovie(int imageName) { int simpleCount = 0; string simpleAudio = ""; string videoPath = ""; string title = ""; string description = ""; string tags = ""; foreach (VideoListModel videoItem in vList.getData()) { if (simpleCount == 0) { title = videoItem.VideoName; description = videoItem.VideoDescription; tags = videoItem.VideoTags; simpleAudio = videoItem.AudioLocation; videoPath = Application.StartupPath + "\\VideoData\\video\\" + videoItem.VideoName + ".wmv"; } simpleCount++; } label9.Text = title; Console.WriteLine(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg"); using (ITimeline timeline = new DefaultTimeline(1)) { IGroup group = timeline.AddVideoGroup(32, 1920, 1080); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, 0); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(simpleAudio); IClip clip2 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, audio.Duration); var participant = new PercentageProgressParticipant(timeline); participant.ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(participant_ProgressChanged); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, videoPath, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } progressBar1.Value = 0; vList.getData().RemoveAt(0); vList.saveData(); refreshList(); try { //2. Get credentials and upload the file Run(title, description, videoPath, tags); } catch (AggregateException ex) { foreach (var exception in ex.InnerExceptions) { Console.WriteLine(exception.Message); } } }