Esempio n. 1
0
        internal static void ExportToAvs(VidkaProj Proj, string fileOut)
        {
            var sbFiles         = new StringBuilder();
            var sbClips         = new StringBuilder();
            var sbClipsSplice   = new StringBuilder();
            var sbClipStats     = new StringBuilder();
            var sbPostOp        = new StringBuilder();
            var sbAudio         = new StringBuilder();
            var renderableProj  = Proj.GetVideoClipsForRendering();
            var renderableClips = renderableProj.Clips;
            var lastClip        = renderableClips.LastOrDefault();

            foreach (var file in renderableProj.Files)
            {
                if (file.Type == RenderableMediaFileType.DirectShowSource)
                {
                    //sbFiles.Append($"{file.VarName} = DirectShowSource(\"{file.FileName}\", audio=True, fps=proj_frameRate, convertfps=true)");
                    var audioParam = file.HasAudio ? ", atrack=-1" : "";
                    sbFiles.Append($"{file.VarName} = FFmpegSource2(\"{file.FileName}\"{audioParam})");
                }
                else if (file.Type == RenderableMediaFileType.ImageSource)
                {
                    sbFiles.Append($"{file.VarName} = ImageSource(\"{file.FileName}\", start=0, end={renderableProj.MaxLengthOfImageClip}, fps=proj_frameRate)");
                }
                else if (file.Type == RenderableMediaFileType.AudioSource)
                {
                    sbFiles.Append($"{file.VarName} = DirectShowSource(\"{file.FileName}\")");
                }
                sbFiles.Append("\n");
            }
            foreach (var clip in renderableClips)
            {
                sbPostOp.Clear();
                if (clip.HasCustomAudio)
                {
                    sbPostOp.Append(String.Format(".AddCustomAudio({0}, {1}, fstart={2}, fend={3})",
                                                  clip.CustomAudioFile.VarName, clip.CustomAudioOffset, clip.FrameStart, clip.FrameEnd - 1));
                }
                if (clip.IsMuted)
                {
                    sbPostOp.Append(".MuteThisClip()");
                }
                sbPostOp.Append((clip.PostOp ?? "").Replace("\n", ""));
                if (clip.ClipType == VideoClipRenderableType.Video)
                {
                    sbClips.Append(String.Format("{0} = NeutralClip({1}, {2}, {3}){4}\n",
                                                 clip.VarName, clip.VideoFile.VarName, clip.FrameStart, clip.FrameEnd - 1, sbPostOp.ToString()));
                }
                else if (clip.ClipType == VideoClipRenderableType.Image ||
                         clip.ClipType == VideoClipRenderableType.Text)
                {
                    sbClips.Append(String.Format("{0} = NeutralClipImage({1}, {2}){3}\n",
                                                 clip.VarName, clip.VideoFile.VarName, clip.LengthFrameCalc, sbPostOp.ToString()));
                }
            }

            foreach (var clip in renderableClips)
            {
                var lineEnding = (clip != lastClip) ? ", \\\n" : " ";
                sbPostOp.Clear();
                if (clip.EasingLeft > 0 || clip.EasingRight > 0)
                {
                    sbPostOp.Append(String.Format(".Trim({0}, {1})",
                                                  clip.EasingLeft, clip.LengthFrameCalc - clip.EasingRight));
                }
                foreach (var mix in clip.MixesAudioFromVideo)
                {
                    sbPostOp.Append(String.Format(".MixAudioFromClip({0}, {1}, {2}, {3})",
                                                  mix.ClipVarName, mix.FrameStart, mix.FrameEnd, mix.FrameOffset));
                }
                sbClipsSplice.Append(String.Format("\t{0}{1}{2}",
                                                   clip.VarName, sbPostOp.ToString(), lineEnding));
            }

            // clip stats
            foreach (var clip in renderableProj.StatVideos)
            {
                sbClipStats.Append(String.Format("collectpixeltypestat({0}, {1})\n",
                                                 clip.VideoFile.VarName, clip.LengthFrameCalc));
            }

            foreach (var clip in renderableProj.AudioClips)
            {
                sbPostOp.Clear();
                sbPostOp.Append((clip.PostOp ?? "").Replace("\n", ""));
                sbAudio.Append(String.Format(@"
voiceover=BlankClip(last,{0}) ++BlankClip(last, {1}).AudioDub(DirectShowSource(""{2}"", fps=proj_frameRate, convertfps=true).ResampleAudio(44100)).Trim({3}, {1}){4}
MixAudio(last,voiceover, clip1_factor=1, clip2_factor=1)", clip.FrameOffset, clip.FrameEnd, clip.FileName, clip.FrameStart, sbPostOp.ToString()));
            }

            var templateFile        = GetFileFromThisAppDirectory(TEMPLATE_AVS);
            var templateStr         = File.ReadAllText(templateFile);
            var strVideoClipsSplice = (Proj.ClipsVideo.Count <= 1)
                ? sbClipsSplice.ToString()
                : "UnalignedSplice( \\\n" + sbClipsSplice.ToString() + "\\\n)";
            // TODO: inject project properties
            var outputStr = templateStr
                            .Replace("{proj-fps}", "" + Proj.FrameRate)
                            .Replace("{proj-width}", "" + Proj.Width)
                            .Replace("{proj-height}", "" + Proj.Height)
                            .Replace("{video-files}", sbFiles.ToString())
                            .Replace("{video-clips}", sbClips.ToString() + "\n\n" + strVideoClipsSplice)
                            .Replace("{collectpixeltypestat-videos}", sbClipStats.ToString())
                            .Replace("{audio-clips}", sbAudio.ToString())
            ;

            File.WriteAllText(fileOut, outputStr);
        }