示例#1
0
        public static bool UseLSMASHVideoSource(string inputFile, bool bVideo)
        {
            string extension = Path.GetExtension(inputFile).ToLowerInvariant();

            if (!extension.Equals(".mp4") && !extension.Equals(".m4v") && !extension.Equals(".mov") && !extension.Equals(".m4a") &&
                !extension.Equals(".3gp") && !extension.Equals(".3g2") && !extension.Equals(".aac") && !extension.Equals(".qt"))
            {
                return(false);
            }

            StringBuilder script = new StringBuilder();

            script.AppendFormat("LoadPlugin(\"{0}\"){1}", MainForm.Instance.Settings.LSMASH.Path, Environment.NewLine);
            script.AppendFormat("{0}(\"{1}\")", (bVideo ? "LSMASHVideoSource" : "LSMASHAudioSource"), inputFile);

            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(script.ToString()))
                    {
                        return(true);
                    }
                }
            }
            catch
            {
                return(false);
            }
        }
示例#2
0
        private AvsFile(string script, bool parse)
        {
            try
            {
                this.enviroment = new AviSynthScriptEnvironment();
                this.clip = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);

                checked
                {
                    ulong width = (ulong)clip.VideoWidth;
                    ulong height = (ulong)clip.VideoHeight;
                    info = new MediaFileInfo(
                        clip.HasVideo, width, height,
                        new Dar(clip.GetIntVariable("MeGUI_darx", -1),
                              clip.GetIntVariable("MeGUI_dary", -1),
                              width, height),
                              (ulong)clip.num_frames,
                              ((double)clip.raten) / ((double)clip.rated),
                              (clip.SamplesCount != 0));
                }
            }
            catch (Exception)
            {
                cleanup();
                throw;
            }
        }
示例#3
0
        private AvsFile(string script, bool parse)
        {
            try
            {
                this.enviroment = new AviSynthScriptEnvironment();
                this.clip       = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);

                checked
                {
                    if (clip.HasVideo)
                    {
                        ulong width  = (ulong)clip.VideoWidth;
                        ulong height = (ulong)clip.VideoHeight;
                        info = new VideoInformation(
                            clip.HasVideo, width, height,
                            new Dar(clip.GetIntVariable("MeGUI_darx", -1),
                                    clip.GetIntVariable("MeGUI_dary", -1),
                                    width, height),
                            (ulong)clip.num_frames,
                            ((double)clip.raten) / ((double)clip.rated),
                            clip.raten, clip.rated);
                    }
                    else
                    {
                        info = new VideoInformation(false, 0, 0, Dar.A1x1, (ulong)clip.SamplesCount, (double)clip.AudioSampleRate, 0, 0);
                    }
                }
            }
            catch (Exception)
            {
                cleanup();
                throw;
            }
        }
示例#4
0
        private void createEncoderProcess(AviSynthClip a)
        {
            try
            {
                _encoderProcess = new Process();
                ProcessStartInfo info = new ProcessStartInfo();
                // Command line arguments, to be passed to encoder
                // {0} means output file name
                // {1} means samplerate in Hz
                // {2} means bits per sample
                // {3} means channel count
                // {4} means samplecount
                // {5} means size in bytes
                info.Arguments = string.Format(_encoderCommandLine,
                                               audioJob.Output, a.AudioSampleRate, a.BitsPerSample, a.ChannelsCount, a.SamplesCount, a.AudioSizeInBytes);
                info.FileName = _encoderExecutablePath;
                _log.LogValue("Commandline", _encoderExecutablePath + " " + info.Arguments);
                info.UseShellExecute        = false;
                info.RedirectStandardInput  = true;
                info.RedirectStandardOutput = true;
                info.RedirectStandardError  = true;
                info.CreateNoWindow         = true;
                _encoderProcess.StartInfo   = info;
                _encoderProcess.Start();

                // Take priority from Avisynth thread rather than default in settings
                // just in case user has managed to change job setting before getting here.
                if (_encoderThread.Priority == ThreadPriority.Lowest)
                {
                    _encoderProcess.PriorityClass = ProcessPriorityClass.Idle;
                }
                else if (_encoderThread.Priority == ThreadPriority.Normal)
                {
                    _encoderProcess.PriorityClass = ProcessPriorityClass.Normal;
                }
                else if (_encoderThread.Priority == ThreadPriority.AboveNormal)
                {
                    _encoderProcess.PriorityClass = ProcessPriorityClass.High;
                }

                _readFromStdOutThread = new Thread(new ThreadStart(readStdOut));
                _readFromStdErrThread = new Thread(new ThreadStart(readStdErr));
                _readFromStdOutThread.Start();
                _readFromStdOutThread.Priority = ThreadPriority.Normal;
                _readFromStdErrThread.Start();
                _readFromStdErrThread.Priority = ThreadPriority.Normal;
            }
            catch (Exception e)
            {
                throw new ApplicationException("Can't start encoder: " + e.Message, e);
            }
        }
示例#5
0
文件: AvsReader.cs 项目: pphh77/MeGui
 private void cleanup()
 {
     if (this.clip != null)
     {
         (this.clip as IDisposable).Dispose();
         this.clip = null;
     }
     if (this.enviroment != null)
     {
         (this.enviroment as IDisposable).Dispose();
         this.enviroment = null;
     }
     GC.SuppressFinalize(this);
 }
示例#6
0
 public static bool AVSScriptHasVideo(String strAVSScript, out string strErrorText)
 {
     try
     {
         strErrorText = String.Empty;
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.ParseScript(strAVSScript))
                 return(a.HasVideo);
     }
     catch (Exception ex)
     {
         strErrorText = ex.Message;
         return(false);
     }
 }
示例#7
0
 private void cleanup()
 {
     System.Threading.Thread.Sleep(100);
     if (this.clip != null)
     {
         (this.clip as IDisposable).Dispose();
         this.clip = null;
     }
     if (this.enviroment != null)
     {
         (this.enviroment as IDisposable).Dispose();
         this.enviroment = null;
     }
     GC.SuppressFinalize(this);
 }
示例#8
0
 public static int AVSFileChannelCount(String strAVSScript)
 {
     try
     {
         if (!Path.GetExtension(strAVSScript).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".avs"))
         {
             return(0);
         }
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.OpenScriptFile(strAVSScript))
                 return(a.ChannelsCount);
     }
     catch
     {
         return(0);
     }
 }
示例#9
0
        protected override void RunInThread()
        {
            try
            {
                // job output file in case of LWLibavVideoSource()
                base.jobOutputFile = job.Input + ".lwi";

                // generate the avs script
                StringBuilder strAVSScript = new StringBuilder();
                MediaInfoFile oInfo        = null;
                strAVSScript.Append(VideoUtil.getLSMASHVideoInputLine(job.Input, job.Output, 0, ref oInfo));
                if (oInfo != null)
                {
                    oInfo.Dispose();
                }
                base.log.LogValue("AviSynth script", strAVSScript.ToString(), ImageType.Information);

                // check if the script has a video track, also this call will create the index file if there is one
                string strErrorText = "no video track found";
                bool   openSuccess  = false;
                try
                {
                    strErrorText = String.Empty;
                    using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                        using (AviSynthClip a = env.ParseScript(strAVSScript.ToString(), false, false))
                            openSuccess = a.HasVideo;
                }
                catch (Exception ex)
                {
                    strErrorText = ex.Message;
                }
                if (!openSuccess)
                {
                    // avs script has no video track or an error has been thrown
                    base.log.LogEvent(strErrorText, ImageType.Error);
                    su.HasError = true;
                }
            }
            catch (Exception ex)
            {
                base.log.LogValue("Error: ", ex.Message, ImageType.Error);
                su.HasError = true;
            }
        }
示例#10
0
        private void writeHeader(Stream target, AviSynthClip a)
        {
            const uint FAAD_MAGIC_VALUE = 0xFFFFFF00;
            const uint WAV_HEADER_SIZE  = 36;
            bool       useFaadTrick     = a.AudioSizeInBytes >= (uint.MaxValue - WAV_HEADER_SIZE);

            target.Write(System.Text.Encoding.ASCII.GetBytes("RIFF"), 0, 4);
            target.Write(BitConverter.GetBytes(useFaadTrick ? FAAD_MAGIC_VALUE : (uint)(a.AudioSizeInBytes + WAV_HEADER_SIZE)), 0, 4);
            target.Write(System.Text.Encoding.ASCII.GetBytes("WAVEfmt "), 0, 8);
            target.Write(BitConverter.GetBytes((uint)0x10), 0, 4);
            target.Write(BitConverter.GetBytes((short)0x01), 0, 2);
            target.Write(BitConverter.GetBytes(a.ChannelsCount), 0, 2);
            target.Write(BitConverter.GetBytes(a.AudioSampleRate), 0, 4);
            target.Write(BitConverter.GetBytes(a.AvgBytesPerSec), 0, 4);
            target.Write(BitConverter.GetBytes(a.BytesPerSample * a.ChannelsCount), 0, 2);
            target.Write(BitConverter.GetBytes(a.BitsPerSample), 0, 2);
            target.Write(System.Text.Encoding.ASCII.GetBytes("data"), 0, 4);
            target.Write(BitConverter.GetBytes(useFaadTrick ? (FAAD_MAGIC_VALUE - WAV_HEADER_SIZE) : (uint)a.AudioSizeInBytes), 0, 4);
        }
示例#11
0
 public static bool AVSScriptHasAudio(String strAVSScript, out string strErrorText)
 {
     try
     {
         strErrorText = String.Empty;
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.ParseScript(strAVSScript))
                 if (a.ChannelsCount == 0)
                 {
                     return(false);
                 }
         return(true);
     }
     catch (Exception ex)
     {
         strErrorText = ex.Message;
         return(false);
     }
 }
示例#12
0
 public static bool AVSFileHasAudio(String strAVSScript)
 {
     try
     {
         if (!Path.GetExtension(strAVSScript).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".avs"))
         {
             return(false);
         }
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.OpenScriptFile(strAVSScript))
                 if (a.ChannelsCount == 0)
                 {
                     return(false);
                 }
         return(true);
     }
     catch
     {
         return(false);
     }
 }
示例#13
0
 private AvsFile(string script, bool parse)
 {
     try
     {
         this.enviroment = new AviSynthScriptEnvironment();
         this.clip       = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);
         this.hasVideo   = clip.HasVideo;
         this.height     = this.clip.VideoHeight;
         this.width      = this.clip.VideoWidth;
         this.frameRate  = ((double)clip.raten) / ((double)clip.rated);
         this.darX       = this.clip.GetIntVariable("MeGUI_darx", -1);
         this.darY       = this.clip.GetIntVariable("MeGUI_dary", -1);
         this.frameCount = clip.num_frames;
         this.hasAudio   = (clip.SamplesCount != 0);
     }
     catch (Exception)
     {
         cleanup();
         throw;
     }
 }
示例#14
0
 public static bool AVSFileHasAudio(String strAVSScript)
 {
     try
     {
         if (!Path.GetExtension(strAVSScript).ToLowerInvariant().Equals(".avs"))
         {
             return(false);
         }
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.OpenScriptFile(strAVSScript))
                 if (!a.HasAudio)
                 {
                     return(false);
                 }
         return(true);
     }
     catch
     {
         return(false);
     }
 }
示例#15
0
 public static double GetFPSFromAVSFile(String strAVSScript)
 {
     try
     {
         if (!Path.GetExtension(strAVSScript).ToLower(System.Globalization.CultureInfo.InvariantCulture).Equals(".avs"))
         {
             return(0);
         }
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
         {
             using (AviSynthClip a = env.OpenScriptFile(strAVSScript))
                 if (a.HasVideo)
                 {
                     return((double)a.raten / (double)a.rated);
                 }
         }
         return(0);
     }
     catch
     {
         return(0);
     }
 }
示例#16
0
 private void createEncoderProcess(AviSynthClip a)
 {
     try
     {
         _encoderProcess = new Process();
         ProcessStartInfo info = new ProcessStartInfo();
         // Command line arguments, to be passed to encoder
         // {0} means output file name
         // {1} means samplerate in Hz
         // {2} means bits per sample
         // {3} means channel count
         // {4} means samplecount
         // {5} means size in bytes
         info.Arguments = string.Format(_encoderCommandLine,
                                        audioJob.Output, a.AudioSampleRate, a.BitsPerSample, a.ChannelsCount, a.SamplesCount, a.AudioSizeInBytes);
         info.FileName = _encoderExecutablePath;
         _logBuilder.AppendFormat("Command line used: {0} {1}", _encoderExecutablePath, info.Arguments, Environment.NewLine);
         info.UseShellExecute        = false;
         info.RedirectStandardInput  = true;
         info.RedirectStandardOutput = true;
         info.RedirectStandardError  = true;
         info.CreateNoWindow         = true;
         _encoderProcess.StartInfo   = info;
         _encoderProcess.Start();
         _encoderProcess.PriorityClass = ProcessPriorityClass.Idle;
         _readFromStdOutThread         = new Thread(new ThreadStart(readStdOut));
         _readFromStdErrThread         = new Thread(new ThreadStart(readStdErr));
         _readFromStdOutThread.Start();
         _readFromStdOutThread.Priority = ThreadPriority.Normal;
         _readFromStdErrThread.Start();
         _readFromStdErrThread.Priority = ThreadPriority.Normal;
     }
     catch (Exception e)
     {
         throw new ApplicationException("Can't start encoder: " + e.Message, e);
     }
 }
示例#17
0
文件: AvsReader.cs 项目: pphh77/MeGui
 public AvsAudioReader(AviSynthClip clip)
 {
     this.clip = clip;
 }
示例#18
0
文件: AvsReader.cs 项目: pphh77/MeGui
 public AvsVideoReader(AviSynthClip clip, int width, int height)
 {
     this.clip   = clip;
     this.width  = width;
     this.height = height;
 }
示例#19
0
        private void encode()
        {
            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(_avisynthAudioScript))
                    {
                        if (0 == a.ChannelsCount)
                        {
                            throw new ApplicationException("Can't find audio stream");
                        }

                        _logBuilder.AppendFormat("Input: Channels={0}, BitsPerSample={1}, SampleRate={2}Hz{3}", a.ChannelsCount, a.BitsPerSample, a.AudioSampleRate, Environment.NewLine);
                        _start = DateTime.Now;

                        const int MAX_SAMPLES_PER_ONCE = 4096;
                        int       frameSample          = 0;
                        int       lastUpdateSample     = 0;
                        int       frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * a.ChannelsCount * a.BytesPerSample;
                        byte[]    frameBuffer          = new byte[frameBufferTotalSize];
                        createEncoderProcess(a);
                        try
                        {
                            using (Stream target = _encoderProcess.StandardInput.BaseStream)
                            {
                                // let's write WAV Header
                                if (_mustSendWavHeaderToEncoderStdIn)
                                {
                                    writeHeader(target, a);
                                }

                                _sampleRate = a.AudioSampleRate;

                                raiseEvent("Preprocessing...");
                                bool hasStartedEncoding = false;

                                GCHandle h       = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned);
                                IntPtr   address = h.AddrOfPinnedObject();
                                try
                                {
                                    su.ClipLength = TimeSpan.FromSeconds((double)a.SamplesCount / (double)_sampleRate);
                                    while (frameSample < a.SamplesCount)
                                    {
                                        _mre.WaitOne();

                                        if (_encoderProcess != null)
                                        {
                                            if (_encoderProcess.HasExited)
                                            {
                                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                                            }
                                        }
                                        int nHowMany = Math.Min((int)(a.SamplesCount - frameSample), MAX_SAMPLES_PER_ONCE);
                                        a.ReadAudio(address, frameSample, nHowMany);

                                        _mre.WaitOne();
                                        if (!hasStartedEncoding)
                                        {
                                            raiseEvent("Encoding audio...");
                                            hasStartedEncoding = true;
                                        }


                                        target.Write(frameBuffer, 0, nHowMany * a.ChannelsCount * a.BytesPerSample);
                                        target.Flush();
                                        frameSample += nHowMany;
                                        if (frameSample - lastUpdateSample > SAMPLES_PER_UPDATE)
                                        {
                                            setProgress((decimal)frameSample / (decimal)a.SamplesCount);
                                            lastUpdateSample = frameSample;
                                        }
                                        Thread.Sleep(0);
                                    }
                                }
                                finally
                                {
                                    h.Free();
                                }
                                setProgress(1M);

                                if (_mustSendWavHeaderToEncoderStdIn && a.BytesPerSample % 2 == 1)
                                {
                                    target.WriteByte(0);
                                }
                            }
                            raiseEvent("Finalizing encoder");
                            _encoderProcess.WaitForExit();
                            _readFromStdErrThread.Join();
                            _readFromStdOutThread.Join();
                            if (0 != _encoderProcess.ExitCode)
                            {
                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                            }
                        }
                        finally
                        {
                            if (!_encoderProcess.HasExited)
                            {
                                _encoderProcess.Kill();
                                _encoderProcess.WaitForExit();
                                _readFromStdErrThread.Join();
                                _readFromStdOutThread.Join();
                            }
                            _readFromStdErrThread = null;
                            _readFromStdOutThread = null;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                deleteOutputFile();
                if (e is ThreadAbortException)
                {
                    _logBuilder.Append("ABORTING!\n");
                    su.WasAborted = true;
                    raiseEvent();
                }
                else
                {
                    // Better Errors Exception for Audio Encoders
                    int    encoder_path  = _encoderExecutablePath.LastIndexOf(@"\");
                    string audio_encoder = _encoderExecutablePath.Substring(encoder_path + 1).ToLower();

                    _logBuilder.Append("\n");
                    _logBuilder.Append("\nError:\n");

                    if (audioJob.Settings is WinAmpAACSettings)
                    {
                        if (File.Exists(encoder_path + "enc_aacplus.dll") == false)
                        {
                            _logBuilder.Append("enc_aacplus.dll not found in the path...\n");
                        }
                        if (File.Exists(Environment.SystemDirectory + @"\nscrt.dll") == false)
                        {
                            _logBuilder.Append("nscrt.dll must be in your Windows System directory...\n");
                        }
                    }
                    su.HasError = true;
                    raiseEvent();
                }
                return;
            }
            finally
            {
                deleteTempFiles();
            }
            su.IsComplete = true;
            raiseEvent();
        }
示例#20
0
 public AvsVideoReader(AviSynthClip clip, int width, int height)
 {
     this.clip = clip;
     this.width = width;
     this.height = height;
 }
示例#21
0
 public AvsAudioReader(AviSynthClip clip)
 {
     this.clip = clip;
 }
示例#22
0
 private void cleanup()
 {
     System.Threading.Thread.Sleep(100);
     if (this.clip != null)
     {
         (this.clip as IDisposable).Dispose();
         this.clip = null;
     }
     if (this.enviroment != null)
     {
         (this.enviroment as IDisposable).Dispose();
         this.enviroment = null;
     }
     GC.SuppressFinalize(this);
 }
示例#23
0
        private void encode()
        {
            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(_avisynthAudioScript))
                    {
                        if (0 == a.ChannelsCount)
                        {
                            throw new ApplicationException("Can't find audio stream");
                        }

                        _logBuilder.AppendFormat("Channels={0}, BitsPerSample={1}, SampleRate={2}Hz{3}", a.ChannelsCount, a.BitsPerSample, a.AudioSampleRate, Environment.NewLine);

                        const int MAX_SAMPLES_PER_ONCE = 4096;
                        int       frameSample          = 0;
                        int       frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * a.ChannelsCount * a.BytesPerSample;
                        byte[]    frameBuffer          = new byte[frameBufferTotalSize];
                        createEncoderProcess(a);
                        try
                        {
                            using (Stream target = _encoderProcess.StandardInput.BaseStream)
                            {
                                // let's write WAV Header
                                if (_mustSendWavHeaderToEncoderStdIn)
                                {
                                    writeHeader(target, a);
                                }

                                _sampleRate = a.AudioSampleRate;

                                raiseEvent("Preprocessing...");

                                GCHandle h       = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned);
                                IntPtr   address = h.AddrOfPinnedObject();
                                try
                                {
                                    while (frameSample < a.SamplesCount)
                                    {
                                        _mre.WaitOne();

                                        if (_encoderProcess != null)
                                        {
                                            if (_encoderProcess.HasExited)
                                            {
                                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                                            }
                                        }
                                        int nHowMany = Math.Min((int)(a.SamplesCount - frameSample), MAX_SAMPLES_PER_ONCE);
                                        a.ReadAudio(address, frameSample, nHowMany);

                                        _mre.WaitOne();

                                        setProgress(((100 * (double)frameSample) / a.SamplesCount), frameSample);
                                        target.Write(frameBuffer, 0, nHowMany * a.ChannelsCount * a.BytesPerSample);
                                        target.Flush();
                                        frameSample += nHowMany;
                                        Thread.Sleep(0);
                                    }
                                }
                                finally
                                {
                                    h.Free();
                                }
                                setProgress(100, frameSample);

                                if (_mustSendWavHeaderToEncoderStdIn && a.BytesPerSample % 2 == 1)
                                {
                                    target.WriteByte(0);
                                }
                            }
                            raiseEvent("Finalizing encoder");
                            _encoderProcess.WaitForExit();
                            _readFromStdErrThread.Join();
                            _readFromStdOutThread.Join();
                            if (0 != _encoderProcess.ExitCode)
                            {
                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                            }
                        }
                        finally
                        {
                            if (!_encoderProcess.HasExited)
                            {
                                _encoderProcess.Kill();
                                _encoderProcess.WaitForExit();
                                _readFromStdErrThread.Join();
                                _readFromStdOutThread.Join();
                            }
                            _readFromStdErrThread = null;
                            _readFromStdOutThread = null;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                deleteOutputFile();
                if (e is ThreadAbortException)
                {
                    _logBuilder.Append("ABORTING!\n");
                    StatusUpdate u = new StatusUpdate();
                    u.WasAborted = true;
                    raiseEvent(u);
                }
                else
                {
                    _logBuilder.Append("Error:\n" + e.ToString());
                    StatusUpdate u = new StatusUpdate();
                    u.HasError = true;
                    u.Error    = e.ToString();
                    raiseEvent(u);
                }
                return;
            }
            finally
            {
                deleteTempFiles();
            }
            StatusUpdate u2 = new StatusUpdate();

            u2.IsComplete = true;
            raiseEvent(u2);
        }