Esempio n. 1
0
        private AvsFile(string script, bool parse)
        {
            try
            {
                this.enviroment = new AviSynthScriptEnvironment();
                this.clip = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);

                checked
                {
                    ulong width = (ulong)clip.VideoWidth;
                    ulong height = (ulong)clip.VideoHeight;
                    info = new MediaFileInfo(
                        clip.HasVideo, width, height,
                        new Dar(clip.GetIntVariable("MeGUI_darx", -1),
                              clip.GetIntVariable("MeGUI_dary", -1),
                              width, height),
                              (ulong)clip.num_frames,
                              ((double)clip.raten) / ((double)clip.rated),
                              (clip.SamplesCount != 0));
                }
            }
            catch (Exception)
            {
                cleanup();
                throw;
            }
        }
Esempio n. 2
0
        public static bool UseLSMASHVideoSource(string inputFile, bool bVideo)
        {
            string extension = Path.GetExtension(inputFile).ToLowerInvariant();

            if (!extension.Equals(".mp4") && !extension.Equals(".m4v") && !extension.Equals(".mov") && !extension.Equals(".m4a") &&
                !extension.Equals(".3gp") && !extension.Equals(".3g2") && !extension.Equals(".aac") && !extension.Equals(".qt"))
            {
                return(false);
            }

            StringBuilder script = new StringBuilder();

            script.AppendFormat("LoadPlugin(\"{0}\"){1}", MainForm.Instance.Settings.LSMASH.Path, Environment.NewLine);
            script.AppendFormat("{0}(\"{1}\")", (bVideo ? "LSMASHVideoSource" : "LSMASHAudioSource"), inputFile);

            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(script.ToString()))
                    {
                        return(true);
                    }
                }
            }
            catch
            {
                return(false);
            }
        }
Esempio n. 3
0
        private AvsFile(string script, bool parse)
        {
            try
            {
                this.enviroment = new AviSynthScriptEnvironment();
                this.clip       = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);

                checked
                {
                    if (clip.HasVideo)
                    {
                        ulong width  = (ulong)clip.VideoWidth;
                        ulong height = (ulong)clip.VideoHeight;
                        info = new VideoInformation(
                            clip.HasVideo, width, height,
                            new Dar(clip.GetIntVariable("MeGUI_darx", -1),
                                    clip.GetIntVariable("MeGUI_dary", -1),
                                    width, height),
                            (ulong)clip.num_frames,
                            ((double)clip.raten) / ((double)clip.rated),
                            clip.raten, clip.rated);
                    }
                    else
                    {
                        info = new VideoInformation(false, 0, 0, Dar.A1x1, (ulong)clip.SamplesCount, (double)clip.AudioSampleRate, 0, 0);
                    }
                }
            }
            catch (Exception)
            {
                cleanup();
                throw;
            }
        }
Esempio n. 4
0
 public static bool AVSScriptHasVideo(String strAVSScript, out string strErrorText)
 {
     try
     {
         strErrorText = String.Empty;
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.ParseScript(strAVSScript))
                 return(a.HasVideo);
     }
     catch (Exception ex)
     {
         strErrorText = ex.Message;
         return(false);
     }
 }
Esempio n. 5
0
        protected override void RunInThread()
        {
            try
            {
                // job output file in case of LWLibavVideoSource()
                base.jobOutputFile = job.Input + ".lwi";

                // generate the avs script
                StringBuilder strAVSScript = new StringBuilder();
                MediaInfoFile oInfo        = null;
                strAVSScript.Append(VideoUtil.getLSMASHVideoInputLine(job.Input, job.Output, 0, ref oInfo));
                if (oInfo != null)
                {
                    oInfo.Dispose();
                }
                base.log.LogValue("AviSynth script", strAVSScript.ToString(), ImageType.Information);

                // check if the script has a video track, also this call will create the index file if there is one
                string strErrorText = "no video track found";
                bool   openSuccess  = false;
                try
                {
                    strErrorText = String.Empty;
                    using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                        using (AviSynthClip a = env.ParseScript(strAVSScript.ToString(), false, false))
                            openSuccess = a.HasVideo;
                }
                catch (Exception ex)
                {
                    strErrorText = ex.Message;
                }
                if (!openSuccess)
                {
                    // avs script has no video track or an error has been thrown
                    base.log.LogEvent(strErrorText, ImageType.Error);
                    su.HasError = true;
                }
            }
            catch (Exception ex)
            {
                base.log.LogValue("Error: ", ex.Message, ImageType.Error);
                su.HasError = true;
            }
        }
Esempio n. 6
0
 public static bool AVSScriptHasAudio(String strAVSScript, out string strErrorText)
 {
     try
     {
         strErrorText = String.Empty;
         using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
             using (AviSynthClip a = env.ParseScript(strAVSScript))
                 if (a.ChannelsCount == 0)
                 {
                     return(false);
                 }
         return(true);
     }
     catch (Exception ex)
     {
         strErrorText = ex.Message;
         return(false);
     }
 }
Esempio n. 7
0
 private AvsFile(string script, bool parse)
 {
     try
     {
         this.enviroment = new AviSynthScriptEnvironment();
         this.clip       = parse ? enviroment.ParseScript(script, AviSynthColorspace.RGB24) : enviroment.OpenScriptFile(script, AviSynthColorspace.RGB24);
         this.hasVideo   = clip.HasVideo;
         this.height     = this.clip.VideoHeight;
         this.width      = this.clip.VideoWidth;
         this.frameRate  = ((double)clip.raten) / ((double)clip.rated);
         this.darX       = this.clip.GetIntVariable("MeGUI_darx", -1);
         this.darY       = this.clip.GetIntVariable("MeGUI_dary", -1);
         this.frameCount = clip.num_frames;
         this.hasAudio   = (clip.SamplesCount != 0);
     }
     catch (Exception)
     {
         cleanup();
         throw;
     }
 }
Esempio n. 8
0
        private void encode()
        {
            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(_avisynthAudioScript))
                    {
                        if (0 == a.ChannelsCount)
                        {
                            throw new ApplicationException("Can't find audio stream");
                        }

                        _logBuilder.AppendFormat("Input: Channels={0}, BitsPerSample={1}, SampleRate={2}Hz{3}", a.ChannelsCount, a.BitsPerSample, a.AudioSampleRate, Environment.NewLine);
                        _start = DateTime.Now;

                        const int MAX_SAMPLES_PER_ONCE = 4096;
                        int       frameSample          = 0;
                        int       lastUpdateSample     = 0;
                        int       frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * a.ChannelsCount * a.BytesPerSample;
                        byte[]    frameBuffer          = new byte[frameBufferTotalSize];
                        createEncoderProcess(a);
                        try
                        {
                            using (Stream target = _encoderProcess.StandardInput.BaseStream)
                            {
                                // let's write WAV Header
                                if (_mustSendWavHeaderToEncoderStdIn)
                                {
                                    writeHeader(target, a);
                                }

                                _sampleRate = a.AudioSampleRate;

                                raiseEvent("Preprocessing...");
                                bool hasStartedEncoding = false;

                                GCHandle h       = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned);
                                IntPtr   address = h.AddrOfPinnedObject();
                                try
                                {
                                    su.ClipLength = TimeSpan.FromSeconds((double)a.SamplesCount / (double)_sampleRate);
                                    while (frameSample < a.SamplesCount)
                                    {
                                        _mre.WaitOne();

                                        if (_encoderProcess != null)
                                        {
                                            if (_encoderProcess.HasExited)
                                            {
                                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                                            }
                                        }
                                        int nHowMany = Math.Min((int)(a.SamplesCount - frameSample), MAX_SAMPLES_PER_ONCE);
                                        a.ReadAudio(address, frameSample, nHowMany);

                                        _mre.WaitOne();
                                        if (!hasStartedEncoding)
                                        {
                                            raiseEvent("Encoding audio...");
                                            hasStartedEncoding = true;
                                        }


                                        target.Write(frameBuffer, 0, nHowMany * a.ChannelsCount * a.BytesPerSample);
                                        target.Flush();
                                        frameSample += nHowMany;
                                        if (frameSample - lastUpdateSample > SAMPLES_PER_UPDATE)
                                        {
                                            setProgress((decimal)frameSample / (decimal)a.SamplesCount);
                                            lastUpdateSample = frameSample;
                                        }
                                        Thread.Sleep(0);
                                    }
                                }
                                finally
                                {
                                    h.Free();
                                }
                                setProgress(1M);

                                if (_mustSendWavHeaderToEncoderStdIn && a.BytesPerSample % 2 == 1)
                                {
                                    target.WriteByte(0);
                                }
                            }
                            raiseEvent("Finalizing encoder");
                            _encoderProcess.WaitForExit();
                            _readFromStdErrThread.Join();
                            _readFromStdOutThread.Join();
                            if (0 != _encoderProcess.ExitCode)
                            {
                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                            }
                        }
                        finally
                        {
                            if (!_encoderProcess.HasExited)
                            {
                                _encoderProcess.Kill();
                                _encoderProcess.WaitForExit();
                                _readFromStdErrThread.Join();
                                _readFromStdOutThread.Join();
                            }
                            _readFromStdErrThread = null;
                            _readFromStdOutThread = null;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                deleteOutputFile();
                if (e is ThreadAbortException)
                {
                    _logBuilder.Append("ABORTING!\n");
                    su.WasAborted = true;
                    raiseEvent();
                }
                else
                {
                    // Better Errors Exception for Audio Encoders
                    int    encoder_path  = _encoderExecutablePath.LastIndexOf(@"\");
                    string audio_encoder = _encoderExecutablePath.Substring(encoder_path + 1).ToLower();

                    _logBuilder.Append("\n");
                    _logBuilder.Append("\nError:\n");

                    if (audioJob.Settings is WinAmpAACSettings)
                    {
                        if (File.Exists(encoder_path + "enc_aacplus.dll") == false)
                        {
                            _logBuilder.Append("enc_aacplus.dll not found in the path...\n");
                        }
                        if (File.Exists(Environment.SystemDirectory + @"\nscrt.dll") == false)
                        {
                            _logBuilder.Append("nscrt.dll must be in your Windows System directory...\n");
                        }
                    }
                    su.HasError = true;
                    raiseEvent();
                }
                return;
            }
            finally
            {
                deleteTempFiles();
            }
            su.IsComplete = true;
            raiseEvent();
        }
Esempio n. 9
0
        private void encode()
        {
            try
            {
                using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment())
                {
                    using (AviSynthClip a = env.ParseScript(_avisynthAudioScript))
                    {
                        if (0 == a.ChannelsCount)
                        {
                            throw new ApplicationException("Can't find audio stream");
                        }

                        _logBuilder.AppendFormat("Channels={0}, BitsPerSample={1}, SampleRate={2}Hz{3}", a.ChannelsCount, a.BitsPerSample, a.AudioSampleRate, Environment.NewLine);

                        const int MAX_SAMPLES_PER_ONCE = 4096;
                        int       frameSample          = 0;
                        int       frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * a.ChannelsCount * a.BytesPerSample;
                        byte[]    frameBuffer          = new byte[frameBufferTotalSize];
                        createEncoderProcess(a);
                        try
                        {
                            using (Stream target = _encoderProcess.StandardInput.BaseStream)
                            {
                                // let's write WAV Header
                                if (_mustSendWavHeaderToEncoderStdIn)
                                {
                                    writeHeader(target, a);
                                }

                                _sampleRate = a.AudioSampleRate;

                                raiseEvent("Preprocessing...");

                                GCHandle h       = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned);
                                IntPtr   address = h.AddrOfPinnedObject();
                                try
                                {
                                    while (frameSample < a.SamplesCount)
                                    {
                                        _mre.WaitOne();

                                        if (_encoderProcess != null)
                                        {
                                            if (_encoderProcess.HasExited)
                                            {
                                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                                            }
                                        }
                                        int nHowMany = Math.Min((int)(a.SamplesCount - frameSample), MAX_SAMPLES_PER_ONCE);
                                        a.ReadAudio(address, frameSample, nHowMany);

                                        _mre.WaitOne();

                                        setProgress(((100 * (double)frameSample) / a.SamplesCount), frameSample);
                                        target.Write(frameBuffer, 0, nHowMany * a.ChannelsCount * a.BytesPerSample);
                                        target.Flush();
                                        frameSample += nHowMany;
                                        Thread.Sleep(0);
                                    }
                                }
                                finally
                                {
                                    h.Free();
                                }
                                setProgress(100, frameSample);

                                if (_mustSendWavHeaderToEncoderStdIn && a.BytesPerSample % 2 == 1)
                                {
                                    target.WriteByte(0);
                                }
                            }
                            raiseEvent("Finalizing encoder");
                            _encoderProcess.WaitForExit();
                            _readFromStdErrThread.Join();
                            _readFromStdOutThread.Join();
                            if (0 != _encoderProcess.ExitCode)
                            {
                                throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString());
                            }
                        }
                        finally
                        {
                            if (!_encoderProcess.HasExited)
                            {
                                _encoderProcess.Kill();
                                _encoderProcess.WaitForExit();
                                _readFromStdErrThread.Join();
                                _readFromStdOutThread.Join();
                            }
                            _readFromStdErrThread = null;
                            _readFromStdOutThread = null;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                deleteOutputFile();
                if (e is ThreadAbortException)
                {
                    _logBuilder.Append("ABORTING!\n");
                    StatusUpdate u = new StatusUpdate();
                    u.WasAborted = true;
                    raiseEvent(u);
                }
                else
                {
                    _logBuilder.Append("Error:\n" + e.ToString());
                    StatusUpdate u = new StatusUpdate();
                    u.HasError = true;
                    u.Error    = e.ToString();
                    raiseEvent(u);
                }
                return;
            }
            finally
            {
                deleteTempFiles();
            }
            StatusUpdate u2 = new StatusUpdate();

            u2.IsComplete = true;
            raiseEvent(u2);
        }