private void raiseEvent(string s) { StatusUpdate e = new StatusUpdate(); e.AudioPosition = s; raiseEvent(e); }
private void raiseEvent(StatusUpdate e) { e.JobName = audioJob.Name; e.JobType = JobTypes.AUDIO; if (e.IsComplete = (e.IsComplete || e.WasAborted || e.HasError)) { e.Log = createLog(); } this.sendStatusUpdateToGUI(e); }
public void setup(Job job2, StatusUpdate su, LogItem log) { Debug.Assert(job2 is TJob, "Job is the wrong type"); this.log = log; TJob job = (TJob)job2; this.job = job; this.su = su; checkJobIO(); }
/// <summary> /// catches the StatusUpdate event fired from Form1 and updates the GUI accordingly /// </summary> /// <param name="su"></param> public void UpdateStatus(StatusUpdate su) { try { // possible to abort job abortButton.Enabled = (su.JobStatus == JobStatus.PROCESSING); // Current position positionInClip.Text = (Util.ToString(su.ClipPosition) ?? "---") + " / " + (Util.ToString(su.ClipLength) ?? "---"); // Current frame currentVideoFrame.Text = (Util.ToString(su.NbFramesDone, true) ?? "---") + " / " + (Util.ToString(su.NbFramesTotal, true) ?? "---"); // Data videoData.Text = (su.CurrentFileSize.HasValue ? su.CurrentFileSize.Value.ToString() : "---") + " / " + (su.ProjectedFileSize.HasValue ? su.ProjectedFileSize.Value.ToString() : "---"); // Processing speed fps.Text = su.ProcessingSpeed ?? "---"; // Time elapsed // Now we use TotalHours to avoid 24h+ resets... if (su.TimeElapsed.TotalHours > 24) { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}:{3:00}", (int)su.TimeElapsed.TotalDays, su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } else { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}", (int)su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } // Estimated time // go back to the old function ;-) totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact ?? 0M); this.Text = "Status: " + (su.PercentageDoneExact ?? 0M).ToString("0.00") + " %"; statusLabel.Text = su.Status ?? ""; jobNameLabel.Text = "[" + su.JobName + "]"; progress.Value = su.PercentageDone; if ((Environment.OSVersion.Version.Major == 6 && Environment.OSVersion.Version.Minor >= 1) || Environment.OSVersion.Version.Major > 6) { taskbarProgress.SetProgressValue(this.Handle, Convert.ToUInt64(su.PercentageDone), 100); } } catch (Exception) { } }
protected void sendStatusUpdateToGUI(StatusUpdate su) { su.TimeElapsed = DateTime.Now.Ticks - job.Start.Ticks; if (File.Exists(job.Output)) { FileInfo fi = new FileInfo(job.Output); su.FileSize = fi.Length / 1024; } if (statusUpdate != null) { statusUpdate(su); } }
private void setProgress(double n, int currentSample) { StatusUpdate e = new StatusUpdate(); e.PercentageDoneExact = n; if (currentSample != -1) { Int64 msec = currentSample; msec *= 1000; msec /= _sampleRate; msec *= 10000; DateTime dt = new DateTime(msec); e.AudioPosition = dt.ToString("HHHH:mm:ss.fff"); } raiseEvent(e); }
public override bool setup(Job job, out string error) { error = null; if (!checkEncoderExistence(executable, out error)) { return(false); } executable = "\"" + executable + "\""; this.job = job; su = new StatusUpdate(); su.FPS = 0; su.JobName = job.Name; su.JobType = JobTypes.AUDIO; log = new StringBuilder(); return(true); }
public void setup(Job job2, StatusUpdate su) { Debug.Assert(job2 is TJob, "Job is the wrong type"); TJob job = (TJob)job2; this.job = job; // This enables relative paths, etc executable = Path.Combine(System.Windows.Forms.Application.StartupPath, executable); Util.ensureExists(executable); this.su = su; checkJobIO(); }
public override bool setup(Job job, out string error) { error = null; // This allows relative paths executable = Path.Combine(System.Windows.Forms.Application.StartupPath, executable); if (!checkExecutable(executable, out error)) { return(false); } executable = "\"" + executable + "\""; this.job = (MuxJob)job; su = new StatusUpdate(); su.JobName = job.Name; su.JobType = JobTypes.MUX; su.FileSize = 0; log = new StringBuilder(); setProjectedFileSize(); return(true); }
/// <summary> /// catches the StatusUpdate event fired from Form1 and updates the GUI accordingly /// </summary> /// <param name="su"></param> public void UpdateStatus(StatusUpdate su) { try { // Current position positionInClip.Text = (Util.ToString(su.ClipPosition) ?? "---") + " / " + (Util.ToString(su.ClipLength) ?? "---"); // Current frame currentVideoFrame.Text = (Util.ToString(su.NbFramesDone) ?? "---") + " / " + (Util.ToString(su.NbFramesTotal) ?? "---"); // Data videoData.Text = (su.CurrentFileSize.HasValue ? su.CurrentFileSize.Value.ToString() : "---") + " / " + (su.ProjectedFileSize.HasValue ? su.ProjectedFileSize.Value.ToString() : "---"); // Processing speed fps.Text = su.ProcessingSpeed ?? "---"; // Time elapsed // Now we use TotalHours to avoid 24h+ resets... if (su.TimeElapsed.TotalHours > 24) { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}:{3:00}", (int)su.TimeElapsed.TotalDays, su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } else { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}", (int)su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } // Estimated time // go back to the old function ;-) totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact ?? 0M); this.Text = "Status: " + (su.PercentageDoneExact ?? 0M).ToString("##.##") + " %"; statusLabel.Text = su.Status ?? ""; progress.Value = su.PercentageDone; } catch (Exception) { } }
public void setup(Job job2, StatusUpdate su, LogItem log) { Debug.Assert(job2 is TJob, "Job is the wrong type"); this.log = log; TJob job = (TJob)job2; this.job = job; if (!executable.ToLowerInvariant().Equals("cmd.exe")) { // This enables relative paths, etc if (!File.Exists(executable)) { executable = Path.Combine(System.Windows.Forms.Application.StartupPath, executable); } Util.ensureExists(executable); } this.su = su; checkJobIO(); }
/// <summary> /// sets up encoding /// </summary /// <param name="job">the job to be processed</param> /// <param name="error">output for any errors that might ocurr during this method</param> /// <returns>true if the setup has succeeded, false if it has not</returns> public void setup(Job job, StatusUpdate su, LogItem _) { Debug.Assert(job is AviSynthJob, "Job isn't an AviSynthJob"); stup = su; this.job = (AviSynthJob)job; try { file = AvsFile.OpenScriptFile(job.Input); reader = file.GetVideoReader(); } catch (Exception ex) { throw new JobRunException(ex); } stup.NbFramesTotal = (ulong)reader.FrameCount; stup.ClipLength = TimeSpan.FromSeconds((double)stup.NbFramesTotal / file.VideoInfo.FPS); stup.Status = "Playing through file..."; position = 0; try { processorThread = new Thread(new ThreadStart(process)); } catch (Exception e) { throw new JobRunException(e); } try { statusThread = new Thread(new ThreadStart(update)); } catch (Exception e) { throw new JobRunException(e); } }
public AviSynthProcessor() { stup = new StatusUpdate(); stup.JobType = JobTypes.AVS; }
public void setup(Job job, StatusUpdate su, LogItem _log) { this._log = _log; this.job = (OneClickPostProcessingJob)job; this.su = su; }
/// <summary> /// catches the StatusUpdate event fired from Form1 and updates the GUI accordingly /// </summary> /// <param name="su"></param> public void UpdateStatus(StatusUpdate su) { try { // Current position positionInClip.Text = (Util.ToString(su.ClipPosition) ?? "---") + " / " + (Util.ToString(su.ClipLength) ?? "---"); // Current frame currentVideoFrame.Text = (Util.ToString(su.NbFramesDone) ?? "---") + " / " + (Util.ToString(su.NbFramesTotal) ?? "---"); // Data videoData.Text = (su.CurrentFileSize.HasValue ? su.CurrentFileSize.Value.ToString() : "---") + " / " + (su.ProjectedFileSize.HasValue ? su.ProjectedFileSize.Value.ToString() : "---"); // Processing speed fps.Text = su.ProcessingSpeed ?? "---"; // Time elapsed // Now we use TotalHours to avoid 24h+ resets... if (su.TimeElapsed.TotalHours > 24) { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}:{3:00}", (int)su.TimeElapsed.TotalDays, su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } else { timeElapsed.Text = string.Format("{0:00}:{1:00}:{2:00}", (int)su.TimeElapsed.Hours, su.TimeElapsed.Minutes, su.TimeElapsed.Seconds); } // Estimated time // totalTime.Text = Util.ToString(su.EstimatedTime) ?? "---"; // go back to the old function ;-) totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact ?? 0M); this.Text = "Status: " + (su.PercentageDoneExact ?? 0M).ToString("##.##") + " %"; statusLabel.Text = su.Status ?? ""; progress.Value = su.PercentageDone; } catch (Exception) { } #region old code /* if (su.JobType == JobTypes.VIDEO) // video status update * { * this.currentVideoFrame.Text = su.NbFramesDone + "/" + su.NbFramesTotal; * this.videoData.Text = su.FileSize.ToString(); * if (su.FileSize.HasValue) * { * FileSize projectedSize = su.FileSize.Value * (100M / su.PercentageDoneExact); * this.filesize.Text = projectedSize.ToString(); * } * else * this.filesize.Text = "unknown"; * this.fps.Text = su.FPS.ToString("##.##") + " FPS"; * this.timeElapsed.Text = su.TimeElapsedString; * this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); * this.progress.Value = su.PercentageDone; * this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; * } * if (su.JobType == JobTypes.AUDIO) // audio status update * { * this.currentVideoFrame.Text = Util.ToString(su.ClipPosition); * this.videoData.Text = su.FileSize.ToString(); * if (!su.FileSize.HasValue) // first pass * { * this.filesize.Text = "N/A (first pass)"; * this.totalTime.Text = "N/A (first pass)"; * this.Text = "Status: first pass"; * } * else * { * FileSize projectedSize = su.FileSize.Value * (100M / su.PercentageDoneExact); * this.filesize.Text = projectedSize.ToString(); * this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); * this.progress.Value = su.PercentageDone; * this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; * } * this.timeElapsed.Text = su.TimeElapsedString; * } * if (su.JobType == JobTypes.MUX) // mux status update * { * this.currentVideoFrame.Text = su.AudioFileSize.ToString(); // audio data * this.videoData.Text = su.FileSize.ToString(); * this.filesize.Text = su.ProjectedFileSize.ToString(); * this.fps.Text = Util.ToString(su.ClipPosition); * this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; * this.progress.Value = su.PercentageDone; * this.timeElapsed.Text = su.TimeElapsedString; * } * if (su.JobType == JobTypes.AVS) // video status update * { * this.currentVideoFrame.Text = su.NbFramesDone + "/" + su.NbFramesTotal; * this.videoData.Text = "N/A"; * this.filesize.Text = "N/A"; * this.fps.Text = su.FPS.ToString("##.##") + " FPS"; * this.timeElapsed.Text = su.TimeElapsedString; * this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); * this.progress.Value = su.PercentageDone; * this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; * } * if (su.JobType == JobTypes.INDEX) * { * this.currentVideoFrame.Text = "N/A"; * this.videoData.Text = "N/A"; * this.filesize.Text = "N/A"; * this.fps.Text = "N/A"; * this.totalTime.Text = "N/A"; * this.progress.Value = 0; * this.timeElapsed.Text = su.TimeElapsedString; * }*/ #endregion }
protected void sendStatusUpdateToGUI(StatusUpdate su) { statusUpdate(su); }
public VobSubIndexer() { stup = new StatusUpdate(); stup.JobType = JobTypes.VOBSUB; isProcessing = false; }
/// <summary> /// catches the StatusUpdate event fired from Form1 and updates the GUI accordingly /// </summary> /// <param name="su"></param> public void UpdateStatus(StatusUpdate su) { if (su.JobType == JobTypes.VIDEO) // video status update { this.currentVideoFrame.Text = su.NbFramesDone + "/" + su.NbFramesTotal; this.videoData.Text = su.FileSize.ToString() + " KB"; long projectedSize = (long)((double)su.FileSize / su.PercentageDoneExact * (double)100); if (projectedSize < 0) { this.filesize.Text = "unknown"; } else { this.filesize.Text = projectedSize.ToString() + " KB"; } this.fps.Text = su.FPS.ToString("##.##") + " FPS"; this.timeElapsed.Text = su.TimeElapsedString; this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); this.progress.Value = su.PercentageDone; this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; } if (su.JobType == JobTypes.AUDIO) // audio status update { this.currentVideoFrame.Text = su.AudioPosition; this.videoData.Text = su.FileSize.ToString() + " KB"; if (su.FileSize == 0) // first pass { this.filesize.Text = "N/A (first pass)"; this.totalTime.Text = "N/A (first pass)"; this.Text = "Status: first pass"; } else { long projectedSize = (long)((double)su.FileSize / su.PercentageDoneExact * (double)100); this.filesize.Text = projectedSize.ToString() + " KB"; this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); this.progress.Value = su.PercentageDone; this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; } this.timeElapsed.Text = su.TimeElapsedString; } if (su.JobType == JobTypes.MUX) // mux status update { this.currentVideoFrame.Text = su.AudioFileSize.ToString() + " KB"; // audio data this.videoData.Text = su.FileSize.ToString() + " KB"; this.filesize.Text = su.ProjectedFileSize.ToString() + " KB"; this.fps.Text = su.AudioPosition; this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; this.progress.Value = su.PercentageDone; this.timeElapsed.Text = su.TimeElapsedString; } if (su.JobType == JobTypes.AVS) // video status update { this.currentVideoFrame.Text = su.NbFramesDone + "/" + su.NbFramesTotal; this.videoData.Text = "N/A"; this.filesize.Text = "N/A"; this.fps.Text = su.FPS.ToString("##.##") + " FPS"; this.timeElapsed.Text = su.TimeElapsedString; this.totalTime.Text = getTimeString(su.TimeElapsed, su.PercentageDoneExact); this.progress.Value = su.PercentageDone; this.Text = "Status: " + su.PercentageDoneExact.ToString("##.##") + " %"; } if (su.JobType == JobTypes.INDEX) { this.currentVideoFrame.Text = "N/A"; this.videoData.Text = "N/A"; this.filesize.Text = "N/A"; this.fps.Text = "N/A"; this.totalTime.Text = "N/A"; this.progress.Value = 0; this.timeElapsed.Text = su.TimeElapsedString; } }
/// <summary> /// sends a status update up the chain where it will be thrown as an event /// </summary> /// <param name="su">the status update object to be send back to the GUI</param> protected void sendStatusUpdate(StatusUpdate su) { base.sendStatusUpdateToGUI(su); }
private void encode() { try { using (AviSynthScriptEnvironment env = new AviSynthScriptEnvironment()) { using (AviSynthClip a = env.ParseScript(_avisynthAudioScript)) { if (0 == a.ChannelsCount) { throw new ApplicationException("Can't find audio stream"); } _logBuilder.AppendFormat("Channels={0}, BitsPerSample={1}, SampleRate={2}Hz{3}", a.ChannelsCount, a.BitsPerSample, a.AudioSampleRate, Environment.NewLine); const int MAX_SAMPLES_PER_ONCE = 4096; int frameSample = 0; int frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * a.ChannelsCount * a.BytesPerSample; byte[] frameBuffer = new byte[frameBufferTotalSize]; createEncoderProcess(a); try { using (Stream target = _encoderProcess.StandardInput.BaseStream) { // let's write WAV Header if (_mustSendWavHeaderToEncoderStdIn) { writeHeader(target, a); } _sampleRate = a.AudioSampleRate; raiseEvent("Preprocessing..."); GCHandle h = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned); IntPtr address = h.AddrOfPinnedObject(); try { while (frameSample < a.SamplesCount) { _mre.WaitOne(); if (_encoderProcess != null) { if (_encoderProcess.HasExited) { throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString()); } } int nHowMany = Math.Min((int)(a.SamplesCount - frameSample), MAX_SAMPLES_PER_ONCE); a.ReadAudio(address, frameSample, nHowMany); _mre.WaitOne(); setProgress(((100 * (double)frameSample) / a.SamplesCount), frameSample); target.Write(frameBuffer, 0, nHowMany * a.ChannelsCount * a.BytesPerSample); target.Flush(); frameSample += nHowMany; Thread.Sleep(0); } } finally { h.Free(); } setProgress(100, frameSample); if (_mustSendWavHeaderToEncoderStdIn && a.BytesPerSample % 2 == 1) { target.WriteByte(0); } } raiseEvent("Finalizing encoder"); _encoderProcess.WaitForExit(); _readFromStdErrThread.Join(); _readFromStdOutThread.Join(); if (0 != _encoderProcess.ExitCode) { throw new ApplicationException("Abnormal encoder termination " + _encoderProcess.ExitCode.ToString()); } } finally { if (!_encoderProcess.HasExited) { _encoderProcess.Kill(); _encoderProcess.WaitForExit(); _readFromStdErrThread.Join(); _readFromStdOutThread.Join(); } _readFromStdErrThread = null; _readFromStdOutThread = null; } } } } catch (Exception e) { deleteOutputFile(); if (e is ThreadAbortException) { _logBuilder.Append("ABORTING!\n"); StatusUpdate u = new StatusUpdate(); u.WasAborted = true; raiseEvent(u); } else { _logBuilder.Append("Error:\n" + e.ToString()); StatusUpdate u = new StatusUpdate(); u.HasError = true; u.Error = e.ToString(); raiseEvent(u); } return; } finally { deleteTempFiles(); } StatusUpdate u2 = new StatusUpdate(); u2.IsComplete = true; raiseEvent(u2); }
public void setup(Job job, StatusUpdate su) { this.audioJob = (AudioJob)job; this.su = su; //let's create avisynth script StringBuilder script = new StringBuilder(); string id = _uniqueId; string tmp = Path.Combine(Path.GetTempPath(), id); bool directShow = audioJob.Settings.ForceDecodingViaDirectShow; if (!directShow) { switch (Path.GetExtension(audioJob.Input).ToLower()) { case ".ac3": script.AppendFormat("NicAc3Source(\"{0}\"", audioJob.Input); if (audioJob.Settings.AutoGain) { script.AppendFormat(", DRC=1){0}", Environment.NewLine); } else { script.Append(")"); } break; case ".avs": script.AppendFormat("Import(\"{0}\"){1}", audioJob.Input, Environment.NewLine); break; case ".wav": script.AppendFormat("WavSource(\"{0}\"){1}", audioJob.Input, Environment.NewLine); break; case ".dts": script.AppendFormat("NicDtsSource(\"{0}\")", audioJob.Input); if (audioJob.Settings.AutoGain) { script.AppendFormat(", DRC=1){0}", Environment.NewLine); } else { script.Append(")"); } break; case ".mpa": case ".mpg": case ".mp2": script.AppendFormat("NicMPASource(\"{0}\"){1}", audioJob.Input, Environment.NewLine); break; case ".mp3": script.AppendFormat("NicMPG123Source(\"{0}\"){1}", audioJob.Input, Environment.NewLine); break; default: directShow = true; break; } } if (directShow) { script.AppendFormat("DirectShowSource(\"{0}\"){1}", audioJob.Input, Environment.NewLine); } script.AppendFormat("EnsureVBRMP3Sync(){0}", Environment.NewLine); if (audioJob.Delay != 0) { script.AppendFormat("DelayAudio({0}.0/1000.0){1}", audioJob.Delay, Environment.NewLine); } if (audioJob.Settings.ImproveAccuracy || audioJob.Settings.AutoGain /* to fix the bug */) { script.AppendFormat("ConvertAudioToFloat(){0}", Environment.NewLine); } if (!string.IsNullOrEmpty(audioJob.CutFile)) { try { Cuts cuts = FilmCutter.ReadCutsFromFile(audioJob.CutFile); script.AppendLine(FilmCutter.GetCutsScript(cuts, true)); } catch (FileNotFoundException) { deleteTempFiles(); throw new MissingFileException(audioJob.CutFile); } catch (Exception) { deleteTempFiles(); throw new JobRunException("Broken cuts file, " + audioJob.CutFile + ", can't continue."); } } if (audioJob.Settings.AutoGain) { script.AppendFormat("Normalize(){0}", Environment.NewLine); } switch (audioJob.Settings.DownmixMode) { case ChannelMode.KeepOriginal: break; case ChannelMode.ConvertToMono: script.AppendFormat("ConvertToMono(){0}", Environment.NewLine); break; case ChannelMode.DPLDownmix: script.Append("6<=Audiochannels(last)?x_dpl" + id + @"(ConvertAudioToFloat(last)):last" + Environment.NewLine); break; case ChannelMode.DPLIIDownmix: script.Append("6<=Audiochannels(last)?x_dpl2" + id + @"(ConvertAudioToFloat(last)):last" + Environment.NewLine); break; case ChannelMode.StereoDownmix: script.Append("6<=Audiochannels(last)?x_stereo" + id + @"(ConvertAudioToFloat(last)):last" + Environment.NewLine); break; case ChannelMode.Upmix: createTemporallyEqFiles(tmp); script.Append("2==Audiochannels(last)?x_upmix" + id + @"(last):last" + Environment.NewLine); break; case ChannelMode.UpmixUsingSoxEq: script.Append("2==Audiochannels(last)?x_upmixR" + id + @"(last):last" + Environment.NewLine); break; case ChannelMode.UpmixWithCenterChannelDialog: script.Append("2==Audiochannels(last)?x_upmixC" + id + @"(last):last" + Environment.NewLine); break; } //let's obtain command line & other staff if (audioJob.Settings is AC3Settings) { script.Append("6<=Audiochannels(last)?GetChannel(last,1,3,2,5,6,4):last" + Environment.NewLine); _mustSendWavHeaderToEncoderStdIn = true; AC3Settings n = audioJob.Settings as AC3Settings; _encoderExecutablePath = this._settings.FFMpegPath; _encoderCommandLine = "-i - -y -acodec ac3 -ab " + n.Bitrate + "k \"{0}\""; } if (audioJob.Settings is MP2Settings) { _mustSendWavHeaderToEncoderStdIn = true; MP2Settings n = audioJob.Settings as MP2Settings; _encoderExecutablePath = this._settings.FFMpegPath; _encoderCommandLine = "-i - -y -acodec mp2 -ab " + n.Bitrate + "k \"{0}\""; } if (audioJob.Settings is WinAmpAACSettings) { _mustSendWavHeaderToEncoderStdIn = false; WinAmpAACSettings n = audioJob.Settings as WinAmpAACSettings; _encoderExecutablePath = this._settings.EncAacPlusPath; StringBuilder sb = new StringBuilder("- \"{0}\" --rawpcm {1} {3} {2} --cbr "); sb.Append(n.Bitrate * 1000); if (n.Mpeg2AAC) { sb.Append(" --mpeg2aac"); } switch (n.Profile) { case AacProfile.PS: break; case AacProfile.HE: sb.Append(" --nops"); break; case AacProfile.LC: sb.Append(" --lc"); break; } switch (n.StereoMode) { case WinAmpAACSettings.AacStereoMode.Dual: sb.Append(" --dc"); break; case WinAmpAACSettings.AacStereoMode.Joint: break; case WinAmpAACSettings.AacStereoMode.Independent: sb.Append(" --is"); break; } _encoderCommandLine = sb.ToString(); } if (audioJob.Settings is AudXSettings) { script.Append("ResampleAudio(last,48000)" + Environment.NewLine); script.Append("6==Audiochannels(last)?last:GetChannel(last,1,1,1,1,1,1)" + Environment.NewLine); _mustSendWavHeaderToEncoderStdIn = false; AudXSettings n = audioJob.Settings as AudXSettings; _encoderExecutablePath = this._settings.EncAudXPath; _encoderCommandLine = "- \"{0}\" --q " + ((int)n.Quality) + " --raw {1}"; } if (audioJob.Settings is OggVorbisSettings) { // http://forum.doom9.org/showthread.php?p=831098#post831098 //if(!this._settings.FreshOggEnc2) script.Append("6==Audiochannels(last)?GetChannel(last,1,3,2,5,6,4):last" + Environment.NewLine); _mustSendWavHeaderToEncoderStdIn = false; OggVorbisSettings n = audioJob.Settings as OggVorbisSettings; _encoderExecutablePath = this._settings.OggEnc2Path; _encoderCommandLine = "-Q --raw --raw-bits={2} --raw-chan={3} --raw-rate={1} --quality " + n.Quality.ToString(System.Globalization.CultureInfo.InvariantCulture) + " -o \"{0}\" -"; } if (audioJob.Settings is NeroAACSettings) { _mustSendWavHeaderToEncoderStdIn = true; NeroAACSettings n = audioJob.Settings as NeroAACSettings; NeroAACSettings nas = n; _encoderExecutablePath = this._settings.NeroAacEncPath; StringBuilder sb = new StringBuilder("-ignorelength "); switch (n.Profile) { case AacProfile.HE: sb.Append("-he "); break; case AacProfile.PS: sb.Append("-hev2 "); break; case AacProfile.LC: sb.Append("-lc "); break; } if (n.CreateHintTrack) { sb.Append("-hinttrack "); } switch (n.BitrateMode) { case BitrateManagementMode.ABR: sb.AppendFormat(System.Globalization.CultureInfo.InvariantCulture, "-br {0} ", n.Bitrate * 1000); break; case BitrateManagementMode.CBR: sb.AppendFormat(System.Globalization.CultureInfo.InvariantCulture, "-cbr {0} ", n.Bitrate * 1000); break; case BitrateManagementMode.VBR: sb.AppendFormat(System.Globalization.CultureInfo.InvariantCulture, "-q {0} ", n.Quality); break; } sb.Append("-if - -of \"{0}\""); _encoderCommandLine = sb.ToString(); } if (audioJob.Settings is FaacSettings) { FaacSettings f = audioJob.Settings as FaacSettings; _encoderExecutablePath = this._settings.FaacPath; _mustSendWavHeaderToEncoderStdIn = false; switch (f.BitrateMode) { // {0} means output file name // {1} means samplerate in Hz // {2} means bits per sample // {3} means channel count // {4} means samplecount // {5} means size in bytes case BitrateManagementMode.VBR: _encoderCommandLine = "-q " + f.Quality + " -o \"{0}\" -P -X -R {1} -B {2} -C {3} --mpeg-vers 4 -"; break; default: _encoderCommandLine = "-b " + f.Bitrate + " -o \"{0}\" -P -X -R {1} -B {2} -C {3} --mpeg-vers 4 -"; break; } } if (audioJob.Settings is MP3Settings) { MP3Settings m = audioJob.Settings as MP3Settings; _mustSendWavHeaderToEncoderStdIn = true; _encoderExecutablePath = this._settings.LamePath; switch (m.BitrateMode) { case BitrateManagementMode.VBR: _encoderCommandLine = "-V " + (m.Quality / 10 - 1) + " -h --silent - \"{0}\""; break; case BitrateManagementMode.CBR: _encoderCommandLine = "-b " + m.Bitrate + " --cbr -h --silent - \"{0}\""; break; case BitrateManagementMode.ABR: _encoderCommandLine = "--abr " + m.Bitrate + " -h --silent - \"{0}\""; break; } } //Just check encoder existance _encoderExecutablePath = Path.Combine(AppDomain.CurrentDomain.SetupInformation.ApplicationBase, _encoderExecutablePath); if (!File.Exists(_encoderExecutablePath)) { deleteTempFiles(); throw new EncoderMissingException(_encoderExecutablePath); } script.AppendFormat("ConvertAudioTo16bit(){0}", Environment.NewLine); script.AppendLine( @" return last function x_dpl" + id + @"(clip a) { fl = GetChannel(a, 1) fr = GetChannel(a, 2) c = GetChannel(a, 3) sl = GetChannel(a, 5) sr = GetChannel(a, 6) ssr = MixAudio(sl, sr, 0.2222, 0.2222) ssl = Amplify(ssr, -1.0) fl_c = MixAudio(fl, c, 0.3254, 0.2301) fr_c = MixAudio(fr, c, 0.3254, 0.2301) l = MixAudio(ssl, fl_c, 1.0, 1.0) r = MixAudio(ssr, fr_c, 1.0, 1.0) return MergeChannels(l, r) } function x_dpl2" + id + @"(clip a) { fl = GetChannel(a, 1) fr = GetChannel(a, 2) c = GetChannel(a, 3) sl = GetChannel(a, 5) sr = GetChannel(a, 6) ssl = MixAudio(sl, sr, 0.2818, 0.1627).Amplify(-1.0) fl_c = MixAudio(fl, c, 0.3254, 0.2301) ssr = MixAudio(sl, sr, 0.1627, 0.2818) fr_c = MixAudio(fr, c, 0.3254, 0.2301) l = MixAudio(ssl, fl_c, 1.0, 1.0) r = MixAudio(ssr, fr_c, 1.0, 1.0) return MergeChannels(l, r) } function x_stereo" + id + @"(clip a) { fl = GetChannel(a, 1) fr = GetChannel(a, 2) c = GetChannel(a, 3) lfe = GetChannel(a, 4) sl = GetChannel(a, 5) sr = GetChannel(a, 6) l_sl = MixAudio(fl, sl, 0.2929, 0.2929) c_lfe = MixAudio(lfe, c, 0.2071, 0.2071) r_sr = MixAudio(fr, sr, 0.2929, 0.2929) l = MixAudio(l_sl, c_lfe, 1.0, 1.0) r = MixAudio(r_sr, c_lfe, 1.0, 1.0) return MergeChannels(l, r) } function x_upmix" + id + @"(clip a) { m = ConvertToMono(a) f = SuperEQ(a,""" + tmp + @"front.feq"") s = SuperEQ(a,""" + tmp + @"back.feq"") c = SuperEQ(m,""" + tmp + @"center.feq"") lfe = SuperEQ(m,""" + tmp + @"lfe.feq"") return MergeChannels( f.getleftchannel, f.getrightchannel , c, lfe, s.getleftchannel, s.getrightchannel) } function x_upmixR" + id + @"(clip Stereo) { Front = mixaudio(Stereo.soxfilter(""filter 0-600""),mixaudio(Stereo.soxfilter(""filter 600-1200""),Stereo.soxfilter(""filter 1200-7000""),0.45,0.25),0.50,1) Back = mixaudio(Stereo.soxfilter(""filter 0-600""),mixaudio(Stereo.soxfilter(""filter 600-1200""),Stereo.soxfilter(""filter 1200-7000""),0.35,0.15),0.40,1) fl = GetLeftChannel(Front) fr = GetRightChannel(Front) cc = ConvertToMono(stereo).SoxFilter(""filter 625-24000"") lfe = ConvertToMono(stereo).SoxFilter(""lowpass 100"",""vol -0.5"") sl = GetLeftChannel(Back) sr = GetRightChannel(Back) sl = DelayAudio(sl,0.02) sr = DelayAudio(sr,0.02) return MergeChannels(fl,fr,cc,lfe,sl,sr) } function x_upmixC" + id + @"(clip stereo) { left = stereo.GetLeftChannel() right = stereo.GetRightChannel() fl = mixaudio(left.soxfilter(""filter 0-24000""),right.soxfilter(""filter 0-24000""),0.6,-0.5) fr = mixaudio(right.soxfilter(""filter 0-24000""),left.soxfilter(""filter 0-24000""),0.6,-0.5) cc = ConvertToMono(stereo).SoxFilter(""filter 625-24000"") lfe = ConvertToMono(stereo).SoxFilter(""lowpass 100"",""vol -0.5"") sl = mixaudio(left.soxfilter(""filter 0-24000""),right.soxfilter(""filter 0-24000""),0.5,-0.4) sr = mixaudio(right.soxfilter(""filter 0-24000""),left.soxfilter(""filter 0-24000""),0.5,-0.4) sl = DelayAudio(sl,0.02) sr = DelayAudio(sr,0.02) return MergeChannels(fl,fr,cc,lfe,sl,sr) } " ); _avisynthAudioScript = script.ToString(); }