/// <summary> /// Returns a string representation in which the Distributor is turned into a CommandArguments string and appended to the end of the argument collection. /// </summary> /// <returns></returns> public override string ToString() { //CommandArguments distribAsCmd = CommandArguments.FromParsable(_clusterArgs); //distribAsCmd.ExtractOptional<string>("ParallelOptions", null); // this was not guaranteed to be known about before. DistributeLocally will add it, so if your program already expected, there will now be two. string result = _argCollection.ToString();// +" " + distribAsCmd.ToString(); return(result); }
public void ValidateToString2() { string fileName = Path.GetTempFileName(); fileName = fileName.Replace(Path.GetTempPath(), ""); string[] args = { fileName, "Sum" }; CommandArguments parsableObject = new CommandArguments(args); bool protectWithQuotes = true; string str = parsableObject.ToString(protectWithQuotes); Assert.IsNotNull(str); }
public void ValidateToString1() { string fileName = Path.GetTempFileName(); fileName = fileName.Replace(Path.GetTempPath(), ""); string[] args = { fileName, "Sum" }; object parsableObject = new CommandArguments(args); bool suppressDefaults = true; bool protectWithQuotes = true; string str = CommandArguments.ToString(parsableObject, suppressDefaults, protectWithQuotes); Assert.IsNotNull(str); }
void StartProbeAsync(string ffmpegexe, string workdir) { // Set up objects AVStreams = new List <AVStream>(); probeArguments = new CommandArguments(); ConstructProbeArguments(); // From parameters ProcessStartInfo psi = new ProcessStartInfo(); // if (ffmpegexe.Equals("ffmpeglatest.exe")) ffmpegexe = "ffmpeg.exe"; // prevent issues like 5.1(side) audio not recognized string theFileName = "\"" + Path.Combine(PathToTools, ffmpegexe) + "\""; string cmdArguments = "/C "; psi.Arguments = cmdArguments + "\"" + theFileName + " " + probeArguments.ToString() + "\""; SendDebugMessage("probe command: " + psi.Arguments); psi.FileName = "cmd"; psi.UseShellExecute = false; psi.CreateNoWindow = true; if (ffmpegexe.Equals("ffmpeglatest.exe")) { psi.WorkingDirectory = workdir; // the m3u8 index files refers to .ts files in workdir } // ensure directory exists Directory.CreateDirectory(Path.GetDirectoryName(TempFile)); Process p = Process.Start(psi); p.WaitForExit(50000); if (!File.Exists(TempFile)) { // Failed ProbeFinished = false; ProbeSuccess = false; ProbeReport = "No output file was created."; EndProbe(); return; } // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputFile(); // Must do this first! AnalyseStreams(); }
void Initialise() { shellRunner = new ShellCmdRunner(); shellRunner.ProcessFinished += new EventHandler <GenericEventArgs <processfinishedEventArgs> >(shellRunner_ProcessFinished); shellRunner.FileName = Path.Combine(PathToTools, "ffmpeg.exe"); shellRunner.StandardErrorReceivedLine += new EventHandler <GenericEventArgs <string> >(shellRunner_StandardErrorReceivedLine); shellRunner.StandardOutputReceived += new EventHandler <GenericEventArgs <byte[]> >(shellRunner_StandardOutputReceived); // Incoming data from ffmpeg STDOUt - mangements InitTempWriter(); // Set up objects cmdArguments = new CommandArguments(); segmentArguments = new CommandArguments(); // Arguments ConstructArguments(); shellRunner.Arguments = cmdArguments.ToString(); }
void StartProbeAsync() { // Set up objects AVStreams = new List <AVStream>(); probeArguments = new CommandArguments(); ConstructProbeArguments(); // From parameters ProcessStartInfo psi = new ProcessStartInfo(); string theFileName = "\"" + Path.Combine(PathToTools, "ffmpeg.exe") + "\""; string cmdArguments = "/C "; psi.Arguments = cmdArguments + "\"" + theFileName + " " + probeArguments.ToString() + "\""; psi.FileName = "cmd"; psi.UseShellExecute = false; psi.CreateNoWindow = true; // Ensure directory exists Directory.CreateDirectory(Path.GetDirectoryName(TempFile)); Process p = Process.Start(psi); p.WaitForExit(50000); if (!File.Exists(TempFile)) { // Failed ProbeFinished = false; ProbeSuccess = false; ProbeReport = "No output file was created."; EndProbe(); return; } // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputFile(); // Must do this first! AnalyseStreams(); }
public void StartProbeAsync() { ShellCmdRunner shellProber = new ShellCmdRunner(); // From parameters probeRunner = new ShellCmdRunner(); probeRunner.ProcessOutputReceived += new EventHandler <GenericEventArgs <string> >(probeRunner_ProcessOutputReceived); probeRunner.FileName = Path.Combine(PathToTools, "ffprobe.exe"); // No need to monitor the children here probeRunner.MonitorChildren = false; // Set up objects probeArguments = new CommandArguments(); ConstructProbeArguments(); probeRunner.Arguments = probeArguments.ToString(); AVStreams = new List <AVStream>(); string txtResult = ""; if (!probeRunner.Start(ref txtResult, true)) { // Failure - return ProbeFinished = false; // We didnt' get to finish ProbeSuccess = false; ProbeReport = txtResult; probeHandle.Set(); return; } // Doesn't really finish, I think it launches more threads... ...give it 3 seconds System.Threading.Thread.CurrentThread.Join(3000); // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputBuffer(); // Must do this first! AnalyseStreams(); }
/// <Summary> /// Launch the AI program to take any turns for AI players. /// /// </Summary> /// <param name="status">Required as this is called in as a worker thread, but unused.</param> private void RunAI(object status) { if (runAiCheckBox.Checked) { // MessageBox.Show("Run AI"); foreach (PlayerSettings settings in serverState.AllPlayers) { if (settings.AiProgram == "Human") { continue; } EmpireData empireData; serverState.AllEmpires.TryGetValue(settings.PlayerNumber, out empireData); if (empireData == null || empireData.TurnYear != serverState.TurnYear || !empireData.TurnSubmitted) { // TODO: Add support for running custom AIs based on settings.AiProgram. CommandArguments args = new CommandArguments(); args.Add(CommandArguments.Option.AiSwitch); args.Add(CommandArguments.Option.RaceName, settings.RaceName); args.Add(CommandArguments.Option.Turn, serverState.TurnYear); args.Add(CommandArguments.Option.IntelFileName, Path.Combine(serverState.GameFolder, settings.RaceName + Global.IntelExtension)); try { Process.Start(Assembly.GetExecutingAssembly().Location, args.ToString()); } catch { Report.Error("Failed to launch AI."); } // FIXME (priority 3) - can not process any more than one AI at a time. // It will crash if multiple AI's try to access the same files at the same time. return; } } } }
//private static bool TryCreateTaskArgsAndValidate(ArgumentCollection args, string pieceIndexAsString, out ArgumentCollection resultArgs) //{ // var argsToValidate = (ArgumentCollection)args.Clone(); // argsToValidate.AddOptional("Tasks", pieceIndexAsString == "*" ? "0" : pieceIndexAsString); // resultArgs = (ArgumentCollection)args.Clone(); // resultArgs.AddOptional("Tasks", pieceIndexAsString); // //if (checkIfValid) // //{ // // if (!validateParamsOrNull(argsToValidate)) // // { // // Console.WriteLine("The parameters are not valid for the submitted task."); // // Console.WriteLine(resultArgs); // // return false; // // } // //} // return true; //} private static string CreateTaskString(Distribute.Distribute distributeExe, bool suppressDefaults) { DistributableWrapper wrapper = distributeExe.Distributable as DistributableWrapper; if (wrapper != null) { Distribute.Locally local = (Distribute.Locally)distributeExe.Distributor; string result = string.Format("{0} -TaskCount {1} -Tasks {2} -Cleanup {3}", wrapper.ToString(), local.TaskCount, local.Tasks == null ? "*" : local.Tasks.ToString(), local.Cleanup); return(result); } else { string result = CommandArguments.ToString(distributeExe, protect: true, suppressDefaults: suppressDefaults); result = result.Replace("Tasks:null", "Tasks:*"); return(result); } }
void Initialise() { shellRunner = new ShellCmdRunner(); shellRunner.ProcessFinished += new EventHandler<GenericEventArgs<processfinishedEventArgs>>(shellRunner_ProcessFinished); shellRunner.FileName = Path.Combine(PathToTools, "ffmpeg.exe"); shellRunner.StandardErrorReceivedLine += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine); shellRunner.StandardOutputReceived += new EventHandler<GenericEventArgs<byte[]>>(shellRunner_StandardOutputReceived); // Incoming data from ffmpeg STDOUt - mangements InitTempWriter(); // Set up objects cmdArguments = new CommandArguments(); segmentArguments = new CommandArguments(); // Arguments ConstructArguments(); shellRunner.Arguments = cmdArguments.ToString(); }
public void StartProbeAsync() { ShellCmdRunner shellProber = new ShellCmdRunner(); // From parameters probeRunner = new ShellCmdRunner(); probeRunner.ProcessOutputReceived += new EventHandler<GenericEventArgs<string>>(probeRunner_ProcessOutputReceived); probeRunner.FileName = Path.Combine(PathToTools, "ffprobe.exe"); // No need to monitor the children here probeRunner.MonitorChildren = false; // Set up objects probeArguments = new CommandArguments(); ConstructProbeArguments(); probeRunner.Arguments = probeArguments.ToString(); AVStreams = new List<AVStream>(); string txtResult = ""; if (!probeRunner.Start(ref txtResult, true)) { // Failure - return ProbeFinished = false; // We didnt' get to finish ProbeSuccess = false; ProbeReport = txtResult; probeHandle.Set(); return; } // Doesn't really finish, I think it launches more threads... ...give it 3 seconds System.Threading.Thread.CurrentThread.Join(3000); // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputBuffer(); // Must do this first! AnalyseStreams(); }
void AnalyseStreams() { SendDebugMessage("Analysing streams."); mapArguments = new CommandArguments(); // Okay, let's look at what we got... // There must be at least one audio stream AND one video stream for us to add mapping parameters to ffmpeg if ((AVStreams == null) || (AVAudioStreams.Count < 1) || (AVVideoStreams.Count < 1) ) { ProbeSuccess = false; ProbeReport = "Not enough audio or video streams detected to add mappings."; EndProbe(); return; } // We already know there's at least one video stream // Use the first video stream if (AVVideoStreams.Count > 0) { SendDebugMessage("MediaProbe: Adding first video stream 0:" + AVVideoStreams[0].StreamIndex.ToString() + " (" + AVVideoStreams[0].CodecTag + ")"); AddStreamToMap(AVVideoStreams[0]); } // Do we have a preferred audio index? if ( (UsePreferredAudioStreamIndex) && (AVStreamByIndex(PreferredAudioStreamIndex) != null) ) { AVStream ast = AVStreamByIndex(PreferredAudioStreamIndex); SendDebugMessage("MediaProbe: Adding requested stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // TODO: For MP2, different behavioru with AC3? // We already know there's at least one audio stream if (AVAudioStreams.Count == 1) // If there's just one audio stream, use it. AddStreamToMap(AVAudioStreams[0]); else if (AVAudioStreamsStereo.Count > 0) // If there are some stereo streams { if (AVAudioStreamsStereo.Count == 1) // If there's just one stereo audio stream, use it { AVStream ast = AVAudioStreamsStereo[0]; SendDebugMessage("MediaProbe: Adding only stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // There are multiple stereo streams: add the LAST stereo stream AVStream ast = AVAudioStreamsStereo[AVAudioStreamsStereo.Count - 1]; SendDebugMessage("MediaProbe: Adding last stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } else { // There are no stereo streamss: just add the first audio stream AVStream ast = AVAudioStreams[0]; SendDebugMessage("MediaProbe: No stereo audio streams found, adding first audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } SendDebugMessage("MediaProbe: Analysis complete. Mappings are: " + mapArguments.ToString()); // It's a success! ProbeReport = "Probe finished OK."; ProbeSuccess = true; ProbeFinished = true; EndProbe(); }
void StartProbeAsync() { // Set up objects AVStreams = new List<AVStream>(); probeArguments = new CommandArguments(); ConstructProbeArguments(); // From parameters ProcessStartInfo psi = new ProcessStartInfo(); string theFileName = "\"" + Path.Combine(PathToTools, "ffmpeg.exe") + "\""; string cmdArguments = "/C "; psi.Arguments = cmdArguments + "\"" + theFileName + " " + probeArguments.ToString() + "\""; psi.FileName = "cmd"; psi.UseShellExecute = false; psi.CreateNoWindow = true; // Ensure directory exists Directory.CreateDirectory(Path.GetDirectoryName(TempFile)); Process p = Process.Start(psi); p.WaitForExit(50000); if (!File.Exists(TempFile)) { // Failed ProbeFinished = false; ProbeSuccess = false; ProbeReport = "No output file was created."; EndProbe(); return; } // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputFile(); // Must do this first! AnalyseStreams(); }
void AnalyseStreams() { SendDebugMessage("Analysing streams."); mapArguments = new CommandArguments(); // Okay, let's look at what we got... // There must be at least one audio stream AND one video stream for us to add mapping parameters to ffmpeg if ((AVStreams == null) || (AVAudioStreams.Count < 1) || (AVVideoStreams.Count < 1) ) { ProbeSuccess = false; ProbeReport = "Not enough audio or video streams detected to add mappings."; EndProbe(); return; } // We already know there's at least one video stream // Use the first video stream if (AVVideoStreams.Count > 0) { SendDebugMessage("MediaProbe: Adding first video stream 0:" + AVVideoStreams[0].StreamIndex.ToString() + " (" + AVVideoStreams[0].CodecTag + ")"); AddStreamToMap(AVVideoStreams[0]); } // TODO: For MP2, different behavioru with AC3? // We already know there's at least one audio stream if (AVAudioStreams.Count == 1) // If there's just one audio stream, use it. { AddStreamToMap(AVAudioStreams[0]); } else if (AVAudioStreamsStereo.Count > 0) // If there are some stereo streams { if (AVAudioStreamsStereo.Count == 1) // If there's just one stereo audio stream, use it { AVStream ast = AVAudioStreamsStereo[0]; SendDebugMessage("MediaProbe: Adding only stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // There are multiple stereo streams: add the LAST stereo stream AVStream ast = AVAudioStreamsStereo[AVAudioStreamsStereo.Count - 1]; SendDebugMessage("MediaProbe: Adding last stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } else { // There are no stereo streamss: just add the first audio stream AVStream ast = AVAudioStreams[0]; SendDebugMessage("MediaProbe: No stereo audio streams found, adding first audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } SendDebugMessage("MediaProbe: Analysis complete. Mappings are: " + mapArguments.ToString()); // IT's a success! ProbeReport = "Probe finished OK."; ProbeSuccess = true; ProbeFinished = true; EndProbe(); }
/// <summary> /// Returns the string of program params and args /// </summary> /// <returns>params and args</returns> public override string ToString() { return(CommandArguments.ToString(this) + " " + ProgramParams); }
/* * C:\Program Files (x86)\AirVideoServer\ffmpeg.exe" * --segment-length 4 * --segment-offset 188 * --conversion-id 548cf790-c04f-488a-96be-aae2968f272bdefd0e1d-2bdf-457d-ab15-3eb6c51ccf85 * --port-number 46631 * -threads 4 * -flags +loop * -g 30 -keyint_min 1 * -bf 0 * -b_strategy 0 * -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 -coder 0 -me_range 16 -subq 5 -partitions +parti4x4+parti8x8+partp8x8 * -trellis 0 * -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -map 0.1:0.1 -map 0.0:0.0 -ss 188.0 * -vf "crop=720:572:0:2, scale=568:320" * -aspect 720:576 * -y * -async 1 * -f mpegts * -vcodec libx264 * -bufsize 1024k * -b 1200k * -bt 1300k * -qmax 48 * -qmin 2 * -r 25.0 * -acodec libmp3lame * -ab 192k * -ar 48000 * -ac 2 */ void ConstructArguments() { // Use either the standard ffmpeg template or a custom one string strFFMpegTemplate = (string.IsNullOrWhiteSpace(EncodingParameters.CustomFFMpegTemplate)) ? @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -f mpegts -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r 25 {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}" : EncodingParameters.CustomFFMpegTemplate; // Segment length and offset segmentArguments.AddArgCouple("--segment-length", EncodingParameters.SegmentDuration.ToString()); segmentArguments.AddArgCouple("--segment-offset", StartAtSeconds.ToString()); cmdArguments.AddArg(segmentArguments.ToString()); // Multi threads strFFMpegTemplate = strFFMpegTemplate.Replace("{THREADS}", "-threads 4"); // Me Range strFFMpegTemplate = strFFMpegTemplate.Replace("{MOTIONSEARCHRANGE}", ("-me_range " + EncodingParameters.MotionSearchRange.ToString())); // SUBQ - important as setting it too high can slow things down strFFMpegTemplate = strFFMpegTemplate.Replace("{SUBQ}", ("-subq " + EncodingParameters.X264SubQ.ToString())); // Partitions string strPartitions = (EncodingParameters.PartitionsFlags.Length > 0) ? "-partitions " + EncodingParameters.PartitionsFlags : ""; strFFMpegTemplate = strFFMpegTemplate.Replace("{PARTITIONS}", strPartitions); // Add Mappings string strMapArgs = (string.IsNullOrEmpty(MapArgumentsString)) ? "" : MapArgumentsString; strFFMpegTemplate = strFFMpegTemplate.Replace("{MAPPINGS}", strMapArgs); // Start at : MUST BE BEFORE INPUT FILE FLAG -i *** !!! string strStartTime = (StartAtSeconds <= 0) ? "" : ("-ss " + StartAtSeconds.ToString()); strFFMpegTemplate = strFFMpegTemplate.Replace("{STARTTIME}", strStartTime); // Input file - make short to avoid issues with UTF-8 in batch files IT IS VERY IMPORTANT WHERE THIS GOES; AFTER SS BUT BEFORE VCODEC AND ACODEC string shortInputFile = Functions.FileWriter.GetShortPathName(InputFile); // Quotes around file string quotedInputFile = "\"" + shortInputFile + "\""; strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", ("-i " + quotedInputFile)); // Aspect ratio and frame size string strAspectRatio = (EncodingParameters.OutputSquarePixels) ? "-aspect 1:1" : "-aspect " + EncodingParameters.AspectRatio; strFFMpegTemplate = strFFMpegTemplate.Replace("{ASPECT}", strAspectRatio); string strFrameSize = "-s " + EncodingParameters.ConstrainedSize; strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMESIZE}", strFrameSize); // Deinterlace (experimental) string strDeinterlace = (EncodingParameters.DeInterlace) ? "-deinterlace" : ""; strFFMpegTemplate = strFFMpegTemplate.Replace("{DEINTERLACE}", strDeinterlace); // OPTIONAL FOR LATER: -vf "crop=720:572:0:2, scale=568:320" // Think this means crop to the aspect ratio, then scale to the normal frame // Audio sync amount string strAudioSync = "-async " + AudioSyncAmount.ToString(); strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSYNC}", strAudioSync); // Video bitrate string strVideoBitRateOptions = "-bufsize " + EncodingParameters.VideoBitRate + " -b " + EncodingParameters.VideoBitRate; //cmdArguments.AddArgCouple("-maxrate", VideoBitRate); strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATE}", strVideoBitRateOptions); // Max video bitrate (optional) string strMaxVideoBitRate = "-maxrate " + EncodingParameters.VideoBitRate; strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXVIDEOBITRATE}", strMaxVideoBitRate); string strVideoBitRateDeviation = "-bt " + EncodingParameters.BitRateDeviation; strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATEDEVIATION}", strVideoBitRateDeviation); // Restrict H264 encoding level (e.g. for iPhone 3G) string strH264Level = (EncodingParameters.X264Level > 0) ? ("-level " + EncodingParameters.X264Level.ToString()) : ""; strFFMpegTemplate = strFFMpegTemplate.Replace("{H264LEVEL}", strH264Level); string strH264Profile = (string.IsNullOrWhiteSpace(EncodingParameters.X264Profile)) ? "" : "-profile " + EncodingParameters.X264Profile; strFFMpegTemplate = strFFMpegTemplate.Replace("{H264PROFILE}", strH264Profile); // Audio: MP3 - must be after input file flag -i // string strAudioCodecOptions = ""; switch (EncodingParameters.AudioCodec) { case VideoEncodingParameters.AudioCodecTypes.AAC: strAudioCodecOptions = "-acodec aac -strict experimental"; break; default: strAudioCodecOptions = "-acodec libmp3lame"; break; //// "libfaac"); } strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCODEC}", strAudioCodecOptions); // Audio Bitrate string strAudioBitRate = "-ab " + EncodingParameters.AudioBitRate; strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOBITRATE}", strAudioBitRate); // Audio sample rate string strAudioSampleRate = "-ar " + EncodingParameters.AudioSampleRate; strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSAMPLERATE}", strAudioSampleRate); // Force stereo string strAudioChannels = "-ac 2"; strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCHANNELS}", strAudioChannels); // Volume Level string strVolumeBoost = ""; if (EncodingParameters.AudioVolumePercent != 100) { double fVolumeBytes = (256.0 * (EncodingParameters.AudioVolumePercent / 100.0)); int iVolumeBytes = Convert.ToInt32(fVolumeBytes); strVolumeBoost = "-vol " + iVolumeBytes.ToString(); } strFFMpegTemplate = strFFMpegTemplate.Replace("{VOLUMELEVEL}", strVolumeBoost); // Pipe to segmenter (ie send to standard output now) strFFMpegTemplate = strFFMpegTemplate + " -"; // Commit - add to the arguments cmdArguments.AddArg(strFFMpegTemplate); }
/// <summary> /// Returns a string representation in which the Distributor is turned into a CommandArguments string and appended to the end of the argument collection. /// </summary> /// <returns></returns> public override string ToString() { string result = _argCollection.ToString(); return(result); }
void StartProbeAsync(string ffmpegexe, string workdir) { // Set up objects AVStreams = new List<AVStream>(); probeArguments = new CommandArguments(); ConstructProbeArguments(); // From parameters ProcessStartInfo psi = new ProcessStartInfo(); // if (ffmpegexe.Equals("ffmpeglatest.exe")) ffmpegexe = "ffmpeg.exe"; // prevent issues like 5.1(side) audio not recognized string theFileName = "\"" + Path.Combine(PathToTools, ffmpegexe) + "\""; string cmdArguments = "/C "; psi.Arguments = cmdArguments + "\"" + theFileName + " " + probeArguments.ToString() + "\""; SendDebugMessage("probe command: " + psi.Arguments); psi.FileName = "cmd"; psi.UseShellExecute = false; psi.CreateNoWindow = true; if (ffmpegexe.Equals("ffmpeglatest.exe")) psi.WorkingDirectory = workdir; // the m3u8 index files refers to .ts files in workdir // ensure directory exists Directory.CreateDirectory(Path.GetDirectoryName(TempFile)); Process p = Process.Start(psi); p.WaitForExit(50000); if (!File.Exists(TempFile)) { // Failed ProbeFinished = false; ProbeSuccess = false; ProbeReport = "No output file was created."; EndProbe(); return; } // Probe has finished - we waited for exit.. ..let's analyse ProbeFinished = true; processOutputFile(); // Must do this first! AnalyseStreams(); }
void AnalyseStreams() { SendDebugMessage("Analysing streams."); mapArguments = new CommandArguments(); // Okay, let's look at what we got... // NOT: There must be at least one audio stream AND one video stream for us to add mapping parameters to ffmpeg if ((AVStreams == null) || ((AVAudioStreams.Count < 1) && (AVVideoStreams.Count < 1)) ) { ProbeSuccess = false; ProbeReport = "Not enough audio or video streams detected to add mappings."; EndProbe(); return; } // We already know there's at least one video stream // Use the first video stream if (AVVideoStreams.Count > 0) { AVStream CurrentAvs = null; foreach (AVStream avs in AVVideoStreams) { CurrentAvs = avs; if (avs.CodecName.Equals("mjpeg")) { //skip } else { break; } } SendDebugMessage("MediaProbe: Adding first non MJPEG video stream 0:" + CurrentAvs.StreamIndex.ToString() + " (" + CurrentAvs.CodecTag + ")"); AddStreamToMap(CurrentAvs); } // Do we have a preferred audio index? if ((UsePreferredAudioStreamIndex) && (AVStreamByIndex(PreferredAudioStreamIndex) != null)) { AVStream ast = AVStreamByIndex(PreferredAudioStreamIndex); SendDebugMessage("MediaProbe: Adding requested multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // TODO: For MP2, different behavioru with AC3? // We already know there's at least one audio stream if (AVAudioStreams.Count == 1) // If there's just one audio stream, use it. { AddStreamToMap(AVAudioStreams[0]); } else if (AVAudioStreamsStereo.Count > 0) // If there are some stereo streams { if (AVAudioStreamsStereo.Count == 1) // If there's just one stereo audio stream, use it { AVStream ast = AVAudioStreamsStereo[0]; SendDebugMessage("MediaProbe: Adding only multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // There are multiple stereo streams: add the LAST stereo stream AVStream ast = AVAudioStreamsStereo[AVAudioStreamsStereo.Count - 1]; SendDebugMessage("MediaProbe: Adding last multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } else { // There are no stereo streams: just add the first audio stream if (AVAudioStreams.Count > 0) { AVStream ast = AVAudioStreams[0]; SendDebugMessage("MediaProbe: No multiple track audio streams found, adding first audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } } // Use the first subtitle TODO for testing purposes //if (AVSubtitleStreams.Count > 0) //{ // SendDebugMessage("MediaProbe: Adding second subtitle stream 0:" + AVSubtitleStreams[0].StreamIndex.ToString() + " (" + AVSubtitleStreams[0].CodecTag + ")"); // AddStreamToMap(AVSubtitleStreams[0]); //} SendDebugMessage("MediaProbe: Analysis complete. Mappings are: " + mapArguments.ToString()); // It's a success! ProbeReport = "Probe finished OK."; ProbeSuccess = true; ProbeFinished = true; EndProbe(); }
/// <Summary> /// When the 'Open Game' button is pressed, open a file browser to locate the game and open it. /// </Summary> /// <param name="sender">The source of the event.</param> /// <param name="e">A <see cref="EventArgs"/> that contains the event data.</param> private void OpenGameButton_Click(object sender, EventArgs e) { string intelFileName = ""; bool gameLaunched = false; // have the user identify the game to open try { OpenFileDialog fd = new OpenFileDialog(); fd.Title = "Open Game"; fd.FileName = "*" + Global.IntelExtension; DialogResult result = fd.ShowDialog(); if (result != DialogResult.OK) { return; } intelFileName = fd.FileName; } catch { Report.FatalError("Unable to open a game."); } // Launch the GUI CommandArguments args = new CommandArguments(); args.Add(CommandArguments.Option.GuiSwitch); args.Add(CommandArguments.Option.IntelFileName, intelFileName); try { Process.Start(Assembly.GetExecutingAssembly().Location, args.ToString()); gameLaunched = true; } catch { Report.Error("NovaLauncher.cs: OpenGameButton_Click() - Failed to launch GUI."); } // Launch the Console if this is a local game, i.e. if the console.state is in the same directory. string serverStateFileName = ""; FileInfo intelFileInfo = new FileInfo(intelFileName); string gamePathName = intelFileInfo.DirectoryName; DirectoryInfo gameDirectoryInfo = new DirectoryInfo(gamePathName); FileInfo[] gameFilesInfo = gameDirectoryInfo.GetFiles(); foreach (FileInfo file in gameFilesInfo) { if (file.Extension == Global.ServerStateExtension) { serverStateFileName = file.FullName; } } if (serverStateFileName.Length > 0) { args.Clear(); args.Add(CommandArguments.Option.ConsoleSwitch); args.Add(CommandArguments.Option.StateFileName, serverStateFileName); try { Process.Start(Assembly.GetExecutingAssembly().Location, args.ToString()); gameLaunched = true; } catch { Report.Error("NovaLauncher.cs: OpenGameButton_Click() - Failed to launch GUI."); } } if (gameLaunched) { Application.Exit(); } }
public override CommandResultCode Execute(IExecutionContext context, CommandArguments args) { String outStr = args.ToString(); Application.Out.WriteLine(StripQuotes(outStr)); return CommandResultCode.Success; }
void Initialise() { shellRunner = new ShellCmdRunner(); shellRunner.ProcessFinished += new EventHandler<GenericEventArgs<processfinishedEventArgs>>(shellRunner_ProcessFinished); shellRunner.FileName = Path.Combine(PathToTools, "ffmpeg.exe"); String workingFolderPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "RemotePotato"); workingFolderPath = workingFolderPath+"\\static\\"; if (!Directory.Exists(workingFolderPath)) Directory.CreateDirectory(workingFolderPath); shellRunner.BatchFile = Path.Combine(workingFolderPath, "ffmpeg" + DateTime.Now.ToString().Replace(" ", "").Replace(":", "") + ".bat"); shellRunner.DurationBeforeMediaIsReopened = EncodingParameters.DurationBeforeMediaIsReopened; shellRunner.StandardErrorReceivedLine += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine); shellRunner.StandardOutputReceived += new EventHandler<GenericEventArgs<byte[]>>(shellRunner_StandardOutputReceived); // Incoming data from ffmpeg STDOUt - mangements InitTempWriter(); // Set up objects cmdArguments = new CommandArguments(); segmentArguments = new CommandArguments(); // Arguments ConstructArguments(); shellRunner.Arguments = cmdArguments.ToString(); SendDebugMessage("Initialized to run: " + shellRunner.BatchFile + " " + shellRunner.Arguments); shellRunner.CreateBatchFile(shellRunner.BatchFile, EncodingParameters.DurationBeforeMediaIsReopened); }
void Initialise(int startAtSegment, string ID) { shellRunner = new ShellCmdRunner(request.LiveTV, request.NewLiveTV, request.UseNewerFFMPEG, ID, this); shellRunner.ProcessFinished += new EventHandler<GenericEventArgs<processfinishedEventArgs>>(shellRunner_ProcessFinished); if (request.NewLiveTV) { shellRunner.PathToTools = PathToTools; shellRunner.preferredAudioStreamIndex = request.UseAudioStreamIndex; shellRunner.latestffmpeg = Path.Combine(PathToTools, "ffmpeglatest.exe"); shellRunner.latestffprobe = Path.Combine(PathToTools, "ffprobelatest.exe"); shellRunner.DummyLoopOrFfmpegOrLatestffmpeg = Path.Combine(PathToTools, "dummyloop.bat"); shellRunner.ProbeFileName = Path.Combine(PathToTools, "ffprobelatest.exe"); shellRunner.mappings = (string.IsNullOrEmpty(MapArgumentsString)) ? "" : MapArgumentsString; shellRunner.request = request; shellRunner.NamedPipeServer = Path.Combine(PathToTools, "NamedPipeServer.exe"); } else if (request.UseNewerFFMPEG) shellRunner.DummyLoopOrFfmpegOrLatestffmpeg = Path.Combine(PathToTools, "ffmpeglatest.exe"); else shellRunner.DummyLoopOrFfmpegOrLatestffmpeg = Path.Combine(PathToTools, "ffmpeg.exe"); shellRunner.StandardErrorReceivedLine += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine); shellRunner.StandardErrorReceivedLine2 += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine2); shellRunner.StandardErrorReceivedLine3 += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine3); shellRunner.StandardErrorReceivedLine4 += new EventHandler<GenericEventArgs<string>>(shellRunner_StandardErrorReceivedLine4); shellRunner.StandardOutputReceived += new EventHandler<GenericEventArgs<byte[]>>(shellRunner_StandardOutputReceived); // Incoming data from ffmpeg STDOUt - mangements InitTempWriter(); // Set up objects cmdArguments = new CommandArguments(); segmentArguments = new CommandArguments(); // Arguments ConstructArguments(startAtSegment); shellRunner.Arguments = cmdArguments.ToString(); shellRunner.inputFile = InputFile; }