public void BuildPipeline(StreamContext context, EncoderUnit.LogStream log) { // input bool doInputReader = context.Source.NeedsInputReaderUnit; if(doInputReader) { context.Pipeline.AddDataUnit(context.Source.GetInputReaderUnit(), 1); } // get parameters VLCParameters vlcparam = GenerateVLCParameters(context); string path = @"\#OUT#"; string sout = vlcparam.Sout.Replace("#OUT#", path); // generate vlc argument string var quotedArgList = vlcparam.Arguments.Select(x => x.Replace("\"", "\\\"")); string vlcArguments = "\"" + String.Join("\" \"", quotedArgList) + "\""; string arguments = GenerateArguments(vlcparam.Input, sout, vlcArguments); // add the unit EncoderUnit.TransportMethod input = doInputReader ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other; EncoderUnit.TransportMethod outputMethod = readOutputStream ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other; // waiting for output pipe is meaningless for VLC as it opens it way earlier then that it actually writes to it. Instead, log parsing // in VLCWrapped handles the delay (yes, this class is standalone probably useless but is provided for debugging). EncoderUnit unit = new EncoderUnit(context.Profile.CodecParameters["path"], arguments, input, outputMethod, log); unit.DebugOutput = false; // change this for debugging context.Pipeline.AddDataUnit(unit, 5); }
public void BuildPipeline() { // create full argument string string program = Context.Profile.TranscoderParameters["transcoder"]; string arguments = Context.Profile.TranscoderParameters["arguments"] .Replace("#WIDTH#", Context.OutputSize.Width.ToString()) .Replace("#HEIGHT#", Context.OutputSize.Height.ToString()) .Replace("#AUDIOSTREAMID#", Context.AudioTrackId.ToString()) .Replace("#STARTPOSITION#", Math.Round((decimal)Context.StartPosition / 1000).ToString()); // add input reader if (Context.Source.NeedsInputReaderUnit) { Context.Pipeline.AddDataUnit(Context.Source.GetInputReaderUnit(), 1); } else { arguments = arguments.Replace("#IN#", Context.Source.GetPath()); } // add unit EncoderUnit.TransportMethod input = Context.Source.NeedsInputReaderUnit ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other; EncoderUnit unit = new EncoderUnit(program, arguments, input, EncoderUnit.TransportMethod.NamedPipe, EncoderUnit.LogStream.None); unit.DebugOutput = false; // change this for debugging Context.Pipeline.AddDataUnit(unit, 5); // setup output parsing Context.TranscodingInfo.Supported = false; }
public virtual void BuildPipeline() { // add input bool doInputReader = Context.NeedsInputReaderUnit; if (doInputReader) { Context.Pipeline.AddDataUnit(Context.GetInputReaderUnit(), 1); } string arguments = GenerateArguments(); // fix input thing if (!doInputReader) arguments = arguments.Replace("#IN#", Context.Source.GetPath()); // add unit EncoderUnit.TransportMethod input = doInputReader ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other; EncoderUnit unit = new EncoderUnit(Configuration.StreamingProfiles.FFMpegPath, arguments, input, ReadOutputStream ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other, EncoderUnit.LogStream.StandardError, Context); unit.DebugOutput = false; // change this for debugging Context.Pipeline.AddDataUnit(unit, 5); // setup output parsing var einfo = new Reference<WebTranscodingInfo>(() => Context.TranscodingInfo, x => { Context.TranscodingInfo = x; }); FFMpegLogParsingUnit logunit = new FFMpegLogParsingUnit(Context.Identifier, einfo, Context.StartPosition); logunit.LogMessages = true; logunit.LogProgress = true; Context.Pipeline.AddLogUnit(logunit, 6); }
public void BuildPipeline(StreamContext context) { // add input bool doInputReader = context.Source.NeedsInputReaderUnit; if (doInputReader) { context.Pipeline.AddDataUnit(context.Source.GetInputReaderUnit(), 1); } // calculate stream mappings (no way I'm going to add subtitle support; it's just broken) string mappings = ""; if (context.AudioTrackId != null) { mappings = String.Format("-map v:0 -map a:{0}", context.MediaInfo.AudioStreams.First(x => x.ID == context.AudioTrackId).Index); } // calculate full argument string string arguments; if (context.Profile.HasVideoStream) { arguments = String.Format( "-y {0} -i \"#IN#\" -s {1} -aspect {2}:{3} {4} {5} \"#OUT#\"", context.StartPosition != 0 ? "-ss " + (context.StartPosition / 1000) : "", context.OutputSize, context.OutputSize.Width, context.OutputSize.Height, mappings, context.Profile.CodecParameters["codecParameters"] ); } else { arguments = String.Format( "-y {0} -i \"#IN#\" {1} {2} \"#OUT#\"", context.StartPosition != 0 ? "-ss " + (context.StartPosition / 1000) : "", mappings, context.Profile.CodecParameters["codecParameters"] ); } // fix input thing if (!doInputReader) arguments = arguments.Replace("#IN#", context.Source.GetPath()); // add unit EncoderUnit.TransportMethod input = doInputReader ? EncoderUnit.TransportMethod.NamedPipe : EncoderUnit.TransportMethod.Other; EncoderUnit unit = new EncoderUnit(Configuration.Streaming.FFMpegPath, arguments, input, EncoderUnit.TransportMethod.NamedPipe, EncoderUnit.LogStream.StandardError); unit.DebugOutput = false; // change this for debugging context.Pipeline.AddDataUnit(unit, 5); // setup output parsing var einfo = new Reference<WebTranscodingInfo>(() => context.TranscodingInfo, x => { context.TranscodingInfo = x; }); FFMpegLogParsingUnit logunit = new FFMpegLogParsingUnit(einfo, context.StartPosition); logunit.LogMessages = true; logunit.LogProgress = true; context.Pipeline.AddLogUnit(logunit, 6); }