예제 #1
0
        public static double GetMediaDuration(string fileName, string workdir)
        {
            MediaInfoGrabber grabber = new MediaInfoGrabber(Functions2.ToolkitFolder, Path.Combine(Functions2.StreamBaseFolder, "probe_results"), fileName);

            grabber.GetInfo("ffmpeglatest.exe", workdir);//handles m3u8 as well

            return(grabber.Info.NewLiveTVPartDuration);
        }
예제 #2
0
        public TimeSpan GetMediaDuration(string fileName)
        {
            MediaInfoGrabber grabber = new MediaInfoGrabber(Functions.ToolkitFolder, Path.Combine(Functions.StreamBaseFolder, "probe_results"), fileName);

            grabber.DebugMessage += new EventHandler <FatAttitude.GenericEventArgs <string> >(grabber_DebugMessage);
            grabber.GetInfo();
            grabber.DebugMessage -= grabber_DebugMessage;

            TimeSpan duration = (grabber.Info.Success) ? grabber.Info.Duration : new TimeSpan(0);

            return(duration);
        }
예제 #3
0
        /*
         * C:\Program Files (x86)\AirVideoServer\ffmpeg.exe"
         * --segment-length 4
         * --segment-offset 188
         * --conversion-id 548cf790-c04f-488a-96be-aae2968f272bdefd0e1d-2bdf-457d-ab15-3eb6c51ccf85
         * --port-number 46631
         * -threads 4
         * -flags +loop
         * -g 30 -keyint_min 1
         * -bf 0
         * -b_strategy 0
         * -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 -coder 0 -me_range 16 -subq 5 -partitions +parti4x4+parti8x8+partp8x8
         * -trellis 0
         * -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -map 0.1:0.1 -map 0.0:0.0 -ss 188.0
         * -vf "crop=720:572:0:2, scale=568:320"
         * -aspect 720:576
         * -y
         * -async 1
         * -f mpegts
         * -vcodec libx264
         * -bufsize 1024k
         * -b 1200k
         * -bt 1300k
         * -qmax 48
         * -qmin 2
         * -r 25.0
         * -acodec libmp3lame
         * -ab 192k
         * -ar 48000
         * -ac 2
         */
        void ConstructArguments()
        {
            //Get info on intput file...
            MediaInfoGrabber g = new MediaInfoGrabber(PathToTools, Environment.GetEnvironmentVariable("tmp"), InputFile);
            g.GetInfo();

            // Use either the standard ffmpeg template or a custom one
            string strFFMpegTemplate = (string.IsNullOrWhiteSpace(EncodingParameters.CustomFFMpegTemplate)) ?
                @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -f mpegts -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}" :
                EncodingParameters.CustomFFMpegTemplate;

            // Segment length and offset
            segmentArguments.AddArgCouple("--segment-length", EncodingParameters.SegmentDuration.ToString());
            segmentArguments.AddArgCouple("--segment-offset", StartAtSeconds.ToString() );
            cmdArguments.AddArg(segmentArguments.ToString());

            // Multi threads
            // Set to number of physical cores.
            int coreCount = 0;
            foreach (var item in new System.Management.ManagementObjectSearcher("Select * from Win32_Processor").Get())
            {
                coreCount += int.Parse(item["NumberOfCores"].ToString());
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{THREADS}", "-threads " + coreCount.ToString());

            string rate = g.Info.AVVideoStreams[0].frameRate;
            Debug.WriteLine("Detected frame rate is: " + rate);
            if (Convert.ToDouble(rate) > 31)
            {
                Debug.WriteLine("Computed frame rate:" + (Convert.ToDouble(rate) / 2).ToString());
                strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMERATE}", (Convert.ToDouble(rate) / 2).ToString()); //If over 30Hz, assume 50 or 59.94Hz and convert to 25 or 29.97

            }
            else
                strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMERATE}", rate);

            // Me Range
            strFFMpegTemplate = strFFMpegTemplate.Replace("{MOTIONSEARCHRANGE}", ("-me_range " + EncodingParameters.MotionSearchRange.ToString()));

            // SUBQ - important as setting it too high can slow things down
            strFFMpegTemplate = strFFMpegTemplate.Replace("{SUBQ}", ("-subq " + EncodingParameters.X264SubQ.ToString()) );

            // Partitions
            string strPartitions = (EncodingParameters.PartitionsFlags.Length > 0) ?
                "-partitions " + EncodingParameters.PartitionsFlags : "";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{PARTITIONS}", strPartitions);

            // Add Mappings
            string strMapArgs = (string.IsNullOrEmpty(MapArgumentsString)) ? "" : MapArgumentsString;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{MAPPINGS}", strMapArgs);

            // Start at : MUST BE BEFORE INPUT FILE FLAG -i *** !!!
            string strStartTime = (StartAtSeconds <= 0) ? "" :  ("-ss " + StartAtSeconds.ToString());
            strFFMpegTemplate = strFFMpegTemplate.Replace("{STARTTIME}", strStartTime);

            // Input file - make short to avoid issues with UTF-8 in batch files  IT IS VERY IMPORTANT WHERE THIS GOES; AFTER SS BUT BEFORE VCODEC AND ACODEC
            string shortInputFile = Functions.FileWriter.GetShortPathName(InputFile);
            // Quotes around file
            string quotedInputFile = "\"" + shortInputFile + "\"";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", ("-i " + quotedInputFile));

            // Aspect ratio and frame size
            string strAspectRatio = (EncodingParameters.OutputSquarePixels) ? "-aspect 1:1" : "-aspect " + EncodingParameters.AspectRatio;
            Debug.WriteLine("EncodingParameters indicated Aspect is: " + EncodingParameters.AspectRatio);
            strFFMpegTemplate = strFFMpegTemplate.Replace("{ASPECT}", strAspectRatio);
            Debug.WriteLine("Selected Aspect is: " + strAspectRatio);

            //Ensure viudeo fits within selected display size while maintain square pixels
            string strFrameSize = "-s " + EncodingParameters.ConstrainedSize;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMESIZE}", strFrameSize);
            Debug.WriteLine("Selected frame size is: " + strFrameSize);

            // Deinterlace (experimental)
            // Only deinterlace SD or 1080i HD TV content
            // Assume anything else is progressive.
            string strDeinterlace =  (EncodingParameters.DeInterlace) ? "-deinterlace" : "";

            if (((g.Info.AVVideoStreams[0].Height < 700) || (g.Info.AVVideoStreams[0].Height > 1000)) & (InputFile.ToLower().Contains(".wtv") || InputFile.ToLower().Contains(".dvr-ms")))
            {
                strFFMpegTemplate = strFFMpegTemplate.Replace("{DEINTERLACE}", strDeinterlace);
                Debug.WriteLine("Enabling deinterlacing");
            }
            else
            {
                strFFMpegTemplate = strFFMpegTemplate.Replace("{DEINTERLACE}", "");
                Debug.WriteLine("Disabling deinterlacing");
            }

            // OPTIONAL FOR LATER:  -vf "crop=720:572:0:2, scale=568:320"
            // Think this means crop to the aspect ratio, then scale to the normal frame

            // Audio sync amount
            string strAudioSync = "-async " +  AudioSyncAmount.ToString();
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSYNC}", strAudioSync);

            // Video bitrate
            string strVideoBitRateOptions = "-bufsize " +  EncodingParameters.VideoBitRate + " -b " + EncodingParameters.VideoBitRate;  //cmdArguments.AddArgCouple("-maxrate", VideoBitRate);
            strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATE}", strVideoBitRateOptions);

            // Max video bitrate (optional)
            string strMaxVideoBitRate = "-maxrate " + EncodingParameters.VideoBitRate;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXVIDEOBITRATE}", strMaxVideoBitRate);

            string strVideoBitRateDeviation = "-bt " + EncodingParameters.BitRateDeviation;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATEDEVIATION}", strVideoBitRateDeviation);

            // Restrict H264 encoding level (e.g. for iPhone 3G)
            string strH264Level = (EncodingParameters.X264Level > 0) ? ("-level " + EncodingParameters.X264Level.ToString() )  : "";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{H264LEVEL}", strH264Level);
            string strH264Profile = (string.IsNullOrWhiteSpace(EncodingParameters.X264Profile)) ? "" : "-profile " + EncodingParameters.X264Profile;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{H264PROFILE}", strH264Profile);

            // Audio: MP3 - must be after input file flag -i  //
            string strAudioCodecOptions = "";
            switch (EncodingParameters.AudioCodec)
            {
                case VideoEncodingParameters.AudioCodecTypes.AAC:
                    strAudioCodecOptions = "-acodec aac -strict experimental";
                    break;

                default:
                    strAudioCodecOptions = "-acodec libmp3lame";
                    break;

                //// "libfaac");
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCODEC}", strAudioCodecOptions);

            // Audio Bitrate
            string strAudioBitRate = "-ab " + EncodingParameters.AudioBitRate;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOBITRATE}", strAudioBitRate);

            // Audio sample rate
            string strAudioSampleRate = "-ar " + EncodingParameters.AudioSampleRate;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSAMPLERATE}", strAudioSampleRate);

            // Force stereo
            string strAudioChannels = "-ac 2";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCHANNELS}", strAudioChannels);

            // Volume Level
            string strVolumeBoost = "";
            if (EncodingParameters.AudioVolumePercent != 100)
            {
                double fVolumeBytes = (256.0 * (EncodingParameters.AudioVolumePercent / 100.0));
                int iVolumeBytes = Convert.ToInt32(fVolumeBytes);
                strVolumeBoost = "-vol " + iVolumeBytes.ToString();
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{VOLUMELEVEL}", strVolumeBoost);

            // Pipe to segmenter (ie send to standard output now)
            strFFMpegTemplate = strFFMpegTemplate + " -";

            // Commit - add to the arguments
            cmdArguments.AddArg(strFFMpegTemplate);
        }
예제 #4
0
        private void ConstructArguments(int startAtSegment)
        {
            MediaInfoGrabber g = null;
            //            if (!request.NewLiveTV)
            {
                //Get info on intput file...
                SendDebugMessage("MediaInfoGrabber: Setting up prober in HLSRunner...");
                g = new MediaInfoGrabber(PathToTools, Environment.GetEnvironmentVariable("tmp"), InputFile);
                if (request.NewLiveTV)
                {
                    //g.GetInfo2("ffmpeg.exe", "");
                }
                else if (request.LiveTV)
                {
                    g.GetInfo2("ffmpeg.exe", "");
                }
                else
                {
                    g.GetInfo("ffmpeg.exe", "");
                }
            }

            string args ="";
             //   if (Settings.Default.UseOsmo4)
               // {
                //request.UseNewerFFMPEG = Settings.Default.UseOsmo4;
                //string rppath = Path.Combine(
                //    Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "remotepotato");
                //string workingdirectory = Path.Combine(rppath, "static\\mediastreams\\" + ID + "\\");
                //args =
                //    @"{THREADS} {STARTTIME} {INPUTFILE} {MAPPINGS} -vbsf h264_mp4toannexb -flags -global_header {H264PROFILE} -level 30 -preset ultrafast {USEGPU} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -weightb 0 -8x8dct 0 -deblock 0:0 -cmp chroma -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL} -force_key_frames expr:gte(t,n_forced*" +
                //    SegmentDuration + ") -f segment -segment_time " + SegmentDuration + " -segment_list " +
                //    workingdirectory + "index2.m3u8 -segment_start_number {SEGMENTSTARTNR} segment-%d.ts";
            //    }
              //  else
            if (request.NewLiveTV)
                //                args = @" {INPUTFILE} -y {THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -b_strategy 0 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {AUDIOCODEC} {AUDIOBITRATE} {MAXAUDIOBITRATE} {MINAUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} -c:v libx264 {AUDIOSYNC} {ASPECT} -vbsf h264_mp4toannexb {FRAMESIZE} {VIDEOBITRATE} {MAXVIDEOBITRATE} {MINVIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -flags -global_header -f segment {SEGMENTLENGTH} {INDEXFILE} {segment_start_number} seg-%d.ts";
                //                args = @" {INPUTFILE} -map 0 -y {THREADS} -c copy -f segment {SEGMENTLENGTH} {INDEXFILE} {segment_start_number} liveseg-%d.ts";
                args = "";
            //                args = @" {INPUTFILE} -y {THREADS} {MAPPINGS} {AUDIOCODEC} {AUDIOBITRATE} {MAXAUDIOBITRATE} {MINAUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} -c:v libx264 -profile:v baseline -level 1 {AUDIOSYNC} {ASPECT} -vbsf h264_mp4toannexb {FRAMESIZE} {VIDEOBITRATE} {MAXVIDEOBITRATE} {MINVIDEOBITRATE} {VIDEOBITRATEDEVIATION}  -flags -global_header c:\scratch\scratch.ts";
            else if (request.LiveTV)
                //                args = @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {AUDIOSYNC} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y -f mpegts -vcodec libx264 {VIDEOBITRATE} {MINVIDEOBITRATE} {MAXVIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r 25 {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}";
                args =
                    @"{THREADS} {H264PROFILE} {H264LEVEL}  {USEGPU}  -flags +loop -g 30 -keyint_min 1 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {AUDIOSYNC} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y -f mpegts -vcodec libx264 {VIDEOBITRATE} {MINVIDEOBITRATE} {MAXVIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}";
                // see for efficiency: http://smorgasbork.com/component/content/article/35-linux/98-high-bitrate-real-time-mpeg-2-encoding-with-ffmpeg as well
            else if (request.UseNewerFFMPEG && !request.NewLiveTV && !request.LiveTV)
            {
                string rppath = Path.Combine(
                    Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "remotepotato");
                string workingdirectory = Path.Combine(rppath, "static\\mediastreams\\" + ID + "\\");
                args =

                    @"{THREADS} {STARTTIME} {INPUTFILE} {MAPPINGS} -vbsf h264_mp4toannexb -flags -global_header {H264PROFILE} {H264LEVEL} {USEGPU} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -weightb 0 -8x8dct 0 -deblock 0:0 -cmp chroma -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL} -force_key_frames expr:gte(t,n_forced*" +
                    SegmentDuration + ") -f segment -segment_time " + SegmentDuration + " -segment_list " +
                    workingdirectory + "index2.m3u8 -segment_start_number {SEGMENTSTARTNR} segment-%d.ts";
                //args = @"{THREADS} {INPUTFILE} -map 0:0 -map 0:1 -vbsf h264_mp4toannexb -flags -global_header {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -weightb 0 -8x8dct 0 -deblock 0:0 -cmp chroma -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {STARTTIME} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}  -f segment -segment_time 3 -segment_list " + workingdirectory + "index2.m3u8 segment-%d.ts";
            }
            else // never change a winning team:
                //                args = @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -f mpegts -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL} -scodec copy";
                args =
                    @"{THREADS} {H264PROFILE} {H264LEVEL} {USEGPU} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y {AUDIOSYNC} -f mpegts -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r {FRAMERATE} {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL} ";

            //if (request.LiveTV)
            //            args = @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {AUDIOSYNC} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y -f mpegts -vcodec libx264 {VIDEOBITRATE} {MINVIDEOBITRATE} {MAXVIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r 25 {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}";
            //// see for efficiency: http://smorgasbork.com/component/content/article/35-linux/98-high-bitrate-real-time-mpeg-2-encoding-with-ffmpeg as well
            //else // never change a winning team:
            //    args = @"{THREADS} {H264PROFILE} {H264LEVEL} -flags +loop -g 30 -keyint_min 1 -bf 0 -b_strategy 0 -flags2 -wpred-dct8x8 -cmp +chroma -deblockalpha 0 -deblockbeta 0 -refs 1 {MOTIONSEARCHRANGE} {SUBQ} {PARTITIONS} -trellis 0 -coder 0 -sc_threshold 40 -i_qfactor 0.71 -qcomp 0.6 -qdiff 4 -rc_eq 'blurCplx^(1-qComp)' {MAPPINGS} {STARTTIME} {INPUTFILE} {AUDIOSYNC} {ASPECT} {FRAMESIZE} {DEINTERLACE} -y -f mpegts -vcodec libx264 {VIDEOBITRATE} {VIDEOBITRATEDEVIATION} -qmax 48 -qmin 2 -r 25 {AUDIOCODEC} {AUDIOBITRATE} {AUDIOSAMPLERATE} {AUDIOCHANNELS} {VOLUMELEVEL}";

            // Use either the standard ffmpeg template or a custom one
            string strFFMpegTemplate = (string.IsNullOrWhiteSpace(EncodingParameters.CustomFFMpegTemplate)) ? args : EncodingParameters.CustomFFMpegTemplate;

            if (request.NewLiveTV)
            {
                //                strFFMpegTemplate = strFFMpegTemplate.Replace("{SEGMENTLENGTH}", "-segment_time " + SegmentDuration.ToString());
                strFFMpegTemplate = strFFMpegTemplate.Replace("{SEGMENTLENGTH}", "-segment_time 4");  // this results in segments ABOUT this size of 4 seconds
                //    strFFMpegTemplate = strFFMpegTemplate.Replace("{SEGMENTLENGTH}", " -segment_list_flags +live ");
                // Segment length and offset
            }
            else if (request.LiveTV)
            {// start with 4 seconds first then gradually increase up to 1 minute of segmentlength
                segmentArguments.AddArgCouple("--segment-length", SegmentDuration.ToString());
                segmentArguments.AddArgCouple("--segment-offset", StartAtSeconds.ToString());
                cmdArguments.AddArg(segmentArguments.ToString());
            }
            else if (!request.UseNewerFFMPEG)
            {
                segmentArguments.AddArgCouple("--segment-length", EncodingParameters.SegmentDuration.ToString());
                segmentArguments.AddArgCouple("--segment-offset", StartAtSeconds.ToString());
                cmdArguments.AddArg(segmentArguments.ToString());
            }

            // Multi threads
            if (!(request.LiveTV || request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)))
            {// never change a winning team:
                strFFMpegTemplate = strFFMpegTemplate.Replace("{THREADS}", "-threads 4");
                strFFMpegTemplate = strFFMpegTemplate.Replace("{USEGPU}", " ");
            }
            //if ((Settings.Default.applyLiveTVSettingsToVideosAsWell && (request.UseNewerFFMPEG && !request.LiveTV)) || request.NewLiveTV )
            // {
            //     strFFMpegTemplate = strFFMpegTemplate.Replace("{THREADS}",
            //                                                  "-threads " + Settings.Default.NumberOfCoresX264);
            //     strFFMpegTemplate = strFFMpegTemplate.Replace("{USEGPU}",
            //                                                   (Settings.Default.GPUtranscode
            //                                                        ? "  -x264opts ref=9:opencl:crf=16.0 "
            //                                                        : " "));
            // }
            //else
            {
                strFFMpegTemplate = strFFMpegTemplate.Replace("{THREADS}", "-threads 8");
                strFFMpegTemplate = strFFMpegTemplate.Replace("{USEGPU}", " ");
            }

            string rate = "50";
            SendDebugMessage("frame rate set to: " + rate);
            if (g.Info.Success && (g.Info.AVVideoStreams.Count > 0))
            {
                if (!request.NewLiveTV && !request.LiveTV && g.Info.Success)
                {
                    rate = g.Info.AVVideoStreams[0].frameRate;
                    SendDebugMessage("However detected frame rate is, so set to: " + rate);
                }
            }
            double rateD;
            double.TryParse(rate, NumberStyles.Any, CultureInfo.InvariantCulture, out rateD);
            if (rateD > 31)
            {
                SendDebugMessage("Computed frame rate:" + (rateD / 2));
                strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMERATE}", "" + (rateD / 2)); //If over 30Hz, assume 50 or 59.94Hz and convert to 25 or 29.97
            }
            else
                strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMERATE}", rate);

            // Me Range
            strFFMpegTemplate = strFFMpegTemplate.Replace("{MOTIONSEARCHRANGE}", ("-me_range " + EncodingParameters.MotionSearchRange.ToString()));

            // SUBQ - important as setting it too high can slow things down
            strFFMpegTemplate = strFFMpegTemplate.Replace("{SUBQ}", ("-subq " + EncodingParameters.X264SubQ.ToString()));

            // Partitions
            string strPartitions = (EncodingParameters.PartitionsFlags.Length > 0) ?
                "-partitions " + EncodingParameters.PartitionsFlags : "";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{PARTITIONS}", strPartitions);

            // Add Mappings
            if (((true || request.UseNewerFFMPEG) && !request.NewLiveTV))// for newlivetv change mappings in shellcmdrunner
            {
                string strMapArgs = (string.IsNullOrEmpty(MapArgumentsString)) ? "" : MapArgumentsString;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAPPINGS}", strMapArgs);
            }
            else if (!request.NewLiveTV)
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAPPINGS}", "");

            // Start at : MUST BE BEFORE INPUT FILE FLAG -i *** !!!
            string strStartTime;
            if (request.NewLiveTV)
            {
                strStartTime = (secondsToStartNextTime <= 0) ? "" : ("-ss " + secondsToStartNextTime.ToString());
            }
            else
            {
                strStartTime = (StartAtSeconds <= 0) ? "" : ("-ss " + StartAtSeconds.ToString());
            }

            strFFMpegTemplate = strFFMpegTemplate.Replace("{STARTTIME}", strStartTime);

            int segstart = (startAtSegment >= 99999) ? 0 : startAtSegment;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{SEGMENTSTARTNR}", segstart.ToString());

            //// for liveTV, -async 1 alone does not seem to work, so:
            //string strVideoAudioSync = (EncodingParameters.AVSyncDifference <= 0) ? "" : ("-itsoffset " + EncodingParameters.AVSyncDifference.ToString(CultureInfo.InvariantCulture));
            //strFFMpegTemplate = strFFMpegTemplate.Replace("{ITOFFSET}", strVideoAudioSync);

            // Input file - make short to avoid issues with UTF-8 in batch files  IT IS VERY IMPORTANT WHERE THIS GOES; AFTER SS BUT BEFORE VCODEC AND ACODEC
            if (request.NewLiveTV)
            {
                //                strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", (" -ab 256000 -vb 10000000 -mbd rd -trellis 2 -cmp 2 -subcmp 2 -g 100 -f mpeg -i -")); //let it catch the pipe
                //strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", (" -f mpeg -i -")); //let it catch the pipe
                string shortInputFile = Functions.FileWriter.GetShortPathName(InputFile);
                // Quotes around file
                string quotedInputFile = "\"" + shortInputFile + "\"";
                strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", ("-i " + quotedInputFile));
            }
            else
            {
                string shortInputFile = Functions.FileWriter.GetShortPathName(InputFile);
                // Quotes around file
                string quotedInputFile = "\"" + shortInputFile + "\"";
                strFFMpegTemplate = strFFMpegTemplate.Replace("{INPUTFILE}", ("-i " + quotedInputFile));
            }

            if (request.NewLiveTV)
            {
                string workingFolderPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "RemotePotato");
                workingFolderPath = Path.Combine(workingFolderPath + "\\static\\mediastreams\\", ID);
                if (!Directory.Exists(workingFolderPath)) Directory.CreateDirectory(workingFolderPath);
                string quotedIndexFile = "\"" + workingFolderPath + "\\livetv5.m3u8" + "\"";
                strFFMpegTemplate = strFFMpegTemplate.Replace("{INDEXFILE}", ("-segment_list " + quotedIndexFile));

                shellRunner.NamedPipe = Path.Combine(workingFolderPath, "PipestreamPath");
            }

            // Aspect ratio and frame size
            string asp = (request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-aspect:v " : "-aspect ";
            string strAspectRatio = (EncodingParameters.OutputSquarePixels) ? asp + "1:1" : asp + EncodingParameters.AspectRatio;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{ASPECT}", strAspectRatio);
            string strFrameSize = ((request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-s:v " : "-s ") + EncodingParameters.ConstrainedSize;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{FRAMESIZE}", strFrameSize);

            // Deinterlace (experimental)
            string strDeinterlace = (EncodingParameters.DeInterlace) ? "-deinterlace" : "";
            //string strdeinterlace = (encodingparameters.deinterlace) ? "-deinterlace" : "";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{DEINTERLACE}", strDeinterlace);

            // OPTIONAL FOR LATER:  -vf "crop=720:572:0:2, scale=568:320"
            // Think this means crop to the aspect ratio, then scale to the normal frame

            // Audio sync amount
            string strAudioSync = "-async " + AudioSyncAmount.ToString();
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSYNC}", strAudioSync);

            int maxDuration = EncodingParameters.SegmentDuration;
            int currentSegmentDuration = Math.Min(EncodingParameters.SegmentDuration, request.InitialWaitTimeBeforeLiveTVStarts + startAtSegment * request.SegmentIncreasingStepsLiveTV); ;
            int videobitrate = 0;
            int audiobitrate = 0;
            // Video bitrate
            if (request.LiveTV)
            {
                if (maxDuration == 0)
                {
                }
                else
                {
                    videobitrate = (int)(toInteger(EncodingParameters.VideoBitRate) * (Math.Sin((Math.PI) * currentSegmentDuration / (2 * maxDuration))));//sin seems to be a nice fast climbing function
                    //                    audiobitrate = (int)(toInteger(EncodingParameters.AudioBitRate) * (Math.Sin((Math.PI) * currentSegmentDuration / (2 * maxDuration))));
                    audiobitrate = toInteger(EncodingParameters.AudioBitRate);
                    //videobitrate = (int)(toInteger(EncodingParameters.VideoBitRate) * (currentSegmentDuration / maxDuration)); //linear
                    //audiobitrate = (int)(toInteger(EncodingParameters.AudioBitRate) * (currentSegmentDuration / maxDuration));
                    SendDebugMessage("VideoBitRate now:  " + videobitrate);
                }
                string strVideoBitRateOptions = "-bufsize " + "50Mi -b " + videobitrate;  //cmdArguments.AddArgCouple("-maxrate", VideoBitRate);
                strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATE}", strVideoBitRateOptions);
            }
            else
            {
                string strVideoBitRateOptions = "-bufsize " + EncodingParameters.VideoBitRate + ((request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? " -b:v " : " -b ") + EncodingParameters.VideoBitRate;  //cmdArguments.AddArgCouple("-maxrate", VideoBitRate);
                strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATE}", strVideoBitRateOptions);
            }
            if (request.NewLiveTV || (request.UseNewerFFMPEG  && !request.LiveTV))
            {
                // Max video bitrate (optional)
                string strMaxVideoBitRate = "-maxrate " + EncodingParameters.VideoBitRate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXVIDEOBITRATE}", strMaxVideoBitRate);
                string strMinVideoBitRate = "-minrate " + EncodingParameters.VideoBitRate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MINVIDEOBITRATE}", strMinVideoBitRate);
                string strMaxAudioBitRate = "-maxrate " + EncodingParameters.AudioBitRate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXAUDIOBITRATE}", strMaxAudioBitRate);
                string strMinAudioBitRate = "-minrate " + EncodingParameters.AudioBitRate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MINAUDIOBITRATE}", strMinAudioBitRate);

                string strVideoBitRateDeviation = "";
                strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATEDEVIATION}", strVideoBitRateDeviation);
            }
            else if (request.LiveTV)
            {
                // Max video bitrate (optional)
                string strMaxVideoBitRate = "-maxrate " + videobitrate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXVIDEOBITRATE}", strMaxVideoBitRate);
                string strMinVideoBitRate = "-minrate " + videobitrate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MINVIDEOBITRATE}", strMinVideoBitRate);
                string strMaxAudioBitRate = "-maxrate " + audiobitrate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXAUDIOBITRATE}", strMaxAudioBitRate);
                string strMinAudioBitRate = "-minrate " + audiobitrate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MINAUDIOBITRATE}", strMinAudioBitRate);

                string strVideoBitRateDeviation = "";
                strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATEDEVIATION}", strVideoBitRateDeviation);
            }
            else
            {
                // Max video bitrate (optional)
                string strMaxVideoBitRate = "-maxrate " + EncodingParameters.VideoBitRate;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{MAXVIDEOBITRATE}", strMaxVideoBitRate);

                string strVideoBitRateDeviation = "-bt " + EncodingParameters.BitRateDeviation;
                strFFMpegTemplate = strFFMpegTemplate.Replace("{VIDEOBITRATEDEVIATION}", strVideoBitRateDeviation);
            }

            // Restrict H264 encoding level (e.g. for iPhone 3G)
            string strH264Level = (EncodingParameters.X264Level > 0) ? ("-level " + EncodingParameters.X264Level.ToString()) : "";
            strFFMpegTemplate = strFFMpegTemplate.Replace("{H264LEVEL}", strH264Level);
            string strH264Profile = (string.IsNullOrWhiteSpace(EncodingParameters.X264Profile)) ? "" : ((request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-profile:v " : "-profile ") + EncodingParameters.X264Profile;
            strFFMpegTemplate = strFFMpegTemplate.Replace("{H264PROFILE}", strH264Profile);

            // Audio: MP3 - must be after input file flag -i  //
            string strAudioCodecOptions = "";
            string cod = (request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-c:a " : "-acodec ";
            switch (EncodingParameters.AudioCodec)
            {
                case VideoEncodingParameters.AudioCodecTypes.NONE:
                    strAudioCodecOptions = " -an ";
                    break;

                case VideoEncodingParameters.AudioCodecTypes.AAC:
                    strAudioCodecOptions = cod + "aac -strict experimental ";
                    break;

                default:
                    strAudioCodecOptions = cod + "libmp3lame";
                    break;
                //// "libfaac");
            }
            if (request.NewLiveTV && EncodingParameters.AudioCodec!=VideoEncodingParameters.AudioCodecTypes.NONE)
            {
                strAudioCodecOptions = cod + " aac -strict experimental ";
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCODEC}", strAudioCodecOptions);

            // Audio Bitrate
            string strAudioBitRate = "";
            if (request.LiveTV)
            {
                strAudioBitRate = "-ab " + audiobitrate;
            }
            else if (request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV))
            {
                strAudioBitRate = "-ab:a " + EncodingParameters.AudioBitRate;
            }
            else
            {
                strAudioBitRate = "-ab " + EncodingParameters.AudioBitRate;
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOBITRATE}", strAudioBitRate);

            // Audio sample rate
            string strAudioSampleRate = ((request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-ar:a " : "-ar ") + "44100";//EncodingParameters.AudioSampleRate; 48000 results in no sound on android!!!
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOSAMPLERATE}", strAudioSampleRate);

            // Force stereo
            string strAudioChannels = ((request.NewLiveTV || (request.UseNewerFFMPEG && !request.LiveTV)) ? "-ac:a 2 " : "-ac 2 ");
            strFFMpegTemplate = strFFMpegTemplate.Replace("{AUDIOCHANNELS}", strAudioChannels);

            // Volume Level
            string strVolumeBoost = "";
            if (EncodingParameters.AudioVolumePercent != 100)
            {
                double fVolumeBytes = (256.0 * (EncodingParameters.AudioVolumePercent / 100.0));
                int iVolumeBytes = Convert.ToInt32(fVolumeBytes);
                strVolumeBoost = "-vol " + iVolumeBytes.ToString();
            }
            strFFMpegTemplate = strFFMpegTemplate.Replace("{VOLUMELEVEL}", strVolumeBoost);

            if (request.NewLiveTV)
            {
                //http://stackoverflow.com/questions/1179970/c-sharp-find-most-recent-file-in-dir
                var directory = new DirectoryInfo(WorkingDirectory);
                int LatestSegmentNr = 0;
                if (File.Exists(WorkingDirectory + "\\liveseg-1.ts"))
                {
                    var myFile = directory.GetFiles("*.ts").OrderByDescending(f => f.LastWriteTime).First();
                    string bestand = myFile.Name;
                    bestand = bestand.Replace(".ts", ""); // remove extension
                    // Get segment number
                    string strSegNumber;
                    List<string> parts = bestand.Split('-').ToList();
                    if (parts.Count > 1)
                    {
                        strSegNumber = parts[1];
                        if (!int.TryParse(strSegNumber, out LatestSegmentNr))
                        {
                        }
                    }
                }

                //                strFFMpegTemplate = strFFMpegTemplate.Replace("{segment_times}", "-segment_times 4,20,40,60");
                strFFMpegTemplate = strFFMpegTemplate.Replace("{segment_times}", "");
                strFFMpegTemplate = strFFMpegTemplate.Replace("{segment_start_number}", "-segment_start_number " + Math.Max(1, LatestSegmentNr + 1));
            }
            else if (!(request.UseNewerFFMPEG && !request.LiveTV) && !request.NewLiveTV)
            {
                // Pipe to segmenter (ie send to standard output now)
                strFFMpegTemplate = strFFMpegTemplate + " -";
            }

            // Commit - add to the arguments
            cmdArguments.AddArg(strFFMpegTemplate);
        }
예제 #5
0
        public TimeSpan GetMediaDuration(string fileName)
        {
            MediaInfoGrabber grabber = new MediaInfoGrabber(Functions2.ToolkitFolder, Path.Combine(Functions2.StreamBaseFolder, "probe_results"), fileName);
            grabber.DebugMessage += new EventHandler<GenericEventArgs<string>>(grabber_DebugMessage);
            grabber.GetInfo("ffmpeg.exe", "");
            grabber.DebugMessage -= grabber_DebugMessage;

            TimeSpan duration = (grabber.Info.Success) ? grabber.Info.Duration : new TimeSpan(0);

            return duration;
        }