IsNoPtsValue() private method

private IsNoPtsValue ( long timestamp ) : bool
timestamp long
return bool
        /// <summary>
        /// Initializes the internal transcoder -- This create the input, processing, and output blocks that make
        /// up the video and audio decoding stream.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <param name="inputFormatName">Name of the input format. Leave null or empty to detect automatically</param>
        /// <param name="referer">The referer. Leave null or empty to skip setting it</param>
        /// <param name="userAgent">The user agent. Leave null or empty to skip setting it.</param>
        /// <exception cref="FileFormatException"></exception>
        /// <exception cref="Exception">Could not find stream info
        /// or
        /// Media must contain at least a video or and audio stream</exception>
        /// <exception cref="System.Exception">Could not open file
        /// or
        /// Could not find stream info
        /// or
        /// Media must contain a video stream
        /// or
        /// Media must contain an audio stream
        /// or
        /// Unsupported codec
        /// or
        /// Could not initialize the output conversion context
        /// or
        /// Could not create output codec context from input
        /// or
        /// Could not open codec</exception>
        private void InitializeMedia(string filePath, string inputFormatName, string referer, string userAgent)
        {
            // Create the input format context by opening the file
            InputFormatContext = ffmpeg.avformat_alloc_context();

            AVDictionary *optionsDict = null;

            if (string.IsNullOrWhiteSpace(userAgent) == false)
            {
                ffmpeg.av_dict_set(&optionsDict, "user-agent", userAgent, 0);
            }

            if (string.IsNullOrWhiteSpace(referer) == false)
            {
                ffmpeg.av_dict_set(&optionsDict, "headers", $"Referer:{referer}", 0);
            }

            ffmpeg.av_dict_set_int(&optionsDict, "usetoc", 1, 0);

            { // for m3u8 (HLS) streaming
                // TODO: maybe detect here if it is streaming? I need to test if this negatively affects filesystem files or network files as opposed to RTSP streams and HLS streams
                ffmpeg.av_dict_set_int(&optionsDict, "multiple_requests", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_at_eof", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_streamed", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_delay_max", (int)Constants.WaitForPlaybackReadyStateTimeout.TotalMilliseconds, 0);
            }


            AVInputFormat *inputFormat = null;

            if (string.IsNullOrWhiteSpace(inputFormatName) == false)
            {
                inputFormat = ffmpeg.av_find_input_format(inputFormatName);

                fixed(AVFormatContext **inputFormatContextRef = &InputFormatContext)
                {
                    if (ffmpeg.avformat_open_input(inputFormatContextRef, filePath, inputFormat, &optionsDict) != 0)
                    {
                        throw new FileFormatException(string.Format("Could not open stream or file '{0}'", filePath));
                    }
                }

                InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOBUFFER;
                InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOFILLIN;

                ffmpeg.av_dict_free(&optionsDict);

                // Extract the stream info headers from the file
                if (ffmpeg.avformat_find_stream_info(InputFormatContext, null) != 0)
                {
                    throw new Exception("Could not find stream info");
                }

                // search for the audio and video streams
                for (int i = 0; i < InputFormatContext->nb_streams; i++)
                {
                    var codecType = InputFormatContext->streams[i]->codec->codec_type;

                    if (codecType == AVMediaType.AVMEDIA_TYPE_VIDEO && InputVideoStream == null)
                    {
                        InputVideoStream = InputFormatContext->streams[i];
                        continue;
                    }

                    if (codecType == AVMediaType.AVMEDIA_TYPE_AUDIO && InputAudioStream == null)
                    {
                        InputAudioStream = InputFormatContext->streams[i];
                        continue;
                    }
                }

                if (InputVideoStream != null)
                {
                    this.InitializeVideo();
                    this.HasVideo = VideoBitrate > 0 || VideoFrameRate > 0M || VideoFrameWidth > 0 || VideoFrameHeight > 0;
                }

                if (InputAudioStream != null)
                {
                    this.InitializeAudio();
                    this.HasAudio = AudioBytesPerSample > 0;
                }

                if (HasAudio == false && HasVideo == false)
                {
                    throw new Exception("Media must contain at least a video or and audio stream");
                }
                else
                {
                    // General Properties here

                    NaturalDuration = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->duration) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                    IsLiveStream    = Helper.IsNoPtsValue(InputFormatContext->duration);
                    StartTime       = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->start_time) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                    EndTime         = StartTime + NaturalDuration;

                    RealtimeClock.Seek(StartTime);
                }
        }
Example #2
0
        /// <summary>
        /// Initializes a new instance of the <see cref="FFmpegMedia"/> class.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <param name="errorCallback">The error callback.</param>
        /// <exception cref="System.ArgumentException">
        /// errorCallback cannot be null
        /// or
        /// filePath cannot be null or empty
        /// </exception>
        /// <exception cref="System.Exception"></exception>
        public FFmpegMedia(string filePath, MediaErrorOccurredCallback errorCallback)
        {
            // Argument validation
            if (errorCallback == null)
            {
                throw new ArgumentException("errorCallback cannot be null");
            }

            if (string.IsNullOrWhiteSpace(filePath))
            {
                throw new ArgumentException("filePath cannot be null or empty");
            }

            // Error callback
            this.ErrorOccurredCallback = errorCallback;

            // Register the property state change handler
            this.RealtimeClock.PropertyChanged += (s, e) => { NotifyPlayStateChanged(); };

            // Make sure we registwered the library
            Helper.RegisterFFmpeg();

            // Create the audio provider and audio renderer
            this.PcmAudioProvider = new AudioBufferProvider(this);
            this.AudioRenderer    = new AudioRenderer();

            // load input, codec and output contexts
            this.InitializeMedia(filePath);

            // Setup the frames Cache
            this.VideoFramesCache = new FFmpegMediaFrameCache(this.VideoFrameRate, MediaFrameType.Video);
            this.AudioFramesCache = new FFmpegMediaFrameCache(this.AudioSampleRate / 1000M, MediaFrameType.Audio);

            // Setup the Leading and Lagging frames cache
            if (HasVideo && (HasAudio == false || InputAudioStream->index > InputVideoStream->index))
            {
                this.LeadingFramesCache = VideoFramesCache;
                this.LaggingFramesCache = AudioFramesCache;
                this.StartDts           = InputVideoStream->start_time;

                LeadingStreamType = MediaFrameType.Video;
                LaggingStreamType = HasAudio ? MediaFrameType.Audio : MediaFrameType.Unknown;
            }
            else
            {
                this.LeadingFramesCache = AudioFramesCache;
                this.LaggingFramesCache = VideoFramesCache;
                this.StartDts           = InputAudioStream->start_time;

                LeadingStreamType = MediaFrameType.Audio;
                LaggingStreamType = HasVideo ? MediaFrameType.Video : MediaFrameType.Unknown;
            }

            if (Helper.IsNoPtsValue(StartDts))
            {
                StartDts = 0;
            }

            // Setup Video Renderer and Video Frames Cache
            if (HasVideo)
            {
                this.VideoRenderer = new WriteableBitmap(this.VideoFrameWidth, this.VideoFrameHeight, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null);
            }
            else
            {
                this.VideoRenderer = new WriteableBitmap(1, 1, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null);
            }


            // Setup Audio Renderer and Audio Frames Cache
            if (HasAudio)
            {
                this.StartAudioRenderer();
            }

            // Start the continuous Decoder thread that fills up our queue.
            MediaFrameExtractorThread = new Thread(ExtractMediaFramesContinuously)
            {
                IsBackground = true,
                Priority     = ThreadPriority.AboveNormal
            };

            // Begin the media extractor
            MediaFrameExtractorThread.Start();
            MediaFramesExtractedDone.Reset();
            if (MediaFramesExtractedDone.Wait(Constants.WaitForPlaybackReadyStateTimeout) == false)
            {
                throw new Exception(string.Format("Could not load sream frames in a timely manner. Timed out in {0}", Constants.WaitForPlaybackReadyStateTimeout));
            }

            // Initialize the Speed Ratio to 1.0 (Default)
            this.SpeedRatio = Constants.DefaultSpeedRatio;

            // Start the render timer on the UI thread.
            this.VideoRenderTimer.Tick     += RenderVideoImage;
            this.VideoRenderTimer.Interval  = TimeSpan.FromMilliseconds(Constants.VideoRenderTimerIntervalMillis);
            this.VideoRenderTimer.IsEnabled = true;
            this.VideoRenderTimer.Start();
        }
Example #3
0
        /// <summary>
        /// Initializes the internal transcoder -- This create the input, processing, and output blocks that make
        /// up the video and audio decoding stream.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <exception cref="System.Exception">
        /// Could not open file
        /// or
        /// Could not find stream info
        /// or
        /// Media must contain a video stream
        /// or
        /// Media must contain an audio stream
        /// or
        /// Unsupported codec
        /// or
        /// Could not initialize the output conversion context
        /// or
        /// Could not create output codec context from input
        /// or
        /// Could not open codec
        /// </exception>
        private void InitializeMedia(string filePath)
        {
            // Create the input format context by opening the file
            InputFormatContext = ffmpeg.avformat_alloc_context();

            AVDictionary *optionsDict = null;

            ffmpeg.av_dict_set_int(&optionsDict, "usetoc", 1, 0);

            fixed(AVFormatContext **inputFormatContextRef = &InputFormatContext)
            {
                if (ffmpeg.avformat_open_input(inputFormatContextRef, filePath, null, &optionsDict) != 0)
                {
                    throw new Exception(string.Format("Could not open file '{0}'", filePath));
                }
            }

            //InputFormatContext->iformat->flags = InputFormatContext->iformat->flags | FFmpegInvoke.AVFMT_SEEK_TO_PTS;
            ffmpeg.av_dict_free(&optionsDict);

            // Extract the stream info headers from the file
            if (ffmpeg.avformat_find_stream_info(InputFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            // search for the audio and video streams
            for (int i = 0; i < InputFormatContext->nb_streams; i++)
            {
                var codecType = InputFormatContext->streams[i]->codec->codec_type;

                if (codecType == AVMediaType.AVMEDIA_TYPE_VIDEO && InputVideoStream == null)
                {
                    InputVideoStream = InputFormatContext->streams[i];
                    continue;
                }

                if (codecType == AVMediaType.AVMEDIA_TYPE_AUDIO && InputAudioStream == null)
                {
                    InputAudioStream = InputFormatContext->streams[i];
                    continue;
                }
            }

            if (InputVideoStream != null)
            {
                this.InitializeVideo();
                this.HasVideo = VideoBitrate > 0 || VideoFrameRate > 0M || VideoFrameWidth > 0 || VideoFrameHeight > 0;
            }

            if (InputAudioStream != null)
            {
                this.InitializeAudio();
                this.HasAudio = AudioBytesPerSample > 0;
            }

            if (HasAudio == false && HasVideo == false)
            {
                throw new Exception("Media must contain at least a video or and audio stream");
            }
            else
            {
                // General Properties here

                NaturalDuration = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->duration) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                IsLiveStream    = Helper.IsNoPtsValue(InputFormatContext->duration);
                StartTime       = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->start_time) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                EndTime         = StartTime + NaturalDuration;

                RealtimeClock.Seek(StartTime);
            }
        }