TimestampToSeconds() public static method

Converts a Timestamp to seconds.
public static TimestampToSeconds ( long ts, FFmpeg.AutoGen.AVRational streamTimeBase ) : decimal
ts long The ts.
streamTimeBase FFmpeg.AutoGen.AVRational The stream time base.
return decimal
Exemplo n.º 1
0
        private FFmpegMediaFrame CreateMediaFrameFromDecodedWaveHolder()
        {
            // Resample
            IntPtr bufferPtr = IntPtr.Zero;

            byte[] audioBuffer;

            try
            {
                var inputSampleCount  = DecodedWaveHolder->nb_samples;
                var outputDelay       = ffmpeg.swr_get_delay(AudioResampler, AudioSampleRate);
                var outputSampleCount = (int)ffmpeg.av_rescale_rnd(outputDelay + inputSampleCount, AudioOutputSampleRate, AudioSampleRate, AVRounding.AV_ROUND_UP);

                var outputLineSize  = outputSampleCount * (this.AudioOutputBitsPerSample / 8);
                var maxBufferLength = outputLineSize * Constants.AudioOutputChannelCount;

                bufferPtr = System.Runtime.InteropServices.Marshal.AllocHGlobal(maxBufferLength);
                var bufferNativePtr = (sbyte *)bufferPtr.ToPointer();

                var convertSampleCount = ffmpeg.swr_convert(AudioResampler, &bufferNativePtr, outputSampleCount, DecodedWaveHolder->extended_data, inputSampleCount);
                var outputBufferLength = ffmpeg.av_samples_get_buffer_size(&outputLineSize, Constants.AudioOutputChannelCount, convertSampleCount, Constants.AudioOutputSampleFormat, 1);

                if (outputBufferLength < 0)
                {
                    return(null);
                }

                audioBuffer = new byte[outputBufferLength];
                System.Runtime.InteropServices.Marshal.Copy(bufferPtr, audioBuffer, 0, audioBuffer.Length);
            }
            finally
            {
                if (bufferPtr != IntPtr.Zero)
                {
                    System.Runtime.InteropServices.Marshal.FreeHGlobal(bufferPtr);
                }
            }


            // Create the managed audio frame
            var mediaFrame = new FFmpegMediaFrame()
            {
                AudioBuffer        = audioBuffer,
                Duration           = Helper.TimestampToSeconds(DecodedWaveHolder->pkt_duration, InputAudioStream->time_base),
                CodedPictureNumber = -1,
                Flags       = FFmpegMediaFrameFlags.None,
                PictureType = FFmpegPictureType.None,
                StartTime   = Helper.TimestampToSeconds(DecodedWaveHolder->best_effort_timestamp, InputAudioStream->time_base),
                StreamIndex = InputAudioStream->index,
                Timestamp   = DecodedWaveHolder->best_effort_timestamp,
                Type        = MediaFrameType.Audio
            };

            return(mediaFrame);
        }
Exemplo n.º 2
0
        private FFmpegMediaFrame CreateMediaFrameFromDecodedPictureHolder()
        {
            // Create the output picture. Once the DecodeFrameHolder has the frame in YUV, the SWS API is
            // then used to convert to BGR24 and display on the screen.
            var outputPicture       = (AVPicture *)ffmpeg.av_frame_alloc();
            var outputPictureBuffer = (sbyte *)ffmpeg.av_malloc((uint)OutputPictureBufferLength);

            ffmpeg.avpicture_fill(outputPicture, outputPictureBuffer, Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight);

            // convert the colorspace from (typically) YUV to BGR24
            sbyte **sourceScan0 = &DecodedPictureHolder->data0;
            sbyte **targetScan0 = &outputPicture->data0;

            ffmpeg.sws_scale(
                VideoResampler, sourceScan0, DecodedPictureHolder->linesize, 0,
                VideoFrameHeight, targetScan0, outputPicture->linesize);

            // Compute data size and data pointer (stride and scan0, respectively)
            var imageStride   = outputPicture->linesize[0];
            var imageDataSize = Convert.ToUInt32(VideoFrameHeight * imageStride);
            var imageDataPtr  = new IntPtr(outputPicture->data0);

            // Create a MediaFrame object with the info we have -- we will return this
            var mediaFrame = new FFmpegMediaFrame()
            {
                Picture             = outputPicture,
                PictureBuffer       = outputPictureBuffer,
                PictureBufferPtr    = imageDataPtr,
                PictureBufferLength = imageDataSize,
                StartTime           = Helper.TimestampToSeconds(DecodedPictureHolder->best_effort_timestamp, InputVideoStream->time_base),
                Flags              = (FFmpegMediaFrameFlags)DecodedPictureHolder->flags,
                PictureType        = (FFmpegPictureType)DecodedPictureHolder->pict_type,
                CodedPictureNumber = DecodedPictureHolder->coded_picture_number,
                Duration           = Helper.TimestampToSeconds(DecodedPictureHolder->pkt_duration, InputVideoStream->time_base),
                Timestamp          = DecodedPictureHolder->best_effort_timestamp,
                Type        = MediaFrameType.Video,
                StreamIndex = InputVideoStream->index
            };

            return(mediaFrame);
        }