Exemplo n.º 1
0
        public int SetupAudio()
        {
            int ret;

            if (swrCtx == null)
            {
                swrCtx = swr_alloc();
            }

            m_max_dst_nb_samples = -1;

            av_opt_set_int(swrCtx, "in_channel_layout", (int)codecCtx->channel_layout, 0);
            av_opt_set_int(swrCtx, "in_channel_count", codecCtx->channels, 0);
            av_opt_set_int(swrCtx, "in_sample_rate", codecCtx->sample_rate, 0);
            av_opt_set_sample_fmt(swrCtx, "in_sample_fmt", codecCtx->sample_fmt, 0);

            av_opt_set_int(swrCtx, "out_channel_layout", AOutChannelLayout, 0);
            av_opt_set_int(swrCtx, "out_channel_count", AOutChannels, 0);
            av_opt_set_int(swrCtx, "out_sample_rate", codecCtx->sample_rate, 0);
            av_opt_set_sample_fmt(swrCtx, "out_sample_fmt", AOutSampleFormat, 0);

            ret = swr_init(swrCtx);
            if (ret < 0)
            {
                Log($"[AudioSetup] [ERROR-1] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})");
            }

            decCtx.audioPlayer.Initialize(codecCtx->sample_rate);

            return(ret);
        }
Exemplo n.º 2
0
        public int SetupAudio()
        {
            int ret;

            if (swrCtx == null)
            {
                swrCtx = swr_alloc();
            }

            m_max_dst_nb_samples = -1;

            opt.audio.SampleRate = codecCtx->sample_rate;
            opt.audio.Channels   = av_get_channel_layout_nb_channels((ulong)opt.audio.ChannelLayout);
            opt.audio.Bits       = codecCtx->bits_per_coded_sample > 0 ? codecCtx->bits_per_coded_sample : 8;

            av_opt_set_int(swrCtx, "in_channel_layout", (int)codecCtx->channel_layout, 0);
            av_opt_set_int(swrCtx, "in_channel_count", codecCtx->channels, 0);
            av_opt_set_int(swrCtx, "in_sample_rate", codecCtx->sample_rate, 0);
            av_opt_set_sample_fmt(swrCtx, "in_sample_fmt", codecCtx->sample_fmt, 0);

            av_opt_set_int(swrCtx, "out_channel_layout", opt.audio.ChannelLayout, 0);
            av_opt_set_int(swrCtx, "out_channel_count", opt.audio.Channels, 0);
            av_opt_set_int(swrCtx, "out_sample_rate", opt.audio.SampleRate, 0);
            av_opt_set_sample_fmt(swrCtx, "out_sample_fmt", opt.audio.SampleFormat, 0);

            ret = swr_init(swrCtx);
            if (ret < 0)
            {
                Log($"[AudioSetup] [ERROR-1] {Utils.ErrorCodeToMsg(ret)} ({ret})");
            }

            return(ret);
        }
Exemplo n.º 3
0
        /// <summary>
        /// <see cref="swr_free(SwrContext**)"/>
        /// </summary>
        public void Free()
        {
            SwrContext *ptr = this;

            swr_free(&ptr);
            _nativePointer = (IntPtr)ptr;
        }
Exemplo n.º 4
0
        /// <summary>
        /// create a resampler to convert the audio format to our output format
        /// </summary>
        private void SetupResampler()
        {
            _resampled = ffmpeg.av_frame_alloc();
            _resampled->channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(_source._DESIRED_CHANNELS);
            _resampled->sample_rate    = _source._DESIRED_SAMPLE_RATE;
            _resampled->channels       = _source._DESIRED_CHANNELS;
            _resampled->format         = (int)_source._DESIRED_SAMPLE_FORMAT;

            // Fixes SWR @ 0x2192200] Input channel count and layout are unset error.
            if (_codecCtx->channel_layout == 0)
            {
                _codecCtx->channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(_codecCtx->channels);
            }

            //we only want to change from planar to interleaved
            _resampler = ffmpeg.swr_alloc_set_opts(null,
                                                   (long)_resampled->channel_layout, //Out layout should be identical to input layout
                                                   (AVSampleFormat)_resampled->format,
                                                   _resampled->sample_rate,          //Out frequency should be identical to input frequency
                                                   (long)_codecCtx->channel_layout,
                                                   _codecCtx->sample_fmt,
                                                   _codecCtx->sample_rate,
                                                   0, null); //No logging

            if (ffmpeg.swr_init(_resampler) != 0)
            {
                throw new InvalidOperationException("Can't init resampler!");
            }
        }
        public unsafe void InitialiseSource()
        {
            if (!_isInitialised)
            {
                _isInitialised = true;

                _fmtCtx        = ffmpeg.avformat_alloc_context();
                _fmtCtx->flags = ffmpeg.AVFMT_FLAG_NONBLOCK;

                var pFormatContext = _fmtCtx;
                ffmpeg.avformat_open_input(&pFormatContext, _sourceUrl, _inputFormat, null).ThrowExceptionIfError();
                ffmpeg.avformat_find_stream_info(_fmtCtx, null).ThrowExceptionIfError();

                ffmpeg.av_dump_format(_fmtCtx, 0, _sourceUrl, 0);

                // Set up audio decoder.
                AVCodec *audCodec = null;
                _audioStreamIndex = ffmpeg.av_find_best_stream(_fmtCtx, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audCodec, 0).ThrowExceptionIfError();
                logger.LogDebug($"FFmpeg file source decoder {ffmpeg.avcodec_get_name(audCodec->id)} audio codec for stream {_audioStreamIndex}.");
                _audDecCtx = ffmpeg.avcodec_alloc_context3(audCodec);
                if (_audDecCtx == null)
                {
                    throw new ApplicationException("Failed to allocate audio decoder codec context.");
                }
                ffmpeg.avcodec_parameters_to_context(_audDecCtx, _fmtCtx->streams[_audioStreamIndex]->codecpar).ThrowExceptionIfError();
                ffmpeg.avcodec_open2(_audDecCtx, audCodec, null).ThrowExceptionIfError();

                // Set up an audio conversion context so that the decoded samples can always be delivered as signed 16 bit mono PCM.

                _swrContext = ffmpeg.swr_alloc();
                ffmpeg.av_opt_set_sample_fmt(_swrContext, "in_sample_fmt", _audDecCtx->sample_fmt, 0);
                ffmpeg.av_opt_set_sample_fmt(_swrContext, "out_sample_fmt", AVSampleFormat.AV_SAMPLE_FMT_S16, 0);

                ffmpeg.av_opt_set_int(_swrContext, "in_sample_rate", _audDecCtx->sample_rate, 0);
                ffmpeg.av_opt_set_int(_swrContext, "out_sample_rate", Helper.AUDIO_SAMPLING_RATE, 0);

                //FIX:Some Codec's Context Information is missing
                if (_audDecCtx->channel_layout == 0)
                {
                    long in_channel_layout = ffmpeg.av_get_default_channel_layout(_audDecCtx->channels);
                    ffmpeg.av_opt_set_channel_layout(_swrContext, "in_channel_layout", in_channel_layout, 0);
                }
                else
                {
                    ffmpeg.av_opt_set_channel_layout(_swrContext, "in_channel_layout", (long)_audDecCtx->channel_layout, 0);
                }
                ffmpeg.av_opt_set_channel_layout(_swrContext, "out_channel_layout", ffmpeg.AV_CH_LAYOUT_MONO, 0);

                ffmpeg.swr_init(_swrContext).ThrowExceptionIfError();


                _audioTimebase      = ffmpeg.av_q2d(_fmtCtx->streams[_audioStreamIndex]->time_base);
                _audioAvgFrameRate  = ffmpeg.av_q2d(_fmtCtx->streams[_audioStreamIndex]->avg_frame_rate);
                _maxAudioFrameSpace = (int)(_audioAvgFrameRate > 0 ? 1000 / _audioAvgFrameRate : 10000 * Helper.AUDIO_SAMPLING_RATE);
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Inits the Codec context.
        /// </summary>
        /// <param name="encoder">If set to <c>true</c> encoder.</param>
        void CreateContext(bool encoder = false)
        {
            if (!Initialized)
            {
                throw new InvalidOperationException("Instance is not initialized yet, call Initialize() first");
            }
            else if (ContextCreated)
            {
                throw new InvalidOperationException("Context already initialized!");
            }

            if (encoder)
            {
                pCodec = ffmpeg.avcodec_find_encoder(avCodecID);
            }
            else
            {
                pCodec = ffmpeg.avcodec_find_decoder(avCodecID);
            }

            if (pCodec == null)
            {
                throw new InvalidOperationException("VideoCodec not found");
            }
            pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);
            if (pCodecContext == null)
            {
                throw new InvalidOperationException("Could not allocate codec context");
            }

            // Call to abstract method
            SetCodecContextParams(pCodecContext);

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new InvalidOperationException("Could not open codec");
            }

            if (doResample)
            {
                // Call to abstract method
                pResampler = CreateResampler(pCodecContext);
                if (ffmpeg.swr_is_initialized(pResampler) <= 0)
                {
                    throw new InvalidOperationException("Failed to init resampler");
                }
            }

            ContextCreated = true;
        }
Exemplo n.º 7
0
        public void ConvertToFormat(AVSampleFormat sampleFormat, int sampleRate, int channels, ResampleQuality resampleQuality = ResampleQuality.High)
        {
            if (format == (int)sampleFormat &&
                this.sampleRate == sampleRate &&
                this.channels == channels)
            {
                return;
            }

            format          = (int)sampleFormat;
            this.sampleRate = sampleRate;
            this.channels   = channels;

            int channelLayout = (int)ffmpeg.av_get_default_channel_layout(channels);

            swrContext = ffmpeg.swr_alloc();

            ffmpeg.av_opt_set_int(swrContext, "in_channel_layout", (int)codecContext->channel_layout, 0);
            ffmpeg.av_opt_set_int(swrContext, "out_channel_layout", channelLayout, 0);
            ffmpeg.av_opt_set_int(swrContext, "in_channel_count", codecContext->channels, 0);
            ffmpeg.av_opt_set_int(swrContext, "out_channel_count", channels, 0);
            ffmpeg.av_opt_set_int(swrContext, "in_sample_rate", codecContext->sample_rate, 0);
            ffmpeg.av_opt_set_int(swrContext, "out_sample_rate", sampleRate, 0);
            ffmpeg.av_opt_set_sample_fmt(swrContext, "in_sample_fmt", codecContext->sample_fmt, 0);
            ffmpeg.av_opt_set_sample_fmt(swrContext, "out_sample_fmt", sampleFormat, 0);

            switch (resampleQuality)
            {
            case ResampleQuality.Low:
                ffmpeg.av_opt_set_int(swrContext, "filter_size", 0, 0);
                ffmpeg.av_opt_set_int(swrContext, "phase_shift", 0, 0);
                break;

            case ResampleQuality.Medium:
                // default ffmpeg settings
                break;

            case ResampleQuality.High:
                ffmpeg.av_opt_set_int(swrContext, "filter_size", 128, 0);
                ffmpeg.av_opt_set_double(swrContext, "cutoff", 1.0, 0);
                break;

            case ResampleQuality.Highest:
                ffmpeg.av_opt_set_int(swrContext, "filter_size", 256, 0);
                ffmpeg.av_opt_set_double(swrContext, "cutoff", 1.0, 0);
                break;
            }

            if (ffmpeg.swr_init(swrContext) != 0)
            {
                throw new ApplicationException("Failed init SwrContext: " + FFmpegHelper.logLastLine);
            }
        }
Exemplo n.º 8
0
        public void Close()
        {
            if (_convHandle.IsAllocated)
            {
                _convHandle.Free();
            }

            if (_swrContext != null)
            {
                ffmpeg.swr_close(_swrContext);
                _swrContext = null;
            }
        }
Exemplo n.º 9
0
        /// <inheritdoc />
        protected override void Dispose(bool alsoManaged)
        {
            RC.Current.Remove(Scaler);
            if (Scaler != null)
            {
                var scalerRef = Scaler;
                ffmpeg.swr_free(&scalerRef);
                Scaler = null;
            }

            DestroyFilterGraph();
            base.Dispose(alsoManaged);
        }
Exemplo n.º 10
0
        private void initSWR()
        {
            _swrContext = ffmpeg.swr_alloc_set_opts(null,
                                                    ffmpeg.av_get_default_channel_layout(OutFormat.Channels),
                                                    AVSampleFormat.AV_SAMPLE_FMT_S16,
                                                    OutFormat.SampleRate,
                                                    ffmpeg.av_get_default_channel_layout(_audioCodecContext->channels),
                                                    _audioCodecContext->sample_fmt,
                                                    _audioCodecContext->sample_rate,
                                                    0,
                                                    null);

            Throw("SWR_INIT", ffmpeg.swr_init(_swrContext));
        }
Exemplo n.º 11
0
        /// <summary>
        /// Sets the resampler parameters.
        /// </summary>
        internal override SwrContext *CreateResampler(AVCodecContext *codecContext)
        {
            SwrContext *resampler = ffmpeg.swr_alloc();

            ffmpeg.av_opt_set_pixel_fmt(resampler, "in_pixel_fmt", codecContext->pix_fmt, 0);
            ffmpeg.av_opt_set_video_rate(resampler, "in_video_rate", codecContext->time_base, 0);
            ffmpeg.av_opt_set_image_size(resampler, "in_image_size", codecContext->width, codecContext->height, 0);

            ffmpeg.av_opt_set_pixel_fmt(resampler, "out_pixel_fmt", avTargetPixelFormat, 0);
            ffmpeg.av_opt_set_video_rate(resampler, "out_video_rate", codecContext->time_base, 0);
            ffmpeg.av_opt_set_image_size(resampler, "out_image_size", codecContext->width, codecContext->height, 0);

            ffmpeg.swr_init(resampler);
            return(resampler);
        }
Exemplo n.º 12
0
        public AudioResampler(AudioFormat src, AudioFormat dst)
        {
            Source      = src;
            Destination = dst;

            try {
                ctx = FF.swr_alloc_set_opts(null,
                                            Destination.ChannelLayout, Destination.SampleFormat, Destination.SampleRate,
                                            Source.ChannelLayout, Source.SampleFormat, Source.SampleRate,
                                            0, null);
                Reset();
            } catch {
                Dispose();
                throw;
            }
        }
/**
 * Convert the input audio samples into the output sample format.
 * The conversion happens on a per-frame basis, the size of which is
 * specified by frame_size.
 * @param      input_data       Samples to be decoded. The dimensions are
 *                              channel (for multi-channel audio), sample.
 * @param[out] converted_data   Converted samples. The dimensions are channel
 *                              (for multi-channel audio), sample.
 * @param      frame_size       Number of samples to be converted
 * @param      resample_context Resample context for the conversion
 * @return Error code (0 if successful)
 */
    int convert_samples(byte **input_data,
                        byte **converted_data, int frame_size,
                        SwrContext *resample_context)
    {
        int error;

        /* Convert the samples using the resampler. */
        if ((error = swr_convert(resample_context,
                                 converted_data, frame_size,
                                 input_data, frame_size)) < 0)
        {
            Console.WriteLine($"error: Could not convert input samples (error '{LibAVErrorToString(error)}')");
            return(error);
        }
        return(0);
    }
Exemplo n.º 14
0
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioStream"/> class.
        /// </summary>
        /// <param name="stream">The audio stream.</param>
        /// <param name="options">The decoder settings.</param>
        internal AudioStream(Decoder stream, MediaOptions options)
            : base(stream, options)
        {
            swrContext = ffmpeg.swr_alloc_set_opts(
                null,
                Info.ChannelLayout,
                (AVSampleFormat)SampleFormat.SingleP,
                Info.SampleRate,
                Info.ChannelLayout,
                (AVSampleFormat)Info.SampleFormat,
                Info.SampleRate,
                0,
                null);

            ffmpeg.swr_init(swrContext);
        }
Exemplo n.º 15
0
        public AudioFrameConverter(AVSampleFormat fmt, int rate, int channels)
        {
            in_sample_fmt   = fmt;
            in_sample_rate  = rate;
            out_sample_rate = in_sample_rate;
            in_ch_layout    = ffmpeg.av_get_default_channel_layout(channels);

            //out_sample_fmt = in_sample_fmt;
            out_ch_layout   = ffmpeg.av_get_default_channel_layout(channels);
            out_nb_channels = channels;
            Console.WriteLine($"in_ch_layout:{in_ch_layout}");
            Console.WriteLine($"out_ch_layout:{out_ch_layout}");
            audio_convert_ctx = ffmpeg.swr_alloc();
            ffmpeg.swr_alloc_set_opts(audio_convert_ctx, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, (void *)IntPtr.Zero);
            ffmpeg.swr_init(audio_convert_ctx);
            //out_nb_channels = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);//获取声道个数
        }
Exemplo n.º 16
0
        internal override SwrContext *CreateResampler(AVCodecContext *codecContext)
        {
            SwrContext *resampler = ffmpeg.swr_alloc();

            ffmpeg.av_opt_set_int(resampler, "in_channel_count", codecContext->channels, 0);
            ffmpeg.av_opt_set_channel_layout(resampler, "in_channel_layout", (long)codecContext->channel_layout, 0);
            ffmpeg.av_opt_set_int(resampler, "in_sample_rate", codecContext->sample_rate, 0);
            ffmpeg.av_opt_set_sample_fmt(resampler, "in_sample_fmt", codecContext->sample_fmt, 0);

            ffmpeg.av_opt_set_int(resampler, "out_channel_count", codecContext->channels, 0);
            ffmpeg.av_opt_set_channel_layout(resampler, "out_channel_layout", (long)codecContext->channel_layout, 0);
            ffmpeg.av_opt_set_int(resampler, "out_sample_rate", codecContext->sample_rate, 0);
            ffmpeg.av_opt_set_sample_fmt(resampler, "out_sample_fmt", avTargetSampleFormat, 0);

            ffmpeg.swr_init(resampler);
            return(resampler);
        }
Exemplo n.º 17
0
        public WaveFormatConverter(WaveFormat fromFormat, WaveFormat toFormat)
        {
            _fromFormat = fromFormat;
            _toFormat   = toFormat;

            _swrContext = ffmpeg.swr_alloc_set_opts(null,
                                                    ffmpeg.av_get_default_channel_layout(_toFormat.Channels),
                                                    AVSampleFormat.AV_SAMPLE_FMT_S16,
                                                    _toFormat.SampleRate,
                                                    ffmpeg.av_get_default_channel_layout(_fromFormat.Channels),
                                                    AVSampleFormat.AV_SAMPLE_FMT_S16,
                                                    _fromFormat.SampleRate,
                                                    0,
                                                    null);
            ffmpeg.swr_init(_swrContext);

            _convHandle = GCHandle.Alloc(_convOut, GCHandleType.Pinned);
        }
Exemplo n.º 18
0
        private void SwrCheckInit(MediaFrame srcFrame)
        {
            if (pSwrContext == null && !isDisposing)
            {
                AVFrame *src = srcFrame;
                AVFrame *dst = dstFrame;
                ulong    srcChannelLayout = src->channel_layout;
                if (srcChannelLayout == 0)
                {
                    srcChannelLayout = FFmpegHelper.GetChannelLayout(src->channels);
                }

                pSwrContext = ffmpeg.swr_alloc_set_opts(null,
                                                        (long)DstChannelLayout, DstFormat, DstSampleRate == 0 ? src->sample_rate : DstSampleRate,
                                                        (long)srcChannelLayout, (AVSampleFormat)src->format, src->sample_rate,
                                                        0, null);
                ffmpeg.swr_init(pSwrContext).ThrowIfError();
            }
        }
Exemplo n.º 19
0
 public Resampler(CodecContext input, CodecContext output)
 {
     try
     {
         if ((resamplerContext = ffmpeg.swr_alloc_set_opts(null, (long)output.ChannelLayout, output.SampleFormat, output.SampleRate, (long)input.ChannelLayout, input.SampleFormat, input.SampleRate, 0, null)) == null)
         {
             throw new FFmpegException(ffmpeg.AVERROR(ffmpeg.ENOMEM), "Failed to allocate resampler context.");
         }
         int ret;
         if ((ret = ffmpeg.swr_init(resamplerContext)) < 0)
         {
             throw new FFmpegException(ret, "Failed to initialize resampler context.");
         }
         NumOutputChannels  = output.Channels;
         OutputSampleFormat = output.SampleFormat;
         OutputSampleRate   = output.SampleRate;
     }
     catch (Exception) when(this.DisposeOnException())
     {
     }
 }
Exemplo n.º 20
0
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioOutputStream"/> class.
        /// </summary>
        /// <param name="stream">The audio stream.</param>
        /// <param name="config">The stream setting.</param>
        internal AudioOutputStream(OutputStream <AudioFrame> stream, AudioEncoderSettings config)
        {
            this.stream = stream;

            long channelLayout = ffmpeg.av_get_default_channel_layout(config.Channels);

            swrContext = ffmpeg.swr_alloc_set_opts(
                null,
                channelLayout,
                (AVSampleFormat)config.SampleFormat,
                config.SampleRate,
                channelLayout,
                (AVSampleFormat)SampleFormat.SingleP,
                config.SampleRate,
                0,
                null);

            ffmpeg.swr_init(swrContext);

            Configuration = config;
            frame         = AudioFrame.Create(config.SampleRate, config.Channels, config.SamplesPerFrame, channelLayout, SampleFormat.SingleP);
        }
Exemplo n.º 21
0
        public AudioResampler(AudioFormatInfo sourceFormat, AudioFormatInfo targetFormat)
        {
            this.pointer      = ffmpeg.swr_alloc();
            this.sourceFormat = sourceFormat;
            this.targetFormat = targetFormat;

            // In
            ffmpeg.av_opt_set_int(pointer, "in_channel_layout", (long)sourceFormat.ChannelLayout, 0);
            ffmpeg.av_opt_set_int(pointer, "in_sample_rate", sourceFormat.SampleRate, 0);
            ffmpeg.av_opt_set_sample_fmt(pointer, "in_sample_fmt", sourceFormat.SampleFormat.ToAVFormat(), 0);

            // Out
            ffmpeg.av_opt_set_int(pointer, "out_channel_layout", (long)targetFormat.ChannelLayout, 0);
            ffmpeg.av_opt_set_int(pointer, "out_sample_rate", targetFormat.SampleRate, 0);
            ffmpeg.av_opt_set_sample_fmt(pointer, "out_sample_fmt", targetFormat.SampleFormat.ToAVFormat(), 0);

            // filter_size
            // phase_shift
            // cutoff

            ffmpeg.swr_init(pointer).EnsureSuccess();
        }
Exemplo n.º 22
0
        public static void InitSwr(int audioIndex)
        {
            if (ofmt_ctx->streams[0]->codec->channels != ifmt_ctx->streams[audioIndex]->codec->channels ||
                ofmt_ctx->streams[0]->codec->sample_rate != ifmt_ctx->streams[audioIndex]->codec->sample_rate ||
                ofmt_ctx->streams[0]->codec->sample_fmt != ifmt_ctx->streams[audioIndex]->codec->sample_fmt)
            {
                if (pSwrCtx == null)
                {
                    pSwrCtx = ffmpeg.swr_alloc();
                }

                pSwrCtx = ffmpeg.swr_alloc_set_opts(null,
                                                    (long)ofmt_ctx->streams[audioIndex]->codec->channel_layout,
                                                    ofmt_ctx->streams[audioIndex]->codec->sample_fmt,
                                                    ofmt_ctx->streams[audioIndex]->codec->sample_rate,
                                                    (long)ifmt_ctx->streams[audioIndex]->codec->channel_layout,
                                                    ifmt_ctx->streams[audioIndex]->codec->sample_fmt,
                                                    ifmt_ctx->streams[audioIndex]->codec->sample_rate,
                                                    0, null);
                ffmpeg.swr_init(pSwrCtx);
            }
        }
Exemplo n.º 23
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <param name="siblings">The sibling blocks that may help guess some additional parameters for the input frame.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            var source = input as AudioFrame;
            var target = output as AudioBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Create the source and target ausio specs. We might need to scale from
            // the source to the target
            var sourceSpec = AudioParams.CreateSource(source.Pointer);
            var targetSpec = AudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || AudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(
                    Scaler,
                    targetSpec.ChannelLayout,
                    targetSpec.Format,
                    targetSpec.SampleRate,
                    sourceSpec.ChannelLayout,
                    sourceSpec.Format,
                    sourceSpec.SampleRate,
                    0,
                    null);

                RC.Current.Add(Scaler, $"109: {nameof(AudioComponent)}.{nameof(MaterializeFrame)}()");
                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer
            if (target.AudioBufferLength != targetSpec.BufferLength)
            {
                if (target.AudioBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.AudioBuffer);
                }

                target.AudioBufferLength = targetSpec.BufferLength;
                target.AudioBuffer       = Marshal.AllocHGlobal(targetSpec.BufferLength);
            }

            var outputBufferPtr = (byte *)target.AudioBuffer;

            // Execute the conversion (audio scaling). It will return the number of samples that were output
            var outputSamplesPerChannel =
                ffmpeg.swr_convert(
                    Scaler,
                    &outputBufferPtr,
                    targetSpec.SamplesPerChannel,
                    source.Pointer->extended_data,
                    source.Pointer->nb_samples);

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && siblings != null && siblings.Count > 0)
            {
                // Get timing information from the last sibling
                var lastSibling = siblings[siblings.Count - 1];

                // We set the target properties
                target.StartTime = lastSibling.EndTime;
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : lastSibling.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;
            }

            target.BufferLength = outputBufferLength;
            target.ChannelCount = targetSpec.ChannelCount;

            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;
            target.StreamIndex       = input.StreamIndex;

            return(target);
        }
Exemplo n.º 24
0
 public static extern int swr_set_matrix(SwrContext * @s, double * @matrix, int @stride);
Exemplo n.º 25
0
 public static extern int swr_set_channel_mapping(SwrContext * @s, int * @channel_map);
Exemplo n.º 26
0
 public static extern int swr_set_compensation(SwrContext * @s, int @sample_delta, int @compensation_distance);
Exemplo n.º 27
0
 public static extern long swr_next_pts(SwrContext * @s, long @pts);
Exemplo n.º 28
0
 public static extern int swr_convert(SwrContext * @s, sbyte ** @out, int @out_count, sbyte ** @in, int @in_count);
Exemplo n.º 29
0
 public static extern void swr_close(SwrContext * @s);
Exemplo n.º 30
0
 public static extern SwrContext *swr_alloc_set_opts(SwrContext * @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void * @log_ctx);
Exemplo n.º 31
0
        /// <summary>
        /// Releases all managed and unmanaged resources
        /// </summary>
        public void Dispose()
        {
            if (IsCancellationPending)
                return;

            this.IsCancellationPending = true;

            this.VideoRenderTimer.Stop();

            if (this.AudioRenderer != null)
            {
                if (this.AudioRenderer.HasInitialized)
                    this.AudioRenderer.Stop();

                this.AudioRenderer.Dispose();
                this.AudioRenderer = null;
            }

            if (MediaFrameExtractorThread != null)
            {
                MediaFrameExtractorThread.Join();
                MediaFrameExtractorThread = null;
            }

            if (MediaFramesExtractedDone != null)
            {
                try
                {
                    MediaFramesExtractedDone.Dispose();
                    MediaFramesExtractedDone = null;
                }
                finally { }
            }

            if (PrimaryFramesCache != null)
            {
                PrimaryFramesCache.Clear();
                PrimaryFramesCache = null;
            }

            if (SecondaryFramesCache != null)
            {
                SecondaryFramesCache.Clear();
                SecondaryFramesCache = null;
            }

            if (VideoCodecContext != null)
            {
                fixed (AVCodecContext** videoCodecContextRef = &VideoCodecContext)
                {
                    ffmpeg.avcodec_close(VideoCodecContext);
                    ffmpeg.avcodec_free_context(videoCodecContextRef);
                    VideoCodecContext = null;
                }
            }

            if (AudioCodecContext != null)
            {
                fixed (AVCodecContext** audioCodecContextRef = &AudioCodecContext)
                {
                    ffmpeg.avcodec_close(AudioCodecContext);
                    ffmpeg.avcodec_free_context(audioCodecContextRef);
                    AudioCodecContext = null;
                }
            }

            if (VideoResampler != null)
            {
                ffmpeg.sws_freeContext(VideoResampler);
                VideoResampler = null;
            }

            if (AudioResampler != null)
            {
                fixed (SwrContext** audioResamplerRef = &AudioResampler)
                {
                    ffmpeg.swr_close(AudioResampler);
                    ffmpeg.swr_free(audioResamplerRef);
                    AudioResampler = null;
                }
            }

            if (InputFormatContext != null)
            {
                fixed (AVFormatContext** inputFormatContextRef = &InputFormatContext)
                {
                    ffmpeg.avformat_close_input(inputFormatContextRef);
                    ffmpeg.avformat_free_context(InputFormatContext);
                    InputFormatContext = null;
                }
            }

            if (DecodedPictureHolder != null)
            {
                ffmpeg.av_free(DecodedPictureHolder);
                DecodedPictureHolder = null;
            }

            if (DecodedWaveHolder != null)
            {
                ffmpeg.av_free(DecodedWaveHolder);
                DecodedWaveHolder = null;
            }

        }
Exemplo n.º 32
0
        /// <summary>
        /// Initializes the audio.
        /// </summary>
        /// <exception cref="System.Exception">
        /// Unsupported audio codec
        /// or
        /// Could not create audio output codec context from input
        /// or
        /// Could not open codec
        /// </exception>
        /// <exception cref="System.InvalidOperationException">Could not load media file</exception>
        private void InitializeAudio()
        {
            // Extract wave sample format and codec id
            var inputCodecContext = *(InputAudioStream->codec);
            var inputCodecId = inputCodecContext.codec_id;

            // Get an input decoder for the input codec
            AVCodec* inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId);
            if (inputDecoder == null)
                throw new Exception("Unsupported audio codec");

            //Create an output codec context. -- We copy the data from the input context and we
            //then proceed to adjust some output parameters.
            // Before it said: var outputCodecContext = &inputCodecContext;
            AudioCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder);
            if (ffmpeg.avcodec_copy_context(AudioCodecContext, &inputCodecContext) != Constants.SuccessCode)
                throw new Exception("Could not create audio output codec context from input");

            if ((inputDecoder->capabilities & (int)ffmpeg.CODEC_CAP_TRUNCATED) == (int)ffmpeg.CODEC_CAP_TRUNCATED)
                AudioCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;

            if (ffmpeg.avcodec_open2(AudioCodecContext, inputDecoder, null) < Constants.SuccessCode)
                throw new Exception("Could not open codec");

            // setup basic properties
            AudioBytesPerSample = ffmpeg.av_get_bytes_per_sample(AudioCodecContext->sample_fmt);
            AudioCodec = inputCodecContext.codec_id.ToString();
            AudioChannels = inputCodecContext.channels;
            AudioBitrate = (int)inputCodecContext.bit_rate;
            AudioOutputBitsPerSample = ffmpeg.av_get_bytes_per_sample(Constants.AudioOutputSampleFormat) * 8;
            AudioSampleRate = inputCodecContext.sample_rate;
            AudioOutputSampleRate = AudioSampleRate > 44100 ? 44100 : AudioSampleRate; // We set a max of 44.1 kHz to save CPU. Anything more is too much (for most people).

            // Reference: http://www.ffmpeg.org/doxygen/2.0/group__lswr.html
            // Used Example: https://github.com/FFmpeg/FFmpeg/blob/7206b94fb893c63b187bcdfe26422b4e026a3ea0/doc/examples/resampling_audio.c
            AudioResampler = ffmpeg.swr_alloc();
            ffmpeg.av_opt_set_int(AudioResampler, "in_channel_layout", (long)AudioCodecContext->channel_layout, 0);
            ffmpeg.av_opt_set_int(AudioResampler, "out_channel_layout", (long)(ffmpeg.AV_CH_FRONT_LEFT | ffmpeg.AV_CH_FRONT_RIGHT), 0);
            ffmpeg.av_opt_set_int(AudioResampler, "in_sample_rate", AudioSampleRate, 0);
            ffmpeg.av_opt_set_int(AudioResampler, "out_sample_rate", AudioOutputSampleRate, 0);
            ffmpeg.av_opt_set_sample_fmt(AudioResampler, "in_sample_fmt", AudioCodecContext->sample_fmt, 0);
            ffmpeg.av_opt_set_sample_fmt(AudioResampler, "out_sample_fmt", Constants.AudioOutputSampleFormat, 0);
            ffmpeg.swr_init(AudioResampler);

            // All output frames will have the same length and will be held by the same structure; the Decoder frame holder.
            DecodedWaveHolder = ffmpeg.av_frame_alloc();

            // Ensure proper audio properties
            if (AudioOutputBitsPerSample <= 0 || AudioSampleRate <= 0)
                throw new InvalidOperationException("Could not load media file");
        }