Example #1
0
        /// <exception cref="DecoderException"></exception>
        public unsafe bool TryDecode(RawAudioFrame rawAudioFrame, out int decodedFrameSize)
        {
            if (rawAudioFrame is RawAACFrame aacFrame)
            {
                Debug.Assert(aacFrame.ConfigSegment.Array != null, "aacFrame.ConfigSegment.Array != null");

                if (!_extraData.SequenceEqual(aacFrame.ConfigSegment))
                {
                    if (_extraData.Length == aacFrame.ConfigSegment.Count)
                    {
                        Buffer.BlockCopy(aacFrame.ConfigSegment.Array, aacFrame.ConfigSegment.Offset, _extraData, 0,
                                         aacFrame.ConfigSegment.Count);
                    }
                    else
                        _extraData = aacFrame.ConfigSegment.ToArray();

                    fixed(byte *extradataPtr = &_extraData[0])
                    {
                        int resultCode = FFmpegAudioPInvoke.SetAudioDecoderExtraData(_decoderHandle, (IntPtr)extradataPtr, aacFrame.ConfigSegment.Count);

                        if (resultCode != 0)
                        {
                            throw new DecoderException($"An error occurred while setting audio extra data, {_audioCodecId} codec, code: {resultCode}");
                        }
                    }
                }
            }

            Debug.Assert(rawAudioFrame.FrameSegment.Array != null, "rawAudioFrame.FrameSegment.Array != null");

            fixed(byte *rawBufferPtr = &rawAudioFrame.FrameSegment.Array[rawAudioFrame.FrameSegment.Offset])
            {
                int sampleRate;
                int bitsPerSample;
                int channels;

                int resultCode = FFmpegAudioPInvoke.DecodeFrame(_decoderHandle, (IntPtr)rawBufferPtr, rawAudioFrame.FrameSegment.Count,
                                                                out decodedFrameSize, out sampleRate, out bitsPerSample, out channels);

                _currentRawFrameTimestamp = rawAudioFrame.Timestamp;

                if (resultCode != 0)
                {
                    return(false);
                }

                if (rawAudioFrame is RawG711Frame g711Frame)
                {
                    sampleRate = g711Frame.SampleRate;
                    channels   = g711Frame.Channels;
                }

                if (_currentFrameFormat.SampleRate != sampleRate || _currentFrameFormat.BitPerSample != bitsPerSample || _currentFrameFormat.Channels != channels)
                {
                    _currentFrameFormat = new AudioFrameFormat(sampleRate, bitsPerSample, channels);
                }
            }

            return(true);
        }
Example #2
0
        /// <exception cref="DecoderException"></exception>
        public unsafe bool TryDecode(RawAudioFrame rawAudioFrame)
        {
            if (rawAudioFrame is RawAACFrame aacFrame)
            {
                Debug.Assert(aacFrame.ConfigSegment.Array != null, "aacFrame.ConfigSegment.Array != null");

                if (!_extraData.SequenceEqual(aacFrame.ConfigSegment))
                {
                    if (_extraData.Length == aacFrame.ConfigSegment.Count)
                    {
                        Buffer.BlockCopy(aacFrame.ConfigSegment.Array, aacFrame.ConfigSegment.Offset, _extraData, 0,
                                         aacFrame.ConfigSegment.Count);
                    }
                    else
                        _extraData = aacFrame.ConfigSegment.ToArray();

                    fixed(byte *extradataPtr = &_extraData[0])
                    {
                        int resultCode = FFmpegAudioPInvoke.SetAudioDecoderExtraData(_decoderHandle, (IntPtr)extradataPtr, aacFrame.ConfigSegment.Count);

                        if (resultCode != 0)
                        {
                            throw new DecoderException($"An error occurred while setting audio extra data, {_audioCodecId} codec, code: {resultCode}");
                        }
                    }
                }
            }

            Debug.Assert(rawAudioFrame.FrameSegment.Array != null, "rawAudioFrame.FrameSegment.Array != null");

            fixed(byte *rawBufferPtr = &rawAudioFrame.FrameSegment.Array[rawAudioFrame.FrameSegment.Offset])
            {
                lock (disposalLock) {
                    if (_disposed)
                    {
                        Console.WriteLine("Skipped decoding audio frame, as decoder was disposed. (Therefore the frame probably wasn't wanted)");
                        return(false);
                    }

                    int resultCode = FFmpegAudioPInvoke.DecodeFrame(_decoderHandle, (IntPtr)rawBufferPtr,
                                                                    rawAudioFrame.FrameSegment.Count, out int sampleRate, out int bitsPerSample, out int channels);

                    _currentRawFrameTimestamp = rawAudioFrame.Timestamp;

                    if (resultCode != 0)
                    {
                        return(false);
                    }

                    if (rawAudioFrame is RawG711Frame g711Frame)
                    {
                        sampleRate = g711Frame.SampleRate;
                        channels   = g711Frame.Channels;
                    }


                    if (_currentFrameFormat.SampleRate != sampleRate || _currentFrameFormat.BitPerSample != bitsPerSample ||
                        _currentFrameFormat.Channels != channels)
                    {
                        _currentFrameFormat = new AudioFrameFormat(sampleRate, bitsPerSample, channels);

                        if (_resamplerHandle != IntPtr.Zero)
                        {
                            FFmpegAudioPInvoke.RemoveAudioResampler(_resamplerHandle);
                        }
                    }
                }
            }

            return(true);
        }