Ejemplo n.º 1
0
        /// <exception cref="DecoderException"></exception>
        public unsafe bool TryDecode(RawAudioFrame rawAudioFrame, out int decodedFrameSize)
        {
            if (rawAudioFrame is RawAACFrame aacFrame)
            {
                Debug.Assert(aacFrame.ConfigSegment.Array != null, "aacFrame.ConfigSegment.Array != null");

                if (!_extraData.SequenceEqual(aacFrame.ConfigSegment))
                {
                    if (_extraData.Length == aacFrame.ConfigSegment.Count)
                    {
                        Buffer.BlockCopy(aacFrame.ConfigSegment.Array, aacFrame.ConfigSegment.Offset, _extraData, 0,
                                         aacFrame.ConfigSegment.Count);
                    }
                    else
                        _extraData = aacFrame.ConfigSegment.ToArray();

                    fixed(byte *extradataPtr = &_extraData[0])
                    {
                        int resultCode = FFmpegAudioPInvoke.SetAudioDecoderExtraData(_decoderHandle, (IntPtr)extradataPtr, aacFrame.ConfigSegment.Count);

                        if (resultCode != 0)
                        {
                            throw new DecoderException($"An error occurred while setting audio extra data, {_audioCodecId} codec, code: {resultCode}");
                        }
                    }
                }
            }

            Debug.Assert(rawAudioFrame.FrameSegment.Array != null, "rawAudioFrame.FrameSegment.Array != null");

            fixed(byte *rawBufferPtr = &rawAudioFrame.FrameSegment.Array[rawAudioFrame.FrameSegment.Offset])
            {
                int sampleRate;
                int bitsPerSample;
                int channels;

                int resultCode = FFmpegAudioPInvoke.DecodeFrame(_decoderHandle, (IntPtr)rawBufferPtr, rawAudioFrame.FrameSegment.Count,
                                                                out decodedFrameSize, out sampleRate, out bitsPerSample, out channels);

                _currentRawFrameTimestamp = rawAudioFrame.Timestamp;

                if (resultCode != 0)
                {
                    return(false);
                }

                if (rawAudioFrame is RawG711Frame g711Frame)
                {
                    sampleRate = g711Frame.SampleRate;
                    channels   = g711Frame.Channels;
                }

                if (_currentFrameFormat.SampleRate != sampleRate || _currentFrameFormat.BitPerSample != bitsPerSample || _currentFrameFormat.Channels != channels)
                {
                    _currentFrameFormat = new AudioFrameFormat(sampleRate, bitsPerSample, channels);
                }
            }

            return(true);
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Creates a new network frame.
        /// </summary>
        /// <param name="systemName">System name.</param>
        /// <param name="streamName">Stream name.</param>
        /// <param name="totalFramesReceived">Total number of frames received by the media stream so far.</param>
        /// <param name="mediaFrame">Media frame.</param>
        /// <param name="metadataRequired">Indicates whether to include metadata.</param>
        /// <returns>A new network frame.</returns>
        private static NetworkFrame CreateNetworkFrame(string systemName, string streamName, int totalFramesReceived,
                                                       RawFrame mediaFrame, bool metadataRequired)
        {
            byte interpretation = mediaFrame switch
            {
                RawAudioFrame _ => 1,
                RawVideoFrame _ => 2,
                 _ => 0
            };

            return(new NetworkFrame(
                       (ulong)Chrono.GetUniqueTimestamp64(),
                       (uint)totalFramesReceived,
                       interpretation,
                       systemName,
                       streamName,
                       CreateDataSegments(mediaFrame, metadataRequired)));
        }
Ejemplo n.º 3
0
        private FFmpegAudioDecoder GetDecoderForFrame(RawAudioFrame audioFrame)
        {
            FFmpegAudioCodecId codecId = DetectCodecId(audioFrame);

            if (!_audioDecodersMap.TryGetValue(codecId, out FFmpegAudioDecoder decoder))
            {
                int bitsPerCodedSample = 0;

                if (audioFrame is RawG726Frame g726Frame)
                {
                    bitsPerCodedSample = g726Frame.BitsPerCodedSample;
                }

                decoder = FFmpegAudioDecoder.CreateDecoder(codecId, bitsPerCodedSample);
                _audioDecodersMap.Add(codecId, decoder);
            }

            return(decoder);
        }
Ejemplo n.º 4
0
        private FFmpegAudioCodecId DetectCodecId(RawAudioFrame audioFrame)
        {
            if (audioFrame is RawAACFrame)
            {
                return(FFmpegAudioCodecId.AAC);
            }
            if (audioFrame is RawG711AFrame)
            {
                return(FFmpegAudioCodecId.G711A);
            }
            if (audioFrame is RawG711UFrame)
            {
                return(FFmpegAudioCodecId.G711U);
            }
            if (audioFrame is RawG726Frame)
            {
                return(FFmpegAudioCodecId.G726);
            }

            throw new ArgumentOutOfRangeException(nameof(audioFrame));
        }
Ejemplo n.º 5
0
        /// <exception cref="DecoderException"></exception>
        public unsafe bool TryDecode(RawAudioFrame rawAudioFrame)
        {
            if (rawAudioFrame is RawAACFrame aacFrame)
            {
                Debug.Assert(aacFrame.ConfigSegment.Array != null, "aacFrame.ConfigSegment.Array != null");

                if (!_extraData.SequenceEqual(aacFrame.ConfigSegment))
                {
                    if (_extraData.Length == aacFrame.ConfigSegment.Count)
                    {
                        Buffer.BlockCopy(aacFrame.ConfigSegment.Array, aacFrame.ConfigSegment.Offset, _extraData, 0,
                                         aacFrame.ConfigSegment.Count);
                    }
                    else
                        _extraData = aacFrame.ConfigSegment.ToArray();

                    fixed(byte *extradataPtr = &_extraData[0])
                    {
                        int resultCode = FFmpegAudioPInvoke.SetAudioDecoderExtraData(_decoderHandle, (IntPtr)extradataPtr, aacFrame.ConfigSegment.Count);

                        if (resultCode != 0)
                        {
                            throw new DecoderException($"An error occurred while setting audio extra data, {_audioCodecId} codec, code: {resultCode}");
                        }
                    }
                }
            }

            Debug.Assert(rawAudioFrame.FrameSegment.Array != null, "rawAudioFrame.FrameSegment.Array != null");

            fixed(byte *rawBufferPtr = &rawAudioFrame.FrameSegment.Array[rawAudioFrame.FrameSegment.Offset])
            {
                lock (disposalLock) {
                    if (_disposed)
                    {
                        Console.WriteLine("Skipped decoding audio frame, as decoder was disposed. (Therefore the frame probably wasn't wanted)");
                        return(false);
                    }

                    int resultCode = FFmpegAudioPInvoke.DecodeFrame(_decoderHandle, (IntPtr)rawBufferPtr,
                                                                    rawAudioFrame.FrameSegment.Count, out int sampleRate, out int bitsPerSample, out int channels);

                    _currentRawFrameTimestamp = rawAudioFrame.Timestamp;

                    if (resultCode != 0)
                    {
                        return(false);
                    }

                    if (rawAudioFrame is RawG711Frame g711Frame)
                    {
                        sampleRate = g711Frame.SampleRate;
                        channels   = g711Frame.Channels;
                    }


                    if (_currentFrameFormat.SampleRate != sampleRate || _currentFrameFormat.BitPerSample != bitsPerSample ||
                        _currentFrameFormat.Channels != channels)
                    {
                        _currentFrameFormat = new AudioFrameFormat(sampleRate, bitsPerSample, channels);

                        if (_resamplerHandle != IntPtr.Zero)
                        {
                            FFmpegAudioPInvoke.RemoveAudioResampler(_resamplerHandle);
                        }
                    }
                }
            }

            return(true);
        }
 static FFmpegAudioCodecId DetectCodecId(RawAudioFrame frame) => frame switch
 {