Пример #1
0
        /// <summary>
        /// Event handler for receiving RTP packets from the remote party.
        /// </summary>
        /// <param name="remoteEP">The remote end point the RTP was received from.</param>
        /// <param name="format">The audio format of the encoded packets.</param>
        /// <param name="rtpPacket">The RTP packet with the media sample.</param>
        public short[] DecodeAudio(byte[] encodedSample, AudioFormat format)
        {
            if (format.Codec == AudioCodecsEnum.G722)
            {
                if (_g722Decoder == null)
                {
                    _g722Decoder      = new G722Codec();
                    _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                short[] decodedPcm         = new short[encodedSample.Length * 2];
                int     decodedSampleCount =
                    _g722Decoder.Decode(_g722DecoderState, decodedPcm, encodedSample, encodedSample.Length);

                return(decodedPcm.Take(decodedSampleCount).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMA)
            {
                return(encodedSample.Select(x => ALawDecoder.ALawToLinearSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMU)
            {
                return(encodedSample.Select(x => MuLawDecoder.MuLawToLinearSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.L16)
            {
                // Samples are on the wire as big endian.
                return(encodedSample.Where((x, i) => i % 2 == 0)
                       .Select((y, i) => (short)(encodedSample[i * 2] << 8 | encodedSample[i * 2 + 1])).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCM_S16LE)
            {
                // Samples are on the wire as little endian (well unlikely to be on the wire in this case but when they
                // arrive from somewhere like the SkypeBot SDK they will be in little endian format).
                return(encodedSample.Where((x, i) => i % 2 == 0)
                       .Select((y, i) => (short)(encodedSample[i * 2 + 1] << 8 | encodedSample[i * 2])).ToArray());
            }
            else
            {
                throw new ApplicationException($"Audio format {format.Codec} cannot be decoded.");
            }
        }
Пример #2
0
        public byte[] EncodeAudio(short[] pcm, AudioFormat format)
        {
            if (format.Codec == AudioCodecsEnum.G722)
            {
                if (_g722Codec == null)
                {
                    _g722Codec      = new G722Codec();
                    _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                int    outputBufferSize = pcm.Length / 2;
                byte[] encodedSample    = new byte[outputBufferSize];
                int    res = _g722Codec.Encode(_g722CodecState, encodedSample, pcm, pcm.Length);

                return(encodedSample);
            }
            else if (format.Codec == AudioCodecsEnum.PCMA)
            {
                return(pcm.Select(x => ALawEncoder.LinearToALawSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMU)
            {
                return(pcm.Select(x => MuLawEncoder.LinearToMuLawSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.L16)
            {
                // When netstandard2.1 can be used.
                //return MemoryMarshal.Cast<short, byte>(pcm)

                // Put on the wire in network byte order (big endian).
                return(pcm.SelectMany(x => new byte[] { (byte)(x >> 8), (byte)(x) }).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCM_S16LE)
            {
                // Put on the wire as little endian.
                return(pcm.SelectMany(x => new byte[] { (byte)(x), (byte)(x >> 8) }).ToArray());
            }
            else
            {
                throw new ApplicationException($"Audio format {format.Codec} cannot be encoded.");
            }
        }
Пример #3
0
        /// <summary>
        /// Initialises the audio source as required.
        /// </summary>
        public override Task Start()
        {
            lock (this)
            {
                if (!IsStarted)
                {
                    if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null ||
                        AudioLocalTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a local audio track being available.");
                    }
                    else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null ||
                             AudioRemoteTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a remote audio track being available.");
                    }

                    _sendingFormat          = base.GetSendingFormat(SDPMediaTypesEnum.audio);
                    _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec);
                    _sendingAudioRtpRate    = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec);

                    Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}.");

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        _g722Codec        = new G722Codec();
                        _g722CodecState   = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                        _g722Decoder      = new G722Codec();
                        _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                    }

                    // If required start the audio source.
                    if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None)
                    {
                        if (_audioOpts.AudioSource == AudioSourcesEnum.Silence)
                        {
                            _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.SineWave)
                        {
                            _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1);

                            switch (_audioOpts.AudioSource)
                            {
                            case AudioSourcesEnum.PinkNoise:
                                _signalGenerator.Type = SignalGeneratorType.Pink;
                                break;

                            case AudioSourcesEnum.SineWave:
                                _signalGenerator.Type = SignalGeneratorType.Sin;
                                break;

                            case AudioSourcesEnum.WhiteNoise:
                            default:
                                _signalGenerator.Type = SignalGeneratorType.White;
                                break;
                            }

                            _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0,
                                                          AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.Music)
                        {
                            if (_audioOpts.SourceFiles == null ||
                                !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec))
                            {
                                Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}.");
                            }
                            else
                            {
                                string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec];

                                if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile))
                                {
                                    Log.LogWarning(
                                        "Could not start audio music source as the source file does not exist.");
                                }
                                else
                                {
                                    _audioStreamReader = new StreamReader(sourceFile);
                                    _audioStreamTimer  = new Timer(SendMusicSample, null, 0,
                                                                   AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                }
                            }
                        }
                    }

                    base.OnRtpPacketReceived += RtpPacketReceived;
                }

                return(base.Start());
            }
        }
Пример #4
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing
        /// causing connection issues.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;
            _disableExternalAudioSource = disableExternalAudioSource;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                //// RTP event support.
                //int clockRate = pcmu.GetClockRate();
                //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                //rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                //audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
Пример #5
0
        /// <summary>
        /// Initialises the audio source as required.
        /// </summary>
        public override Task Start()
        {
            lock (this)
            {
                if (!_isStarted)
                {
                    _isStarted = true;

                    if (AudioLocalTrack == null || AudioLocalTrack.Capabilties == null || AudioLocalTrack.Capabilties.Count == 0)
                    {
                        throw new ApplicationException("Cannot start audio session without a local audio track being available.");
                    }
                    else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilties == null || AudioRemoteTrack.Capabilties.Count == 0)
                    {
                        throw new ApplicationException("Cannot start audio session without a remote audio track being available.");
                    }

                    // Choose which codec to use.
                    //_sendingFormat = AudioLocalTrack.Capabilties
                    //    .Where(x => x.FormatID != DTMF_EVENT_PAYLOAD_ID.ToString() && int.TryParse(x.FormatID, out _))
                    //    .OrderBy(x => int.Parse(x.FormatID)).First();
                    _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio);

                    Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}.");

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        _g722Codec      = new G722Codec();
                        _g722CodecState = new G722CodecState(64000, G722Flags.None);
                    }

                    // If required start the audio source.
                    if (_audioOpts != null && _audioOpts.AudioSource != DummyAudioSourcesEnum.None)
                    {
                        if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Silence)
                        {
                            _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.PinkNoise ||
                                 _audioOpts.AudioSource == DummyAudioSourcesEnum.WhiteNoise ||
                                 _audioOpts.AudioSource == DummyAudioSourcesEnum.SineWave)
                        {
                            _signalGenerator = new SignalGenerator(SAMPLE_RATE, 1);

                            switch (_audioOpts.AudioSource)
                            {
                            case DummyAudioSourcesEnum.PinkNoise:
                                _signalGenerator.Type = SignalGeneratorType.Pink;
                                break;

                            case DummyAudioSourcesEnum.SineWave:
                                _signalGenerator.Type = SignalGeneratorType.Sin;
                                break;

                            case DummyAudioSourcesEnum.WhiteNoise:
                            default:
                                _signalGenerator.Type = SignalGeneratorType.White;
                                break;
                            }

                            _audioStreamTimer = new Timer(SendNoiseSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Music)
                        {
                            if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec))
                            {
                                Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}.");
                            }
                            else
                            {
                                string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec];

                                if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile))
                                {
                                    Log.LogWarning("Could not start audio music source as the source file does not exist.");
                                }
                                else
                                {
                                    _audioStreamReader = new StreamReader(sourceFile);
                                    _audioStreamTimer  = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                }
                            }
                        }
                    }
                }

                return(base.Start());
            }
        }
Пример #6
0
        public byte[] EncodeAudio(short[] pcm, AudioCodecsEnum codec, AudioSamplingRatesEnum sampleRate)
        {
            byte[] encodedSample = null;

            if (codec == AudioCodecsEnum.G722)
            {
                if (_g722Codec == null)
                {
                    _g722Codec      = new G722Codec();
                    _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                if (sampleRate == AudioSamplingRatesEnum.Rate16KHz)
                {
                    // No up sampling required.
                    int outputBufferSize = pcm.Length / 2;
                    encodedSample = new byte[outputBufferSize];
                    int res = _g722Codec.Encode(_g722CodecState, encodedSample, pcm, pcm.Length);
                }
                else
                {
                    // Up sample the supplied PCM signal by doubling each sample.
                    int outputBufferSize = pcm.Length;
                    encodedSample = new byte[outputBufferSize];

                    short[] pcmUpsampled = new short[pcm.Length * 2];
                    for (int i = 0; i < pcm.Length; i++)
                    {
                        pcmUpsampled[i * 2]     = pcm[i];
                        pcmUpsampled[i * 2 + 1] = pcm[i];
                    }

                    _g722Codec.Encode(_g722CodecState, encodedSample, pcmUpsampled, pcmUpsampled.Length);
                }

                return(encodedSample);
            }
            else if (codec == AudioCodecsEnum.PCMA ||
                     codec == AudioCodecsEnum.PCMU)
            {
                Func <short, byte> encode = (codec == AudioCodecsEnum.PCMA) ?
                                            (Func <short, byte>)ALawEncoder.LinearToALawSample : MuLawEncoder.LinearToMuLawSample;

                if (sampleRate == AudioSamplingRatesEnum.Rate8KHz)
                {
                    // No down sampling required.
                    int outputBufferSize = pcm.Length;
                    encodedSample = new byte[outputBufferSize];

                    for (int index = 0; index < pcm.Length; index++)
                    {
                        encodedSample[index] = encode(pcm[index]);
                    }
                }
                else
                {
                    // Down sample the supplied PCM signal by skipping every second sample.
                    int outputBufferSize = pcm.Length / 2;
                    encodedSample = new byte[outputBufferSize];
                    int encodedIndex = 0;

                    // Skip every second sample.
                    for (int index = 0; index < pcm.Length; index += 2)
                    {
                        encodedSample[encodedIndex++] = encode(pcm[index]);
                    }
                }

                return(encodedSample);
            }
            else
            {
                throw new ApplicationException($"Audio format {codec} cannot be encoded.");
            }
        }
Пример #7
0
        /// <summary>
        /// Event handler for receiving RTP packets from the remote party.
        /// </summary>
        /// <param name="remoteEP">The remote end point the RTP was received from.</param>
        /// <param name="codec">The encoding codec of the packets.</param>
        /// <param name="rtpPacket">The RTP packet with the media sample.</param>
        public byte[] DecodeAudio(byte[] encodedSample, AudioCodecsEnum codec, AudioSamplingRatesEnum sampleRate)
        {
            bool wants8kSamples  = sampleRate == AudioSamplingRatesEnum.Rate8KHz;
            bool wants16kSamples = sampleRate == AudioSamplingRatesEnum.Rate16KHz;

            if (codec == AudioCodecsEnum.G722)
            {
                if (_g722Decoder == null)
                {
                    _g722Decoder      = new G722Codec();
                    _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                short[] decodedPcm16k      = new short[encodedSample.Length * 2];
                int     decodedSampleCount = _g722Decoder.Decode(_g722DecoderState, decodedPcm16k, encodedSample, encodedSample.Length);

                // The decoder provides short samples but streams and devices generally seem to want
                // byte samples so convert them.
                byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[decodedSampleCount] : null;
                byte[] pcm16kBuffer = (wants16kSamples) ? new byte[decodedSampleCount * 2] : null;

                for (int i = 0; i < decodedSampleCount; i++)
                {
                    var bufferSample = BitConverter.GetBytes(decodedPcm16k[i]);

                    // For 8K samples the crude re-sampling to get from 16K to 8K is to skip
                    // every second sample.
                    if (pcm8kBuffer != null && i % 2 == 0)
                    {
                        pcm8kBuffer[(i / 2) * 2]     = bufferSample[0];
                        pcm8kBuffer[(i / 2) * 2 + 1] = bufferSample[1];
                    }

                    // G722 provides 16k samples.
                    if (pcm16kBuffer != null)
                    {
                        pcm16kBuffer[i * 2]     = bufferSample[0];
                        pcm16kBuffer[i * 2 + 1] = bufferSample[1];
                    }
                }

                return(pcm8kBuffer ?? pcm16kBuffer);
            }
            else if (codec == AudioCodecsEnum.PCMA ||
                     codec == AudioCodecsEnum.PCMU)
            {
                Func <byte, short> decode = (codec == AudioCodecsEnum.PCMA) ?
                                            (Func <byte, short>)ALawDecoder.ALawToLinearSample : MuLawDecoder.MuLawToLinearSample;

                byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[encodedSample.Length * 2] : null;
                byte[] pcm16kBuffer = (wants16kSamples) ? new byte[encodedSample.Length * 4] : null;

                for (int i = 0; i < encodedSample.Length; i++)
                {
                    var bufferSample = BitConverter.GetBytes(decode(encodedSample[i]));

                    // G711 samples at 8KHz.
                    if (pcm8kBuffer != null)
                    {
                        pcm8kBuffer[i * 2]     = bufferSample[0];
                        pcm8kBuffer[i * 2 + 1] = bufferSample[1];
                    }

                    // The crude up-sampling approach to get 16K samples from G711 is to
                    // duplicate each 8K sample.
                    // TODO: This re-sampling approach introduces artifacts. Applying a low pass
                    // filter seems to be recommended.
                    if (pcm16kBuffer != null)
                    {
                        pcm16kBuffer[i * 4]     = bufferSample[0];
                        pcm16kBuffer[i * 4 + 1] = bufferSample[1];
                        pcm16kBuffer[i * 4 + 2] = bufferSample[0];
                        pcm16kBuffer[i * 4 + 3] = bufferSample[1];
                    }
                }

                return(pcm8kBuffer ?? pcm16kBuffer);
            }
            else
            {
                throw new ApplicationException($"Audio format {codec} cannot be decoded.");
            }
        }