示例#1
0
        /// <summary>
        /// Event handler for receiving RTP packets from the remote party.
        /// </summary>
        /// <param name="remoteEP">The remote end point the RTP was received from.</param>
        /// <param name="format">The audio format of the encoded packets.</param>
        /// <param name="rtpPacket">The RTP packet with the media sample.</param>
        public short[] DecodeAudio(byte[] encodedSample, AudioFormat format)
        {
            if (format.Codec == AudioCodecsEnum.G722)
            {
                if (_g722Decoder == null)
                {
                    _g722Decoder      = new G722Codec();
                    _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                short[] decodedPcm         = new short[encodedSample.Length * 2];
                int     decodedSampleCount =
                    _g722Decoder.Decode(_g722DecoderState, decodedPcm, encodedSample, encodedSample.Length);

                return(decodedPcm.Take(decodedSampleCount).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMA)
            {
                return(encodedSample.Select(x => ALawDecoder.ALawToLinearSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMU)
            {
                return(encodedSample.Select(x => MuLawDecoder.MuLawToLinearSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.L16)
            {
                // Samples are on the wire as big endian.
                return(encodedSample.Where((x, i) => i % 2 == 0)
                       .Select((y, i) => (short)(encodedSample[i * 2] << 8 | encodedSample[i * 2 + 1])).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCM_S16LE)
            {
                // Samples are on the wire as little endian (well unlikely to be on the wire in this case but when they
                // arrive from somewhere like the SkypeBot SDK they will be in little endian format).
                return(encodedSample.Where((x, i) => i % 2 == 0)
                       .Select((y, i) => (short)(encodedSample[i * 2 + 1] << 8 | encodedSample[i * 2])).ToArray());
            }
            else
            {
                throw new ApplicationException($"Audio format {format.Codec} cannot be decoded.");
            }
        }
示例#2
0
        public byte[] EncodeAudio(short[] pcm, AudioFormat format)
        {
            if (format.Codec == AudioCodecsEnum.G722)
            {
                if (_g722Codec == null)
                {
                    _g722Codec      = new G722Codec();
                    _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                int    outputBufferSize = pcm.Length / 2;
                byte[] encodedSample    = new byte[outputBufferSize];
                int    res = _g722Codec.Encode(_g722CodecState, encodedSample, pcm, pcm.Length);

                return(encodedSample);
            }
            else if (format.Codec == AudioCodecsEnum.PCMA)
            {
                return(pcm.Select(x => ALawEncoder.LinearToALawSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCMU)
            {
                return(pcm.Select(x => MuLawEncoder.LinearToMuLawSample(x)).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.L16)
            {
                // When netstandard2.1 can be used.
                //return MemoryMarshal.Cast<short, byte>(pcm)

                // Put on the wire in network byte order (big endian).
                return(pcm.SelectMany(x => new byte[] { (byte)(x >> 8), (byte)(x) }).ToArray());
            }
            else if (format.Codec == AudioCodecsEnum.PCM_S16LE)
            {
                // Put on the wire as little endian.
                return(pcm.SelectMany(x => new byte[] { (byte)(x), (byte)(x >> 8) }).ToArray());
            }
            else
            {
                throw new ApplicationException($"Audio format {format.Codec} cannot be encoded.");
            }
        }
示例#3
0
        /// <summary>
        /// Initialises the audio source as required.
        /// </summary>
        public override Task Start()
        {
            lock (this)
            {
                if (!IsStarted)
                {
                    if (AudioLocalTrack == null || AudioLocalTrack.Capabilities == null ||
                        AudioLocalTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a local audio track being available.");
                    }
                    else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilities == null ||
                             AudioRemoteTrack.Capabilities.Count == 0)
                    {
                        throw new ApplicationException(
                                  "Cannot start audio session without a remote audio track being available.");
                    }

                    _sendingFormat          = base.GetSendingFormat(SDPMediaTypesEnum.audio);
                    _sendingAudioSampleRate = SDPMediaFormatInfo.GetClockRate(_sendingFormat.FormatCodec);
                    _sendingAudioRtpRate    = SDPMediaFormatInfo.GetRtpClockRate(_sendingFormat.FormatCodec);

                    Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}.");

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        _g722Codec        = new G722Codec();
                        _g722CodecState   = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                        _g722Decoder      = new G722Codec();
                        _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                    }

                    // If required start the audio source.
                    if (_audioOpts != null && _audioOpts.AudioSource != AudioSourcesEnum.None)
                    {
                        if (_audioOpts.AudioSource == AudioSourcesEnum.Silence)
                        {
                            _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.PinkNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.WhiteNoise ||
                                 _audioOpts.AudioSource == AudioSourcesEnum.SineWave)
                        {
                            _signalGenerator = new SignalGenerator(_sendingAudioSampleRate, 1);

                            switch (_audioOpts.AudioSource)
                            {
                            case AudioSourcesEnum.PinkNoise:
                                _signalGenerator.Type = SignalGeneratorType.Pink;
                                break;

                            case AudioSourcesEnum.SineWave:
                                _signalGenerator.Type = SignalGeneratorType.Sin;
                                break;

                            case AudioSourcesEnum.WhiteNoise:
                            default:
                                _signalGenerator.Type = SignalGeneratorType.White;
                                break;
                            }

                            _audioStreamTimer = new Timer(SendSignalGeneratorSample, null, 0,
                                                          AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == AudioSourcesEnum.Music)
                        {
                            if (_audioOpts.SourceFiles == null ||
                                !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec))
                            {
                                Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}.");
                            }
                            else
                            {
                                string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec];

                                if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile))
                                {
                                    Log.LogWarning(
                                        "Could not start audio music source as the source file does not exist.");
                                }
                                else
                                {
                                    _audioStreamReader = new StreamReader(sourceFile);
                                    _audioStreamTimer  = new Timer(SendMusicSample, null, 0,
                                                                   AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                }
                            }
                        }
                    }

                    base.OnRtpPacketReceived += RtpPacketReceived;
                }

                return(base.Start());
            }
        }
示例#4
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing
        /// causing connection issues.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;
            _disableExternalAudioSource = disableExternalAudioSource;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                //// RTP event support.
                //int clockRate = pcmu.GetClockRate();
                //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                //rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                //audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
示例#5
0
        /// <summary>
        /// Encodes a buffer of G722
        /// </summary>
        /// <param name="state">Codec state</param>
        /// <param name="outputBuffer">Output buffer (to contain encoded G722)</param>
        /// <param name="inputBuffer">PCM 16 bit samples to encode</param>
        /// <param name="inputBufferCount">Number of samples in the input buffer to encode</param>
        /// <returns>Number of encoded bytes written into output buffer</returns>
        public int Encode(G722CodecState state, byte[] outputBuffer, short[] inputBuffer, int inputBufferCount)
        {
            int dlow;
            int dhigh;
            int el;
            int wd;
            int wd1;
            int ril;
            int wd2;
            int il4;
            int ih2;
            int wd3;
            int eh;
            int mih;
            int i;
            int j;
            // Low and high band PCM from the QMF
            int xlow;
            int xhigh;
            int g722_bytes;
            // Even and odd tap accumulators
            int sumeven;
            int sumodd;
            int ihigh;
            int ilow;
            int code;

            g722_bytes = 0;
            xhigh      = 0;
            for (j = 0; j < inputBufferCount;)
            {
                if (state.ItuTestMode)
                {
                    xlow      =
                        xhigh = inputBuffer[j++] >> 1;
                }
                else
                {
                    if (state.EncodeFrom8000Hz)
                    {
                        xlow = inputBuffer[j++] >> 1;
                    }
                    else
                    {
                        // Apply the transmit QMF
                        // Shuffle the buffer down
                        for (i = 0; i < 22; i++)
                        {
                            state.QmfSignalHistory[i] = state.QmfSignalHistory[i + 2];
                        }

                        state.QmfSignalHistory[22] = inputBuffer[j++];
                        state.QmfSignalHistory[23] = inputBuffer[j++];

                        // Discard every other QMF output
                        sumeven = 0;
                        sumodd  = 0;
                        for (i = 0; i < 12; i++)
                        {
                            sumodd  += state.QmfSignalHistory[2 * i] * qmf_coeffs[i];
                            sumeven += state.QmfSignalHistory[2 * i + 1] * qmf_coeffs[11 - i];
                        }

                        xlow  = (sumeven + sumodd) >> 14;
                        xhigh = (sumeven - sumodd) >> 14;
                    }
                }

                // Block 1L, SUBTRA
                el = Saturate(xlow - state.Band[0].s);

                // Block 1L, QUANTL
                wd = (el >= 0) ? el : -(el + 1);

                for (i = 1; i < 30; i++)
                {
                    wd1 = (q6[i] * state.Band[0].det) >> 12;
                    if (wd < wd1)
                    {
                        break;
                    }
                }

                ilow = (el < 0) ? iln[i] : ilp[i];

                // Block 2L, INVQAL
                ril  = ilow >> 2;
                wd2  = qm4[ril];
                dlow = (state.Band[0].det * wd2) >> 15;

                // Block 3L, LOGSCL
                il4 = rl42[ril];
                wd  = (state.Band[0].nb * 127) >> 7;
                state.Band[0].nb = wd + wl[il4];
                if (state.Band[0].nb < 0)
                {
                    state.Band[0].nb = 0;
                }
                else if (state.Band[0].nb > 18432)
                {
                    state.Band[0].nb = 18432;
                }

                // Block 3L, SCALEL
                wd1 = (state.Band[0].nb >> 6) & 31;
                wd2 = 8 - (state.Band[0].nb >> 11);
                wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
                state.Band[0].det = wd3 << 2;

                Block4(state, 0, dlow);

                if (state.EncodeFrom8000Hz)
                {
                    // Just leave the high bits as zero
                    code = (0xC0 | ilow) >> (8 - state.BitsPerSample);
                }
                else
                {
                    // Block 1H, SUBTRA
                    eh = Saturate(xhigh - state.Band[1].s);

                    // Block 1H, QUANTH
                    wd    = (eh >= 0) ? eh : -(eh + 1);
                    wd1   = (564 * state.Band[1].det) >> 12;
                    mih   = (wd >= wd1) ? 2 : 1;
                    ihigh = (eh < 0) ? ihn[mih] : ihp[mih];

                    // Block 2H, INVQAH
                    wd2   = qm2[ihigh];
                    dhigh = (state.Band[1].det * wd2) >> 15;

                    // Block 3H, LOGSCH
                    ih2 = rh2[ihigh];
                    wd  = (state.Band[1].nb * 127) >> 7;
                    state.Band[1].nb = wd + wh[ih2];
                    if (state.Band[1].nb < 0)
                    {
                        state.Band[1].nb = 0;
                    }
                    else if (state.Band[1].nb > 22528)
                    {
                        state.Band[1].nb = 22528;
                    }

                    // Block 3H, SCALEH
                    wd1 = (state.Band[1].nb >> 6) & 31;
                    wd2 = 10 - (state.Band[1].nb >> 11);
                    wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
                    state.Band[1].det = wd3 << 2;

                    Block4(state, 1, dhigh);
                    code = ((ihigh << 6) | ilow) >> (8 - state.BitsPerSample);
                }

                if (state.Packed)
                {
                    // Pack the code bits
                    state.OutBuffer |= (uint)(code << state.OutBits);
                    state.OutBits   += state.BitsPerSample;
                    if (state.OutBits >= 8)
                    {
                        outputBuffer[g722_bytes++] = (byte)(state.OutBuffer & 0xFF);
                        state.OutBits    -= 8;
                        state.OutBuffer >>= 8;
                    }
                }
                else
                {
                    outputBuffer[g722_bytes++] = (byte)code;
                }
            }

            return(g722_bytes);
        }
示例#6
0
        /// <summary>
        /// Decodes a buffer of G722
        /// </summary>
        /// <param name="state">Codec state</param>
        /// <param name="outputBuffer">Output buffer (to contain decompressed PCM samples)</param>
        /// <param name="inputG722Data"></param>
        /// <param name="inputLength">Number of bytes in input G722 data to decode</param>
        /// <returns>Number of samples written into output buffer</returns>
        public int Decode(G722CodecState state, short[] outputBuffer, byte[] inputG722Data, int inputLength)
        {
            int dlowt;
            int rlow;
            int ihigh;
            int dhigh;
            int rhigh;
            int xout1;
            int xout2;
            int wd1;
            int wd2;
            int wd3;
            int code;
            int outlen;
            int i;
            int j;

            outlen = 0;
            rhigh  = 0;
            for (j = 0; j < inputLength;)
            {
                if (state.Packed)
                {
                    // Unpack the code bits
                    if (state.InBits < state.BitsPerSample)
                    {
                        state.InBuffer |= (uint)(inputG722Data[j++] << state.InBits);
                        state.InBits   += 8;
                    }

                    code             = (int)state.InBuffer & ((1 << state.BitsPerSample) - 1);
                    state.InBuffer >>= state.BitsPerSample;
                    state.InBits    -= state.BitsPerSample;
                }
                else
                {
                    code = inputG722Data[j++];
                }

                switch (state.BitsPerSample)
                {
                default:
                case 8:
                    wd1   = code & 0x3F;
                    ihigh = (code >> 6) & 0x03;
                    wd2   = qm6[wd1];
                    wd1 >>= 2;
                    break;

                case 7:
                    wd1   = code & 0x1F;
                    ihigh = (code >> 5) & 0x03;
                    wd2   = qm5[wd1];
                    wd1 >>= 1;
                    break;

                case 6:
                    wd1   = code & 0x0F;
                    ihigh = (code >> 4) & 0x03;
                    wd2   = qm4[wd1];
                    break;
                }

                // Block 5L, LOW BAND INVQBL
                wd2 = (state.Band[0].det * wd2) >> 15;

                // Block 5L, RECONS
                rlow = state.Band[0].s + wd2;

                // Block 6L, LIMIT
                if (rlow > 16383)
                {
                    rlow = 16383;
                }
                else if (rlow < -16384)
                {
                    rlow = -16384;
                }

                // Block 2L, INVQAL
                wd2   = qm4[wd1];
                dlowt = (state.Band[0].det * wd2) >> 15;

                // Block 3L, LOGSCL
                wd2  = rl42[wd1];
                wd1  = (state.Band[0].nb * 127) >> 7;
                wd1 += wl[wd2];
                if (wd1 < 0)
                {
                    wd1 = 0;
                }
                else if (wd1 > 18432)
                {
                    wd1 = 18432;
                }

                state.Band[0].nb = wd1;

                // Block 3L, SCALEL
                wd1 = (state.Band[0].nb >> 6) & 31;
                wd2 = 8 - (state.Band[0].nb >> 11);
                wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
                state.Band[0].det = wd3 << 2;

                Block4(state, 0, dlowt);

                if (!state.EncodeFrom8000Hz)
                {
                    // Block 2H, INVQAH
                    wd2   = qm2[ihigh];
                    dhigh = (state.Band[1].det * wd2) >> 15;

                    // Block 5H, RECONS
                    rhigh = dhigh + state.Band[1].s;

                    // Block 6H, LIMIT
                    if (rhigh > 16383)
                    {
                        rhigh = 16383;
                    }
                    else if (rhigh < -16384)
                    {
                        rhigh = -16384;
                    }

                    // Block 2H, INVQAH
                    wd2  = rh2[ihigh];
                    wd1  = (state.Band[1].nb * 127) >> 7;
                    wd1 += wh[wd2];
                    if (wd1 < 0)
                    {
                        wd1 = 0;
                    }
                    else if (wd1 > 22528)
                    {
                        wd1 = 22528;
                    }

                    state.Band[1].nb = wd1;

                    // Block 3H, SCALEH
                    wd1 = (state.Band[1].nb >> 6) & 31;
                    wd2 = 10 - (state.Band[1].nb >> 11);
                    wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
                    state.Band[1].det = wd3 << 2;

                    Block4(state, 1, dhigh);
                }

                if (state.ItuTestMode)
                {
                    outputBuffer[outlen++] = (short)(rlow << 1);
                    outputBuffer[outlen++] = (short)(rhigh << 1);
                }
                else
                {
                    if (state.EncodeFrom8000Hz)
                    {
                        outputBuffer[outlen++] = (short)(rlow << 1);
                    }
                    else
                    {
                        // Apply the receive QMF
                        for (i = 0; i < 22; i++)
                        {
                            state.QmfSignalHistory[i] = state.QmfSignalHistory[i + 2];
                        }

                        state.QmfSignalHistory[22] = rlow + rhigh;
                        state.QmfSignalHistory[23] = rlow - rhigh;

                        xout1 = 0;
                        xout2 = 0;
                        for (i = 0; i < 12; i++)
                        {
                            xout2 += state.QmfSignalHistory[2 * i] * qmf_coeffs[i];
                            xout1 += state.QmfSignalHistory[2 * i + 1] * qmf_coeffs[11 - i];
                        }

                        outputBuffer[outlen++] = (short)(xout1 >> 11);
                        outputBuffer[outlen++] = (short)(xout2 >> 11);
                    }
                }
            }

            return(outlen);
        }
示例#7
0
        static void Block4(G722CodecState s, int band, int d)
        {
            int wd1;
            int wd2;
            int wd3;
            int i;

            // Block 4, RECONS
            s.Band[band].d[0] = d;
            s.Band[band].r[0] = Saturate(s.Band[band].s + d);

            // Block 4, PARREC
            s.Band[band].p[0] = Saturate(s.Band[band].sz + d);

            // Block 4, UPPOL2
            for (i = 0; i < 3; i++)
            {
                s.Band[band].sg[i] = s.Band[band].p[i] >> 15;
            }

            wd1 = Saturate(s.Band[band].a[1] << 2);

            wd2 = (s.Band[band].sg[0] == s.Band[band].sg[1]) ? -wd1 : wd1;
            if (wd2 > 32767)
            {
                wd2 = 32767;
            }

            wd3  = (s.Band[band].sg[0] == s.Band[band].sg[2]) ? 128 : -128;
            wd3 += (wd2 >> 7);
            wd3 += (s.Band[band].a[2] * 32512) >> 15;
            if (wd3 > 12288)
            {
                wd3 = 12288;
            }
            else if (wd3 < -12288)
            {
                wd3 = -12288;
            }

            s.Band[band].ap[2] = wd3;

            // Block 4, UPPOL1
            s.Band[band].sg[0] = s.Band[band].p[0] >> 15;
            s.Band[band].sg[1] = s.Band[band].p[1] >> 15;
            wd1 = (s.Band[band].sg[0] == s.Band[band].sg[1]) ? 192 : -192;
            wd2 = (s.Band[band].a[1] * 32640) >> 15;

            s.Band[band].ap[1] = Saturate(wd1 + wd2);
            wd3 = Saturate(15360 - s.Band[band].ap[2]);
            if (s.Band[band].ap[1] > wd3)
            {
                s.Band[band].ap[1] = wd3;
            }
            else if (s.Band[band].ap[1] < -wd3)
            {
                s.Band[band].ap[1] = -wd3;
            }

            // Block 4, UPZERO
            wd1 = (d == 0) ? 0 : 128;
            s.Band[band].sg[0] = d >> 15;
            for (i = 1; i < 7; i++)
            {
                s.Band[band].sg[i] = s.Band[band].d[i] >> 15;
                wd2 = (s.Band[band].sg[i] == s.Band[band].sg[0]) ? wd1 : -wd1;
                wd3 = (s.Band[band].b[i] * 32640) >> 15;
                s.Band[band].bp[i] = Saturate(wd2 + wd3);
            }

            // Block 4, DELAYA
            for (i = 6; i > 0; i--)
            {
                s.Band[band].d[i] = s.Band[band].d[i - 1];
                s.Band[band].b[i] = s.Band[band].bp[i];
            }

            for (i = 2; i > 0; i--)
            {
                s.Band[band].r[i] = s.Band[band].r[i - 1];
                s.Band[band].p[i] = s.Band[band].p[i - 1];
                s.Band[band].a[i] = s.Band[band].ap[i];
            }

            // Block 4, FILTEP
            wd1             = Saturate(s.Band[band].r[1] + s.Band[band].r[1]);
            wd1             = (s.Band[band].a[1] * wd1) >> 15;
            wd2             = Saturate(s.Band[band].r[2] + s.Band[band].r[2]);
            wd2             = (s.Band[band].a[2] * wd2) >> 15;
            s.Band[band].sp = Saturate(wd1 + wd2);

            // Block 4, FILTEZ
            s.Band[band].sz = 0;
            for (i = 6; i > 0; i--)
            {
                wd1              = Saturate(s.Band[band].d[i] + s.Band[band].d[i]);
                s.Band[band].sz += (s.Band[band].b[i] * wd1) >> 15;
            }

            s.Band[band].sz = Saturate(s.Band[band].sz);

            // Block 4, PREDIC
            s.Band[band].s = Saturate(s.Band[band].sp + s.Band[band].sz);
        }
示例#8
0
        /// <summary>
        /// Initialises the audio source as required.
        /// </summary>
        public override Task Start()
        {
            lock (this)
            {
                if (!_isStarted)
                {
                    _isStarted = true;

                    if (AudioLocalTrack == null || AudioLocalTrack.Capabilties == null || AudioLocalTrack.Capabilties.Count == 0)
                    {
                        throw new ApplicationException("Cannot start audio session without a local audio track being available.");
                    }
                    else if (AudioRemoteTrack == null || AudioRemoteTrack.Capabilties == null || AudioRemoteTrack.Capabilties.Count == 0)
                    {
                        throw new ApplicationException("Cannot start audio session without a remote audio track being available.");
                    }

                    // Choose which codec to use.
                    //_sendingFormat = AudioLocalTrack.Capabilties
                    //    .Where(x => x.FormatID != DTMF_EVENT_PAYLOAD_ID.ToString() && int.TryParse(x.FormatID, out _))
                    //    .OrderBy(x => int.Parse(x.FormatID)).First();
                    _sendingFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio);

                    Log.LogDebug($"RTP audio session selected sending codec {_sendingFormat.FormatCodec}.");

                    if (_sendingFormat.FormatCodec == SDPMediaFormatsEnum.G722)
                    {
                        _g722Codec      = new G722Codec();
                        _g722CodecState = new G722CodecState(64000, G722Flags.None);
                    }

                    // If required start the audio source.
                    if (_audioOpts != null && _audioOpts.AudioSource != DummyAudioSourcesEnum.None)
                    {
                        if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Silence)
                        {
                            _audioStreamTimer = new Timer(SendSilenceSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.PinkNoise ||
                                 _audioOpts.AudioSource == DummyAudioSourcesEnum.WhiteNoise ||
                                 _audioOpts.AudioSource == DummyAudioSourcesEnum.SineWave)
                        {
                            _signalGenerator = new SignalGenerator(SAMPLE_RATE, 1);

                            switch (_audioOpts.AudioSource)
                            {
                            case DummyAudioSourcesEnum.PinkNoise:
                                _signalGenerator.Type = SignalGeneratorType.Pink;
                                break;

                            case DummyAudioSourcesEnum.SineWave:
                                _signalGenerator.Type = SignalGeneratorType.Sin;
                                break;

                            case DummyAudioSourcesEnum.WhiteNoise:
                            default:
                                _signalGenerator.Type = SignalGeneratorType.White;
                                break;
                            }

                            _audioStreamTimer = new Timer(SendNoiseSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                        }
                        else if (_audioOpts.AudioSource == DummyAudioSourcesEnum.Music)
                        {
                            if (_audioOpts.SourceFiles == null || !_audioOpts.SourceFiles.ContainsKey(_sendingFormat.FormatCodec))
                            {
                                Log.LogWarning($"Source file not set for codec {_sendingFormat.FormatCodec}.");
                            }
                            else
                            {
                                string sourceFile = _audioOpts.SourceFiles[_sendingFormat.FormatCodec];

                                if (String.IsNullOrEmpty(sourceFile) || !File.Exists(sourceFile))
                                {
                                    Log.LogWarning("Could not start audio music source as the source file does not exist.");
                                }
                                else
                                {
                                    _audioStreamReader = new StreamReader(sourceFile);
                                    _audioStreamTimer  = new Timer(SendMusicSample, null, 0, AUDIO_SAMPLE_PERIOD_MILLISECONDS);
                                }
                            }
                        }
                    }
                }

                return(base.Start());
            }
        }
示例#9
0
        public byte[] EncodeAudio(short[] pcm, AudioCodecsEnum codec, AudioSamplingRatesEnum sampleRate)
        {
            byte[] encodedSample = null;

            if (codec == AudioCodecsEnum.G722)
            {
                if (_g722Codec == null)
                {
                    _g722Codec      = new G722Codec();
                    _g722CodecState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                if (sampleRate == AudioSamplingRatesEnum.Rate16KHz)
                {
                    // No up sampling required.
                    int outputBufferSize = pcm.Length / 2;
                    encodedSample = new byte[outputBufferSize];
                    int res = _g722Codec.Encode(_g722CodecState, encodedSample, pcm, pcm.Length);
                }
                else
                {
                    // Up sample the supplied PCM signal by doubling each sample.
                    int outputBufferSize = pcm.Length;
                    encodedSample = new byte[outputBufferSize];

                    short[] pcmUpsampled = new short[pcm.Length * 2];
                    for (int i = 0; i < pcm.Length; i++)
                    {
                        pcmUpsampled[i * 2]     = pcm[i];
                        pcmUpsampled[i * 2 + 1] = pcm[i];
                    }

                    _g722Codec.Encode(_g722CodecState, encodedSample, pcmUpsampled, pcmUpsampled.Length);
                }

                return(encodedSample);
            }
            else if (codec == AudioCodecsEnum.PCMA ||
                     codec == AudioCodecsEnum.PCMU)
            {
                Func <short, byte> encode = (codec == AudioCodecsEnum.PCMA) ?
                                            (Func <short, byte>)ALawEncoder.LinearToALawSample : MuLawEncoder.LinearToMuLawSample;

                if (sampleRate == AudioSamplingRatesEnum.Rate8KHz)
                {
                    // No down sampling required.
                    int outputBufferSize = pcm.Length;
                    encodedSample = new byte[outputBufferSize];

                    for (int index = 0; index < pcm.Length; index++)
                    {
                        encodedSample[index] = encode(pcm[index]);
                    }
                }
                else
                {
                    // Down sample the supplied PCM signal by skipping every second sample.
                    int outputBufferSize = pcm.Length / 2;
                    encodedSample = new byte[outputBufferSize];
                    int encodedIndex = 0;

                    // Skip every second sample.
                    for (int index = 0; index < pcm.Length; index += 2)
                    {
                        encodedSample[encodedIndex++] = encode(pcm[index]);
                    }
                }

                return(encodedSample);
            }
            else
            {
                throw new ApplicationException($"Audio format {codec} cannot be encoded.");
            }
        }
示例#10
0
        /// <summary>
        /// Event handler for receiving RTP packets from the remote party.
        /// </summary>
        /// <param name="remoteEP">The remote end point the RTP was received from.</param>
        /// <param name="codec">The encoding codec of the packets.</param>
        /// <param name="rtpPacket">The RTP packet with the media sample.</param>
        public byte[] DecodeAudio(byte[] encodedSample, AudioCodecsEnum codec, AudioSamplingRatesEnum sampleRate)
        {
            bool wants8kSamples  = sampleRate == AudioSamplingRatesEnum.Rate8KHz;
            bool wants16kSamples = sampleRate == AudioSamplingRatesEnum.Rate16KHz;

            if (codec == AudioCodecsEnum.G722)
            {
                if (_g722Decoder == null)
                {
                    _g722Decoder      = new G722Codec();
                    _g722DecoderState = new G722CodecState(G722_BIT_RATE, G722Flags.None);
                }

                short[] decodedPcm16k      = new short[encodedSample.Length * 2];
                int     decodedSampleCount = _g722Decoder.Decode(_g722DecoderState, decodedPcm16k, encodedSample, encodedSample.Length);

                // The decoder provides short samples but streams and devices generally seem to want
                // byte samples so convert them.
                byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[decodedSampleCount] : null;
                byte[] pcm16kBuffer = (wants16kSamples) ? new byte[decodedSampleCount * 2] : null;

                for (int i = 0; i < decodedSampleCount; i++)
                {
                    var bufferSample = BitConverter.GetBytes(decodedPcm16k[i]);

                    // For 8K samples the crude re-sampling to get from 16K to 8K is to skip
                    // every second sample.
                    if (pcm8kBuffer != null && i % 2 == 0)
                    {
                        pcm8kBuffer[(i / 2) * 2]     = bufferSample[0];
                        pcm8kBuffer[(i / 2) * 2 + 1] = bufferSample[1];
                    }

                    // G722 provides 16k samples.
                    if (pcm16kBuffer != null)
                    {
                        pcm16kBuffer[i * 2]     = bufferSample[0];
                        pcm16kBuffer[i * 2 + 1] = bufferSample[1];
                    }
                }

                return(pcm8kBuffer ?? pcm16kBuffer);
            }
            else if (codec == AudioCodecsEnum.PCMA ||
                     codec == AudioCodecsEnum.PCMU)
            {
                Func <byte, short> decode = (codec == AudioCodecsEnum.PCMA) ?
                                            (Func <byte, short>)ALawDecoder.ALawToLinearSample : MuLawDecoder.MuLawToLinearSample;

                byte[] pcm8kBuffer  = (wants8kSamples) ? new byte[encodedSample.Length * 2] : null;
                byte[] pcm16kBuffer = (wants16kSamples) ? new byte[encodedSample.Length * 4] : null;

                for (int i = 0; i < encodedSample.Length; i++)
                {
                    var bufferSample = BitConverter.GetBytes(decode(encodedSample[i]));

                    // G711 samples at 8KHz.
                    if (pcm8kBuffer != null)
                    {
                        pcm8kBuffer[i * 2]     = bufferSample[0];
                        pcm8kBuffer[i * 2 + 1] = bufferSample[1];
                    }

                    // The crude up-sampling approach to get 16K samples from G711 is to
                    // duplicate each 8K sample.
                    // TODO: This re-sampling approach introduces artifacts. Applying a low pass
                    // filter seems to be recommended.
                    if (pcm16kBuffer != null)
                    {
                        pcm16kBuffer[i * 4]     = bufferSample[0];
                        pcm16kBuffer[i * 4 + 1] = bufferSample[1];
                        pcm16kBuffer[i * 4 + 2] = bufferSample[0];
                        pcm16kBuffer[i * 4 + 3] = bufferSample[1];
                    }
                }

                return(pcm8kBuffer ?? pcm16kBuffer);
            }
            else
            {
                throw new ApplicationException($"Audio format {codec} cannot be decoded.");
            }
        }