Exemple #1
0
        private void ProcessSquelch(float *audio, int length)
        {
            if (_squelchThreshold > 0)
            {
                if (_hissBuffer == null || _hissBuffer.Length != length)
                {
                    _hissBuffer = UnsafeBuffer.Create(length, sizeof(float));
                    _hissPtr    = (float *)_hissBuffer;
                }

                Utils.Memcpy(_hissPtr, audio, length * sizeof(float));

                _hissFilter.Process(_hissPtr, length);

                for (var i = 0; i < _hissBuffer.Length; i++)
                {
                    var n = (1 - _noiseAveragingRatio) * _noiseLevel + _noiseAveragingRatio * Math.Abs(_hissPtr[i]);
                    if (!float.IsNaN(n))
                    {
                        _noiseLevel = n;
                    }
                    if (_noiseLevel > _noiseThreshold)
                    {
                        audio[i] = 0.0f;
                    }
                }

                _isSquelchOpen = _noiseLevel < _noiseThreshold;
            }
            else
            {
                _isSquelchOpen = true;
            }
        }
        private void ProcessMono(float *baseBand, float *interleavedStereo, int length)
        {
            #region Prepare buffer

            if (_channelABuffer == null || _channelABuffer.Length != length)
            {
                _channelABuffer = UnsafeBuffer.Create(length, sizeof(float));
                _channelAPtr    = (float *)_channelABuffer;
            }

            #endregion

            #region Decimate L+R

            Utils.Memcpy(_channelAPtr, baseBand, length * sizeof(float));
            _channelADecimator.Process(_channelAPtr, length);

            #endregion

            #region Filter L+R

            length /= _audioDecimationFactor;
            _channelAFilter.Process(_channelAPtr, length);

            #endregion

            #region Process deemphasis

            for (var i = 0; i < length; i++)
            {
                _deemphasisAvgL += _deemphasisAlpha * (_channelAPtr[i] - _deemphasisAvgL);
                _channelAPtr[i]  = _deemphasisAvgL;
            }

            #endregion

            #region Fill output buffer

            for (var i = 0; i < length; i++)
            {
                var sample = _channelAPtr[i] * AudioGain;
                interleavedStereo[i * 2]     = sample;
                interleavedStereo[i * 2 + 1] = sample;
            }

            #endregion
        }
Exemple #3
0
        public void Process(float *baseBand, int length)
        {
            #region Initialize buffers

            if (_rawBuffer == null || _rawBuffer.Length != length)
            {
                _rawBuffer = UnsafeBuffer.Create(length, sizeof(Complex));
                _rawPtr    = (Complex *)_rawBuffer;
            }

            if (_magBuffer == null || _magBuffer.Length != length)
            {
                _magBuffer = UnsafeBuffer.Create(length, sizeof(float));
                _magPtr    = (float *)_magBuffer;
            }

            if (_dataBuffer == null || _dataBuffer.Length != length)
            {
                _dataBuffer = UnsafeBuffer.Create(length, sizeof(float));
                _dataPtr    = (float *)_dataBuffer;
            }

            #endregion

            // Downconvert
            for (var i = 0; i < length; i++)
            {
                _osc->Tick();
                _rawPtr[i] = _osc->Phase * baseBand[i];
            }

            // Decimate
            _decimator.Process(_rawPtr, length);
            length /= _decimationFactor;

            // Filter
            _baseBandFilter.Process(_rawPtr, length);

            // PLL
            for (var i = 0; i < length; i++)
            {
                _dataPtr[i] = _pll->Process(_rawPtr[i]).Imag;
            }

            //if (!_pll->IsLocked)
            //{
            //    _bitDecoder.Reset();
            //    return;
            //}

            // Matched filter
            _matchedFilter.Process(_dataPtr, length);

            // Recover signal energy to sustain the oscillation in the IIR
            for (var i = 0; i < length; i++)
            {
                _magPtr[i] = Math.Abs(_dataPtr[i]);
            }

            // Synchronize to RDS bitrate
            _syncFilter->Process(_magPtr, length);

            // Detect RDS bits
            for (int i = 0; i < length; i++)
            {
                var data    = _dataPtr[i];
                var syncVal = _magPtr[i];
                var slope   = syncVal - _lastSync;
                _lastSync = syncVal;
                if (slope < 0.0f && _lastSyncSlope * slope < 0.0f)
                {
                    bool bit = _lastData > 0;
                    _bitDecoder.Process(bit ^ _lastBit);
                    _lastBit = bit;
                }
                _lastData      = data;
                _lastSyncSlope = slope;
            }
        }
        public void ProcessBuffer(Complex *iqBuffer, float *audioBuffer, int length)
        {
            if (_needConfigure)
            {
                Configure();
                _needConfigure = false;
            }

            if (_hookManager != null)
            {
                _hookManager.ProcessRawIQ(iqBuffer, length);
            }

            _downConverter.Process(iqBuffer, length);

            if (_hookManager != null)
            {
                _hookManager.ProcessFrequencyTranslatedIQ(iqBuffer, length);
            }

            if (_baseBandDecimator.StageCount > 0)
            {
                _baseBandDecimator.Process(iqBuffer, length);
                length /= (int)Math.Pow(2.0, _baseBandDecimator.StageCount);
            }

            _iqFilter.Process(iqBuffer, length);

            if (_hookManager != null)
            {
                _hookManager.ProcessDecimatedAndFilteredIQ(iqBuffer, length);
            }

            if (_actualDetectorType == DetectorType.RAW)
            {
                Utils.Memcpy(audioBuffer, iqBuffer, length * sizeof(Complex));
                return;
            }

            if (_rawAudioBuffer == null || _rawAudioBuffer.Length != length)
            {
                _rawAudioBuffer = UnsafeBuffer.Create(length, sizeof(float));
                _rawAudioPtr    = (float *)_rawAudioBuffer;
            }

            if (_actualDetectorType != DetectorType.WFM)
            {
                ScaleIQ(iqBuffer, length);
            }

            Demodulate(iqBuffer, _rawAudioPtr, length);

            if (_hookManager != null)
            {
                _hookManager.ProcessDemodulatorOutput(_rawAudioPtr, length);
            }

            if (_actualDetectorType != DetectorType.WFM)
            {
                if (_filterAudio)
                {
                    _audioFilter.Process(_rawAudioPtr, length);
                }

                if (_actualDetectorType != DetectorType.NFM && _useAgc)
                {
                    _agc.Process(_rawAudioPtr, length);
                }
            }

            if (_filterAudio)
            {
                _dcRemover.Process(_rawAudioPtr, length);
            }

            if (_actualDetectorType == DetectorType.WFM)
            {
                _rdsDecoder.Process(_rawAudioPtr, length);
                _stereoDecoder.Process(_rawAudioPtr, audioBuffer, length);
                length >>= _audioDecimationStageCount;
            }
            else
            {
                MonoToStereo(_rawAudioPtr, audioBuffer, length);
            }

            if (_hookManager != null)
            {
                length <<= 1;
                _hookManager.ProcessFilteredAudioOutput(audioBuffer, length);
            }
        }
        private void ProcessStereo(float *baseBand, float *interleavedStereo, int length)
        {
            #region Prepare L+R buffer

            if (_channelABuffer == null || _channelABuffer.Length != length)
            {
                _channelABuffer = UnsafeBuffer.Create(length, sizeof(float));
                _channelAPtr    = (float *)_channelABuffer;
            }

            #endregion

            #region Prepare L-R buffer

            if (_channelBBuffer == null || _channelBBuffer.Length != length)
            {
                _channelBBuffer = UnsafeBuffer.Create(length, sizeof(float));
                _channelBPtr    = (float *)_channelBBuffer;
            }

            #endregion

            #region Decimate and filter L+R

            var audioLength = length / _audioDecimationFactor;

            if (_isMultiThreaded)
            {
                DSPThreadPool.QueueUserWorkItem(
                    delegate
                {
                    Utils.Memcpy(_channelAPtr, baseBand, length * sizeof(float));
                    _channelADecimator.Process(_channelAPtr, length);
                    _channelAFilter.Process(_channelAPtr, audioLength);
                    _event.Set();
                });
            }
            else
            {
                Utils.Memcpy(_channelAPtr, baseBand, length * sizeof(float));
                _channelADecimator.Process(_channelAPtr, length);
                _channelAFilter.Process(_channelAPtr, audioLength);
            }

            #endregion

            #region Demodulate L-R

            for (var i = 0; i < length; i++)
            {
                var pilot = _pilotFilter->Process(baseBand[i]);
                _pll->Process(pilot);
                _channelBPtr[i] = baseBand[i] * Trig.Sin((float)(_pll->AdjustedPhase * 2.0));
            }

            if (!_pll->IsLocked)
            {
                if (_isMultiThreaded)
                {
                    _event.WaitOne();
                }

                #region Process mono deemphasis

                for (var i = 0; i < audioLength; i++)
                {
                    _deemphasisAvgL += _deemphasisAlpha * (_channelAPtr[i] - _deemphasisAvgL);
                    _channelAPtr[i]  = _deemphasisAvgL;
                }

                #endregion

                #region Fill output buffer with mono

                for (var i = 0; i < audioLength; i++)
                {
                    var sample = _channelAPtr[i] * AudioGain;
                    interleavedStereo[i * 2]     = sample;
                    interleavedStereo[i * 2 + 1] = sample;
                }

                #endregion

                return;
            }

            #endregion

            #region Decimate and filter L-R

            _channelBDecimator.Process(_channelBPtr, length);
            _channelBFilter.Process(_channelBPtr, audioLength);

            #endregion

            #region Recover L and R audio channels

            if (_isMultiThreaded)
            {
                _event.WaitOne();
            }

            for (var i = 0; i < audioLength; i++)
            {
                var a = _channelAPtr[i];
                var b = 2f * _channelBPtr[i];
                interleavedStereo[i * 2]     = (a + b) * AudioGain;
                interleavedStereo[i * 2 + 1] = (a - b) * AudioGain;
            }

            #endregion

            #region Process deemphasis

            for (var i = 0; i < audioLength; i++)
            {
                _deemphasisAvgL         += _deemphasisAlpha * (interleavedStereo[i * 2] - _deemphasisAvgL);
                interleavedStereo[i * 2] = _deemphasisAvgL;

                _deemphasisAvgR += _deemphasisAlpha * (interleavedStereo[i * 2 + 1] - _deemphasisAvgR);
                interleavedStereo[i * 2 + 1] = _deemphasisAvgR;
            }

            #endregion
        }