示例#1
0
        /// <summary>
        /// Primary audio processing method; this is where the samples are read in and manipulations are applied
        /// </summary>
        private static void ProcessAudio()
        {
            WaveFormat format             = _currentWaveChannel.WaveFormat;
            int        bufferSecondLength = format.SampleRate * format.Channels;

            byte[] inputBuffer         = new byte[BufferSamples * sizeof(float)];
            byte[] soundTouchOutBuffer = new byte[BufferSamples * sizeof(float)];

            ByteAndFloatsConverter convertInputBuffer = new ByteAndFloatsConverter {
                Bytes = inputBuffer
            };
            ByteAndFloatsConverter convertOutputBuffer = new ByteAndFloatsConverter {
                Bytes = soundTouchOutBuffer
            };
            uint outBufferSizeFloats = (uint)convertOutputBuffer.Bytes.Length / (uint)(sizeof(float) * format.Channels);

            int      bytesRead;
            int      floatsRead;
            uint     samplesProcessed = 0;
            int      bufferIndex      = 0;
            TimeSpan actualEndMarker  = TimeSpan.Zero;
            bool     loop             = false;

            _currentWaveChannel.Volume = _currentVolume;
            while (_currentWaveChannel.Position < _currentWaveChannel.Length && !Stopping)
            {
                if (Started)
                {
                    _currentWaveChannel.CurrentTime = TimeSpan.FromSeconds(0);
                    Started = true;
                }

                bytesRead  = _currentWaveChannel.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length);
                floatsRead = bytesRead / ((sizeof(float)) * _currentWaveChannel.WaveFormat.Channels);

                actualEndMarker = DefaultEndMarker;
                if (!loop || actualEndMarker == TimeSpan.Zero)
                {
                    actualEndMarker = _currentWaveChannel.TotalTime;
                }

                if (_currentWaveChannel.CurrentTime > actualEndMarker)
                {
                    _soundTouchSharp.Clear();
                    _inputProvider.Flush();
                    _currentWaveChannel.Flush();

                    if (!stopWorker)
                    {
                        while (!stopWorker && samplesProcessed != 0)
                        {
                            SetSoundSharpValues();

                            samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                            if (samplesProcessed > 0)
                            {
                                TimeSpan currentBufferTime = _currentWaveChannel.CurrentTime;
                                _inputProvider.AddSamples(convertOutputBuffer.Bytes, 0, (int)samplesProcessed * (sizeof(float)) * _currentWaveChannel.WaveFormat.Channels, currentBufferTime);
                            }
                            samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                        }
                    }
                }

                SetSoundSharpValues();

                _soundTouchSharp.PutSamples(convertInputBuffer.Floats, (uint)floatsRead);
                do
                {
                    samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                    if (!stopWorker && samplesProcessed > 0)
                    {
                        TimeSpan currentBufferTime = _currentWaveChannel.CurrentTime;
                        _inputProvider.AddSamples(convertOutputBuffer.Bytes, 0, (int)samplesProcessed * (sizeof(float)) * _currentWaveChannel.WaveFormat.Channels, currentBufferTime);

                        while (!stopWorker && _inputProvider.GetQueueCount() > BusyQueuedBuffersThreshold)
                        {
                            Thread.Sleep(10);
                        }
                        bufferIndex += 1;
                    }
                } while (!stopWorker && samplesProcessed != 0);
            }

            // End of the audio file
            _waveOutDevice.Stop();
            // Stop sampling
            _waveChannel.Sample -= _waveChannel_Sample;
            _waveReader.Close();
            _blockAlignStream.Close();
            _waveChannel.Close();
            _inputProvider.Flush();
            _currentWaveChannel.Close();
            IsPlaying = false;
            if (!stopWorker && _currentWaveChannel.CurrentTime < actualEndMarker)
            {
                _currentWaveChannel.CurrentTime = actualEndMarker;
            }
            _soundTouchSharp.Clear();
        }
示例#2
0
        /// <summary>
        /// Primary audio processing method; this is where the samples are read in and manipulations are applied
        /// </summary>
        private static void ProcessAudio()
        {
            WaveFormat format = _currentWaveChannel.WaveFormat;
            int bufferSecondLength = format.SampleRate * format.Channels;
            byte[] inputBuffer = new byte[BufferSamples * sizeof(float)];
            byte[] soundTouchOutBuffer = new byte[BufferSamples * sizeof(float)];

            ByteAndFloatsConverter convertInputBuffer = new ByteAndFloatsConverter { Bytes = inputBuffer };
            ByteAndFloatsConverter convertOutputBuffer = new ByteAndFloatsConverter { Bytes = soundTouchOutBuffer };
            uint outBufferSizeFloats = (uint)convertOutputBuffer.Bytes.Length / (uint)(sizeof(float) * format.Channels);

            int bytesRead;
            int floatsRead;
            uint samplesProcessed = 0;
            int bufferIndex = 0;
            TimeSpan actualEndMarker = TimeSpan.Zero;
            bool loop = false;

            _currentWaveChannel.Volume = _currentVolume;
            while (_currentWaveChannel.Position < _currentWaveChannel.Length && !Stopping)
            {
                if (Started)
                {
                    _currentWaveChannel.CurrentTime = TimeSpan.FromSeconds(0);
                    Started = true;
                }

                bytesRead = _currentWaveChannel.Read(convertInputBuffer.Bytes, 0, convertInputBuffer.Bytes.Length);
                floatsRead = bytesRead / ((sizeof(float)) * _currentWaveChannel.WaveFormat.Channels);

                actualEndMarker = DefaultEndMarker;
                if (!loop || actualEndMarker == TimeSpan.Zero)
                {
                    actualEndMarker = _currentWaveChannel.TotalTime;
                }

                if (_currentWaveChannel.CurrentTime > actualEndMarker)
                {
                    _soundTouchSharp.Clear();
                    _inputProvider.Flush();
                    _currentWaveChannel.Flush();

                    if (!stopWorker)
                    {
                        while (!stopWorker && samplesProcessed != 0)
                        {
                            SetSoundSharpValues();

                            samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                            if (samplesProcessed > 0)
                            {
                                TimeSpan currentBufferTime = _currentWaveChannel.CurrentTime;
                                _inputProvider.AddSamples(convertOutputBuffer.Bytes, 0, (int)samplesProcessed * (sizeof(float)) * _currentWaveChannel.WaveFormat.Channels, currentBufferTime);
                            }
                            samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                        }
                    }
                }

                SetSoundSharpValues();

                _soundTouchSharp.PutSamples(convertInputBuffer.Floats, (uint)floatsRead);
                do
                {
                    samplesProcessed = _soundTouchSharp.ReceiveSamples(convertOutputBuffer.Floats, outBufferSizeFloats);
                    if (!stopWorker && samplesProcessed > 0)
                    {
                        TimeSpan currentBufferTime = _currentWaveChannel.CurrentTime;
                        _inputProvider.AddSamples(convertOutputBuffer.Bytes, 0, (int)samplesProcessed * (sizeof(float)) * _currentWaveChannel.WaveFormat.Channels, currentBufferTime);

                        while (!stopWorker && _inputProvider.GetQueueCount() > BusyQueuedBuffersThreshold)
                        {
                            Thread.Sleep(10);
                        }
                        bufferIndex += 1;
                    }
                } while (!stopWorker && samplesProcessed != 0);
            }

            // End of the audio file
            _waveOutDevice.Stop();
            // Stop sampling
            _waveChannel.Sample -= _waveChannel_Sample;
            _waveReader.Close();
            _blockAlignStream.Close();
            _waveChannel.Close();
            _inputProvider.Flush();
            _currentWaveChannel.Close();
            IsPlaying = false;
            if (!stopWorker && _currentWaveChannel.CurrentTime < actualEndMarker)
            {
                _currentWaveChannel.CurrentTime = actualEndMarker;
            }
            _soundTouchSharp.Clear();
        }