private void Initialize(bool stream = false)
        {
            switch (m_source.WaveFormat.BitsPerSample)
            {
            case 8:
                m_decoder = new Pcm8BitToSample(m_source);
                break;

            case 16:
                m_decoder = new Pcm16BitToSample(m_source);
                break;

            case 24:
                m_decoder = new Pcm24BitToSample(m_source);
                break;

            default:
                Debug.LogError("No converter found!");
                return;
            }

            Clip = AudioClip.Create(m_name,
                                    (int)(m_decoder.Length / m_decoder.WaveFormat.Channels),
                                    m_decoder.WaveFormat.Channels,
                                    m_decoder.WaveFormat.SampleRate,
                                    true,
                                    true,
                                    OnReadAudio,
                                    OnSetPosition);
        }
Esempio n. 2
0
        /// <summary>
        ///     Converts a <see cref="IWaveSource"/> to a <see cref="ISampleSource"/>.
        /// </summary>
        /// <param name="waveSource">The <see cref="IWaveSource"/> to convert to a <see cref="ISampleSource"/>.</param>
        /// <returns>The <see cref="ISampleSource"/> wrapped around the specified <paramref name="waveSource"/>.</returns>
        public static ISampleSource ToSampleSource(this IWaveSource waveSource)
        {
            if (waveSource == null)
            {
                throw new ArgumentNullException("waveSource");
            }

            return(WaveToSampleBase.CreateConverter(waveSource));
        }
Esempio n. 3
0
        public SampleSourceBase(IWaveStream source)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }

            if (source is ISampleSource)
            {
                _source = (source as ISampleSource);
            }
            else
            {
                _source = WaveToSampleBase.CreateConverter(source as IWaveSource);
            }
        }
Esempio n. 4
0
    public AudioClip GetAudioClip()
    {
        if (_waveSource == null)
        {
            throw new ResourceNotInitializedException(typeof(ResAudio));
        }

        if (_audioClip == null)
        {
            _decoder   = WaveToSampleBase.CreateConverter(_waveSource);
            _loopBegin = 0;
            _loopEnd   = _decoder.Length / _decoder.WaveFormat.Channels;
            _audioClip = AudioClip.Create(GetName(), (int)_loopEnd,
                                          _decoder.WaveFormat.Channels, _decoder.WaveFormat.SampleRate, true, OnReadAudio, OnSetPosition);
        }

        return(_audioClip);
    }
Esempio n. 5
0
        public SampleDataProvider(IWaveStream source)
        {
            if (!(source is ISampleSource) && source is IWaveSource)
            {
                source = WaveToSampleBase.CreateConverter(source as IWaveSource);
            }
            else if (source is ISampleSource)
            {
            }
            else
            {
                throw new ArgumentException("source has to of type IWaveSource or ISampleSource");
            }

            _source        = source as ISampleSource;
            BlockSize      = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0));
            _sampleBuffer  = new Queue <float>();
            _sampleBuffer1 = new Queue <float>();
        }
Esempio n. 6
0
        //##########################################################################################################################################################################################################

        /// <summary>
        /// Read the WAV file and split the samples to the left and right channel
        /// </summary>
        private void ReadToList()
        {
            if (!File.Exists(Filepath))
            {
                return;
            }

            WaveFileReader reader = new WaveFileReader(Filepath);

            format = reader.WaveFormat;

            //Length_s = reader.GetLength().TotalSeconds;             // reader.GetLength() contains the FormatChunks too!
            //Length_s = (reader.Length / format.BytesPerSecond);     // reader.Length contains the FormatChunks too!
            List <CSCore.Codecs.WAV.WaveFileChunk> waveFileDataChunks = reader.Chunks.Where(c => c.GetType() == typeof(DataChunk)).ToList();
            long waveFileDataChunksSizeBytes = waveFileDataChunks.Sum(c => c.ChunkDataSize);

            Length_s = (waveFileDataChunksSizeBytes / format.BytesPerSecond);

            //long _sampleCount = reader.Length / (reader.WaveFormat.BitsPerSample / 8);
            //_sampleCount /= reader.WaveFormat.Channels;                                 //Each sample contains the values of the right and left channel for (Waveformat.Channels == 2)

            ISampleSource source = WaveToSampleBase.CreateConverter(reader);

            format = source.WaveFormat;

            float[] sample_buffer = new float[format.Channels];
            while (source.Read(sample_buffer, 0, sample_buffer.Length) > 0 && source.Position < (waveFileDataChunksSizeBytes / format.Channels))          //At least one byte read
            {
                double time_ms = ((1 / (double)source.WaveFormat.BytesPerSecond) * source.WaveFormat.BytesPerSample * source.Position) * 1000;

                AudioSample sample_left = new AudioSample(sample_buffer[0], time_ms, AudioChannels.LEFT);
                Samples.Add(sample_left);

                if (reader.WaveFormat.Channels == 2)
                {
                    AudioSample sample_right = new AudioSample(sample_buffer[1], time_ms, AudioChannels.RIGHT);
                    Samples.Add(sample_right);
                }
            }
            reader.Dispose();
        }