Esempio n. 1
0
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            Rate = (float)sampleRate;
            Size = (int)buffer.Length;

#if OPENAL && !(MONOMAC || IOS)
            _data  = buffer;
            Format = (channels == AudioChannels.Stereo) ? ALFormat.Stereo16 : ALFormat.Mono16;
            return;
#endif

#if MONOMAC || IOS
            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            if ((int)channels <= 1)
            {
                Format = bitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            }
            else
            {
                Format = bitsPerSample == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;
            }

            _name = "";
            _data = buffer;
#endif
        }
Esempio n. 2
0
        /// <summary>
        /// インスタンスを生成します。
        /// StreamingWave が存在する間、ストリーミングのために input はオープン状態が継続され、
        /// Dispose メソッドでその Dispose メソッドが呼び出されます。
        /// </summary>
        /// <param name="input">Wave ファイルの Stream。</param>
        /// <param name="bufferDuration">バッファリングする再生時間。</param>
        public StreamingWave(Stream input, TimeSpan bufferDuration)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }
            this.input = input;

            reader = new BinaryReader(input);

            // 'data' chunk のデータ部の直前まで読み込みます。
            riffChunk       = RiffChunk.ReadFrom(reader);
            formatChunk     = WaveFormatChunk.ReadFrom(reader);
            dataChunkHeader = ChunkHeader.ReadFrom(reader);

            // 'data' chunk のデータ部の開始位置を記憶します。
            dataOffset = input.Position;

            int           sampleRate = (int)formatChunk.SampleRate;
            AudioChannels channels   = (AudioChannels)formatChunk.Channels;

            dynamicSound = new DynamicSoundEffectInstance(sampleRate, channels);
            dynamicSound.BufferNeeded += new EventHandler <EventArgs>(OnDynamicSoundBufferNeeded);

            bufferSize = dynamicSound.GetSampleSizeInBytes(bufferDuration);
            buffer     = new byte[bufferSize];

            readDataAsyncCaller = new ReadDataAsyncCaller(ReadData);
        }
Esempio n. 3
0
        /// <summary>Returns the index of a channel in a combination of audio channels, or -1.</summary>
        /// <param name="channels">The combination of audio channels.</param>
        /// <param name="channel">An audio channel.</param>
        /// <returns>Returns the index of the specified <paramref name="channel"/> in the <paramref name="channels"/> combination, or -1.</returns>
        public static int GetChannelIndex(this AudioChannels channels, AudioChannels channel)
        {
            var channelCount = GetChannelCount(channels);
            var x            = (int)channel;

            if (channelCount > 0 && x != 0)
            {
                var c     = (int)channels;
                var index = 0;
                var i     = 0;

                while (index < channelCount)
                {
                    if (((c >> i) & 1) == 1)
                    {
                        ++index;
                    }

                    if (((x >> i) & 1) == 1)
                    {
                        return(index);
                    }

                    ++i;
                }
            }

            return(-1);
        }
Esempio n. 4
0
        public SoundEffect(
            byte[] buffer,
            int offset,
            int count,
            int sampleRate,
            AudioChannels channels,
            int loopStart,
            int loopLength
            )
        {
            byte[] sendBuf;
            if (offset != 0 || count != buffer.Length)
            {
                // I kind of hate this. -flibit
                sendBuf = new byte[count];
                Array.Copy(buffer, offset, sendBuf, 0, count);
            }
            else
            {
                sendBuf = buffer;
            }

            INTERNAL_buffer = AudioDevice.GenBuffer(
                sendBuf,
                (uint)sampleRate,
                (uint)channels,
                (uint)loopStart,
                (uint)(loopStart + loopLength),
                false,
                1
                );
        }
        private void PlatformInitializeAdpcm(
            ReadOnlySpan <byte> data, int sampleRate, AudioChannels channels, int blockAlignment,
            int loopStart, int loopLength)
        {
            if (ALController.Get().SupportsAdpcm)
            {
                var format          = AudioLoader.GetSoundFormat(AudioLoader.FormatMsAdpcm, (int)channels, 0);
                int sampleAlignment = AudioLoader.SampleAlignment(format, blockAlignment);

                // Buffer length must be aligned with the block alignment
                int alignedCount = data.Length - (data.Length % blockAlignment);
                data = data.Slice(0, alignedCount);

                SoundBuffer = ALBufferPool.Rent();
                SoundBuffer.BufferData(data, format, sampleRate, sampleAlignment);
            }
            else
            {
                // If MS-ADPCM is not supported, convert to 16-bit signed PCM
                var pcmData = MemoryMarshal.AsBytes(
                    AudioLoader.ConvertMsAdpcmToPcm(data, (int)channels, blockAlignment).AsSpan());

                PlatformInitializePcm(pcmData, 16, sampleRate, channels, loopStart, loopLength);
            }
        }
        private void PlatformInitializeIeeeFloat(
            ReadOnlySpan <byte> data, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
        {
            if (ALController.Get().SupportsFloat32)
            {
                var format = AudioLoader.GetSoundFormat(AudioLoader.FormatIeee, (int)channels, 32);
                SoundBuffer = ALBufferPool.Rent();
                SoundBuffer.BufferData(data, format, sampleRate);
            }
            else
            {
                var floatData = MemoryMarshal.Cast <byte, float>(data);
                int byteCount = floatData.Length * sizeof(short);

                string bufferTag = nameof(PlatformInitializeIeeeFloat);
                using (var buffer = RecyclableMemoryManager.Default.GetBuffer(byteCount, bufferTag))
                {
                    var largeBuffer = buffer.Buffer;

                    // If 32-bit IEEE float is not supported, convert to 16-bit signed PCM
                    AudioLoader.ConvertSingleToInt16(floatData, MemoryMarshal.Cast <byte, short>(largeBuffer));
                    PlatformInitializePcm(largeBuffer, 16, sampleRate, channels, loopStart, loopLength);
                }
            }
        }
Esempio n. 7
0
 public SoundEffect(
     byte[] buffer,
     int offset,
     int count,
     int sampleRate,
     AudioChannels channels,
     int loopStart,
     int loopLength
     ) : this(
         null,
         buffer,
         offset,
         count,
         null,
         1,
         (ushort)channels,
         (uint)sampleRate,
         (uint)(sampleRate * ((ushort)channels * 2)),
         (ushort)((ushort)channels * 2),
         16,
         loopStart,
         loopLength
         )
 {
 }
        /// <summary>Returns the index of a channel in a combination of audio channels, or -1.</summary>
        /// <param name="channels">The combination of audio channels.</param>
        /// <param name="channel">An audio channel.</param>
        /// <returns>Returns the index of the specified <paramref name="channel"/> in the <paramref name="channels"/> combination, or -1.</returns>
        public static int GetChannelIndex( this AudioChannels channels, AudioChannels channel )
        {
            var channelCount = GetChannelCount( channels );
            var x = (int)channel;

            if( channelCount > 0 && x != 0 )
            {
                var c = (int)channels;
                var index = 0;
                var i = 0;

                while( index < channelCount )
                {
                    if( ( ( c >> i ) & 1 ) == 1 )
                        ++index;

                    if( ( ( x >> i ) & 1 ) == 1 )
                        return index;

                    ++i;
                }
            }

            return -1;
        }
Esempio n. 9
0
 public Cue Play(string sound, AudioChannels channel = AudioChannels.Effect, bool loop = false)
 {
     try
     {
         var any = ((channel == AudioChannels.Effect) ? _effectCues : _musicCues).Any(p => p.Sound == sound);
         if (any)
         {
             var c = ((channel == AudioChannels.Effect) ? _effectCues : _musicCues).First(p => p.Sound == sound);
             c.Loop = loop;
             c.Start();
             return(c);
         }
         else
         {
             var c = new Cue(this, channel, sound);
             ((channel == AudioChannels.Effect) ? _effectCues : _musicCues).Add(c);
             c.Loop = loop;
             c.Start();
             return(c);
         }
     }
     catch (Exception e)
     {
         var ex = e;
     }
     return(null);
 }
        /// <param name="sampleRate">Sample rate, in Hertz (Hz).</param>
        /// <param name="channels">Number of channels (mono or stereo).</param>
        public DynamicSoundEffectInstance(int sampleRate, AudioChannels channels)
        {
            SoundEffect.Initialize();
            if (SoundEffect._systemState != SoundEffect.SoundSystemState.Initialized)
            {
                throw new NoAudioHardwareException("Audio has failed to initialize. Call SoundEffect.Initialize() before sound operation to get more specific errors.");
            }

            if ((sampleRate < 8000) || (sampleRate > 48000))
            {
                throw new ArgumentOutOfRangeException("sampleRate");
            }
            if ((channels != AudioChannels.Mono) && (channels != AudioChannels.Stereo))
            {
                throw new ArgumentOutOfRangeException("channels");
            }

            _sampleRate = sampleRate;
            _channels   = channels;
            _state      = SoundState.Stopped;
            PlatformCreate();

            // This instance is added to the pool so that its volume reflects master volume changes
            // and it contributes to the playing instances limit, but the source/voice is not owned by the pool.
            _isPooled  = false;
            _isDynamic = true;
        }
Esempio n. 11
0
 private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
 {
     CreateBuffers(new WaveFormat(sampleRate, (int)channels),
                   DataStream.Create(buffer, true, false),
                   0,
                   buffer.Length);
 }
Esempio n. 12
0
        //
        // Constructor
        //

        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
            : this(null, buffer, 0, buffer.Length,
                   1 /* WAVE_FORMAT_PCM */, (ushort)channels, (uint)sampleRate,
                   (uint)(sampleRate * ((ushort)channels * 2)),
                   (ushort)((ushort)channels * 2), 16, 0, 0)
        {
        }
Esempio n. 13
0
        /// <summary>Instantiates a new <see cref="WaveFormatExtensible"/> structure.</summary>
        /// <param name="waveFormatEx">A <see cref="WaveFormatEx"/> structure.</param>
        /// <param name="samples">Either ValidBitsPerSample, SamplesPerBlock or 0.</param>
        /// <param name="channelMask">Indicates which channels are present in the stream.</param>
        /// <param name="subFormat">Sub format <see cref="Guid"/>.</param>
        public WaveFormatExtensible(WaveFormatEx waveFormatEx, int samples, AudioChannels channelMask, Guid subFormat)
        {
            if (waveFormatEx.FormatTag != (short)WaveFormatTag.Extensible)
            {
                throw new ArgumentException("Invalid wave format: only Extensible is allowed.", "waveFormatEx");
            }

            if (waveFormatEx.ChannelCount == 0 || waveFormatEx.AverageBytesPerSecond == 0 || waveFormatEx.BitsPerSample == 0 || waveFormatEx.BlockAlign == 0 || waveFormatEx.SamplesPerSecond == 0)
            {
                throw new ArgumentException("Invalid wave format.", "waveFormatEx");
            }

            if (waveFormatEx.ExtraInfoSize < ExpectedExtraInfoSize)
            {
                throw new ArgumentException("Invalid wave format: extra info size is too small.", "waveFormatEx");
            }

            if (samples < 0 || samples > ushort.MaxValue)
            {
                throw new ArgumentOutOfRangeException("samples");
            }

            if (channelMask == AudioChannels.None)
            {
                throw new ArgumentOutOfRangeException("channelMask");
            }

            baseFormat       = waveFormatEx;
            this.samples     = (ushort)samples;
            this.channelMask = channelMask;
            this.subFormat   = subFormat;
        }
Esempio n. 14
0
        /// <summary>
        /// Returns the duration for 16bit PCM audio.
        /// </summary>
        /// <param name="sizeInBytes">The length of the audio data in bytes.</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz). Must be between 8000 Hz and 48000 Hz</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <returns>The duration of the audio data.</returns>
        public static TimeSpan GetSampleDuration(int sizeInBytes, int sampleRate, AudioChannels channels)
        {
            if (sizeInBytes < 0)
            {
                throw new ArgumentException("Buffer size cannot be negative.", "sizeInBytes");
            }
            if (sampleRate < 8000 || sampleRate > 48000)
            {
                throw new ArgumentOutOfRangeException("sampleRate");
            }

            var numChannels = (int)channels;

            if (numChannels != 1 && numChannels != 2)
            {
                throw new ArgumentOutOfRangeException("channels");
            }

            if (sizeInBytes == 0)
            {
                return(TimeSpan.Zero);
            }

            // Reference
            // http://tinyurl.com/hq9slfy

            var dur = sizeInBytes / (sampleRate * numChannels * 16f / 8f);

            var duration = TimeSpan.FromSeconds(dur);

            return(duration);
        }
        private void PlatformInitializePcm(
            ReadOnlySpan <byte> data, int sampleBits, int sampleRate, AudioChannels channels,
            int loopStart, int loopLength)
        {
            byte[]? largeBuffer = null;
            string?bufferTag = null;

            try
            {
                if (sampleBits == 24)
                {
                    // Convert 24-bit signed PCM to 16-bit signed PCM
                    largeBuffer = AudioLoader.Convert24To16(data, out bufferTag, out int size);
                    data        = largeBuffer.AsSpan(0, size);
                    sampleBits  = 16;
                }

                var format = AudioLoader.GetSoundFormat(AudioLoader.FormatPcm, (int)channels, sampleBits);
                SoundBuffer = ALBufferPool.Rent();
                SoundBuffer.BufferData(data, format, sampleRate);
            }
            finally
            {
                if (largeBuffer != null)
                {
                    RecyclableMemoryManager.Default.ReturnBuffer(largeBuffer, bufferTag);
                }
            }
        }
Esempio n. 16
0
        /// <summary>
        /// Returns the data size in bytes for 16bit PCM audio.
        /// </summary>
        /// <param name="duration">The total duration of the audio data.</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio data. Must be between 8,000 and 48,000 Hz.</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <returns>The size in bytes of a single sample of audio data.</returns>
        public static int GetSampleSizeInBytes(TimeSpan duration, int sampleRate, AudioChannels channels)
        {
            if (duration < TimeSpan.Zero || duration > TimeSpan.FromMilliseconds(0x7FFFFFF))
            {
                throw new ArgumentOutOfRangeException("duration");
            }
            if (sampleRate < 8000 || sampleRate > 48000)
            {
                throw new ArgumentOutOfRangeException("sampleRate");
            }

            var numChannels = (int)channels;

            if (numChannels != 1 && numChannels != 2)
            {
                throw new ArgumentOutOfRangeException("channels");
            }

            // Reference
            // http://tinyurl.com/hq9slfy

            var sizeInBytes = duration.TotalSeconds * (sampleRate * numChannels * 16f / 8f);

            return((int)sizeInBytes);
        }
Esempio n. 17
0
        //**********************************************************************************************************************************************************************************************************

        /// <summary>
        /// Create a new instance of FadeSettings class. The FadeType must be CUSTOM or UNDO_CUSTOM!!!
        /// </summary>
        /// <param name="fadeStartTime_ms">The fade begin time. Time offset of FadePoints</param>
        /// <param name="fadeType">fade type. Must be CUSTOM or UNDO_CUSTOM!!!</param>
        /// <param name="fadePointsFile">XML file that contains the fade points that describe the custom fade</param>
        /// <param name="fadeChannels">channels that are faded</param>
        public FadeSettings(double fadeStartTime_ms, FadeTypes fadeType, string fadePointsFile, AudioChannels fadeChannels)
        {
            FadeStartTime_ms = fadeStartTime_ms;

            try
            {
                XmlSerializer serializer = new XmlSerializer(typeof(List <PointF>));
                TextReader    textReader = new StreamReader(fadePointsFile);
                FadePoints = (List <PointF>)serializer.Deserialize(textReader);
                textReader.Close();
            }
            catch (Exception)
            { }

            if (FadePoints != null)
            {
                FadePoints = FadePoints.OrderBy(p => p.X).ToList();             //Sort after X-coordinate

                FadeLength_ms   = (FadePoints.Last().X - FadePoints.First().X);
                FadeBeginFactor = FadePoints.First().Y;
                FadeEndFactor   = FadePoints.Last().Y;
                FadeChannels    = fadeChannels;
                FadeShapeFactor = 10;

                if (fadeType != FadeTypes.CUSTOM && fadeType != FadeTypes.UNDO_CUSTOM)
                {
                    FadeType = FadeTypes.CUSTOM;
                }
                else
                {
                    FadeType = fadeType;
                }
            }
        }
Esempio n. 18
0
        public SoundEffect(
			byte[] buffer,
			int offset,
			int count,
			int sampleRate,
			AudioChannels channels,
			int loopStart,
			int loopLength
		)
        {
            byte[] sendBuf;
            if (offset != 0 || count != buffer.Length)
            {
                // I kind of hate this. -flibit
                sendBuf = new byte[count];
                Array.Copy(buffer, offset, sendBuf, 0, count);
            }
            else
            {
                sendBuf = buffer;
            }

            INTERNAL_buffer = AudioDevice.GenBuffer(
                sendBuf,
                (uint) sampleRate,
                (uint) channels,
                (uint) loopStart,
                (uint) (loopStart + loopLength),
                false,
                1
            );
        }
Esempio n. 19
0
		public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
		{
			//buffer should contain 16-bit PCM wave data
			short bitsPerSample = 16;
			
			MemoryStream mStream = new MemoryStream(44+buffer.Length);
			BinaryWriter writer = new BinaryWriter(mStream);
			
			writer.Write("RIFF".ToCharArray()); //chunk id
			writer.Write((int)(36+buffer.Length)); //chunk size
			writer.Write("WAVE".ToCharArray()); //RIFF type
			
			writer.Write("fmt ".ToCharArray()); //chunk id
			writer.Write((int)16); //format header size
			writer.Write((short)1); //format (PCM)
			writer.Write((short)channels);
			writer.Write((int)sampleRate);
			short blockAlign = (short)((bitsPerSample/8)*(int)channels);
			writer.Write((int)(sampleRate*blockAlign)); //byte rate
			writer.Write((short)blockAlign);
			writer.Write((short)bitsPerSample);
			
			writer.Write("data".ToCharArray()); //chunk id
			writer.Write((int)buffer.Length); //data size
			writer.Write(buffer);
			
			writer.Close();
			mStream.Close();
			
			_data = mStream.ToArray();
			_name = "";
			_sound = new Sound(_data, 1.0f, false);
		}
Esempio n. 20
0
        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            short num1 = (short)16;

            using (MemoryStream memoryStream = new MemoryStream(44 + buffer.Length))
            {
                using (BinaryWriter binaryWriter = new BinaryWriter((Stream)memoryStream))
                {
                    binaryWriter.Write("RIFF".ToCharArray());
                    binaryWriter.Write(36 + buffer.Length);
                    binaryWriter.Write("WAVE".ToCharArray());
                    binaryWriter.Write("fmt ".ToCharArray());
                    binaryWriter.Write(16);
                    binaryWriter.Write((short)1);
                    binaryWriter.Write((short)channels);
                    binaryWriter.Write(sampleRate);
                    short num2 = (short)((int)num1 / 8 * (int)channels);
                    binaryWriter.Write(sampleRate * (int)num2);
                    binaryWriter.Write(num2);
                    binaryWriter.Write(num1);
                    binaryWriter.Write("data".ToCharArray());
                    binaryWriter.Write(buffer.Length);
                    binaryWriter.Write(buffer);
                    memoryStream.Seek(0L, SeekOrigin.Begin);
                    this._name = "";
                    this._data = this.LoadAudioStream((Stream)memoryStream, 1f, false);
                    binaryWriter.Close();
                    memoryStream.Close();
                }
            }
        }
Esempio n. 21
0
        public static ALFormat GetALFormat(AudioChannels channels, AudioDepth depth)
        {
            switch (channels)
            {
            case AudioChannels.Mono:
                switch (depth)
                {
                case AudioDepth.Short: return(ALFormat.Mono16);

                case AudioDepth.Float: return(ALFormat.MonoFloat32);
                }
                break;

            case AudioChannels.Stereo:
                switch (depth)
                {
                case AudioDepth.Short: return(ALFormat.Stereo16);

                case AudioDepth.Float: return(ALFormat.StereoFloat32);
                }
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(channels), "Only mono and stereo channels are supported.");
            }
            throw new ArgumentOutOfRangeException(nameof(depth), "Audio format is not supported.");
        }
Esempio n. 22
0
        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            MemoryStream mStream = new MemoryStream(44 + buffer.Length);
            BinaryWriter writer  = new BinaryWriter(mStream);

            writer.Write("RIFF".ToCharArray());      //chunk id
            writer.Write((int)(36 + buffer.Length)); //chunk size
            writer.Write("WAVE".ToCharArray());      //RIFF type

            writer.Write("fmt ".ToCharArray());      //chunk id
            writer.Write((int)16);                   //format header size
            writer.Write((short)1);                  //format (PCM)
            writer.Write((short)channels);
            writer.Write((int)sampleRate);
            short blockAlign = (short)((bitsPerSample / 8) * (int)channels);

            writer.Write((int)(sampleRate * blockAlign));              //byte rate
            writer.Write((short)blockAlign);
            writer.Write((short)bitsPerSample);

            writer.Write("data".ToCharArray());            //chunk id
            writer.Write((int)buffer.Length);              //data size
            writer.Write(buffer);

            writer.Close();
            mStream.Close();

            _data = mStream.ToArray();
            _name = "";

            LoadAudioStream(_data);
        }
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
			Rate = (float)sampleRate;
            Size = (int)buffer.Length;

            _name = "";
            _data = AudioUtil.FormatWavData(buffer, sampleRate, (int)channels);
        }
Esempio n. 24
0
 public static IALBuffer GenBuffer(int sampleRate, AudioChannels channels)
 {
     if (ALDevice == null)
     {
         throw new NoAudioHardwareException();
     }
     return(ALDevice.GenBuffer(sampleRate, channels));
 }
Esempio n. 25
0
    void    Start()
    {
        m_equiped = false;
        m_sword.GetComponent <Renderer>().enabled = false;

        m_audio = FindObjectOfType(typeof(AudioChannels)) as AudioChannels;
        m_combo = 0;
    }
Esempio n. 26
0
		public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			float[] data,
			int sampleRate
		) {
			// No-op, duh.
		}
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            Rate = (float)sampleRate;
            Size = (int)buffer.Length;

            _name = "";
            _data = AudioUtil.FormatWavData(buffer, sampleRate, (int)channels);
        }
Esempio n. 28
0
        public SoundEffect(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
        {
#if DIRECTX
            Initialize(new WaveFormat(sampleRate, (int)channels), buffer, offset, count, loopStart, loopLength);
#else
            throw new NotImplementedException();
#endif
        }
Esempio n. 29
0
 public override void Reset()
 {
     this.range = 1;
     this.active = true;
     this.volume = 1;
     this.Looped = false;
     this.sourceType = AudioChannels.Stereo;
 }
Esempio n. 30
0
 public void GetBufferData(
     IALSource source,
     IALBuffer[] buffer,
     float[] samples,
     AudioChannels channels
     )
 {
     // No-op, duh.
 }
Esempio n. 31
0
 public SynthVoice(AudioFx audio, AudioChannels channel)
 {
     _audio       = audio;
     Channel      = channel;
     PlayingPitch = -1;
     Voices       = new List <SourceVoice>();
     StopDone     = new ManualResetEvent(false);
     CancelStop   = new ManualResetEvent(false);
 }
Esempio n. 32
0
        public IALBuffer GenBuffer(int sampleRate, AudioChannels channels)
        {
            uint result;

            AL10.alGenBuffers(1, out result);
#if VERBOSE_AL_DEBUGGING
            CheckALError();
#endif
            return(new OpenALBuffer(result, TimeSpan.Zero, (int)channels, sampleRate));
        }
 public SoundReversibleInstance(SoundReversible sound, byte[] audioBytes, int sampleRate, AudioChannels channels, bool inReverse)
 {
     this.sound = sound;
     this.sampleRate = sampleRate;
     this.channels = channels;
     reversed = inReverse;
     baseAudioBytes = audioBytes;
     dynamicSound = NewDynamicSoundEffectInstance();
     count = dynamicSound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(BUFFER_CHUNK_SIZE));
 }
Esempio n. 34
0
 public void GetBufferData(
     IALSource source,
     IALBuffer[] buffer,
     IntPtr samples,
     int samplesLen,
     AudioChannels channels
     )
 {
     // No-op, duh.
 }
Esempio n. 35
0
    // 初期化.
    void Start()
    {
        AudioChannels audio = FindObjectOfType(typeof(AudioChannels)) as AudioChannels;

        if (audio != null)
        {
            audio.PlayOneShot(m_SEAppear, 1.0f, 0.0f);
        }
        Destroy(gameObject, m_lifeTime);
    }
Esempio n. 36
0
        public DynamicSoundEffectInstance(int sampleRate, AudioChannels channels) : base(null)
        {
            this.sampleRate = sampleRate;
            this.channels   = channels;

            PendingBufferCount = 0;

            queuedBuffers    = new Queue <uint>();
            buffersToQueue   = new Queue <uint>();
            availableBuffers = new Queue <uint>();
        }
Esempio n. 37
0
		public DynamicSoundEffectInstance(int sampleRate, AudioChannels channels) : base(null)
		{
			this.sampleRate = sampleRate;
			this.channels = channels;

			PendingBufferCount = 0;

			isDynamic = true;
			queuedBuffers = new Queue<IALBuffer>();
			buffersToQueue = new Queue<IALBuffer>();
			availableBuffers = new Queue<IALBuffer>();
		}
Esempio n. 38
0
        public SoundEffect(
			byte[] buffer,
			int sampleRate,
			AudioChannels channels
		)
        {
            INTERNAL_buffer = AudioDevice.GenBuffer(
                buffer,
                (uint) sampleRate,
                (uint) channels,
                0,
                0,
                false,
                1
            );
        }
 public DynamicSoundEffectInstance(int sampleRate, AudioChannels channels)
 {
     this.sampleRate = sampleRate;
     this.channels = channels;
     switch (channels)
     {
         case AudioChannels.Mono:
             this.format = ALFormat.Mono16;
             break;
         case AudioChannels.Stereo:
             this.format = ALFormat.Stereo16;
             break;
         default:
             break;
     }                       
 }
        /// <summary>
        /// Create a dynamic sound effect instance with the given sound properties.
        /// </summary>
        /// <param name="engine">The engine in which the dynamicSoundEffectInstance is created</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio content. Must between 8000 Hz and 48000 Hz</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <param name="encoding">Encoding of a sound data sample</param>
        /// <returns>A new DynamicSoundEffectInstance instance ready to filled with data and then played</returns>
        /// <exception cref="ArgumentOutOfRangeException">This exception is thrown for one of the following reason:
        /// <list type="bullet">
        /// <item>The value specified for sampleRate is less than 8000 Hz or greater than 48000 Hz. </item>
        /// <item>The value specified for channels is something other than mono or stereo. </item>
        /// <item>The value specified for data encoding is something other than 8 or 16 bits. </item>
        /// </list>
        ///  </exception>
        /// <exception cref="ArgumentNullException"><paramref name="engine"/> is null.</exception>
        public DynamicSoundEffectInstance(AudioEngine engine, int sampleRate, AudioChannels channels, AudioDataEncoding encoding)
            : base(engine)
        {
            if (engine == null) 
                throw new ArgumentNullException("engine");

            if (sampleRate < 8000 || 48000 < sampleRate)
                throw new ArgumentOutOfRangeException("sampleRate");

            if(channels != AudioChannels.Mono && channels != AudioChannels.Stereo)
                throw new ArgumentOutOfRangeException("channels");

            if(encoding != AudioDataEncoding.PCM_8Bits && encoding != AudioDataEncoding.PCM_16Bits)
                throw new ArgumentOutOfRangeException("encoding");

            waveFormat = new WaveFormat(sampleRate, (int)encoding, (int)channels);

            Interlocked.Increment(ref totalNbOfInstances);
            Interlocked.Increment(ref numberOfInstances);

            // first instance of dynamic sound effect instance => we create the workerThead and the associated event.
            if (numberOfInstances == 1)
            {
                instancesNeedingBuffer = new ThreadSafeQueue<DynamicSoundEffectInstance>(); // to be sure that there is no remaining request from previous sessions
                awakeWorkerThread = new AutoResetEvent(false);
                CreateWorkerThread();
            }
            
            Name = "Dynamic Sound Effect Instance - "+totalNbOfInstances;

            CreateVoice(WaveFormat);

            InitializeDynamicSound();

            AudioEngine.RegisterSound(this);

            ResetStateToDefault();
        }
Esempio n. 41
0
        public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			byte[] data,
			int count,
			int sampleRate
		)
        {
            AL10.alBufferData(
                (buffer as OpenALBuffer).Handle,
                XNAToShort[channels],
                data, // TODO: offset -flibit
                (IntPtr) count,
                (IntPtr) sampleRate
            );
        }
Esempio n. 42
0
        public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			float[] data,
			int sampleRate
		)
        {
            AL10.alBufferData(
                (buffer as OpenALBuffer).Handle,
                XNAToFloat[channels],
                data,
                (IntPtr) (data.Length * 4),
                (IntPtr) sampleRate
            );
        }
 internal SoundEffect(string name, byte[] buffer, int sampleRate, AudioChannels channels)
     : this(buffer, sampleRate, channels)
 {
     _name = name;
 }
Esempio n. 44
0
        public SoundEffect(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
        {
#if DIRECTX
            Initialize(new WaveFormat(sampleRate, (int)channels), buffer, offset, count, loopStart, loopLength);
#else
            throw new NotImplementedException();
#endif
        }        
Esempio n. 45
0
        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
        {
#if DIRECTX            
            Initialize(new WaveFormat(sampleRate, (int)channels), buffer, 0, buffer.Length, 0, buffer.Length);
#else
            //buffer should contain 16-bit PCM wave data
			short bitsPerSample = 16;

            _name = "";

			using (var mStream = new MemoryStream(44+buffer.Length))
            using (var writer = new BinaryWriter(mStream))
            {
                writer.Write("RIFF".ToCharArray()); //chunk id
                writer.Write((int)(36 + buffer.Length)); //chunk size
                writer.Write("WAVE".ToCharArray()); //RIFF type

                writer.Write("fmt ".ToCharArray()); //chunk id
                writer.Write((int)16); //format header size
                writer.Write((short)1); //format (PCM)
                writer.Write((short)channels);
                writer.Write((int)sampleRate);
                short blockAlign = (short)((bitsPerSample / 8) * (int)channels);
                writer.Write((int)(sampleRate * blockAlign)); //byte rate
                writer.Write((short)blockAlign);
                writer.Write((short)bitsPerSample);

                writer.Write("data".ToCharArray()); //chunk id
                writer.Write((int)buffer.Length); //data size 	MonoGame.Framework.Windows8.DLL!Microsoft.Xna.Framework.Audio.Sound.Sound(byte[] audiodata, float volume, bool looping) Line 199	C#

                writer.Write(buffer);

                _data = mStream.ToArray();
            }

			_sound = new Sound(_data, 1.0f, false);
#endif
        }
Esempio n. 46
0
        public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			float[] data,
			int sampleRate
		)
        {
            AL10.alBufferData(
                (buffer as OpenALBuffer).Handle,
                XNAToFloat[channels],
                data,
                (IntPtr) (data.Length * 4),
                (IntPtr) sampleRate
            );
            #if VERBOSE_AL_DEBUGGING
            CheckALError();
            #endif
        }
Esempio n. 47
0
 private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
 {
     _name = "";
     
     _audioBuffer = new Sound(AudioUtil.FormatWavData(buffer, sampleRate, (int)channels));
 }
Esempio n. 48
0
        public static int GetSampleSizeInBytes(
			TimeSpan duration,
			int sampleRate,
			AudioChannels channels
		)
        {
            return (int) (
                duration.TotalSeconds *
                sampleRate *
                (int) channels
            );
        }
Esempio n. 49
0
        public static TimeSpan GetSampleDuration(
			int sizeInBytes,
			int sampleRate,
			AudioChannels channels
		)
        {
            int ms = (int) (
                (sizeInBytes / (int) channels) /
                (sampleRate / 1000.0f)
            );
            return new TimeSpan(0, 0, 0, 0, ms);
        }
Esempio n. 50
0
        /// <param name="buffer">Buffer containing PCM wave data.</param>
        /// <param name="offset">Offset, in bytes, to the starting position of the audio data.</param>
        /// <param name="count">Amount, in bytes, of audio data.</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz)</param>
        /// <param name="channels">Number of channels (mono or stereo).</param>
        /// <param name="loopStart">The position, in samples, where the audio should begin looping.</param>
        /// <param name="loopLength">The duration, in samples, that audio should loop over.</param>
        /// <remarks>Use SoundEffect.GetSampleDuration() to convert time to samples.</remarks>
        public SoundEffect(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
        {
            _duration = GetSampleDuration(count, sampleRate, channels);

            PlatformInitialize(buffer, offset, count, sampleRate, channels, loopStart, loopLength);
        }
Esempio n. 51
0
        /// <param name="buffer">Buffer containing PCM wave data.</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz)</param>
        /// <param name="channels">Number of channels (mono or stereo).</param>
        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            _duration = GetSampleDuration(buffer.Length, sampleRate, channels);

            PlatformInitialize(buffer, sampleRate, channels);
        }
Esempio n. 52
0
        /// <summary>
        /// Gets the size of a sample from a TimeSpan.
        /// </summary>
        /// <param name="duration">TimeSpan object that contains the sample duration.</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio data. Must be between 8,000 and 48,000 Hz.</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <returns>Size of a single sample of audio data.</returns>
        public static int GetSampleSizeInBytes(TimeSpan duration, int sampleRate, AudioChannels channels)
        {
            if (sampleRate < 8000 || sampleRate > 48000)
                throw new ArgumentOutOfRangeException();

            // Reference: http://social.msdn.microsoft.com/Forums/windows/en-US/5a92be69-3b4e-4d92-b1d2-141ef0a50c91/how-to-calculate-duration-of-wave-file-from-its-size?forum=winforms

            var numChannels = (int)channels;

            var sizeInBytes = duration.TotalSeconds * (sampleRate * numChannels * 16f / 8f);

            return (int)sizeInBytes;
        }
Esempio n. 53
0
 private void PlatformInitialize(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
 {
     throw new NotImplementedException();
 }
Esempio n. 54
0
 private void PlatformInitialize(byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength)
 {
 }
Esempio n. 55
0
		public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			IntPtr data,
			int offset,
			int count,
			int sampleRate
		) {
			// No-op, duh.
		}
Esempio n. 56
0
		/// <summary>
		/// SoundEffect constructor
		/// </summary>
		/// <param name="buffer">sound data buffer</param>
		/// <param name="offset"></param>
		/// <param name="count"></param>
		/// <param name="sampleRate"></param>
		/// <param name="channels"></param>
		/// <param name="loopStart"></param>
		/// <param name="loopLength"></param>
		public SoundEffect( AudioDevice device, byte[] buffer, int offset, int bytesCount, int sampleRate, AudioChannels channels, int loopStart, int loopLength )
        {
			this.device	=	device;
            Initialize(new WaveFormat(sampleRate, (int)channels), buffer, offset, bytesCount, loopStart, loopLength);
        }
Esempio n. 57
0
 private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
 {
 }
Esempio n. 58
0
		public void GetBufferData(
			IALSource source,
			IALBuffer[] buffer,
			IntPtr samples,
			int samplesLen,
			AudioChannels channels
		) {
			// No-op, duh.
		}
Esempio n. 59
0
        public void SetBufferData(
			IALBuffer buffer,
			AudioChannels channels,
			byte[] data,
			int count,
			int sampleRate
		)
        {
            AL10.alBufferData(
                (buffer as OpenALBuffer).Handle,
                XNAToShort[channels],
                data, // TODO: offset -flibit
                (IntPtr) count,
                (IntPtr) sampleRate
            );
            #if VERBOSE_AL_DEBUGGING
            CheckALError();
            #endif
        }
Esempio n. 60
0
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
			Rate = (float)sampleRate;
            Size = (int)buffer.Length;

#if OPENAL && !(MONOMAC || IOS)

            _data = buffer;
            Format = (channels == AudioChannels.Stereo) ? ALFormat.Stereo16 : ALFormat.Mono16;
            return;

#endif

#if MONOMAC || IOS

            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            if ((int)channels <= 1)
                Format = bitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            else
                Format = bitsPerSample == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;

            _name = "";
            _data = buffer;

#endif
        }