예제 #1
0
파일: OpenAL.cs 프로젝트: ermau/Gablarski
        public static OpenALAudioFormat ToOpenALFormat(this AudioFormat format)
        {
            OpenALAudioFormat oalFormat = OpenALAudioFormat.Unknown;

            if (format.Channels == 1)
            {
                if (format.BitsPerSample == 8)
                {
                    oalFormat = OpenALAudioFormat.Mono8Bit;
                }
                else if (format.BitsPerSample == 16)
                {
                    oalFormat = OpenALAudioFormat.Mono16Bit;
                }
            }
            else if (format.Channels == 2)
            {
                if (format.BitsPerSample == 8)
                {
                    oalFormat = OpenALAudioFormat.Stereo8Bit;
                }
                else if (format.BitsPerSample == 16)
                {
                    oalFormat = OpenALAudioFormat.Stereo16Bit;
                }
            }

            return(oalFormat);
        }
예제 #2
0
        public void QueuePlayback(AudioSource audioSource, byte[] data)
        {
            if (this.isDisposed)
            {
                throw new ObjectDisposedException("OpenALPlaybackProvider");
            }
            if (audioSource == null)
            {
                throw new ArgumentNullException("audioSource");
            }

            Stack <SourceBuffer> bufferStack;

            if (!this.buffers.TryGetValue(audioSource, out bufferStack))
            {
                this.buffers[audioSource] = bufferStack = new Stack <SourceBuffer>();
            }

            lock (this.pool.SyncRoot)
            {
                Source source = this.pool.RequestSource(audioSource);

                Tuple <float, float> gain;
                if (this.gains.TryGetValue(audioSource, out gain))
                {
                    source.Gain = gain.Item2;
                }
                else
                {
                    source.Gain = this.normalGain;
                }

                const int bufferLen = 4;

                if (data.Length == 0)
                {
                    return;
                }

                if (!source.IsPlaying)
                {
                    OpenAL.DebugFormat("{0} bound to {1} isn't playing, inserting silent buffers", audioSource, source);

                    RequireBuffers(bufferStack, source, bufferLen);
                    for (int i = 0; i < bufferLen; ++i)
                    {
                        OpenALAudioFormat format = audioSource.CodecSettings.ToOpenALFormat();
                        SourceBuffer      wait   = bufferStack.Pop();
                        wait.Buffer(new byte[format.GetBytes((uint)audioSource.CodecSettings.FrameSize)], format, (uint)audioSource.CodecSettings.SampleRate);
                        source.QueueAndPlay(wait);
                    }
                }

                RequireBuffers(bufferStack, source, 1);
                SourceBuffer buffer = bufferStack.Pop();

                buffer.Buffer(data, audioSource.CodecSettings.ToOpenALFormat(), (uint)audioSource.CodecSettings.SampleRate);
                source.QueueAndPlay(buffer);
            }
        }
예제 #3
0
        /// <summary>
        /// Create a capture stream on given device.
        /// </summary>
        /// <param name="sampleRate">Audio sample rate.</param>
        /// <param name="format">Capture format.</param>
        /// <param name="deviceName">Name of device to capture audio from.</param>
        /// <param name="bufferSizeMs">Size of the audio buffer in milliseconds.</param>
        internal CaptureStream(int sampleRate, OpenALAudioFormat format, string deviceName, int bufferSizeMs)
        {
            if (deviceName == null)
            {
                throw new ArgumentNullException("deviceName");
            }

            var samplesPerBuffer = sampleRate / (1000 / bufferSizeMs);

            _samplesPerBuffer = samplesPerBuffer;

            _bytesPerSample = 1;
            switch (format)
            {
            case OpenALAudioFormat.Mono16Bit:
                _bytesPerSample = 2;
                break;

            case OpenALAudioFormat.Stereo8Bit:
                _bytesPerSample = 2;
                break;

            case OpenALAudioFormat.Stereo16Bit:
                _bytesPerSample = 4;
                break;
            }

            var bufferSize = samplesPerBuffer * _bytesPerSample;

            _device = API.alcCaptureOpenDevice(deviceName, (uint)sampleRate, format, bufferSize * 4);
        }
예제 #4
0
 public PlaybackStream OpenStream(uint sampleRate, OpenALAudioFormat format)
 {
     EnsureDeviceIsOpen();
     var ret = new PlaybackStream(sampleRate, format, this, _context);
     lock(_streams)
         _streams.Add(ret);
     return ret;
 }
예제 #5
0
 /// <summary>
 /// Initializes a new OpenALAudioBuffer class.
 /// </summary>
 public OpenALAudioBuffer(OpenALAudioFormat format, OpenALDevice device)
 {
     Format = format;
     Owner = device;
     _locker = new object();
     _audioMixer = new AudioMixer();
     PlaybackState = PlaybackState.Stopped;
     _source = Owner.SourcePool.RequestSource();
 }
예제 #6
0
        public PlaybackStream OpenStream(uint sampleRate, OpenALAudioFormat format)
        {
            EnsureDeviceIsOpen();
            var ret = new PlaybackStream(sampleRate, format, this, _context);

            lock (_streams)
                _streams.Add(ret);
            return(ret);
        }
예제 #7
0
 internal PlaybackStream(uint sampleRate, OpenALAudioFormat format, PlaybackDevice device, IntPtr context)
 {
     _sampleRate = sampleRate;
     _format = format;
     _device = device;
     _context = context;
     CreateSource();
     Listener = new Listener(_context);
 }
예제 #8
0
 /// <summary>
 /// Initializes a new OpenALAudioBuffer class.
 /// </summary>
 public OpenALAudioBuffer(OpenALAudioFormat format, OpenALDevice device)
 {
     Format        = format;
     Owner         = device;
     _locker       = new object();
     _audioMixer   = new AudioMixer();
     PlaybackState = PlaybackState.Stopped;
     _source       = Owner.SourcePool.RequestSource();
 }
예제 #9
0
 internal PlaybackStream(uint sampleRate, OpenALAudioFormat format, PlaybackDevice device, IntPtr context)
 {
     _sampleRate = sampleRate;
     _format     = format;
     _device     = device;
     _context    = context;
     CreateSource();
     Listener = new Listener(_context);
 }
예제 #10
0
        /// <summary>
        /// Creates a new AudioBuffer.
        /// </summary>
        /// <param name="format">The AudioFormat.</param>
        /// <returns>OpenALAudioBuffer.</returns>
        internal OpenALAudioBuffer CreateAudioBuffer(OpenALAudioFormat format)
        {
            var audioBuffer = new OpenALAudioBuffer(format, this);

            lock (_audioBuffers)
            {
                _audioBuffers.Add(audioBuffer);
            }

            return(audioBuffer);
        }
예제 #11
0
파일: OpenAL.cs 프로젝트: ermau/Gablarski
        public static uint GetSamplesPerSecond(this OpenALAudioFormat self, uint frequency)
        {
            switch (self)
            {
            default:
            case OpenALAudioFormat.Mono8Bit:
            case OpenALAudioFormat.Mono16Bit:
                return(frequency);

            case OpenALAudioFormat.Stereo8Bit:
            case OpenALAudioFormat.Stereo16Bit:
                return(frequency * 2);
            }
        }
예제 #12
0
파일: OpenAL.cs 프로젝트: ermau/Gablarski
        public static uint GetBytesPerSample(this OpenALAudioFormat self)
        {
            switch (self)
            {
            default:
            case OpenALAudioFormat.Mono8Bit:
                return(1);

            case OpenALAudioFormat.Mono16Bit:
            case OpenALAudioFormat.Stereo8Bit:
                return(2);

            case OpenALAudioFormat.Stereo16Bit:
                return(4);
            }
        }
예제 #13
0
        /// <summary>
        /// Opens the capture device with the specified <paramref name="frequency"/> and <paramref name="format"/>.
        /// </summary>
        /// <param name="frequency">The frequency to open the capture device with.</param>
        /// <param name="format">The audio format to open the device with.</param>
        /// <returns>Returns <c>this</c>.</returns>
        public CaptureDevice Open(uint frequency, OpenALAudioFormat format)
        {
            ThrowIfDisposed();

            OpenAL.DebugFormat("Opening capture device {0} at {1} {2}", Name, frequency, format);

            this.Format    = format;
            this.Frequency = frequency;

            uint bufferSize = format.GetBytes(format.GetSamplesPerSecond(frequency)) * 2;

            this.Handle = alcCaptureOpenDevice(this.Name, frequency, format, (int)bufferSize);
            OpenAL.ErrorCheck(this);

            pcm = new byte[bufferSize];

            return(this);
        }
예제 #14
0
        /// <summary>
        /// Create a capture stream on given device.
        /// </summary>
        /// <param name="sampleRate">Audio sample rate.</param>
        /// <param name="format">Capture format.</param>
        /// <param name="deviceName">Name of device to capture audio from.</param>
        /// <param name="bufferSizeMs">Size of the audio buffer in milliseconds.</param>
        internal CaptureStream(int sampleRate, OpenALAudioFormat format, string deviceName, int bufferSizeMs)
        {
            if (deviceName == null) throw new ArgumentNullException("deviceName");

            var samplesPerBuffer = sampleRate / (1000 / bufferSizeMs);
            _samplesPerBuffer = samplesPerBuffer;

            _bytesPerSample = 1;
            switch (format)
            {
                case OpenALAudioFormat.Mono16Bit:
                    _bytesPerSample = 2;
                    break;
                case OpenALAudioFormat.Stereo8Bit:
                    _bytesPerSample = 2;
                    break;
                case OpenALAudioFormat.Stereo16Bit:
                    _bytesPerSample = 4;
                    break;
            }

            var bufferSize = samplesPerBuffer * _bytesPerSample;
            _device = API.alcCaptureOpenDevice(deviceName, (uint)sampleRate, format, bufferSize * 4);
        }
예제 #15
0
 private static extern void alBufferData(uint bufferID, OpenALAudioFormat format, byte[] data, int byteSize, uint frequency);
예제 #16
0
파일: OpenAL.cs 프로젝트: ermau/Gablarski
 public static uint GetBytes(this OpenALAudioFormat self, uint samples)
 {
     return(self.GetBytesPerSample() * samples);
 }
예제 #17
0
 internal static extern void alBufferData(uint bufferID, OpenALAudioFormat format, byte[] data, int byteSize, uint frequency);
예제 #18
0
 /// <summary>
 /// Open a capture stream to capture audio from the device.
 /// </summary>
 /// <param name="sampleRate">Audio sample rate.</param>
 /// <param name="format">Audio format.</param>
 /// <param name="bufferSizeMs">Buffer size in milliseconds. Read operations will provide data in mulitples of this (ie. 10ms would provide data in multiples of 10 like 10ms, 20ms, 30ms depending on how often data is read).</param>
 /// <returns></returns>
 public CaptureStream OpenStream(int sampleRate, OpenALAudioFormat format, int bufferSizeMs)
 {
     return new CaptureStream(sampleRate, format, DeviceName, bufferSizeMs);
 }
예제 #19
0
        /// <summary>
        /// Opens the capture device with the specified <paramref name="frequency"/> and <paramref name="format"/>.
        /// </summary>
        /// <param name="frequency">The frequency to open the capture device with.</param>
        /// <param name="format">The audio format to open the device with.</param>
        /// <returns>Returns <c>this</c>.</returns>
        public CaptureDevice Open(uint frequency, OpenALAudioFormat format)
        {
            ThrowIfDisposed();

            OpenAL.DebugFormat ("Opening capture device {0} at {1} {2}", Name, frequency, format);

            this.Format = format;
            this.Frequency = frequency;

            uint bufferSize = format.GetBytes (format.GetSamplesPerSecond (frequency)) * 2;

            this.Handle = alcCaptureOpenDevice (this.Name, frequency, format, (int)bufferSize);
            OpenAL.ErrorCheck (this);

            pcm = new byte[bufferSize];

            return this;
        }
예제 #20
0
 private static extern IntPtr alcCaptureOpenDevice(string deviceName, uint frequency, OpenALAudioFormat format, int bufferSize);
예제 #21
0
 /// <summary>
 /// Open a capture stream to capture audio from the device.
 /// </summary>
 /// <param name="sampleRate">Audio sample rate.</param>
 /// <param name="format">Audio format.</param>
 /// <param name="bufferSizeMs">Buffer size in milliseconds. Read operations will provide data in mulitples of this (ie. 10ms would provide data in multiples of 10 like 10ms, 20ms, 30ms depending on how often data is read).</param>
 /// <returns></returns>
 public CaptureStream OpenStream(int sampleRate, OpenALAudioFormat format, int bufferSizeMs)
 {
     return(new CaptureStream(sampleRate, format, DeviceName, bufferSizeMs));
 }
예제 #22
0
 internal static extern IntPtr alcCaptureOpenDevice(string deviceName, uint frequency, OpenALAudioFormat format, int bufferSize);
예제 #23
0
        /// <summary>
        /// Creates a new AudioBuffer.
        /// </summary>
        /// <param name="format">The AudioFormat.</param>
        /// <returns>OpenALAudioBuffer.</returns>
        internal OpenALAudioBuffer CreateAudioBuffer(OpenALAudioFormat format)
        {
            var audioBuffer = new OpenALAudioBuffer(format, this);
            lock (_audioBuffers)
            {
                _audioBuffers.Add(audioBuffer);
            }

            return audioBuffer;
        }
예제 #24
0
 public void Buffer(byte[] data, OpenALAudioFormat format, uint frequency)
 {
     alBufferData(this.bufferID, format, data, data.Length, frequency);
     OpenAL.ErrorCheck();
 }