示例#1
0
        internal AudioStream(AudioBuffer buffer, AudioSource source)
        {
            this.source = source;

            codec  = new AudioCodec(buffer.File);
            format = Audio.GetalBufferFormat(codec.Channels, codec.BitDepth);

            alGenBuffers(bufferIDs.Length, bufferIDs);
            Audio.CheckALError("Could not create Buffers");

            fillInitialBuffers();

            Audio.Streams.Add(this);
        }
示例#2
0
        public AudioCaptureDevice(string?deviceName, int channels, int bitDepth, int sampleRate, float length)
        {
            DeviceName          = deviceName;
            Channels            = channels;
            BitDepth            = bitDepth;
            SampleRate          = sampleRate;
            bufferSizeInSamples = (length * sampleRate * (sampleRate / 44100f)).CeilToInt();             // For some reason, changing sampleRate isn't calculated right, as it seems to always calculate with 44100

            ptr = alcCaptureOpenDevice(DeviceName, (uint)SampleRate, Audio.GetalBufferFormat(Channels, BitDepth), bufferSizeInSamples);

            if (Active)
            {
                Audio.CaptureDevices.Add(this);

                // Debug
                Output.WriteLine($"OpenAL Capture Device Initialized:\n    - \"{alcGetString(ptr, alcString.CaptureDeviceSpecifier)}\"");
            }
        }
示例#3
0
        public AudioBuffer(byte[] data, int channels, int bitDepth, int sampleRate)
        {
            File     = "";
            Streamed = false;

            ID = alGenBuffer();
            Audio.CheckALError("Could not create Buffer");

            Size    = data.Length;
            Samples = Size / channels / (bitDepth / 8);
            Length  = (float)Samples / sampleRate;

            // Debug
            //Output.WriteLine($"Buffer: Size: {Size} Samples: {Samples} sampleRate: {sampleRate} Length: {Length}s channels: {channels} bitDepth: {bitDepth}");

            alBufferData(ID, Audio.GetalBufferFormat(channels, bitDepth), data, data.Length, sampleRate);
            Audio.CheckALError("Could not set Buffer Data");

            Audio.Buffers.Add(this);
        }
示例#4
0
        public AudioBuffer(string file, bool streamed = false)
        {
            if (!System.IO.File.Exists(file))
            {
                throw new FileNotFoundException("Audio file not found.", file);
            }

            File     = file;
            Streamed = streamed;

            if (Streamed)
            {
                AudioCodec codec = new AudioCodec(file);
                Size    = codec.Size;
                Samples = codec.Samples;
                Length  = codec.Length;
                codec.Destroy();
            }
            else
            {
                ID = alGenBuffer();
                Audio.CheckALError("Could not create Buffer");

                byte[] bytes = AudioCodec.Decode(file, out float length, out int channels, out int samples, out int sampleRate, out int bitDepth);
                Size    = bytes.Length;
                Samples = samples;
                Length  = length;

                alBufferData(ID, Audio.GetalBufferFormat(channels, bitDepth), bytes, bytes.Length, sampleRate);
                Audio.CheckALError("Could not set Buffer Data");
            }

            // Debug
            //Output.WriteLine($"Buffer: Size: {Size} Samples: {Samples} Length: {Length}s");

            Audio.Buffers.Add(this);
        }