示例#1
0
        /// <summary>
        /// The main.
        /// </summary>
        /// <param name="args">
        /// The args.
        /// </param>
        static void Main(string[] args)
        {
            var audioBuffer = new byte[256];
            var fftData = new byte[256];
            var fft = new double[256];
            double fftavg = 0;
            float amplitude = 10.0f;

            var fftTransoformer = new LomontFFT();

            var writers = new List<IWriter>();
            writers.Add(new KeyboardWriter());
            writers.Add(new ConsoleWriter());

            var audioCapture = new AudioCapture(AudioCapture.DefaultDevice, 8000, ALFormat.Mono8, 256);
            audioCapture.Start();
            audioCapture.ReadSamples(audioBuffer, 256);
           
            while (true)
            {
                for (int j = 0; j < 92; j++)
                {
                    // reset mem
                    for (int i = 0; i < 256; i++)
                    {
                        audioBuffer[i] = 0;
                        fftData[i] = 0;
                        fft[i] = 0;
                    }

                    audioCapture.ReadSamples(audioBuffer, 256);

                    for (int i = 0; i < 256; i++)
                    {
                        fft[i] = (audioBuffer[i] - 128) * amplitude;
                    }

                    fftTransoformer.TableFFT(fft, true);
                    
                    for (int i = 0; i < 256; i += 2)
                    {
                        double fftmag = Math.Sqrt((fft[i] * fft[i]) + (fft[i + 1] * fft[i + 1]));
                        fftavg += fftmag;
                        fftData[i] = (byte)fftmag;
                        fftData[i + 1] = fftData[i];
                    }

                    fftavg /= 10;

                    writers.ForEach(x => x.Write(j, fftData));

                    //Thread.Sleep(15);
                    Thread.Sleep(20);
                }
            }
        }
        void UpdateSamples()
        {
            if (audio_capture == null)
            {
                return;
            }

            int available_samples = audio_capture.AvailableSamples;

            if (available_samples * SampleToByte > buffer.Length * BlittableValueType.StrideOf(buffer))
            {
                buffer = new short[MathHelper.NextPowerOfTwo(
                                       (int)(available_samples * SampleToByte / (double)BlittableValueType.StrideOf(buffer) + 0.5))];
            }

            if (available_samples > 0)
            {
                audio_capture.ReadSamples(buffer, available_samples);

                int buf = AL.GenBuffer();
                AL.BufferData(buf, ALFormat.Mono16, buffer, (int)(available_samples * BlittableValueType.StrideOf(buffer)), audio_capture.SampleFrequency);
                AL.SourceQueueBuffer(src, buf);

                label_SamplesConsumed.Text = "Samples consumed: " + available_samples;

                if (AL.GetSourceState(src) != ALSourceState.Playing)
                {
                    AL.SourcePlay(src);
                }
            }

            ClearBuffers(0);
        }
示例#3
0
        public void CaptureLoop(string device)
        {
            using (AudioContext context = new AudioContext()) {
                Console.WriteLine("Starting capture loop.");
                AudioCapture capture = new AudioCapture(device, Frequency, Format, BufferSize);
                capture.Start();
                byte[] buffer = new byte[BufferSize];

                Console.WriteLine("Started capture loop.");
                while (AudioHandler.Running)
                {
                    int samples = capture.AvailableSamples;
                    if (samples > 0)
                    {
                        if (samples > BufferSize / SampleSize)
                        {
                            samples = BufferSize / SampleSize;
                        }
                        //Console.WriteLine ("samples: " + samples);
                        capture.ReadSamples(buffer, samples);
                        Stream.Write(buffer, 0, samples * SampleSize);
                    }
                }
                Console.WriteLine("Finished capture loop.");
                AudioHandler.Running = false;
            }
        }
示例#4
0
        private void OnRecording(object state)
        {
            lock (syncObj)
            {
                if (capture == null || !capture.IsRunning)
                {
                    return;
                }

                int availableSamples = capture.AvailableSamples;

                if (availableSamples > 0)
                {
                    int availableDataSize = availableSamples * quality.Channels * (quality.Bits / 8);
                    if (availableDataSize > buffer.Length)
                    {
                        buffer = new byte[availableDataSize * 2];
                    }

                    capture.ReadSamples(buffer, availableSamples);

                    var temp = Interlocked.CompareExchange(ref recorded, null, null);
                    if (temp != null)
                    {
                        temp(this, new RecordedEventArgs(buffer, availableSamples, quality.Channels, quality.Bits, quality.Frequency));
                    }
                }

                if (systemTimer != null && capture.IsRunning)
                {
                    systemTimer.Change(GetTimerTimeOut(), -1);
                }
            }
        }
示例#5
0
        private void UpdateSamples()
        {
            buffer = new byte[buffer.Length];
            audio_capture.ReadSamples(buffer, buffer.Length / SampleToByte);             //Need to divide as the readsamples expects the value to be in 2 bytes.

            //Queue raw data, let receiving application determine if it needs to compress
            this.microphoneData.Enqueue(buffer);
            ClearBuffers(0);
        }
示例#6
0
        public void Tick()
        {
            if (Capture == null)
            {
                return;
            }
            int asamps = Capture.AvailableSamples;

            if (asamps > 0)
            {
                Capture.ReadSamples(buffer, asamps);
                Array.Copy(buffer, 0, tempbuf, tempasamps * 2, asamps * 2);
                tempasamps += asamps;
                stat_bytes += asamps * 2;
                int b = 0;
                while ((tempasamps - b) >= 960)
                {
                    AddSection(b, 960);
                    b += 960;
                }
                // Are the below while loops needed?
                while ((tempasamps - b) >= 320)
                {
                    AddSection(b, 320);
                    b += 320;
                }
                while ((tempasamps - b) >= 80)
                {
                    AddSection(b, 80);
                    b += 80;
                }
                while ((tempasamps - b) >= 40)
                {
                    AddSection(b, 40);
                    b += 40;
                }
                if (tempasamps - b > 0)
                {
                    byte[] tbuf = new byte[tempbuf.Length];
                    Array.Copy(tempbuf, b, tbuf, 0, tempasamps - b);
                    tempbuf = tbuf;
                }
                tempasamps -= b;
                int bufc;
                AL.GetSource(PlaybackSrc, ALGetSourcei.BuffersProcessed, out bufc);
                if (bufc > 0)
                {
                    int[] bufs = AL.SourceUnqueueBuffers(PlaybackSrc, bufc);
                    AL.DeleteBuffers(bufs);
                }
                if (AL.GetSourceState(PlaybackSrc) != ALSourceState.Playing)
                {
                    AL.SourcePlay(PlaybackSrc);
                }
            }
        }
示例#7
0
        /// <summary>
        /// Capture
        /// </summary>
        /// <param name="audioCapture"></param>
        /// <param name="audioBuffer"></param>
        /// <returns></returns>
        private static double[] Capture(AudioCapture audioCapture, byte[] audioBuffer)
        {
            audioCapture.ReadSamples(audioBuffer, audioBuffer.Length);
            double[] fft = new double[audioBuffer.Length];

            for (int i = 0; i < audioBuffer.Length; i++)
            {
                fft[i] = audioBuffer[i] - 128;
            }

            return(fft);
        }
示例#8
0
        public AudioBuffer nextBuffer()
        {
            int samples = myDevice.AvailableSamples;

            if (samples > 0)
            {
                AudioBuffer buffer = new AudioBuffer(myFormat, myFrequency);
                buffer.size = buffer.calculateBufferSize(myFormat, samples);
                myDevice.ReadSamples(buffer.data, samples);
                return(buffer);
            }

            return(null);
        }
示例#9
0
        public void Run()
        {
            if (!ChEnable.Checked)
            {
                return;
            }

            if (AudioCapturer == null)
            {
                return;
            }


            //for (int j = 0; j < 92; j++)
            {
                // reset mem
                for (int i = 0; i < 256; i++)
                {
                    audioBuffer[i] = 0;
                    fftData[i]     = 0;
                    fft[i]         = 0;
                }

                AudioCapturer.ReadSamples(audioBuffer, 256);

                for (int i = 0; i < 256; i++)
                {
                    fft[i] = (audioBuffer[i] - 128) * amplitude;
                }

                fftTransoformer.TableFFT(fft, true);

                for (int i = 0; i < 256; i += 2)
                {
                    double fftmag = Math.Sqrt((fft[i] * fft[i]) + (fft[i + 1] * fft[i + 1]));
                    fftavg        += fftmag;
                    fftData[i]     = (byte)fftmag;
                    fftData[i + 1] = fftData[i];
                }

                //fftavg /= 10;

                Main.Keyboard.WriteAudio(fft);
            }
        }
示例#10
0
        private void OnRecording(object state)
        {
            lock (_syncObj)
            {
                if (_capture == null)
                {
                    return;
                }

                var availableSamples = _capture.AvailableSamples;
                if (availableSamples > 0)
                {
                    var availableDataSize = availableSamples * _quality.Channels * (_quality.Bits / 8);
                    if (availableDataSize > _buffer.Length)
                    {
                        _buffer = new byte[availableDataSize * 2];
                    }

                    _capture.ReadSamples(_buffer, availableSamples);

                    var temp = Interlocked.CompareExchange(ref _recorded, null, null);
                    if (temp != null)
                    {
                        temp(this, new RecordedEventArgs(_buffer, availableSamples, _quality.Channels, _quality.Bits, _quality.Frequency));
                    }
                }

                if (_capture.IsRunning)
                {
                    _captureTimer.Change(GetTimerTimeOut(), -1);
                }
                else
                {
                    _captureTimer.Dispose();
                    _captureTimer = null;
                }
            }
        }
示例#11
0
        /// <summary>
        /// Graph
        /// </summary>
        /// <param name="audioBuffer"></param>
        /// <param name="fft"></param>
        private static void Graph(AudioCapture audioCapture, byte[] audioBuffer)
        {
            double[] fft = new double[audioBuffer.Length];
            audioCapture.ReadSamples(audioBuffer, audioBuffer.Length);

            double max       = fft.Max(a => Math.Abs(a));
            double amplitude = (101 - max) / 100 * 0.5;

            for (int i = 0; i < audioBuffer.Length; i++)
            {
                fft[i] = (audioBuffer[i] - 128) * amplitude;
            }

            for (int x = 0; x < Util.Context.Largeur; x++)
            {
                int  y   = (int)(fft[x * 2]) + 10;
                byte red = (byte)Math.Abs(fft[x * 2] * 11);

                if (Util.Context.Pixels.GetCoordonnee(x, y) is Pixel pixel)
                {
                    pixel.Set(red, 0, 127 - red);
                }
            }
        }
示例#12
0
        public RecorderDiagnostic()
        {
            Trace.WriteLine("--- AudioCapture related errors ---");
            IsDeviceAvailable = false;

            try
            {
                r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096);
            }
            catch (AudioDeviceException ade)
            {
                Trace.WriteLine("AudioCapture Exception caught: " + ade.Message);
                return;
            }
            IsDeviceAvailable = true;
            DeviceName = r.CurrentDevice;
            CheckRecorderError("Alc.CaptureOpenDevice");

            r.Start();
            CheckRecorderError("Alc.CaptureStart");
            Thread.Sleep(100);
            r.Stop();
            CheckRecorderError("Alc.CaptureStop");

            byte[] Buffer = new byte[8192];

            Thread.Sleep(10);  // Wait for a few samples to become available.
            int SamplesBefore = r.AvailableSamples;

            CheckRecorderError("Alc.GetInteger(...CaptureSamples...)");
            r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore));
            CheckRecorderError("Alc.CaptureSamples");

            int SamplesCaptured = SamplesBefore - r.AvailableSamples;

            uint ZeroCounter = 0;
            for (int i = 0; i < SamplesCaptured * 2; i++)
            {
                if (Buffer[i] == 0)
                    ZeroCounter++;
            }

            for (int i = 0; i < SamplesCaptured; i++)
            {
                short sample = BitConverter.ToInt16(Buffer, i * 2);
                if (sample > MaxSample)
                    MaxSample = sample;
                if (sample < MinSample)
                    MinSample = sample;
            }

            if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0)
                BufferContentsAllZero = false;
            else
                BufferContentsAllZero = true;

            r.Dispose();
            CheckRecorderError("Alc.CaptureCloseDevice");

            // no playback test needed due to Parrot test app.
            /*
            uint buf;
            AL.GenBuffer(out buf);
            AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000);
            uint src;
            AL.GenSource(out src);
            AL.BindBufferToSource(src, buf);
            AL.Listener(ALListenerf.Gain, 16.0f);
            AL.SourcePlay(src);
            while (AL.GetSourceState(src) == ALSourceState.Playing)
            {
                Thread.Sleep(0);
            }
            AL.SourceStop(src);

            AL.DeleteSource(ref src);
            AL.DeleteBuffer(ref buf);
            */
        }
        public RecorderDiagnostic()
        {
            Trace.WriteLine("--- AudioCapture related errors ---");
            IsDeviceAvailable = false;

            try
            {
                r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096);
            }
            catch (AudioDeviceException ade)
            {
                Trace.WriteLine("AudioCapture Exception caught: " + ade.Message);
                return;
            }
            IsDeviceAvailable = true;
            DeviceName        = r.CurrentDevice;
            CheckRecorderError("Alc.CaptureOpenDevice");

            r.Start();
            CheckRecorderError("Alc.CaptureStart");
            Thread.Sleep(100);
            r.Stop();
            CheckRecorderError("Alc.CaptureStop");

            byte[] Buffer = new byte[8192];

            Thread.Sleep(10);  // Wait for a few samples to become available.
            int SamplesBefore = r.AvailableSamples;

            CheckRecorderError("Alc.GetInteger(...CaptureSamples...)");
            r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore));
            CheckRecorderError("Alc.CaptureSamples");

            int SamplesCaptured = SamplesBefore - r.AvailableSamples;

            uint ZeroCounter = 0;

            for (int i = 0; i < SamplesCaptured * 2; i++)
            {
                if (Buffer[i] == 0)
                {
                    ZeroCounter++;
                }
            }

            for (int i = 0; i < SamplesCaptured; i++)
            {
                short sample = BitConverter.ToInt16(Buffer, i * 2);
                if (sample > MaxSample)
                {
                    MaxSample = sample;
                }
                if (sample < MinSample)
                {
                    MinSample = sample;
                }
            }

            if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0)
            {
                BufferContentsAllZero = false;
            }
            else
            {
                BufferContentsAllZero = true;
            }

            r.Dispose();
            CheckRecorderError("Alc.CaptureCloseDevice");

            // no playback test needed due to Parrot test app.

            /*
             * uint buf;
             * AL.GenBuffer(out buf);
             * AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000);
             * uint src;
             * AL.GenSource(out src);
             * AL.BindBufferToSource(src, buf);
             * AL.Listener(ALListenerf.Gain, 16.0f);
             * AL.SourcePlay(src);
             * while (AL.GetSourceState(src) == ALSourceState.Playing)
             * {
             *  Thread.Sleep(0);
             * }
             * AL.SourceStop(src);
             *
             * AL.DeleteSource(ref src);
             * AL.DeleteBuffer(ref buf);
             */
        }
示例#14
0
        public Task <string> RecordToWav()
        {
            Directory.CreateDirectory($"./{_configuration.WavFilesFolderName}");

            var wavFile = $"./{_configuration.WavFilesFolderName}/{Guid.NewGuid()}.wav";

            var recorders = AudioCapture.AvailableDevices;

            for (int i = 0; i < recorders.Count; i++)
            {
                Console.WriteLine(recorders[i]);
            }
            Console.WriteLine("-----");

            const int samplingRate = 44100;     // Samples per second

            const ALFormat alFormat      = ALFormat.Mono16;
            const ushort   bitsPerSample = 16;  // Mono16 has 16 bits per sample
            const ushort   numChannels   = 1;   // Mono16 has 1 channel

            using (var f = File.OpenWrite(wavFile))
                using (var sw = new BinaryWriter(f))
                {
                    // Read This: http://soundfile.sapp.org/doc/WaveFormat/

                    sw.Write(new char[] { 'R', 'I', 'F', 'F' });
                    sw.Write(0); // will fill in later
                    sw.Write(new char[] { 'W', 'A', 'V', 'E' });
                    // "fmt " chunk (Google: WAVEFORMATEX structure)
                    sw.Write(new char[] { 'f', 'm', 't', ' ' });
                    sw.Write(16);                                               // chunkSize (in bytes)
                    sw.Write((ushort)1);                                        // wFormatTag (PCM = 1)
                    sw.Write(numChannels);                                      // wChannels
                    sw.Write(samplingRate);                                     // dwSamplesPerSec
                    sw.Write(samplingRate * numChannels * (bitsPerSample / 8)); // dwAvgBytesPerSec
                    sw.Write((ushort)(numChannels * (bitsPerSample / 8)));      // wBlockAlign
                    sw.Write(bitsPerSample);                                    // wBitsPerSample
                                                                                // "data" chunk
                    sw.Write(new char[] { 'd', 'a', 't', 'a' });
                    sw.Write(0);                                                // will fill in later

                    // 10 seconds of data. overblown, but it gets the job done
                    const int bufferLength = samplingRate * 10;
                    int       samplesWrote = 0;

                    Console.WriteLine($"Recording from: {recorders[0]}");

                    using (var audioCapture = new AudioCapture(
                               recorders[0], samplingRate, alFormat, bufferLength))
                    {
                        var buffer = new short[bufferLength];

                        audioCapture.Start();
                        for (int i = 0; i < _configuration.SecondsToRecord; ++i)
                        {
                            Thread.Sleep(1000); // give it some time to collect samples

                            var samplesAvailable = audioCapture.AvailableSamples;
                            audioCapture.ReadSamples(buffer, samplesAvailable);
                            for (var x = 0; x < samplesAvailable; ++x)
                            {
                                sw.Write(buffer[x]);
                            }

                            samplesWrote += samplesAvailable;

                            Console.WriteLine($"Wrote {samplesAvailable}/{samplesWrote} samples...");
                        }
                        audioCapture.Stop();
                    }

                    sw.Seek(4, SeekOrigin.Begin);  // seek to overall size
                    sw.Write(36 + samplesWrote * (bitsPerSample / 8) * numChannels);
                    sw.Seek(40, SeekOrigin.Begin); // seek to data size position
                    sw.Write(samplesWrote * (bitsPerSample / 8) * numChannels);
                }

            return(Task.FromResult(wavFile));
        }