Ejemplo n.º 1
0
        private void tmr1_Tick(object sender, EventArgs e)
        {
            counter++;
            bufferedWaveProvider.Read(buffer, 0, buffer.Length);
            bufferedWaveProvider.ClearBuffer();

            //New drawing thing
            short[] shortBuff = byteToShort(buffer);
            drawAudio(shortBuff);

            //drawTimeGraph(buffer);


            if (counter == 10)
            {
                //Convert buffer to short
                for (int i = 1; i < buffer.Length - 2; i += 2)
                {
                    outBufferTest[i / 2] = (short)((Convert.ToUInt16((buffer[i]) | buffer[i + 1] << 8)));
                }
                //Write values to txt box
                string output = "";
                for (int i = 0; i < outBufferTest.Length; i++)
                {
                    output += outBufferTest[i].ToString() + ",";
                }
                txtSimOutput.Text = output;
            }
        }
Ejemplo n.º 2
0
        public async Task SendToStream(VoiceNextConnection voiceStream)
        {
            try
            {
                var sink           = voiceStream.GetTransmitSink(_soundSettings.SampleSize);
                var bytesPerSample = _remoteBuffer.BytesPerSample(sink.SampleDuration);
                streamBuffer = new byte[bytesPerSample];

                await Task.Delay(_soundSettings.BufferDurationMs);

                while (_playing)
                {
                    if (_remoteBuffer.BufferedBytes >= bytesPerSample)
                    {
                        _remoteBuffer.Read(streamBuffer, 0, bytesPerSample);
                        await sink.WriteAsync(streamBuffer, 0, bytesPerSample);
                    }
                }
            }
            catch (Exception e)
            {
                _log.Error(e, $"An error occured during transmission to stream: {e.Message}");
                Stop();
            }
        }
 /// <summary>
 /// Stores retrieved audio data into the specified dataBuffer
 /// </summary>
 /// <param name="dataBuffer">Byte Array to store recieved data in</param>
 /// <param name="size">Length of dataBuffer holding data</param>
 /// <returns></returns>
 public override int Read(byte[] dataBuffer, uint size)
 {
     lock (lockObject)
     {
         return(bufferedWaveProvider.Read(dataBuffer, 0, dataBuffer.Length));
     }
 }
Ejemplo n.º 4
0
        void OnDataAvailable(object sender, WaveInEventArgs e)
        {
            bwp.AddSamples(e.Buffer, 0, e.BytesRecorded);

            byte[] buffer        = e.Buffer;
            int    bytesRecorded = e.BytesRecorded;

            //WriteToFile(buffer, bytesRecorded);

            _sampleAggregator?.OnDataAvailable(buffer, bytesRecorded);

            for (int index = 0; index < e.BytesRecorded; index += 2)
            {
                short sample   = (short)((buffer[index + 1] << 8) | buffer[index + 0]);
                float sample32 = sample / 32768f;
                _sampleAggregator?.Add(sample32);
            }

            int frameSize = BUFFERSIZE;

            byte[] frames = new byte[frameSize];

            bwp.Read(frames, 0, frameSize);
            _sampleAggregator?.Calculate(frames, frameSize);
        }
Ejemplo n.º 5
0
        public void SaveRecord()
        {
            try
            {
                waveOut.Stop();
                waveIn.StopRecording();

                if (File.Exists(outputFile))
                {
                    File.Delete(outputFile);
                }

                writer = new WaveFileWriter(outputFile, waveIn.WaveFormat);

                byte[] buffer = new byte[bwp.BufferLength];
                int    offset = 0;
                int    count  = bwp.BufferLength;

                var read = bwp.Read(buffer, offset, count);
                if (count > 0)
                {
                    writer.Write(buffer, offset, read);
                }

                waveIn.Dispose();
                waveIn = null;
                writer.Close();
                writer = null;
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
            }
        }
Ejemplo n.º 6
0
        public void GetAudioData(object sender, EventArgs e)
        {
            int frameSize  = BUFFERSIZE;
            var audioBytes = new byte[frameSize];

            _bwp.Read(audioBytes, 0, frameSize);

            if (audioBytes.Length == 0)
            {
                return;
            }
            if (audioBytes[frameSize - 2] == 0)
            {
                return;
            }

            int BYTES_PER_POINT = 2;
            int graphPointCount = audioBytes.Length / BYTES_PER_POINT;

            double[] pcm     = new double[graphPointCount];
            double[] fft     = new double[graphPointCount];
            double[] fftReal = new double[graphPointCount / 2];

            for (int i = 0; i < graphPointCount; i++)
            {
                Int16 val = BitConverter.ToInt16(audioBytes, i * 2);
                pcm[i] = (double)(val) / Math.Pow(2, 16) * 200.0;
            }

            fft = FFT(pcm);
            Array.Copy(fft, fftReal, fftReal.Length);

            ProcessPixelData(fftReal);
        }
Ejemplo n.º 7
0
        public bool Format(BufferedWaveProvider bwp) //returns true if data was successfully formatted
        {
            int frameSize  = Dashboard.WorkingProfile.SoundProfile.SAMPLES;
            var audioBytes = new byte[frameSize]; //create working buffer for audio

            bwp.Read(audioBytes, 0, frameSize);   //fill it with input
            if (audioBytes.Length == 0 || audioBytes[frameSize - 2] == 0)
            {
                return(false);                                                         //if its empty, return false
            }
            int BYTES_PER_SAMPLE = Dashboard.WorkingProfile.SoundProfile.BITDEPTH / 8; // BITS / 8 BITS PER BYTE = BYTES
            int samples          = audioBytes.Length / BYTES_PER_SAMPLE;               //gets number of samples


            //different datasets to work with
            pcm = new double[samples];          //Pulse-code modulation: Each value is a sample's quantized amplitude
            double[] fft = new double[samples]; //Fast-Fourier Transformed PCM data: Gets the amplitude at linearly spaced frequencies
            //double[] cqt = new double[samples]; //Constant-Q Transformed PCM data: Gets the amplitude at exponentially spaced frequencies (matches human hearing)

            //populate PCM data
            for (int i = 0; i < samples; ++i)
            {
                Int16 sample = BitConverter.ToInt16(audioBytes, i * 2); //16 bit sample from 2 bytes of data
                pcm[i] = (double)(sample);                              // MAX_16BIT_VALUE * 200.0;
            }

            fft     = FFT(pcm);                        //FFTs the populated pcm;
            fftReal = fft.Take(samples / 2).ToArray(); //Sets fftReal to only contain real values from fft

            return(true);
        }
        /// <summary>
        ///     Send data.
        /// </summary>
        private void StreamingSendData()
        {
            while (true)
            {
                lock (_lockObject)
                {
                    var bufferSize = _sampleRate * _channels * (_bitsPerSample / 8);
                    if (_bufferedWaveProvider.BufferedBytes > bufferSize)
                    {
                        var sendBytes = new byte[bufferSize];
                        var before    = _bufferedWaveProvider.BufferedBytes;
                        _bufferedWaveProvider.Read(sendBytes, 0, sendBytes.Length);

                        Socket.Log.Trace("before:" + before + " after:" + _bufferedWaveProvider.BufferedBytes);

                        if (_ClearBuffer)
                        {
                            Socket.Log.Trace("Send Wave Chunk.");
                            Socket.Send(GetWaveHeader());
                            _ClearBuffer = false;
                        }

                        Socket.Log.Trace("Send data.");
                        Socket.Send(sendBytes);
                        ResetConnectionInterval();
                    }
                }
            }
        }
Ejemplo n.º 9
0
        public void calculateFFT()
        {
            var audioBytes = new byte[BUFFERSIZE];

            bwp.Read(audioBytes, 0, BUFFERSIZE);
            int pointCount = audioBytes.Length / 2;

            double[] pcm     = new double[pointCount];
            double[] fft     = new double[pointCount];
            double[] fftReal = new double[pointCount / 2];


            for (int i = 0; i < pointCount; i++)
            {
                // read the int16 from the two bytes
                Int16 val = BitConverter.ToInt16(audioBytes, i * 2);

                // store the value in Ys as a percent (+/- 100% = 200%)
                pcm[i] = (double)(val) / Math.Pow(2, 16) * 200.0;
            }
            fft = FFT(pcm);
            Array.Copy(fft, fftReal, fftReal.Length);
            if (fftReal.IndexOf(fftReal.Max()) > 1)
            {
                double fz = (double)(fftReal.IndexOf(fftReal.Max()) * RATE) / pointCount;
                Console.WriteLine(fz);
                //Console.WriteLine((double)(fftReal.IndexOf(fftReal.Max())/2)/pointCount);
            }
        }
Ejemplo n.º 10
0
        private void btnDump_Click(object sender, EventArgs e)
        {
            input.StopRecording();
            recording = false;

            int tempbufferbytes;

            if (curdelay > dumpMs && dumps > 1)
            {
                tempbufferbytes = buffer.BufferedBytes - ((waveformat.AverageBytesPerSecond * (targetMs / 1000) / dumps));//* (dumps - 1) / dumps / waveformat.BlockAlign * waveformat.BlockAlign;
                var tempbuffer = new byte[buffer.BufferedBytes];

                tempbufferbytes = buffer.Read(tempbuffer, 0, tempbufferbytes);

                buffer.ClearBuffer();

                buffer.AddSamples(tempbuffer, 0, tempbufferbytes);
            }
            else
            {
                buffer.ClearBuffer();
            }


            curdelay = (int)buffer.BufferedDuration.TotalMilliseconds;
            if (targetRampedUp && curdelay < targetMs)
            {
                rampingup = true;
            }
            else
            {
                rampingdown = true;
            }
        }
        /// <summary>
        /// Processes microphone input when the buffer is full
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void timer1_Tick(object sender, EventArgs e)
        {
            var frames = new byte[640];

            buffer.Read(frames, 0, 640);
            if (frames.Length == 0)
            {
                return;
            }
            if (frames[640 - 2] == 0)
            {
                return;
            }

            timer1.Enabled = false;

            double[] d = new double[320];
            for (int i = 0; i < 640; i += 2)
            {
                double c = (short)((frames[i + 1] << 8) | frames[i]);
                c        = (c / 32768.0);
                d[i / 2] = c * 10;
            }
            double[] FFTs = d;

            Series series = new Series();

            series.ChartType = SeriesChartType.FastLine;
            for (int i = 0; i < 320; i++)
            {
                series.Points.AddXY(i, FFTs[i]);
            }
            chartAudio.Series.Clear();
            chartAudio.Series.Add(series);
            chartAudio.Update();


            /* for displaying FFT
             * Ys2 = FFT(Ys);
             * series = new Series();
             * series.ChartType = SeriesChartType.FastLine;
             * for (int i = 0; i < Xs2.Length / 2; i++)
             * {
             *  series.Points.AddXY(Xs2.Take(Xs2.Length / 2).ToArray()[i], Ys2.Take(Ys2.Length / 2).ToArray()[i]);
             * }
             * chartFreq.Series.Clear();
             * chartFreq.Series.Add(series);
             * chartFreq.Update();
             */

            //Thread testingThread = new Thread(new ParameterizedThreadStart(RunTest));
            //testingThread.IsBackground = true;
            //testingThread.Start(Ys2);
            //testingThread.Start(FFTs);

            RunTest(FFTs);

            timer1.Enabled = true;
        }
Ejemplo n.º 12
0
        public void UpdateAudioGraph()
        {
            // read the bytes from the stream
            int frameSize = BUFFERSIZE;
            var frames    = new byte[frameSize];

            bwp.Read(frames, 0, frameSize);
            if (frames.Length == 0)
            {
                return;
            }
            if (frames[frameSize - 2] == 0)
            {
                return;
            }

            timer1.Enabled = false;

            // convert it to int32 manually (and a double for scottplot)
            int SAMPLE_RESOLUTION = 16;
            int BYTES_PER_POINT   = SAMPLE_RESOLUTION / 8;

            Int32[]  vals = new Int32[frames.Length / BYTES_PER_POINT];
            double[] Ys   = new double[frames.Length / BYTES_PER_POINT];
            double[] Xs   = new double[frames.Length / BYTES_PER_POINT];
            double[] Ys2  = new double[frames.Length / BYTES_PER_POINT];
            double[] Xs2  = new double[frames.Length / BYTES_PER_POINT];
            for (int i = 0; i < vals.Length; i++)
            {
                // bit shift the byte buffer into the right variable format
                byte hByte = frames[i * 2 + 1];
                byte lByte = frames[i * 2 + 0];
                vals[i] = (int)(short)((hByte << 8) | lByte);
                Xs[i]   = i;
                Ys[i]   = vals[i];
                Xs2[i]  = (double)i / Ys.Length * RATE / 1000.0; // units are in kHz
            }

            //scottPlotUC1.PlotXY(Xs, Ys);
            //scottPlotUC1.AxisAuto();
            Ys2 = FFT(Ys);
            mfcc(Ys2);


            scottPlotUC1.PlotXY(Xs2.Take(Xs2.Length / 2).ToArray(), Ys2.Take(Ys2.Length / 2).ToArray());
            scottPlotUC1.AxisAuto();

            // update the displays
            //scottPlotUC1.UpdateGraph();
            //scottPlotUC2.UpdateGraph();
            scottPlotUC1.Clear();
            //scottPlotUC2.Clear();

            Application.DoEvents();
            scottPlotUC1.Update();
            //scottPlotUC2.Update();

            timer1.Enabled = true;
        }
Ejemplo n.º 13
0
 public int Read(byte[] buffer, int offset, int count)
 {
     if (waveBuffer.BufferedBytes < count - offset)
     {
         parent.Read((count - offset) - waveBuffer.BufferedBytes);
     }
     return(waveBuffer.Read(buffer, offset, count));
 }
Ejemplo n.º 14
0
 public int Read(byte[] buffer, int offset, int count)
 {
     lock (syncRoot)
     {
         FilterEcho();
         return(filtered.Read(buffer, offset, count));
     }
 }
Ejemplo n.º 15
0
        public byte[] GetCapturedData()
        {
            var capturedData = new byte[bufferLength];

            bufferProvider.Read(capturedData, 0, bufferLength);

            return(capturedData);
        }
Ejemplo n.º 16
0
 private void SendLatestAudioBytes(object state)
 {
     if (!mIsDisposing)
     {
         byte[] audioBytes = new byte[BufferSize];
         mBufferedWaveProvider.Read(audioBytes, 0, BufferSize);
         NotifyWithNewData.Invoke(this, new RecorderUpdateEventArgs(string.Empty, audioBytes));
     }
 }
Ejemplo n.º 17
0
        public void EmptyBufferCanReturnZeroFromRead()
        {
            var bwp = new BufferedWaveProvider(new WaveFormat());

            bwp.ReadFully = false;
            var buffer = new byte[44100];
            var read   = bwp.Read(buffer, 0, buffer.Length);

            Assert.AreEqual(0, read);
        }
Ejemplo n.º 18
0
        public void UpdateAudioGraph()
        {
            // read the bytes from the stream
            int frameSize = BUFFERSIZE;
            var frames    = new byte[frameSize];

            bwp.Read(frames, 0, frameSize);
            if (frames.Length == 0)
            {
                return;
            }
            if (frames[frameSize - 2] == 0)
            {
                return;
            }

            timer1.Enabled = false;

            // convert it to int32 manually (and a double for scottplot)
            int SAMPLE_RESOLUTION = 16;
            int BYTES_PER_POINT   = SAMPLE_RESOLUTION / 8;
            int valsSize          = frames.Length / BYTES_PER_POINT;

            //int valsSize = 471998;
            double[] vals = new double[valsSize]; //471998 -> frames.Length = 943996
            double[] Ys   = new double[valsSize]; //471998 -> frames.Length = 943996
            double[] Xs   = new double[valsSize]; //471998 -> frames.Length = 943996
            for (int i = 0; i < vals.Length; i++)
            {
                // bit shift the byte buffer into the right variable format
                byte hByte = frames[i * 2 + 1];
                byte lByte = frames[i * 2 + 0];
                vals[i] = (int)(short)((hByte << 8) | lByte);
                Xs[i]   = (double)i / Ys.Length * RATE / 1000.0; // units are in kHz
            }

            //update scottplot (FFT, frequency domain)
            Ys = FFT(vals);
            scottPlotUC1.Xs = Xs.Take(Xs.Length / 2).ToArray(); //235999 -> Xs.Length = 471998
            scottPlotUC1.Ys = Ys.Take(Ys.Length / 2).ToArray(); //235999 -> Ys.Lenght = 471998

            scottPlotUC3.Xs = Xs.Take(Xs.Length / 2).ToArray();
            scottPlotUC3.Ys = Ys.Take(Ys.Length / 2).ToArray();

            Console.ReadLine();
            //// update the displays
            scottPlotUC1.UpdateGraph();
            scottPlotUC3.UpdateGraph("WaterFall");

            Application.DoEvents();
            scottPlotUC1.Update();
            //scottPlotUC2.Update();

            timer1.Enabled = true;
        }
Ejemplo n.º 19
0
        public void PlotLatestData()
        {
            int frameSize  = BUFFERSIZE;
            var audioBytes = new byte[frameSize];

            bwp.Read(audioBytes, 0, frameSize);

            if (audioBytes.Length == 0)
            {
                return;
            }
            if (audioBytes[frameSize - 2] == 0)
            {
                return;
            }

            int BYTES_PER_POINT = 2;

            int graphPointCount = audioBytes.Length / BYTES_PER_POINT;

            double[] pcm     = new double[graphPointCount];
            double[] fft     = new double[graphPointCount];
            double[] fftReal = new double[graphPointCount / 2];

            for (int i = 0; i < graphPointCount; i++)
            {
                short val = BitConverter.ToInt16(audioBytes, i * 2);

                pcm[i] = val / Math.Pow(2, 16) * 200.0;
            }

            fft = FFT(pcm);

            double pcmPointSpacingMs = RATE / 1000;
            double fftMaxFreq        = RATE / 2;
            double fftPointSpacingHz = fftMaxFreq / graphPointCount;

            Array.Copy(fft, fftReal, fftReal.Length);

            PlotUC1.Clear();
            PlotUC1.PlotSignal(pcm, pcmPointSpacingMs, Color.Blue);
            PlotUC2.Clear();
            PlotUC2.PlotSignal(fftReal, fftPointSpacingHz, Color.Blue);

            if (needsAutoScaling)
            {
                PlotUC1.AxisAuto();
                PlotUC2.AxisAuto();
                needsAutoScaling = false;
            }

            numberOfDraws += 1;

            Application.DoEvents();
        }
Ejemplo n.º 20
0
 // Get the entire input byte array
 public byte[] GetAllSamples()
 {
     byte[] holdingBuffer = null;
     if (waveProvider != null)
     {
         holdingBuffer = new byte[waveProvider.BufferedBytes];
         waveProvider.Read(holdingBuffer, 0, waveProvider.BufferedBytes);
         return(holdingBuffer);
     }
     return(null);
 }
Ejemplo n.º 21
0
        public void FullReadsByDefault()
        {
            var bwp    = new BufferedWaveProvider(new WaveFormat());
            var buffer = new byte[44100];

            bwp.AddSamples(buffer, 0, 2000);
            var read = bwp.Read(buffer, 0, buffer.Length);

            Assert.AreEqual(buffer.Length, read);
            Assert.AreEqual(0, bwp.BufferedBytes);
        }
Ejemplo n.º 22
0
        public void WhenBufferHasMoreThanNeededReadFully()
        {
            var bwp    = new BufferedWaveProvider(new WaveFormat());
            var buffer = new byte[44100];

            bwp.AddSamples(buffer, 0, 5000);
            var read = bwp.Read(buffer, 0, 2000);

            Assert.AreEqual(2000, read);
            Assert.AreEqual(3000, bwp.BufferedBytes);
        }
 public override int Read(byte[] dataBuffer, uint size)
 {
     // PullAudioInputStreamCallback classes are expect to block on read
     // however BufferedWaveProvider do not. therefore we will block until
     // the BufferedWaveProvider has something to return.
     while (_provider.BufferedBytes == 0)
     {
         Thread.Sleep(50);
     }
     return(_provider.Read(dataBuffer, 0, (int)size));
 }
Ejemplo n.º 24
0
        void FilterEcho()
        {
            while (remoteSound.BufferedBytes >= bytesPerFrame && localSound.BufferedBytes >= bytesPerFrame)
            {
                // read source of echo
                remoteSound.Read(remoteFrame, 0, bytesPerFrame);
                // read local sound + echo
                localSound.Read(localFrame, 0, bytesPerFrame);

                filter.Filter(localFrame, remoteFrame, outputFrame);
                filtered.AddSamples(outputFrame, 0, outputFrame.Length);
            }

            // read the remaining remote sound and play it
            while (remoteSound.BufferedBytes > 0)
            {
                remoteSound.Read(remoteFrame, 0, bytesPerFrame);
                filtered.AddSamples(remoteFrame, 0, remoteFrame.Length);
            }
        }
Ejemplo n.º 25
0
        public double[] GetData()
        {
            var audioBytes = new byte[bufferSize];

            bwp.Read(audioBytes, 0, bufferSize);

            if (audioBytes.Length == 0)
            {
                return new double[] { 0, 0, 0 }
            }
            ;
            if (audioBytes[bufferSize - 2] == 0)
            {
                return new double[] { 0, 0, 0 }
            }
            ;

            int BYTES_PER_BIN = 2;

            int binCount = audioBytes.Length / BYTES_PER_BIN;

            double[] pcm     = new double[binCount];
            double[] fftReal = new double[binCount / 2]; // we only care about the first half of the bins
            double[] fft;

            for (int i = 0; i < binCount; i++)
            {
                Int16 val = BitConverter.ToInt16(audioBytes, i * 2);
                pcm[i] = (double)(val) / Math.Pow(2, 16) * 200.0;
            }
            fft = FFT(pcm);
            Array.Copy(fft, fftReal, fftReal.Length);
            double l = 0, m = 0, h = 0;
            double multiplier = rate / fft.Length;

            for (int i = 0; i < fftReal.Length; i++)
            {
                double frequency = i * multiplier;
                if (frequency < lowLimit)
                {
                    l += fftReal[i];
                }
                else if (frequency < midLimit)
                {
                    m += fftReal[i];
                }
                else
                {
                    h += fftReal[i];
                }
            }
            return(new double[] { l, m, h });
        }
Ejemplo n.º 26
0
        public override bool GetExact(short[] data, int offset, int length)
        {
            if (bwp.BufferedBytes < length * 2)
            {
                return(false);
            }

            byte[] byteData = new byte[length * 2];
            int    count    = bwp.Read(byteData, 0, length * 2);

            Buffer.BlockCopy(byteData, 0, data, offset, count);
            return(true);
        }
Ejemplo n.º 27
0
        public void PartialReadsPossibleWithReadFullyFalse()
        {
            var bwp = new BufferedWaveProvider(new WaveFormat());

            bwp.ReadFully = false;
            var buffer = new byte[44100];

            bwp.AddSamples(buffer, 0, 2000);
            var read = bwp.Read(buffer, 0, buffer.Length);

            Assert.AreEqual(2000, read);
            Assert.AreEqual(0, bwp.BufferedBytes);
        }
Ejemplo n.º 28
0
        private void wi_DataAvailable(object sender, WaveInEventArgs e)
        {
            bwp.AddSamples(e.Buffer, 0, e.BytesRecorded);

            var bufferBytes = new byte[bwp.BufferedBytes];

            bwp.Read(bufferBytes, 0, 0);

            for (int i = 0; i < e.BytesRecorded; i++)
            {
                bufferBytes[i] = e.Buffer[i];
            }

            passDataAndProcess(bufferBytes);
        }
        public void BufferedBytesAreReturned()
        {
            var bytesToBuffer = 1000;
            var bwp           = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));
            var data          = Enumerable.Range(1, bytesToBuffer).Select(n => (byte)(n % 256)).ToArray();

            bwp.AddSamples(data, 0, data.Length);
            Assert.AreEqual(bytesToBuffer, bwp.BufferedBytes);
            var readBuffer = new byte[bytesToBuffer];
            var bytesRead  = bwp.Read(readBuffer, 0, bytesToBuffer);

            Assert.AreEqual(bytesRead, bytesToBuffer);
            Assert.AreEqual(readBuffer, data);
            Assert.AreEqual(0, bwp.BufferedBytes);
        }
Ejemplo n.º 30
0
        public void calculateLatestData()
        {
            // check the incoming microphone audio
            int frameSize  = BUFFERSIZE;
            var audioBytes = new byte[frameSize];

            bwp.Read(audioBytes, 0, frameSize);

            // return if there's nothing new to plot
            if (audioBytes.Length == 0)
            {
                return;
            }
            if (audioBytes[frameSize - 2] == 0)
            {
                return;
            }

            // incoming data is 16-bit (2 bytes per audio point)
            int BYTES_PER_POINT = 2;

            // create a (32-bit) int array ready to fill with the 16-bit data
            graphPointCount = audioBytes.Length / BYTES_PER_POINT;

            // create double arrays to hold the data we will graph
            double[] pcm     = new double[graphPointCount];
            double[] fft     = new double[graphPointCount];
            double[] fftReal = new double[graphPointCount / 2];

            // populate Xs and Ys with double data
            for (int i = 0; i < graphPointCount; i++)
            {
                // read the int16 from the two bytes
                Int16 val = BitConverter.ToInt16(audioBytes, i * 2);

                // store the value in Ys as a percent (+/- 100% = 200%)
                pcm[i] = (double)val / Math.Pow(2, 16) * 10.0;
            }

            // calculate the full FFT
            fft = FFT(pcm);

            // just keep the real half (the other half imaginary)
            Array.Copy(fft, fftReal, fftReal.Length);
            // calculate logarithmic interpolated values

            Array.Copy(fftReal, bands, bands.Length);
        }