コード例 #1
0
        private void Reader_AudioBeamFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    // Only one audio beam is supported. Get the sub frame list for this beam
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    this.fullAudio = new Byte[audioBuffer.Length * subFrameList.Count];
                    int start = 0;
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        subFrame.CopyFrameDataToArray(this.audioBuffer);
                        this.audioBuffer.CopyTo(this.fullAudio, start);
                        start += this.audioBuffer.Length;
                    }

                    this.publisher.SendByteArray(this.fullAudio);
                    this.fullAudio = null;
                }
            }
        }
コード例 #2
0
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    byte[] audioBuffer = new byte[_kinect.AudioSource.SubFrameLengthInBytes];
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        int bytesRecorded = (int)subFrame.FrameLengthInBytes;
                        subFrame.CopyFrameDataToArray(audioBuffer);


                        if (_waveBufferPos + bytesRecorded < _waveBuffer.Length)
                        {
                            Array.Copy(audioBuffer, 0, _waveBuffer, _waveBufferPos, bytesRecorded);
                            _waveBufferPos += bytesRecorded;
                        }
                        else
                        {
                            PublishBuffer();

                            Array.Copy(audioBuffer, _waveBuffer, bytesRecorded);
                            _waveBufferPos = bytesRecorded;
                        }
                    }
                }
            }
        }
コード例 #3
0
 void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     using (AudioBeamFrameList frames = e.FrameReference.AcquireBeamFrames())
     {
         if (frames != null)
         {
             for (int i = 0; i < frames.Count; i++)
             {
                 KinectBase.AudioPositionEventArgs args = new KinectBase.AudioPositionEventArgs();
                 args.audioAngle = frames[i].AudioBeam.BeamAngle * (180.0 / Math.PI);  //Convert from radians to degress
                 args.confidence = frames[i].AudioBeam.BeamAngleConfidence;
                 args.kinectID   = kinectID;
                 OnAudioPositionChanged(args);
             }
         }
     }
 }
コード例 #4
0
        private void onAudioFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference audioFrameRefrence = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = audioFrameRefrence.AcquireBeamFrames();
                if (frameList != null)
                {
                    using (frameList)
                    {
                        IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {
                            this.audioContainer.utcTime             = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds;
                            this.audioContainer.beamAngle           = subFrame.BeamAngle;
                            this.audioContainer.beamAngleConfidence = subFrame.BeamAngleConfidence;
                            byte[] array = new byte[this.audioSource.SubFrameLengthInBytes];
                            subFrame.CopyFrameDataToArray(array);
                            for (int i = 0; i < array.Length; i += sizeof(float))
                            {
                                audioContainer.audioStream[(int)(i / sizeof(float))] = BitConverter.ToSingle(array, i);
                            }
                            string jsonString = JsonConvert.SerializeObject(this.audioContainer);
                            int    diff       = 4100 - jsonString.Length;
                            for (int i = 0; i < diff; i++)
                            {
                                jsonString += " ";
                            }
                            byte[] transmittedData = new byte[jsonString.Length * sizeof(char)];
                            System.Buffer.BlockCopy(jsonString.ToCharArray(), 0, transmittedData, 0, transmittedData.Length);
                            this.audioConnector.Broadcast(transmittedData);
                            subFrame.Dispose();
                        }
                    }
                    frameList.Dispose();
                }
            }
            catch
            {
            }
        }
コード例 #5
0
        private void beamFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // AudioBeamFrameList is IDisposable
                using (frameList)
                {
                    float readAngle = frameList[0].AudioBeam.BeamAngle;
                    if (readAngle != this.beamAngle)
                    {
                        //Console.WriteLine(subFrame.BeamAngle);
                        this.beamAngle = readAngle;
                        foreach (BeamAngleListener listener in beamAngleListeners)
                        {
                            listener.onBeamAngleChanged(beamAngle);
                        }
                    }

                    /*
                     * // Only one audio beam is supported. Get the sub frame list for this beam
                     * IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
                     *
                     * // Loop over all sub frames, extract audio buffer and beam information
                     * foreach (AudioBeamSubFrame subFrame in subFrameList)
                     * {
                     *  if (subFrame.BeamAngle != this.beamAngle)
                     *  {
                     *      //Console.WriteLine(subFrame.BeamAngle);
                     *      this.beamAngle = subFrame.BeamAngle;
                     *      foreach (BeamAngleListener listener in beamAngleListeners)
                     *      {
                     *          listener.onBeamAngleChanged(subFrame.BeamAngle);
                     *      }
                     *  }
                     * }
                     * */
                }
            }
        }
コード例 #6
0
ファイル: AudioFrame.cs プロジェクト: hetingjane/Diana
        public AudioFrame(Microsoft.Kinect.AudioBeamFrameList lf)
        {
            this.Type = FrameType.Audio;
            // AudioBeamFrame indexed by AudioBeams
            // Logically, each AudioBeam represents one of the mics in the microphone array
            // But the API limits us to only 1 AudioBeam, the cumulative one, which is at index 0
            this.beamFrameList = lf;
            this.subFrameCount = UnderlyingAudioFrame.SubFrames.Count;

            Debug.Assert(this.subFrameCount != 0);
            // The following is always 1024 bytes
            byte[] subrameBuffer = new byte[UnderlyingAudioFrame.AudioSource.SubFrameLengthInBytes];

            // To store floating point samples associated with thie frame
            this.audioBuffer = new float[(UnderlyingAudioFrame.AudioSource.SubFrameLengthInBytes / sizeof(float)) * UnderlyingAudioFrame.SubFrames.Count];

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
            // With 4 bytes per sample, that gives us 1024 bytes.
            // Note that Kinect Audio is mono sampled at 16kHz.
            // Beam angle may vary among subframes belonging to the same AudioBeamFrame
            int j = 0;

            foreach (var sframe in UnderlyingAudioFrame.SubFrames)
            {
                //Debug.Write(sframe.BeamAngleConfidence.ToString() + "\n");
                if (sframe.BeamAngleConfidence >= 0.8)
                {
                    // May want to filter out subframes with low confidence for beam angle
                    sframe.CopyFrameDataToArray(subrameBuffer);
                    for (int i = 0; i < subrameBuffer.Length; i += sizeof(float), j++)
                    {
                        audioBuffer[j] = BitConverter.ToSingle(subrameBuffer, i);
                    }
                }
            }
        }
コード例 #7
0
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    // Loop over all sub frames, extract audio buffer and beam information
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        subFrame.CopyFrameDataToArray(this.audioBuffer);
                        if (fileStream.CanWrite == true)
                        {
                            fileStream.Write(audioBuffer, 0, audioBuffer.Length);
                            size += audioBuffer.Length;
                        }
                    }
                }
            }
        }
コード例 #8
0
/**
 *      void _selectedController_StateChanged(object sender, XboxControllerStateChangedEventArgs e)
 *      {
 *          OnPropertyChanged("SelectedController");
 *
 *          // Where the action happens with the controller.
 *          if (SelectedController.IsAPressed)
 *          {
 *              Console.WriteLine("A is pressed");
 *          }
 *          else if (SelectedController.IsBPressed)
 *          {
 *              Console.WriteLine("B is pressed");
 *          }
 *      }
 *
 *      public XboxController SelectedController
 *      {
 *          get { return _selectedController; }
 *      }
 *
 *      volatile bool _keepRunning;
 *      XboxController _selectedController;
 *
 *      public void OnPropertyChanged(string name)
 *      {
 *          if (PropertyChanged != null)
 *          {
 *              Action a = () => { PropertyChanged(this, new PropertyChangedEventArgs(name)); };
 *              Dispatcher.BeginInvoke(a, null);
 *          }
 *      }
 *
 *
 *
 *      private void SelectedControllerChanged(object sender, RoutedEventArgs e)
 *      {
 *          _selectedController = XboxController.RetrieveController(((ComboBox)sender).SelectedIndex);
 *          OnPropertyChanged("SelectedController");
 *      }
 **/
        void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            //Only interperet Audio if in Audible Tracking state
            if (this.trackingMode == TrackingMode.AUDIBLE)
            {
                AudioBeamFrameReference frameReference = e.FrameReference;

                try
                {
                    AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                    if (frameList != null)
                    {
                        // AudioBeamFrameList is IDisposable
                        using (frameList)
                        {
                            // Only one audio beam is supported. Get the sub frame list for this beam
                            IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                            // Loop over all sub frames, extract audio buffer and beam information
                            foreach (AudioBeamSubFrame subFrame in subFrameList)
                            {
                                // Check if beam angle and/or confidence have changed
                                bool updateBeam = false;

                                if (subFrame.BeamAngle != this.beamAngle)
                                {
                                    this.beamAngle = subFrame.BeamAngle;
                                    updateBeam     = true;
                                }

                                if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                                {
                                    this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                                    updateBeam = true;
                                }
                                if (updateBeam)
                                {
                                    // Refresh display of audio beam
                                    this.AudioBeamChanged();
                                }
                                else
                                {
                                    // If there are has been no update in the audio beam then return the X servo velocity to 0
                                    if (panTilt.IsReady)
                                    {
                                        this.cannonXVelocity = 5;
                                        panTilt.PanX(this.CannonXVelocity);
                                    }
                                }
                            }
                        }
                    }
                }

                catch (Exception)
                {
                    // Ignore if the frame is no longer available
                }
            }
        }
コード例 #9
0
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // AudioBeamFrameList is IDisposable
                using (frameList)
                {
                    // Only one audio beam is supported. Get the sub frame list for this beam
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    // Loop over all sub frames, extract audio buffer and beam information
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        // Check if beam angle and/or confidence have changed
                        bool updateBeam = false;

                        if (subFrame.BeamAngle != this.beamAngle)
                        {
                            this.beamAngle = subFrame.BeamAngle;
                            updateBeam     = true;
                        }

                        if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                        {
                            this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                            updateBeam = true;
                        }

                        if (updateBeam)
                        {
                            // Refresh display of audio beam
                            this.AudioBeamChanged();
                        }

                        // Process audio buffer
                        subFrame.CopyFrameDataToArray(this.audioBuffer);

                        for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                        {
                            // Extract the 32-bit IEEE float sample from the byte array
                            float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                            this.accumulatedSquareSum += audioSample * audioSample;
                            ++this.accumulatedSampleCount;

                            if (this.accumulatedSampleCount < SamplesPerColumn)
                            {
                                continue;
                            }

                            float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                            if (meanSquare > 1.0f)
                            {
                                // A loud audio source right next to the sensor may result in mean square values
                                // greater than 1.0. Cap it at 1.0f for display purposes.
                                meanSquare = 1.0f;
                            }

                            // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                            float energy = MinEnergy;

                            if (meanSquare > 0)
                            {
                                energy = (float)(10.0 * Math.Log10(meanSquare));
                            }

                            lock (this.energyLock)
                            {
                                // Normalize values to the range [0, 1] for display
                                this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                                this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                                ++this.newEnergyAvailable;
                            }

                            this.accumulatedSquareSum   = 0;
                            this.accumulatedSampleCount = 0;
                        }
                    }
                }
            }
        }
コード例 #10
0
        private void AudioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (AudioBeamFrameList frameList = e.FrameReference.AcquireBeamFrames())
            {
                if (frameList != null)
                {
                    // NOTE - the old pattern of passing the AudioBeamFrameList to a downstream
                    // KinectAudio component had issues that were exposed in async mode. The
                    // AudioBeamFrameList is not disposed immediately in the event handler as
                    // it needs to be kept around until the async receiver processes it. However,
                    // Kinect suppresses all further audio events until the AudioBeamFrameList is
                    // disposed, so the receiver in KinectAudio has no way of recycling the old
                    // AudioBeamFrameList once it is done processing it (since the receiver never
                    // gets called again and this is the way objects are passed back upstream for
                    // recycling in the current cooperative buffering scheme). To resolve this, I
                    // moved the audio processing into this handler inside a using clause which
                    // ensures that the AudioBeamFrameList is disposed of immediately.
                    AudioBeamFrame audioBeamFrame = frameList[0];

                    foreach (var subFrame in audioBeamFrame.SubFrames)
                    {
                        // Check if we need to reallocate the audio buffer - if for instance the downstream component
                        // that we posted-by-ref to modifies the reference to audioBuffer to null or an array with
                        // a different size.
                        if ((this.audioBuffer == null) || (this.audioBuffer.Length != subFrame.FrameLengthInBytes))
                        {
                            this.audioBuffer = new byte[subFrame.FrameLengthInBytes];
                        }

                        // Get the raw audio bytes from the frame.
                        subFrame.CopyFrameDataToArray(this.audioBuffer);

                        // Compute originating time from the relative time reported by Kinect.
                        var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks((subFrame.RelativeTime + subFrame.Duration).Ticks);

                        // Post the audio buffer by reference.
                        this.Audio.Post(new AudioBuffer(this.audioBuffer, KinectSensor.audioFormat), originatingTime);

                        // Post the audio beam angle information by value (not using co-operative buffering).
                        this.AudioBeamInfo.Post(new KinectAudioBeamInfo(subFrame.BeamAngle, subFrame.BeamAngleConfidence), originatingTime);

                        if ((subFrame.AudioBodyCorrelations != null) && (subFrame.AudioBodyCorrelations.Count > 0))
                        {
                            // Get BodyTrackingIds from AudioBodyCorrelations list (seems like this is the only
                            // bit of useful information).
                            var bodyIds = subFrame.AudioBodyCorrelations.Select(abc => abc.BodyTrackingId);

                            // Since we are posting bodyTrackingIds by-ref, we need to do a null check each
                            // time and allocate if necessary. Otherwise clear and re-use the existing list.
                            if (this.bodyTrackingIds == null)
                            {
                                // Allocate a new list
                                this.bodyTrackingIds = new List <ulong>(bodyIds);
                            }
                            else
                            {
                                // Re-use the existing list
                                this.bodyTrackingIds.Clear();
                                foreach (ulong id in bodyIds)
                                {
                                    this.bodyTrackingIds.Add(id);
                                }
                            }

                            // Post the audio body correlations by reference.
                            this.AudioBodyCorrelations.Post(this.bodyTrackingIds, originatingTime);
                        }
                    }
                }
            }
        }
コード例 #11
0
        private void onAudioFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            // Return if there are no audio clients.
            if (!this.audioConnector.HasClients)
            {
                return;
            }

            // Create an audio container representing Kinect audio buffer data.
            var audioContainer = new AudioContainer();

            audioContainer.samplingFrequency  = 16000;
            audioContainer.frameLifeTime      = 0.016;
            audioContainer.numSamplesPerFrame = (int)(audioContainer.samplingFrequency * audioContainer.frameLifeTime);
            audioContainer.numBytesPerSample  = sizeof(float);
            audioContainer.audioStream        = new float[256];

            // Record the current Unix epoch timestamp.
            audioContainer.timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds();

            // TODO: add relative timestamp to audio?
            // this.audioContainer.relativeTime = e.FrameReference.RelativeTime.TotalMilliseconds;

            // Retrieve audio beams for current frame.
            AudioBeamFrameList frameList = e.FrameReference.AcquireBeamFrames();

            if (frameList == null)
            {
                return;
            }

            // Serialize all of the subframes and send as a JSON message.
            using (frameList)
            {
                // Only one audio beam is supported. Get the subframe list for the one beam.
                IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                // Consolidate the beam subframes into a single JSON message.
                foreach (AudioBeamSubFrame subFrame in subFrameList)
                {
                    using (subFrame)
                    {
                        audioContainer.beamAngle           = subFrame.BeamAngle;
                        audioContainer.beamAngleConfidence = subFrame.BeamAngleConfidence;

                        byte[] array = new byte[subFrame.FrameLengthInBytes];
                        subFrame.CopyFrameDataToArray(array);
                        for (int i = 0; i < array.Length; i += sizeof(float))
                        {
                            audioContainer.audioStream[(int)(i / sizeof(float))] = BitConverter.ToSingle(array, i);
                        }

                        // Send audio data to clients.
                        string json = JsonConvert.SerializeObject(audioContainer,
                                                                  new JsonSerializerSettings {
                            ContractResolver = new AudioContractResolver()
                        }) + "\n";
                        byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json);
                        this.audioConnector.Broadcast(bytes);
                    }
                }
            }
        }
コード例 #12
0
        /// <summary>
        /// Kinect が AudioBeam を取得したとき実行されるメソッド(イベントハンドラ)です。
        /// </summary>
        /// <param name="sender">
        /// イベントを通知したオブジェクト。ここでは Kinectです。
        /// </param>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameList beamFrames = e.FrameReference.AcquireBeamFrames();

            if (beamFrames == null)
            {
                return;
            }

            IReadOnlyList <AudioBeamSubFrame> subFrames = beamFrames[0].SubFrames;

            //(公式サンプルより)
            //オーディオストリームから取得したデータのバッファです。
            //サブフレームは 16msec 間のデータを 16Khz で 取得します。
            //したがってサブフレームあたりのサンプル数は 16 * 16 = 256 です。
            //サンプル1つ辺りは 4byte 必要なので、256 * 4 = 1024 byte 確保されます。
            byte[] audioBuffer = new byte[this.kinect.AudioSource.SubFrameLengthInBytes];

            foreach (AudioBeamSubFrame subFrame in subFrames)
            {
                //音が発生している方向を取得する。
                //radian の値 -1.57~1.57 で取得される。
                //必要なら degreeの値 -90~90 に直す。
                float radianAngle = subFrame.BeamAngle;
                int   degreeAngle = (int)(radianAngle * 180 / Math.PI);

                //方向検出の精度を取得する。
                float confidence = subFrame.BeamAngleConfidence;

                //発声したユーザを取得する。
                List <ulong> speakers = new List <ulong>();
                foreach (AudioBodyCorrelation audioBody in subFrame.AudioBodyCorrelations)
                {
                    speakers.Add(audioBody.BodyTrackingId);
                }

                //オーディオ情報を複製して取得する。
                //取得したデータから dB(デシベル) を算出する。
                subFrame.CopyFrameDataToArray(audioBuffer);
                float decibel = CalcDecibelWithRMS(audioBuffer);

                //UI を更新する。
                this.Dispatcher.Invoke(new Action(() =>
                {
                    this.Label_BeamAngle.Content  = "BeamAngle : " + degreeAngle;
                    this.Label_Confidence.Content = "Confidence : " + confidence;
                    this.Label_Decibel.Content    = "dB : " + (decibel + 90);

                    string speakerIDs = "";
                    foreach (ulong speakerID in speakers)
                    {
                        speakerIDs += speakerID + ", ";
                    }
                    this.Label_Speaker.Content = "Speakers : " + speakerIDs;

                    this.Rectangle_BeamAngle.RenderTransform
                        = new RotateTransform(-1 * degreeAngle, 0.5, 0);

                    this.Rectangle_dBMeter.Width
                        = this.StackPanel_Container.ActualWidth
                          - this.StackPanel_Container.ActualWidth * (decibel / MinDecibel);
                }));

                break;
            }

            //解放しない場合、次のフレームが届かない。
            beamFrames.Dispose();
        }
コード例 #13
0
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        public void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                if (frameList != null)
                {
                    // AudioBeamFrameList is IDisposable
                    using (frameList)
                    {
                        // Only one audio beam is supported. Get the sub frame list for this beam
                        IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        // Loop over all sub frames, extract audio buffer and beam information
                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {
                            subFrame.CopyFrameDataToArray(this.audioBuffer);


                            for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                            {
                                // Extract the 32-bit IEEE float sample from the byte array
                                float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                                this.accumulatedSquareSum += audioSample * audioSample;
                                ++this.accumulatedSampleCount;

                                if (this.accumulatedSampleCount < SamplesPerColumn)
                                {
                                    continue;
                                }

                                float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                                if (meanSquare > 1.0f)
                                {
                                    //    // A loud audio source right next to the sensor may result in mean square values
                                    //    // greater than 1.0. Cap it at 1.0f for display purposes.
                                    meanSquare = 1.0f;
                                }

                                // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                                float energy = MinEnergy;

                                if (meanSquare > 0)
                                {
                                    energy = (float)(10.0 * Math.Log10(meanSquare));
                                }

                                lock (this.energyLock)
                                {
                                    // Normalize values to the range [0, 1] for display
                                    this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                                    //  this.energy[this.energyIndex] = energy;
                                    this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                                    ++this.newEnergyAvailable;
                                }

                                this.accumulatedSquareSum   = 0;
                                this.accumulatedSampleCount = 0;
                            }
                            audioPreAnalysis.analyzeAudio(this.audioBuffer, this.energy);
                        }
                    }
                }
            }
            catch (Exception)
            {
                // Ignore if the frame is no longer available
            }
        }