예제 #1
0
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (var audioFrames = e.FrameReference.AcquireBeamFrames()) {
                if (audioFrames == null)
                {
                    return;
                }

                var subFrame = audioFrames[0].SubFrames[0];

                // 音の方向
                LineBeamAngle.Angle = (int)(subFrame.BeamAngle * 180 / Math.PI);

                // ビーム角度、信頼性、ビーム方向のBody数を表示
                Text1.Text = (subFrame.BeamAngle * 180.0f / (float)Math.PI).ToString();
                Text2.Text = subFrame.BeamAngleConfidence.ToString();
                Text3.Text = subFrame.AudioBodyCorrelations.Count.ToString();

                // ビーム方向に人がいれば、そのTrackibngIdを保存する
                if (subFrame.AudioBodyCorrelations.Count != 0)
                {
                    AudioTrackingId = subFrame.AudioBodyCorrelations[0].BodyTrackingId;
                }
                else
                {
                    AudioTrackingId = ulong.MaxValue;
                }
            }
        }
예제 #2
0
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    byte[] audioBuffer = new byte[_kinect.AudioSource.SubFrameLengthInBytes];
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        int bytesRecorded = (int)subFrame.FrameLengthInBytes;
                        subFrame.CopyFrameDataToArray(audioBuffer);


                        if (_waveBufferPos + bytesRecorded < _waveBuffer.Length)
                        {
                            Array.Copy(audioBuffer, 0, _waveBuffer, _waveBufferPos, bytesRecorded);
                            _waveBufferPos += bytesRecorded;
                        }
                        else
                        {
                            PublishBuffer();

                            Array.Copy(audioBuffer, _waveBuffer, bytesRecorded);
                            _waveBufferPos = bytesRecorded;
                        }
                    }
                }
            }
        }
예제 #3
0
        // http://mtaulty.com/CommunityServer/blogs/mike_taultys_blog/archive/2014/10/01/kinect-for-windows-v2-hello-audio-world-for-the-net-windows-app-developer-amp-harmonica-player.aspx
        void audioBeamFrameReader_FrameArrived(AudioBeamFrameReader sender,
                                               AudioBeamFrameArrivedEventArgs args)
        {
            using (var audioFrame =
                       args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList) {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i]) {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j]) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // 音の方向の信頼性[0-1]
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();
                            }
                        }
                    }
                }
            }
        }
예제 #4
0
        void audioBeamFrameReader_FrameArrived(
            AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs args)
        {
            using (var audioFrame =
                       args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList) {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i]) {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j]) {
                                subFrame.CopyFrameDataToArray(audioBuffer);

                                waveFile.Write(audioBuffer);

                                // 参考:実際のデータは32bit IEEE floatデータ
                                //float data1 = BitConverter.ToSingle( audioBuffer, 0 );
                                //float data2 = BitConverter.ToSingle( audioBuffer, 4 );
                                //float data3 = BitConverter.ToSingle( audioBuffer, 8 );
                            }
                        }
                    }
                }
            }
        }
예제 #5
0
        void audioBeamFrameReader_FrameArrived( object sender,
            AudioBeamFrameArrivedEventArgs e )
        {
            using ( var audioFrame =
                e.FrameReference.AcquireBeamFrames() as AudioBeamFrameList ) {

                if ( audioFrame == null ) {
                    return;
                }

                for ( int i = 0; i < audioFrame.Count; i++ ) {
                    using ( var frame = audioFrame[i] ) {
                        Trace.WriteLine( frame.SubFrames.Count );
                        for ( int j = 0; j < frame.SubFrames.Count; j++ ) {
                            using ( var subFrame = frame.SubFrames[j] ) {
                                subFrame.CopyFrameDataToArray( audioBuffer );

                                waveFile.Write( audioBuffer );

                                // 参考:実際のデータは32bit IEEE floatデータ
                                //float data1 = BitConverter.ToSingle( audioBuffer, 0 );
                                //float data2 = BitConverter.ToSingle( audioBuffer, 4 );
                                //float data3 = BitConverter.ToSingle( audioBuffer, 8 );
                            }
                        }
                    }
                }
            }
        }
예제 #6
0
        private void Audio_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            using (var frameList = frameReference.AcquireBeamFrames()) {
                if (frameList == null)
                {
                    return;
                }

                IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
                List <float> beamAngles = new List <float> {
                };
                foreach (var frame in subFrameList)
                {
                    if (frame.BeamAngleConfidence > 0.3)
                    {
                        beamAngles.Add(frame.BeamAngle);
                    }
                }

                byte[] bytes = new byte[beamAngles.Count * 4 + 1];
                Buffer.BlockCopy(BitConverter.GetBytes(beamAngles.Count), 0, bytes, 0, 1); // first 1 byte number of beams
                for (int i = 0; i < beamAngles.Count; ++i)
                {
                    Buffer.BlockCopy(BitConverter.GetBytes(beamAngles[i]), 0, bytes, i * 4 + 1, 4);
                }

                this.client.Publish("/kinect/detected/audio", bytes);
            }

            ++this.kinectFrameCount;
        }
예제 #7
0
        void audioBeamFrameReader_FrameArrived( object sender, 
                                                AudioBeamFrameArrivedEventArgs e )
        {
            using ( var audioFrame =
                e.FrameReference.AcquireBeamFrames() as AudioBeamFrameList ) {

                if ( audioFrame == null ) {
                    return;
                }

                for ( int i = 0; i < audioFrame.Count; i++ ) {
                    using ( var frame = audioFrame[i] ) {
                        for ( int j = 0; j < frame.SubFrames.Count; j++ ) {
                            using ( var subFrame = frame.SubFrames[j] ) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // 音の方向の信頼性[0-1]
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();
                            }
                        }
                    }
                }
            }
        }
예제 #8
0
        public void _audioBeamReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            var frames = e.FrameReference.AcquireBeamFrames();

            if (ReferenceEquals(null, frames))
            {
                return;
            }

            foreach (var frame in frames)
            {
                if (ReferenceEquals(null, frame) || ReferenceEquals(null, frame.SubFrames))
                {
                    return;
                }
                foreach (var subFrame in frame.SubFrames)
                {
                    if (ReferenceEquals(null, subFrame.AudioBodyCorrelations))
                    {
                        return;
                    }
                    foreach (var audioBodyCorrelation in subFrame.AudioBodyCorrelations)
                    {
                        CheckPerson.Instance.CheckIfExistsPerson(audioBodyCorrelation.BodyTrackingId);
                        var time = _dataAccessFacade.GetSceneInUseAccess()?.GetLocation();
                        if (time.HasValue)
                        {
                            _dataAccessFacade.GetEventAccess().Add(CheckPerson.Instance.PersonsId[audioBodyCorrelation.BodyTrackingId], "Voice", "Talked", time.Value, 1);
                        }

                        //Console.WriteLine("Tiempo: {0}, Llegó Voz de {1}", DateTime.Now, audioBodyCorrelation.BodyTrackingId);
                    }
                }
            }
        }
예제 #9
0
        private void Audio_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            using (var frameList = frameReference.AcquireBeamFrames()) {
                if (frameList == null)
                {
                    return;
                }

                IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                foreach (var frame in subFrameList)
                {
                    if (frame.BeamAngleConfidence > 0.3)
                    {
                        byte[] bytes = new byte[4];
                        Buffer.BlockCopy(BitConverter.GetBytes(frame.BeamAngle), 0, bytes, 0, 4);
                        this.client.Publish("/kinect/detected/audio", bytes);
                    }

#if PUBLISH_RAW_AUDIO
                    byte[] audioBuffer = new byte[this.kinectSensor.AudioSource.SubFrameLengthInBytes];
                    frame.CopyFrameDataToArray(audioBuffer);
                    byte[] convertedBuffer = new byte[audioBuffer.Length >> 1];
                    ConvertKinectAudioStream(audioBuffer, convertedBuffer);
                    this.client.Publish("/kinect/stream/rawaudio", convertedBuffer);
#endif
                    ++this.kinectFrameCount;
                }
            }

            this.client.Publish("/kinect/audio/alive", new byte[1]);
        }
예제 #10
0
        private void Reader_AudioBeamFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    // Only one audio beam is supported. Get the sub frame list for this beam
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    this.fullAudio = new Byte[audioBuffer.Length * subFrameList.Count];
                    int start = 0;
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        subFrame.CopyFrameDataToArray(this.audioBuffer);
                        this.audioBuffer.CopyTo(this.fullAudio, start);
                        start += this.audioBuffer.Length;
                    }

                    this.publisher.SendByteArray(this.fullAudio);
                    this.fullAudio = null;
                }
            }
        }
예제 #11
0
        public void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            parent.audioHandler.Reader_FrameArrived(sender, e);
            if (MainWindow.myState == MainWindow.States.freestyle)
            {
                var uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-1.png", UriKind.Relative);

                float currentVolume = parent.audioHandler.energy[parent.audioHandler.energyIndex];
                if (Math.Abs(currentVolume) < 0.25)
                {
                    uriSource = new Uri(@"/PT20;component/Images/ic_audio-feedback-1.png", UriKind.Relative);
                }
                else if (Math.Abs(currentVolume) < 0.5)
                {
                    uriSource = new Uri(@"/PT20;component/Images/ic_audio-feedback-2.png", UriKind.Relative);
                }
                else if (Math.Abs(currentVolume) < 0.75)
                {
                    uriSource = new Uri(@"/PT20;component/Images/ic_audio-feedback-3.png", UriKind.Relative);
                }
                else
                {
                    uriSource = new Uri(@"/PT20;component/Images/ic_audio-feedback-4.png", UriKind.Relative);
                }


                MicroPhoneImage.Source = new BitmapImage(uriSource);
            }
            else
            {
                //  parent.audioHandler.UpdateEnergy(sender, null);
                kinectImage.Source = parent.audioHandler.energyBitmap;
            }
        }
예제 #12
0
        private void Reader_FrameArrived(AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (var audioFrame = e.FrameReference.AcquireBeamFrames() as AudioBeamFrameList)
            {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i])
                    {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j])
                            {
                                subFrame.CopyFrameDataToArray(this.audioBuffer);
                                stream.Write(audioBuffer, 0, audioBuffer.Length);
                                size += audioBuffer.Length;
                            }
                        }
                    }
                }
            }
            //AudioBeamFrameList frameList = (AudioBeamFrameList)e.FrameReference.AcquireBeamFrames();

            //if (frameList != null)
            //{
            //    //using(frameList)
            //   // {
            //        IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

            //        // Loop over all sub frames, extract audio buffer and beam informationIReadOnlyList<AudioBeamFrame>
            //        foreach (AudioBeamSubFrame subFrame in subFrameList)
            //        {
            //                subFrame.CopyFrameDataToArray(this.audioBuffer);

            //                stream.Write(audioBuffer, 0, audioBuffer.Length);
            //                size += audioBuffer.Length;
            //        subFrame.Dispose();
            //            }
            //    frameList.Dispose();

            //   // }


            //}
        }
        void audioBeamFrameReader_FrameArrived( object sender, AudioBeamFrameArrivedEventArgs e )
        {
            using ( var audioFrame = e.FrameReference.AcquireBeamFrames() ) {
                if ( audioFrame == null ) {
                    return;
                }

                var subFrame = audioFrame[0].SubFrames[0];

                // 音の方向
                LineBeamAngle.Angle = (int)(subFrame.BeamAngle * 180 / Math.PI);

                // 音の方向の信頼性[0-1]
                TextBeamAngleConfidence.Text = subFrame.BeamAngleConfidence.ToString();
            }
        }
예제 #14
0
 void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     using (AudioBeamFrameList frames = e.FrameReference.AcquireBeamFrames())
     {
         if (frames != null)
         {
             for (int i = 0; i < frames.Count; i++)
             {
                 KinectBase.AudioPositionEventArgs args = new KinectBase.AudioPositionEventArgs();
                 args.audioAngle = frames[i].AudioBeam.BeamAngle * (180.0 / Math.PI);  //Convert from radians to degress
                 args.confidence = frames[i].AudioBeam.BeamAngleConfidence;
                 args.kinectID   = kinectID;
                 OnAudioPositionChanged(args);
             }
         }
     }
 }
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (var audioFrame = e.FrameReference.AcquireBeamFrames()) {
                if (audioFrame == null)
                {
                    return;
                }

                var subFrame = audioFrame[0].SubFrames[0];

                // 音の方向
                LineBeamAngle.Angle = (int)(subFrame.BeamAngle * 180 / Math.PI);

                // 音の方向の信頼性[0-1]
                TextBeamAngleConfidence.Text = subFrame.BeamAngleConfidence.ToString();
            }
        }
        void audioBeamFrameReader_FrameArrived( object sender, AudioBeamFrameArrivedEventArgs e )
        {
            using ( var audioFrame = e.FrameReference.AcquireBeamFrames() ) {
                if ( audioFrame == null ) {
                    return;
                }

                var subFrame = audioFrame[0].SubFrames[0];
                subFrame.CopyFrameDataToArray( audioBuffer );

                waveFile.Write( audioBuffer );

                // (例)実際のデータは32bit IEEE floatデータなので変換する
                float audioData1 = BitConverter.ToSingle( audioBuffer, 0 );
                float audioData2 = BitConverter.ToSingle( audioBuffer, 4 );
                float audioData3 = BitConverter.ToSingle( audioBuffer, 8 );
            }
        }
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (var audioFrame = e.FrameReference.AcquireBeamFrames()) {
                if (audioFrame == null)
                {
                    return;
                }

                var subFrame = audioFrame[0].SubFrames[0];
                subFrame.CopyFrameDataToArray(audioBuffer);

                waveFile.Write(audioBuffer);

                // (例)実際のデータは32bit IEEE floatデータなので変換する
                float audioData1 = BitConverter.ToSingle(audioBuffer, 0);
                float audioData2 = BitConverter.ToSingle(audioBuffer, 4);
                float audioData3 = BitConverter.ToSingle(audioBuffer, 8);
            }
        }
        private void onAudioFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference audioFrameRefrence = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = audioFrameRefrence.AcquireBeamFrames();
                if (frameList != null)
                {
                    using (frameList)
                    {
                        IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {
                            this.audioContainer.utcTime             = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds;
                            this.audioContainer.beamAngle           = subFrame.BeamAngle;
                            this.audioContainer.beamAngleConfidence = subFrame.BeamAngleConfidence;
                            byte[] array = new byte[this.audioSource.SubFrameLengthInBytes];
                            subFrame.CopyFrameDataToArray(array);
                            for (int i = 0; i < array.Length; i += sizeof(float))
                            {
                                audioContainer.audioStream[(int)(i / sizeof(float))] = BitConverter.ToSingle(array, i);
                            }
                            string jsonString = JsonConvert.SerializeObject(this.audioContainer);
                            int    diff       = 4100 - jsonString.Length;
                            for (int i = 0; i < diff; i++)
                            {
                                jsonString += " ";
                            }
                            byte[] transmittedData = new byte[jsonString.Length * sizeof(char)];
                            System.Buffer.BlockCopy(jsonString.ToCharArray(), 0, transmittedData, 0, transmittedData.Length);
                            this.audioConnector.Broadcast(transmittedData);
                            subFrame.Dispose();
                        }
                    }
                    frameList.Dispose();
                }
            }
            catch
            {
            }
        }
예제 #19
0
        private void beamFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // AudioBeamFrameList is IDisposable
                using (frameList)
                {
                    float readAngle = frameList[0].AudioBeam.BeamAngle;
                    if (readAngle != this.beamAngle)
                    {
                        //Console.WriteLine(subFrame.BeamAngle);
                        this.beamAngle = readAngle;
                        foreach (BeamAngleListener listener in beamAngleListeners)
                        {
                            listener.onBeamAngleChanged(beamAngle);
                        }
                    }

                    /*
                     * // Only one audio beam is supported. Get the sub frame list for this beam
                     * IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
                     *
                     * // Loop over all sub frames, extract audio buffer and beam information
                     * foreach (AudioBeamSubFrame subFrame in subFrameList)
                     * {
                     *  if (subFrame.BeamAngle != this.beamAngle)
                     *  {
                     *      //Console.WriteLine(subFrame.BeamAngle);
                     *      this.beamAngle = subFrame.BeamAngle;
                     *      foreach (BeamAngleListener listener in beamAngleListeners)
                     *      {
                     *          listener.onBeamAngleChanged(subFrame.BeamAngle);
                     *      }
                     *  }
                     * }
                     * */
                }
            }
        }
예제 #20
0
        // http://mtaulty.com/CommunityServer/blogs/mike_taultys_blog/archive/2014/10/01/kinect-for-windows-v2-hello-audio-world-for-the-net-windows-app-developer-amp-harmonica-player.aspx
        void audioBeamFrameReader_FrameArrived(AudioBeamFrameReader sender,
                                               AudioBeamFrameArrivedEventArgs args)
        {
            using (var audioFrame =
                       args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList) {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i]) {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j]) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // ビーム角度、信頼性、ビーム方向のBody数を表示
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();
                                TextAudioBodyCorrelations.Text =
                                    subFrame.AudioBodyCorrelations.Count.ToString();

                                // ビーム方向に人がいれば、そのTrackibngIdを保存する
                                if (subFrame.AudioBodyCorrelations.Count != 0)
                                {
                                    AudioTrackingId =
                                        subFrame.AudioBodyCorrelations[0].BodyTrackingId;
                                }
                                else
                                {
                                    AudioTrackingId = ulong.MaxValue;
                                }
                            }
                        }
                    }
                }
            }
        }
예제 #21
0
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            var audioBeamFrames = e.FrameReference.AcquireBeamFrames();

            if (audioBeamFrames != null)
            {
                var audioBeamFrame = audioBeamFrames[0];

                foreach (var subFrame in audioBeamFrame.SubFrames)
                {
                    var buffer = new byte[subFrame.FrameLengthInBytes];

                    subFrame.CopyFrameDataToArray(buffer);

                    lock (audioFrameQueues)
                    {
                        foreach (var queue in audioFrameQueues)
                        {
                            if (queue.Count > 10)
                            {
                                queue.Dequeue();
                            }
                            queue.Enqueue(buffer);
                        }
                    }

                    lock (audioFrameReady)
                        foreach (var autoResetEvent in audioFrameReady)
                        {
                            autoResetEvent.Set();
                        }

                    //Console.WriteLine("subframe " + audioSubFrames++ + "\t" + subFrame.FrameLengthInBytes + "\t" + audioBeamFrame.SubFrames.Count);
                    subFrame.Dispose();
                }

                audioBeamFrame.Dispose();
                audioBeamFrames.Dispose();
            }
        }
예제 #22
0
        private void OnAudioBeamFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            var audioBeamList = e.AudioBeamFrameList;

            if (audioBeamList != null)
            {
                var f = new AudioFrame(audioBeamList);
                f.Timestamp = e.Timestamp;
                using (var ms = new MemoryStream())
                {
                    f.Serialize(ms);
                    // Cache
                    byte[] dataToSend = ms.ToArray();
                    lock (connectedAudioClients)
                    {
                        foreach (var client in connectedAudioClients)
                        {
                            client.Write(dataToSend);
                        }
                    }
                }
            }
        }
        public void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            parent.audioHandler.Reader_FrameArrived(sender, e);
            if(MainWindow.myState == MainWindow.States.freestyle)
            {
                var uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-1.png", UriKind.Relative);
                
                float currentVolume= parent.audioHandler.energy[parent.audioHandler.energyIndex];
                if( Math.Abs(currentVolume)<0.25)
                {
                    uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-1.png", UriKind.Relative);
                }
                else if (Math.Abs(currentVolume) < 0.5)
                {
                    uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-2.png", UriKind.Relative);
                }
                else if (Math.Abs(currentVolume) < 0.75)
                {
                    uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-3.png", UriKind.Relative);
                }
                else
                {
                    uriSource = new Uri(@"/PresentationTrainer;component/Images/ic_audio-feedback-4.png", UriKind.Relative);
                }
                
           
                MicroPhoneImage.Source = new BitmapImage(uriSource);
            }
            else
            {
                //  parent.audioHandler.UpdateEnergy(sender, null);
                kinectImage.Source = parent.audioHandler.energyBitmap;
            }
            

         
        }
예제 #24
0
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            var audioBeamFrames = e.FrameReference.AcquireBeamFrames();
            if (audioBeamFrames != null)
            {
                var audioBeamFrame = audioBeamFrames[0];

                foreach(var subFrame in audioBeamFrame.SubFrames)
                {
                    var buffer = new byte[subFrame.FrameLengthInBytes];

                    subFrame.CopyFrameDataToArray(buffer);

                    lock (audioFrameQueues)
                    {
                        foreach (var queue in audioFrameQueues)
                        {
                            if (queue.Count > 10)
                                queue.Dequeue();
                            queue.Enqueue(buffer);
                        }
                    }

                    lock (audioFrameReady)
                        foreach (var autoResetEvent in audioFrameReady)
                            autoResetEvent.Set();

                    //Console.WriteLine("subframe " + audioSubFrames++ + "\t" + subFrame.FrameLengthInBytes + "\t" + audioBeamFrame.SubFrames.Count);
                    subFrame.Dispose();
                }

                audioBeamFrame.Dispose();
                audioBeamFrames.Dispose();

            }
        }
예제 #25
0
        void audioBeamFrameReader_FrameArrived( object sender,
                                        AudioBeamFrameArrivedEventArgs e )
        {
            using ( var audioFrame =
                e.FrameReference.AcquireBeamFrames() as AudioBeamFrameList ) {

                if ( audioFrame == null ) {
                    return;
                }

                for ( int i = 0; i < audioFrame.Count; i++ ) {
                    using ( var frame = audioFrame[i] ) {
                        for ( int j = 0; j < frame.SubFrames.Count; j++ ) {
                            using ( var subFrame = frame.SubFrames[j] ) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // 音の方向の信頼性[0-1]
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();

                                // ビーム方向に人がいれば、そのTrackibngIdを保存する
                                if ( subFrame.AudioBodyCorrelations.Count != 0 ) {
                                    AudioTrackingId =
                                        subFrame.AudioBodyCorrelations[0].BodyTrackingId;
                                }
                                else {
                                    AudioTrackingId = ulong.MaxValue;
                                }
                            }
                        }
                    }
                }
            }
        }
예제 #26
0
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                using (frameList)
                {
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    // Loop over all sub frames, extract audio buffer and beam information
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        subFrame.CopyFrameDataToArray(this.audioBuffer);
                        if (fileStream.CanWrite == true)
                        {
                            fileStream.Write(audioBuffer, 0, audioBuffer.Length);
                            size += audioBuffer.Length;
                        }
                    }
                }
            }
        }
 /// Handles the audio frame data arriving from the sensor
 /// <param name="sender">object sending the event</param>
 /// <param name="e">event arguments</param>
 private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     AudioBeamFrameReference frameReference = e.FrameReference;
     AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();
     if (frameList != null)
     {
         // AudioBeamFrameList is IDisposable
         using (frameList)
         {
             // Only one audio beam is supported. Get the sub frame list for this beam
             IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;
             // Loop over all sub frames, extract audio buffer and beam information
             foreach (AudioBeamSubFrame subFrame in subFrameList)
             {
                 // Check if beam angle and/or confidence have changed
                 bool updateBeam = false;
                 if (subFrame.BeamAngle != this.beamAngle)
                 {
                     this.beamAngle = subFrame.BeamAngle;
                     updateBeam = true;
                 }
                 if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                 {
                     this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                     updateBeam = true;
                 }
                 if (updateBeam)
                 {
                     // Refresh display of audio beam
                     this.AudioBeamChanged();
                 }
                 // Process audio buffer
                 subFrame.CopyFrameDataToArray(this.audioBuffer);
                 for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                 {
                     // Extract the 32-bit IEEE float sample from the byte array
                     float audioSample = BitConverter.ToSingle(this.audioBuffer, i);
                     this.accumulatedSquareSum += audioSample * audioSample;
                     ++this.accumulatedSampleCount;
                     if (this.accumulatedSampleCount < SamplesPerColumn)
                     {
                         continue;
                     }
                     float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;
                     if (meanSquare > 1.0f)
                     {
                         // A loud audio source right next to the sensor may result in mean square values
                         // greater than 1.0. Cap it at 1.0f for display purposes.
                         meanSquare = 1.0f;
                     }
                     // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                     float energy = MinEnergy;
                     if (meanSquare > 0)
                     {
                         energy = (float)(10.0 * Math.Log10(meanSquare));
                     }
                     lock (this.energyLock)
                     {
                         // Normalize values to the range [0, 1] for display
                         this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                         this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                         ++this.newEnergyAvailable;
                     }
                     this.accumulatedSquareSum = 0;
                     this.accumulatedSampleCount = 0;
                 }
             }
         }
     }
 }    
예제 #28
0
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        public void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                if (frameList != null)
                {
                    // AudioBeamFrameList is IDisposable
                    using (frameList)
                    {
                        // Only one audio beam is supported. Get the sub frame list for this beam
                        IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        // Loop over all sub frames, extract audio buffer and beam information
                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {
                            subFrame.CopyFrameDataToArray(this.audioBuffer);


                            for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                            {
                                // Extract the 32-bit IEEE float sample from the byte array
                                float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                                this.accumulatedSquareSum += audioSample * audioSample;
                                ++this.accumulatedSampleCount;

                                if (this.accumulatedSampleCount < SamplesPerColumn)
                                {
                                    continue;
                                }

                                float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                                if (meanSquare > 1.0f)
                                {
                                    //    // A loud audio source right next to the sensor may result in mean square values
                                    //    // greater than 1.0. Cap it at 1.0f for display purposes.
                                    meanSquare = 1.0f;
                                }

                                // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                                float energy = MinEnergy;

                                if (meanSquare > 0)
                                {
                                    energy = (float)(10.0 * Math.Log10(meanSquare));
                                }

                                lock (this.energyLock)
                                {
                                    // Normalize values to the range [0, 1] for display
                                    this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                                    //  this.energy[this.energyIndex] = energy;
                                    this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                                    ++this.newEnergyAvailable;
                                }

                                this.accumulatedSquareSum   = 0;
                                this.accumulatedSampleCount = 0;
                            }
                            audioPreAnalysis.analyzeAudio(this.audioBuffer, this.energy);
                        }
                    }
                }
            }
            catch (Exception)
            {
                // Ignore if the frame is no longer available
            }
        }
예제 #29
0
/**
 *      void _selectedController_StateChanged(object sender, XboxControllerStateChangedEventArgs e)
 *      {
 *          OnPropertyChanged("SelectedController");
 *
 *          // Where the action happens with the controller.
 *          if (SelectedController.IsAPressed)
 *          {
 *              Console.WriteLine("A is pressed");
 *          }
 *          else if (SelectedController.IsBPressed)
 *          {
 *              Console.WriteLine("B is pressed");
 *          }
 *      }
 *
 *      public XboxController SelectedController
 *      {
 *          get { return _selectedController; }
 *      }
 *
 *      volatile bool _keepRunning;
 *      XboxController _selectedController;
 *
 *      public void OnPropertyChanged(string name)
 *      {
 *          if (PropertyChanged != null)
 *          {
 *              Action a = () => { PropertyChanged(this, new PropertyChangedEventArgs(name)); };
 *              Dispatcher.BeginInvoke(a, null);
 *          }
 *      }
 *
 *
 *
 *      private void SelectedControllerChanged(object sender, RoutedEventArgs e)
 *      {
 *          _selectedController = XboxController.RetrieveController(((ComboBox)sender).SelectedIndex);
 *          OnPropertyChanged("SelectedController");
 *      }
 **/
        void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            //Only interperet Audio if in Audible Tracking state
            if (this.trackingMode == TrackingMode.AUDIBLE)
            {
                AudioBeamFrameReference frameReference = e.FrameReference;

                try
                {
                    AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                    if (frameList != null)
                    {
                        // AudioBeamFrameList is IDisposable
                        using (frameList)
                        {
                            // Only one audio beam is supported. Get the sub frame list for this beam
                            IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                            // Loop over all sub frames, extract audio buffer and beam information
                            foreach (AudioBeamSubFrame subFrame in subFrameList)
                            {
                                // Check if beam angle and/or confidence have changed
                                bool updateBeam = false;

                                if (subFrame.BeamAngle != this.beamAngle)
                                {
                                    this.beamAngle = subFrame.BeamAngle;
                                    updateBeam     = true;
                                }

                                if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                                {
                                    this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                                    updateBeam = true;
                                }
                                if (updateBeam)
                                {
                                    // Refresh display of audio beam
                                    this.AudioBeamChanged();
                                }
                                else
                                {
                                    // If there are has been no update in the audio beam then return the X servo velocity to 0
                                    if (panTilt.IsReady)
                                    {
                                        this.cannonXVelocity = 5;
                                        panTilt.PanX(this.CannonXVelocity);
                                    }
                                }
                            }
                        }
                    }
                }

                catch (Exception)
                {
                    // Ignore if the frame is no longer available
                }
            }
        }
예제 #30
0
        /// <summary>
        /// Kinect が AudioBeam を取得したとき実行されるメソッド(イベントハンドラ)です。
        /// </summary>
        /// <param name="sender">
        /// イベントを通知したオブジェクト。ここでは Kinectです。
        /// </param>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        void audioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameList beamFrames = e.FrameReference.AcquireBeamFrames();

            if (beamFrames == null)
            {
                return;
            }

            IReadOnlyList <AudioBeamSubFrame> subFrames = beamFrames[0].SubFrames;

            //(公式サンプルより)
            //オーディオストリームから取得したデータのバッファです。
            //サブフレームは 16msec 間のデータを 16Khz で 取得します。
            //したがってサブフレームあたりのサンプル数は 16 * 16 = 256 です。
            //サンプル1つ辺りは 4byte 必要なので、256 * 4 = 1024 byte 確保されます。
            byte[] audioBuffer = new byte[this.kinect.AudioSource.SubFrameLengthInBytes];

            foreach (AudioBeamSubFrame subFrame in subFrames)
            {
                //音が発生している方向を取得する。
                //radian の値 -1.57~1.57 で取得される。
                //必要なら degreeの値 -90~90 に直す。
                float radianAngle = subFrame.BeamAngle;
                int   degreeAngle = (int)(radianAngle * 180 / Math.PI);

                //方向検出の精度を取得する。
                float confidence = subFrame.BeamAngleConfidence;

                //発声したユーザを取得する。
                List <ulong> speakers = new List <ulong>();
                foreach (AudioBodyCorrelation audioBody in subFrame.AudioBodyCorrelations)
                {
                    speakers.Add(audioBody.BodyTrackingId);
                }

                //オーディオ情報を複製して取得する。
                //取得したデータから dB(デシベル) を算出する。
                subFrame.CopyFrameDataToArray(audioBuffer);
                float decibel = CalcDecibelWithRMS(audioBuffer);

                //UI を更新する。
                this.Dispatcher.Invoke(new Action(() =>
                {
                    this.Label_BeamAngle.Content  = "BeamAngle : " + degreeAngle;
                    this.Label_Confidence.Content = "Confidence : " + confidence;
                    this.Label_Decibel.Content    = "dB : " + (decibel + 90);

                    string speakerIDs = "";
                    foreach (ulong speakerID in speakers)
                    {
                        speakerIDs += speakerID + ", ";
                    }
                    this.Label_Speaker.Content = "Speakers : " + speakerIDs;

                    this.Rectangle_BeamAngle.RenderTransform
                        = new RotateTransform(-1 * degreeAngle, 0.5, 0);

                    this.Rectangle_dBMeter.Width
                        = this.StackPanel_Container.ActualWidth
                          - this.StackPanel_Container.ActualWidth * (decibel / MinDecibel);
                }));

                break;
            }

            //解放しない場合、次のフレームが届かない。
            beamFrames.Dispose();
        }
예제 #31
0
        private void onAudioFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            // Return if there are no audio clients.
            if (!this.audioConnector.HasClients)
            {
                return;
            }

            // Create an audio container representing Kinect audio buffer data.
            var audioContainer = new AudioContainer();

            audioContainer.samplingFrequency  = 16000;
            audioContainer.frameLifeTime      = 0.016;
            audioContainer.numSamplesPerFrame = (int)(audioContainer.samplingFrequency * audioContainer.frameLifeTime);
            audioContainer.numBytesPerSample  = sizeof(float);
            audioContainer.audioStream        = new float[256];

            // Record the current Unix epoch timestamp.
            audioContainer.timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds();

            // TODO: add relative timestamp to audio?
            // this.audioContainer.relativeTime = e.FrameReference.RelativeTime.TotalMilliseconds;

            // Retrieve audio beams for current frame.
            AudioBeamFrameList frameList = e.FrameReference.AcquireBeamFrames();

            if (frameList == null)
            {
                return;
            }

            // Serialize all of the subframes and send as a JSON message.
            using (frameList)
            {
                // Only one audio beam is supported. Get the subframe list for the one beam.
                IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                // Consolidate the beam subframes into a single JSON message.
                foreach (AudioBeamSubFrame subFrame in subFrameList)
                {
                    using (subFrame)
                    {
                        audioContainer.beamAngle           = subFrame.BeamAngle;
                        audioContainer.beamAngleConfidence = subFrame.BeamAngleConfidence;

                        byte[] array = new byte[subFrame.FrameLengthInBytes];
                        subFrame.CopyFrameDataToArray(array);
                        for (int i = 0; i < array.Length; i += sizeof(float))
                        {
                            audioContainer.audioStream[(int)(i / sizeof(float))] = BitConverter.ToSingle(array, i);
                        }

                        // Send audio data to clients.
                        string json = JsonConvert.SerializeObject(audioContainer,
                                                                  new JsonSerializerSettings {
                            ContractResolver = new AudioContractResolver()
                        }) + "\n";
                        byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json);
                        this.audioConnector.Broadcast(bytes);
                    }
                }
            }
        }
예제 #32
0
/**
        void _selectedController_StateChanged(object sender, XboxControllerStateChangedEventArgs e)
        {
            OnPropertyChanged("SelectedController");

            // Where the action happens with the controller. 
            if (SelectedController.IsAPressed) 
            {
                Console.WriteLine("A is pressed");
            }
            else if (SelectedController.IsBPressed)
            {
                Console.WriteLine("B is pressed");
            }
        }
        
        public XboxController SelectedController
        {
            get { return _selectedController; }
        }

        volatile bool _keepRunning;
        XboxController _selectedController;

        public void OnPropertyChanged(string name)
        {
            if (PropertyChanged != null)
            {
                Action a = () => { PropertyChanged(this, new PropertyChangedEventArgs(name)); };
                Dispatcher.BeginInvoke(a, null);
            }
        }

         

        private void SelectedControllerChanged(object sender, RoutedEventArgs e)
        {
            _selectedController = XboxController.RetrieveController(((ComboBox)sender).SelectedIndex);
            OnPropertyChanged("SelectedController");
        }
        **/
        void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            //Only interperet Audio if in Audible Tracking state
            if (this.trackingMode == TrackingMode.AUDIBLE)
            {
            AudioBeamFrameReference frameReference = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                if (frameList != null)
                {
                    // AudioBeamFrameList is IDisposable
                    using (frameList)
                    {
                        // Only one audio beam is supported. Get the sub frame list for this beam
                        IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        // Loop over all sub frames, extract audio buffer and beam information
                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {
                            // Check if beam angle and/or confidence have changed
                            bool updateBeam = false;

                            if (subFrame.BeamAngle != this.beamAngle)
                            {
                                this.beamAngle = subFrame.BeamAngle;
                                updateBeam = true;
                            }

                            if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                            {
                                this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                                updateBeam = true;
                            }
                            if (updateBeam)
                            {
                                // Refresh display of audio beam
                                this.AudioBeamChanged();
                            } 
                            else
                            {
                                // If there are has been no update in the audio beam then return the X servo velocity to 0
                                if (panTilt.IsReady)
                                {
                                    this.cannonXVelocity = 5;
                                    panTilt.PanX(this.CannonXVelocity);
                                }
                            }
                        }

                    }
                }
            }

            catch (Exception)
            {
                // Ignore if the frame is no longer available
            }
        }
        }
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        public void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;

            try
            {
                AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

                if (frameList != null)
                {
                    // AudioBeamFrameList is IDisposable
                    using (frameList)
                    {
                        // Only one audio beam is supported. Get the sub frame list for this beam
                        IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                        // Loop over all sub frames, extract audio buffer and beam information
                        foreach (AudioBeamSubFrame subFrame in subFrameList)
                        {

                            subFrame.CopyFrameDataToArray(this.audioBuffer);
                           

                            for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                            {
                                // Extract the 32-bit IEEE float sample from the byte array
                                float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                                this.accumulatedSquareSum += audioSample * audioSample;
                                ++this.accumulatedSampleCount;

                                if (this.accumulatedSampleCount < SamplesPerColumn)
                                {
                                    continue;
                                }

                                float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                                if (meanSquare > 1.0f)
                                {
                                //    // A loud audio source right next to the sensor may result in mean square values
                                //    // greater than 1.0. Cap it at 1.0f for display purposes.
                                    meanSquare = 1.0f;
                                }

                                // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                                float energy = MinEnergy;

                                if (meanSquare > 0)
                                {
                                    energy = (float)(10.0 * Math.Log10(meanSquare));
                                }
                                
                                lock (this.energyLock)
                                {
                                    // Normalize values to the range [0, 1] for display
                                    this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                                  //  this.energy[this.energyIndex] = energy;
                                    this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                                    ++this.newEnergyAvailable;
                                }

                                this.accumulatedSquareSum = 0;
                                this.accumulatedSampleCount = 0;
                            }
                            audioPreAnalysis.analyzeAudio(this.audioBuffer, this.energy);
                        }

                        
                    }
                }
            }
            catch (Exception)
            {
                // Ignore if the frame is no longer available
            }
        }
예제 #34
0
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList      frameList      = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // AudioBeamFrameList is IDisposable
                using (frameList)
                {
                    // Only one audio beam is supported. Get the sub frame list for this beam
                    IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    // Loop over all sub frames, extract audio buffer and beam information
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        // Check if beam angle and/or confidence have changed
                        bool updateBeam = false;

                        if (subFrame.BeamAngle != this.beamAngle)
                        {
                            this.beamAngle = subFrame.BeamAngle;
                            updateBeam     = true;
                        }

                        if (subFrame.BeamAngleConfidence != this.beamAngleConfidence)
                        {
                            this.beamAngleConfidence = subFrame.BeamAngleConfidence;
                            updateBeam = true;
                        }

                        if (updateBeam)
                        {
                            // Refresh display of audio beam
                            this.AudioBeamChanged();
                        }

                        // Process audio buffer
                        subFrame.CopyFrameDataToArray(this.audioBuffer);

                        for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                        {
                            // Extract the 32-bit IEEE float sample from the byte array
                            float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                            this.accumulatedSquareSum += audioSample * audioSample;
                            ++this.accumulatedSampleCount;

                            if (this.accumulatedSampleCount < SamplesPerColumn)
                            {
                                continue;
                            }

                            float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                            if (meanSquare > 1.0f)
                            {
                                // A loud audio source right next to the sensor may result in mean square values
                                // greater than 1.0. Cap it at 1.0f for display purposes.
                                meanSquare = 1.0f;
                            }

                            // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                            float energy = MinEnergy;

                            if (meanSquare > 0)
                            {
                                energy = (float)(10.0 * Math.Log10(meanSquare));
                            }

                            lock (this.energyLock)
                            {
                                // Normalize values to the range [0, 1] for display
                                this.energy[this.energyIndex] = (MinEnergy - energy) / MinEnergy;
                                this.energyIndex = (this.energyIndex + 1) % this.energy.Length;
                                ++this.newEnergyAvailable;
                            }

                            this.accumulatedSquareSum   = 0;
                            this.accumulatedSampleCount = 0;
                        }
                    }
                }
            }
        }
예제 #35
0
 void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     using (AudioBeamFrameList frames = e.FrameReference.AcquireBeamFrames())
     {
         if (frames != null)
         {
             for (int i = 0; i < frames.Count; i++)
             {
                 KinectBase.AudioPositionEventArgs args = new KinectBase.AudioPositionEventArgs();
                 args.audioAngle = frames[i].AudioBeam.BeamAngle * (180.0 / Math.PI);  //Convert from radians to degress
                 args.confidence = frames[i].AudioBeam.BeamAngleConfidence;
                 args.kinectID = kinectID;
                 OnAudioPositionChanged(args);
             }
         }
     }
 }
        /// <summary>
        /// Handles the audio frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void audioReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            AudioBeamFrameReference frameReference = e.FrameReference;
            AudioBeamFrameList frameList = frameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // AudioBeamFrameList is IDisposable
                using (frameList)
                {
                    // Only one audio beam is supported. Get the sub frame list for this beam
                    IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                    // Loop over all sub frames, extract audio buffer and beam information
                    foreach (AudioBeamSubFrame subFrame in subFrameList)
                    {
                        foreach(AudioBodyCorrelation audi in subFrame.AudioBodyCorrelations)
                        {
                            for(int j = 0; j < bodyCount; j++)
                            {
                                if (bodies[j].TrackingId == audi.BodyTrackingId)
                                {
                                    // Process audio buffer
                                    subFrame.CopyFrameDataToArray(this.audioBuffer);
                                    // adds up meanSquare values over all samples in audioBuffer
                                    float SUM = 0;
                                    for (int i = 0; i < this.audioBuffer.Length; i += BytesPerSample)
                                    {
                                        // Extract the 32-bit IEEE float sample from the byte array
                                        float audioSample = BitConverter.ToSingle(this.audioBuffer, i);

                                        this.accumulatedSquareSum += audioSample * audioSample;
                                        ++this.accumulatedSampleCount;

                                        if (this.accumulatedSampleCount < SamplesPerColumn)
                                        {
                                            continue;
                                        }
                                        float meanSquare = this.accumulatedSquareSum / SamplesPerColumn;

                                        if (meanSquare > 1.0f)
                                        {
                                            // A loud audio source right next to the sensor may result in mean square values
                                            // greater than 1.0. Cap it at 1.0f for display purposes.
                                            meanSquare = 1.0f;
                                        }
                                        this.accumulatedSquareSum = 0;
                                        this.accumulatedSampleCount = 0;
                                        SUM += meanSquare;
                                    }
                                    EngagementInfo.updateEngagementSound(j, SUM);
                                }
                            }
                        }
                    }
                }
            }
        }
예제 #37
0
        void _audioReader_FrameArrived(AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs args)
        {
            var frameList = args.FrameReference.AcquireBeamFrames();

            if (frameList != null)
            {
                // Only one audio beam is supported. Get the sub frame list for this beam
                IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

                // Loop over all sub frames, extract audio buffer and beam information
                foreach (AudioBeamSubFrame subFrame in subFrameList)
                {
                    // Check if beam angle and/or confidence have changed

                    // Process audio buffer
                    subFrame.CopyFrameDataToArray(this._audioBuffer);

                    float audioSampleTotal = 0.0f;
                    for (int i = 0; i < this._audioBuffer.Length; i += _BytesPerSample)
                    {
                        // Extract the 32-bit IEEE float sample from the byte array
                        float audioSample = BitConverter.ToSingle(this._audioBuffer, i);

                        this._accumulatedSquareSum += audioSample * audioSample;
                        ++this._accumulatedSampleCount;
                        audioSampleTotal += System.Math.Abs(audioSample);

                        if (this._accumulatedSampleCount < _SamplesPerColumn)
                        {
                            continue;
                        }

                        float meanSquare = this._accumulatedSquareSum / _SamplesPerColumn;

                        if (meanSquare > 1.0f)
                        {
                            // A loud audio source right next to the sensor may result in mean square values
                            // greater than 1.0. Cap it at 1.0f for display purposes.
                            meanSquare = 1.0f;
                        }

                        // Calculate energy in dB, in the range [MinEnergy, 0], where MinEnergy < 0
                        float energy = _MinEnergy;

                        if (meanSquare > 0)
                        {
                            energy = (float)(10.0 * Math.Log10(meanSquare));
                        }


                        lock (this.energyLock)
                        {
                            // Normalize values to the range [0, 1] for display
                            this.energy[this._energyIndex] = (_MinEnergy - energy) / _MinEnergy;
                            this._energyIndex = (this._energyIndex + 1) % this.energy.Length;
                            ++this._newEnergyAvailable;
                        }

                        this._accumulatedSquareSum   = 0;
                        this._accumulatedSampleCount = 0;
                    }
                    var currentAveSample = audioSampleTotal / _accumulatedSampleCount;
                    if (_previousAvgSampleDecibel != 0)
                    {
                        //if previous sample is more than 50% difference - speech is being paused
                        var difference = currentAveSample / _previousAvgSampleDecibel;
                        if (difference > .05)
                        {
                            OnSpeechPauseArrived(new SpeechPauseArrivedEventArgs());
                        }
                    }
                    _previousAvgSampleDecibel = currentAveSample;
                }
            }
        }
예제 #38
0
 private void OnAudioSourceFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     AudioSourceFrameArrived?.Invoke(sender, e);
 }
예제 #39
0
        private void AudioBeamFrameReader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (AudioBeamFrameList frameList = e.FrameReference.AcquireBeamFrames())
            {
                if (frameList != null)
                {
                    // NOTE - the old pattern of passing the AudioBeamFrameList to a downstream
                    // KinectAudio component had issues that were exposed in async mode. The
                    // AudioBeamFrameList is not disposed immediately in the event handler as
                    // it needs to be kept around until the async receiver processes it. However,
                    // Kinect suppresses all further audio events until the AudioBeamFrameList is
                    // disposed, so the receiver in KinectAudio has no way of recycling the old
                    // AudioBeamFrameList once it is done processing it (since the receiver never
                    // gets called again and this is the way objects are passed back upstream for
                    // recycling in the current cooperative buffering scheme). To resolve this, I
                    // moved the audio processing into this handler inside a using clause which
                    // ensures that the AudioBeamFrameList is disposed of immediately.
                    AudioBeamFrame audioBeamFrame = frameList[0];

                    foreach (var subFrame in audioBeamFrame.SubFrames)
                    {
                        // Check if we need to reallocate the audio buffer - if for instance the downstream component
                        // that we posted-by-ref to modifies the reference to audioBuffer to null or an array with
                        // a different size.
                        if ((this.audioBuffer == null) || (this.audioBuffer.Length != subFrame.FrameLengthInBytes))
                        {
                            this.audioBuffer = new byte[subFrame.FrameLengthInBytes];
                        }

                        // Get the raw audio bytes from the frame.
                        subFrame.CopyFrameDataToArray(this.audioBuffer);

                        // Compute originating time from the relative time reported by Kinect.
                        var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks((subFrame.RelativeTime + subFrame.Duration).Ticks);

                        // Post the audio buffer by reference.
                        this.Audio.Post(new AudioBuffer(this.audioBuffer, KinectSensor.audioFormat), originatingTime);

                        // Post the audio beam angle information by value (not using co-operative buffering).
                        this.AudioBeamInfo.Post(new KinectAudioBeamInfo(subFrame.BeamAngle, subFrame.BeamAngleConfidence), originatingTime);

                        if ((subFrame.AudioBodyCorrelations != null) && (subFrame.AudioBodyCorrelations.Count > 0))
                        {
                            // Get BodyTrackingIds from AudioBodyCorrelations list (seems like this is the only
                            // bit of useful information).
                            var bodyIds = subFrame.AudioBodyCorrelations.Select(abc => abc.BodyTrackingId);

                            // Since we are posting bodyTrackingIds by-ref, we need to do a null check each
                            // time and allocate if necessary. Otherwise clear and re-use the existing list.
                            if (this.bodyTrackingIds == null)
                            {
                                // Allocate a new list
                                this.bodyTrackingIds = new List <ulong>(bodyIds);
                            }
                            else
                            {
                                // Re-use the existing list
                                this.bodyTrackingIds.Clear();
                                foreach (ulong id in bodyIds)
                                {
                                    this.bodyTrackingIds.Add(id);
                                }
                            }

                            // Post the audio body correlations by reference.
                            this.AudioBodyCorrelations.Post(this.bodyTrackingIds, originatingTime);
                        }
                    }
                }
            }
        }
 private void Reader_FrameArrived(object sender, AudioBeamFrameArrivedEventArgs e)
 {
     ProcessAudioFrame();
 }