Beispiel #1
0
        private bool GetSensor()
        {
            if (!ReferenceEquals(null, _sensor) && _sensor.IsOpen)
            {
                return(true);
            }
            else
            {
                try
                {
                    _sensor = KinectSensor.GetDefault();
                    _sensor.Open();


                    _bodyReader = _sensor.BodyFrameSource.OpenReader();
                    _bodyReader.FrameArrived  += KinectInput.SkeletonColorVideoViewer._bodyReader_FrameArrived;
                    _colorReader               = _sensor.ColorFrameSource.OpenReader();
                    _colorReader.FrameArrived += KinectInput.SkeletonColorVideoViewer._colorReader_FrameArrived;
                    _audioBeamReader           = _sensor.AudioSource.OpenReader();

                    OnStatusChanged();
                    return(true);
                }
                catch (Exception)
                {
                    OnStatusChanged();
                    return(false);
                }
            }
        }
Beispiel #2
0
        protected override void OnNavigatingFrom(
            NavigatingCancelEventArgs e)
        {
            base.OnNavigatingFrom(e);

            if (bodyIndexFrameReader != null)
            {
                bodyIndexFrameReader.Dispose();
                bodyIndexFrameReader = null;
            }

            if (bodyFrameReader != null)
            {
                bodyFrameReader.Dispose();
                bodyFrameReader = null;
            }

            if (audioBeamFrameReader != null)
            {
                audioBeamFrameReader.Dispose();
                audioBeamFrameReader = null;
            }

            if (kinect != null)
            {
                kinect.Close();
                kinect = null;
            }
        }
Beispiel #3
0
        private void InitializeKinect()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                if (_sensor.IsOpen)
                {
                    _sensor.IsAvailableChanged += OnKinectAvailabilityChanged;

                    _multireader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color |
                                                                      FrameSourceTypes.Depth |
                                                                      FrameSourceTypes.Infrared);

                    _colordepthReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color |
                                                                           FrameSourceTypes.Depth);

                    _colorReader = _sensor.ColorFrameSource.OpenReader();

                    _audioReader = _sensor.AudioSource.OpenReader();

                    _multireader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

                    _colordepthReader.MultiSourceFrameArrived += OnColorAndDepthSourceFrameArrived;

                    _colorReader.FrameArrived += OnColorSourceFrameArrived;

                    _audioReader.FrameArrived += OnAudioSourceFrameArrived;
                }
            }
        }
Beispiel #4
0
        void audioBeamFrameReader_FrameArrived(
            AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs args)
        {
            using (var audioFrame =
                       args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList) {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i]) {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j]) {
                                subFrame.CopyFrameDataToArray(audioBuffer);

                                waveFile.Write(audioBuffer);

                                // 参考:実際のデータは32bit IEEE floatデータ
                                //float data1 = BitConverter.ToSingle( audioBuffer, 0 );
                                //float data2 = BitConverter.ToSingle( audioBuffer, 4 );
                                //float data3 = BitConverter.ToSingle( audioBuffer, 8 );
                            }
                        }
                    }
                }
            }
        }
Beispiel #5
0
        // http://mtaulty.com/CommunityServer/blogs/mike_taultys_blog/archive/2014/10/01/kinect-for-windows-v2-hello-audio-world-for-the-net-windows-app-developer-amp-harmonica-player.aspx
        void audioBeamFrameReader_FrameArrived(AudioBeamFrameReader sender,
                                               AudioBeamFrameArrivedEventArgs args)
        {
            using (var audioFrame =
                       args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList) {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i]) {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j]) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // 音の方向の信頼性[0-1]
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();
                            }
                        }
                    }
                }
            }
        }
Beispiel #6
0
        private void Window_Closing(object sender,
                                    System.ComponentModel.CancelEventArgs e)
        {
            if (bodyIndexFrameReader != null)
            {
                bodyIndexFrameReader.Dispose();
                bodyIndexFrameReader = null;
            }

            if (bodyFrameReader != null)
            {
                bodyFrameReader.Dispose();
                bodyFrameReader = null;
            }

            if (audioBeamFrameReader != null)
            {
                audioBeamFrameReader.Dispose();
                audioBeamFrameReader = null;
            }

            if (kinect != null)
            {
                kinect.Close();
                kinect = null;
            }
        }
Beispiel #7
0
        private void ActuallyStartAudio()
        {
            if (kinect.IsAvailable)
            {
                //Start the audio stream if necessary
                if (masterKinectSettings.sendAudioAngle || masterSettings.audioOptions.sourceID == kinectID)
                {
                    audioReader = kinect.AudioSource.OpenReader();
                    audioReader.FrameArrived += audioReader_FrameArrived;

                    if (masterKinectSettings.audioTrackMode != KinectBase.AudioTrackingMode.Loudest)
                    {
                        for (int i = 0; i < kinect.AudioSource.AudioBeams.Count; i++)
                        {
                            kinect.AudioSource.AudioBeams[i].AudioBeamMode = AudioBeamMode.Manual;
                        }
                    }
                    else
                    {
                        for (int i = 0; i < kinect.AudioSource.AudioBeams.Count; i++)
                        {
                            kinect.AudioSource.AudioBeams[i].AudioBeamMode = AudioBeamMode.Manual;
                        }
                    }

                    if (kinect.AudioSource.AudioBeams.Count > 0)
                    {
                        audioStream = kinect.AudioSource.AudioBeams[0].OpenInputStream();
                    }
                }
            }
        }
Beispiel #8
0
        public MainWindow()
        {
            Sensor = KinectSensor.GetDefault();
            Sensor.Open();

            FrameReader = Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body);
            AudioReader = Sensor.AudioSource.OpenReader();

            BitmapToDisplay = new WriteableBitmap(
                Sensor.ColorFrameSource.FrameDescription.Width,
                Sensor.ColorFrameSource.FrameDescription.Height,
                96.0,
                96.0,
                PixelFormats.Bgra32,
                null);

            InitializeComponent();

            this.WindowStyle = System.Windows.WindowStyle.None;
            this.WindowState = System.Windows.WindowState.Maximized;

            Loaded  += OpenKinect;
            Closing += CloseKinect;

            KeyDown += CheckForExit;
        }
 public VerbalAnalysis(AudioSource sensorSource)
 {
     //_sensor = sensor;
     _audioSource = sensorSource;
     _audioReader = _audioSource.OpenReader();
     _audioReader.FrameArrived += _audioReader_FrameArrived;
     _audioBuffer = new byte[_audioSource.SubFrameLengthInBytes];
 }
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // Initialize the components (controls) of the window
            this.InitializeComponent();
            //Use DataContext
            //try open Window2
            Window2 w2 = new Window2();

            //w2.DataContext = this;
            w2.Show();

            // Only one Kinect Sensor is supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            if (this.kinectSensor != null)
            {
                // Open the sensor
                this.kinectSensor.Open();

                // Get its audio source
                AudioSource audioSource = this.kinectSensor.AudioSource;

                // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
                // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
                // With 4 bytes per sample, that gives us 1024 bytes.
                this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

                // Open the reader for the audio frames
                this.reader = audioSource.OpenReader();

                // Uncomment these two lines to overwrite the automatic mode of the audio beam.
                // It will change the beam mode to manual and set the desired beam angle.
                // In this example, point it straight forward.
                // Note that setting beam mode and beam angle will only work if the
                // application window is in the foreground.
                // Furthermore, setting these values is an asynchronous operation --
                // it may take a short period of time for the beam to adjust.

                /*
                 * audioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Manual;
                 * audioSource.AudioBeams[0].BeamAngle = 0;
                 */
            }
            else
            {
                // On failure, set the status text
                this.statusBarText.Text = Properties.Resources.NoSensorStatusText;
                return;
            }

            this.energyBitmap = new WriteableBitmap(EnergyBitmapWidth, EnergyBitmapHeight, 96, 96, PixelFormats.Indexed1, new BitmapPalette(new List <Color> {
                Colors.White, (Color)this.Resources["KinectPurpleColor"]
            }));
        }
 private void DisposeAudioFrameReader()
 {
     if (m_AudioFrameReader == null)
     {
         return;
     }
     m_AudioFrameReader.Dispose();
     m_AudioFrameReader = null;
 }
Beispiel #12
0
 public void addBeamAngleListener(BeamAngleListener listener)
 {
     beamAngleListeners.Add(listener);
     if (beamAngleListeners.Count == 1)
     {
         beamFrameReader = kinectSensor.AudioSource.OpenReader();
         beamFrameReader.FrameArrived += this.beamFrameArrived;
     }
 }
Beispiel #13
0
        public AudioHandler(KinectSensor kinectSensor)
        {
            audioPreAnalysis  = new AudioPreAnalysis();
            this.kinectSensor = kinectSensor;
            // Get its audio source
            AudioSource audioSource = this.kinectSensor.AudioSource;

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
            // With 4 bytes per sample, that gives us 1024 bytes.
            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            // Open the reader for the audio frames
            this.reader = audioSource.OpenReader();

            // PixelFormats.Indexed1;
            this.energyBitmap = new WriteableBitmap(EnergyBitmapWidth, EnergyBitmapHeight, 96, 96, PixelFormats.Indexed4, new BitmapPalette(new List <Color> {
                Colors.White, Colors.Green, Colors.Red, Colors.LightBlue, Colors.Aquamarine, Colors.Pink, Colors.Orange
            }));

            // Initialize foreground pixels
            this.foregroundPixels = new byte[EnergyBitmapHeight];
            for (int i = 0; i < this.foregroundPixels.Length; ++i)
            {
                this.foregroundPixels[i] = 0xff;
            }
            for (int i = 0; i < this.lineVolumePixels.Length; ++i)
            {
                this.lineVolumePixels[i] = 0x55;
            }
            for (int i = 0; i < this.lineVolumePixelsIsSpeaking.Length; ++i)
            {
                this.lineVolumePixelsIsSpeaking[i] = 0x11;
            }
            for (int i = 0; i < this.lineVolumePixelsSoft.Length; ++i)
            {
                this.lineVolumePixelsSoft[i] = 0x22;
            }
            for (int i = 0; i < this.lineVolumePixelsLoud.Length; ++i)
            {
                this.lineVolumePixelsLoud[i] = 0x33;
            }

            //    this.kinectImage.Source = this.energyBitmap;
            CompositionTarget.Rendering += this.UpdateEnergy;



            //if (this.reader != null)
            //{
            //    // Subscribe to new audio frame arrived events
            //    this.reader.FrameArrived += this.Reader_FrameArrived;
            //}


            getSpeechthings();
        }
    void OnDestroy()
    {
        if (audioReader != null)
        {
            this.audioReader.Dispose();
            this.audioReader = null;

            //Debug.Log("KinectAudio destroyed.");
        }
    }
Beispiel #15
0
        private static void StartAudio()
        {
            Windows.Kinect.KinectAudioSource audioSource = sensor.AudioSource;
            if (audioSource != null)
            {
                audioReader = audioSource.OpenReader();
                //audioReader.FrameArrived += Reader_FrameArrived;

                audioBuffer = new byte[audioSource.SubFrameLengthInBytes];
            }
        }
Beispiel #16
0
        public void close()
        {
            // CompositionTarget.Rendering -= this.UpdateEnergy;

            if (this.reader != null)
            {
                // AudioBeamFrameReader is IDisposable
                this.reader.Dispose();
                this.reader = null;
            }
        }
        private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
        {
            if ( audioBeamFrameReader != null ) {
                audioBeamFrameReader.Dispose();
                audioBeamFrameReader = null;
            }

            if ( kinect != null ) {
                kinect.Close();
                kinect = null;
            }
        }
Beispiel #18
0
        private void StartKinect()
        {
            if (this.disposed)
            {
                throw new ObjectDisposedException(nameof(KinectSensor));
            }

            this.kinectSensor = Microsoft.Kinect.KinectSensor.GetDefault();
            this.kinectSensor.CoordinateMapper.CoordinateMappingChanged += this.CoordinateMapper_CoordinateMappingChanged;

            this.whichFrames = FrameSourceTypes.None;

            if (this.configuration.OutputBodies)
            {
                this.whichFrames |= FrameSourceTypes.Body;
            }

            if (this.configuration.OutputColor)
            {
                this.whichFrames |= FrameSourceTypes.Color;
            }

            if (this.configuration.OutputDepth)
            {
                this.whichFrames |= FrameSourceTypes.Depth;
            }

            if (this.configuration.OutputInfrared)
            {
                this.whichFrames |= FrameSourceTypes.Infrared;
            }

            if (this.configuration.OutputLongExposureInfrared)
            {
                this.whichFrames |= FrameSourceTypes.LongExposureInfrared;
            }

            if (this.whichFrames != FrameSourceTypes.None)
            {
                this.multiFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(this.whichFrames);
                this.multiFrameReader.MultiSourceFrameArrived += this.MultiFrameReader_FrameArrived;
            }

            if (this.configuration.OutputAudio)
            {
                this.audioBeamFrameReader = this.kinectSensor.AudioSource.OpenReader();
                this.audioBeamFrameReader.FrameArrived += this.AudioBeamFrameReader_FrameArrived;
            }

            this.kinectSensor.Open();
        }
Beispiel #19
0
        public void InitializeSound(KinectSensor _kinect, System.Windows.Media.Color color)
        {
            if (_kinect == null)
            {
                return;
            }

            if (_audioReader != null)
            {
                return;
            }

            // Get its audio source
            AudioSource audioSource = _kinect.AudioSource;

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
            // With 4 bytes per sample, that gives us 1024 bytes.
            audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            // Open the reader for the audio frames
            _audioReader = audioSource.OpenReader();

            // Uncomment these two lines to overwrite the automatic mode of the audio beam.
            // It will change the beam mode to manual and set the desired beam angle.
            // In this example, point it straight forward.
            // Note that setting beam mode and beam angle will only work if the
            // application window is in the foreground.
            // Furthermore, setting these values is an asynchronous operation --
            // it may take a short period of time for the beam to adjust.

            /*
             * audioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Manual;
             * audioSource.AudioBeams[0].BeamAngle = 0;
             */

            this.energyBitmap = new WriteableBitmap(EnergyBitmapWidth, EnergyBitmapHeight, 96, 96, PixelFormats.Indexed1, new BitmapPalette(new List <System.Windows.Media.Color> {
                Colors.Transparent, color
            }));

            // Initialize foreground pixels
            this.foregroundPixels = new byte[_EnergyBitmapHeight];
            for (int i = 0; i < this.foregroundPixels.Length; ++i)
            {
                this.foregroundPixels[i] = 0xff;
            }

            //this.waveDisplay.Source = this.energyBitmap;

            //CompositionTarget.Rendering += this.UpdateEnergy;
        }
Beispiel #20
0
        private void Reader_FrameArrived(AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs e)
        {
            using (var audioFrame = e.FrameReference.AcquireBeamFrames() as AudioBeamFrameList)
            {
                if (audioFrame == null)
                {
                    return;
                }

                for (int i = 0; i < audioFrame.Count; i++)
                {
                    using (var frame = audioFrame[i])
                    {
                        for (int j = 0; j < frame.SubFrames.Count; j++)
                        {
                            using (var subFrame = frame.SubFrames[j])
                            {
                                subFrame.CopyFrameDataToArray(this.audioBuffer);
                                stream.Write(audioBuffer, 0, audioBuffer.Length);
                                size += audioBuffer.Length;
                            }
                        }
                    }
                }
            }
            //AudioBeamFrameList frameList = (AudioBeamFrameList)e.FrameReference.AcquireBeamFrames();

            //if (frameList != null)
            //{
            //    //using(frameList)
            //   // {
            //        IReadOnlyList<AudioBeamSubFrame> subFrameList = frameList[0].SubFrames;

            //        // Loop over all sub frames, extract audio buffer and beam informationIReadOnlyList<AudioBeamFrame>
            //        foreach (AudioBeamSubFrame subFrame in subFrameList)
            //        {
            //                subFrame.CopyFrameDataToArray(this.audioBuffer);

            //                stream.Write(audioBuffer, 0, audioBuffer.Length);
            //                size += audioBuffer.Length;
            //        subFrame.Dispose();
            //            }
            //    frameList.Dispose();

            //   // }


            //}
        }
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            this.kinect = KinectSensor.GetDefault();

            //活用方法は不明です。
            this.kinect.AudioSource.FrameCaptured += AudioSource_FrameCaptured;

            //AudioBeamFrameReader を取得してイベントハンドラを設定します。
            this.audioBeamFrameReader = this.kinect.AudioSource.OpenReader();
            this.audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;

            this.kinect.Open();
        }
Beispiel #22
0
        public MainPage()
        {
            this.kinect = KinectSensor.GetDefault();
            this.kinect.Open();

            AudioSource audioSource = this.kinect.AudioSource;

            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            this.audioBeamFrameReader = audioSource.OpenReader();
            this.audioBeamFrameReader.FrameArrived += Reader_FrameArrived;

            this.audioBeamFrameReader.IsPaused = true;
            this.InitializeComponent();
        }
Beispiel #23
0
 // Use this for initialization
 void Start()
 {
     i_sensor = KinectSensor.GetDefault();
     if (i_sensor != null)
     {
         if (!i_sensor.IsOpen)
         {
             i_sensor.Open();
         }
         Windows.Kinect.AudioSource l_as = i_sensor.AudioSource;
         i_reader = l_as.OpenReader();
         //l_as.AudioBeams[0].AudioBeamMode = AudioBeamMode.Manual;
         //l_as.AudioBeams[0].BeamAngle = 0;
     }
 }
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 音声リーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 音声リーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Beispiel #26
0
 private void CoordinateMapper_CoordinateMappingChanged(object sender, CoordinateMappingChangedEventArgs e)
 {
     _logger.Info(string.Format("Coordinate mapper changed. Configuring events...", this.kinectSensor.UniqueKinectId));
     this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
     this.depthFrameReader.FrameArrived += DepthFrameReader_FrameArrived;
     this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
     this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
     this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
     this.bodyFrameReader.FrameArrived      += BodyFrameReader_FrameArrived;
     this.audioBeamFrameReader               = this.kinectSensor.AudioSource.OpenReader();
     this.audioBeamFrameReader.FrameArrived += AudioBeamFrameReader_FrameArrived;
     this.audioBeamFrameReader.AudioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Automatic;
     this.audioBeamFrameReader.AudioSource.AudioBeams[0].BeamAngle     = 0.0f;
     this.coordinateMapper = this.kinectSensor.CoordinateMapper;
     coordinateMapperReady.Set();
 }
Beispiel #27
0
        public AudioFrame(KinectSensor kinect, NetworkPublisher pub)
        {
            this.kinect    = kinect;
            this.publisher = pub;

            AudioSource audioSource = this.kinect.AudioSource;

            this.audioBeamFrameReader          = audioSource.OpenReader();
            this.audioBeamFrameReader.IsPaused = true;

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
            // With 4 bytes per sample, that gives us 1024 bytes.
            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            this.audioBeamFrameReader.FrameArrived += this.Reader_AudioBeamFrameArrived;
        }
Beispiel #28
0
        protected override void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);

            try {
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 音声リーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
            }
            catch (Exception ex) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
Beispiel #29
0
        /// <summary>
        /// Execute shutdown tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void MainWindow_Closing(object sender, CancelEventArgs e)
        {
            CompositionTarget.Rendering -= this.UpdateEnergy;

            if (this.reader != null)
            {
                // AudioBeamFrameReader is IDisposable
                this.reader.Dispose();
                this.reader = null;
            }

            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
        }
Beispiel #30
0
    void OnApplicationQuit()
    {
        print("quit");
        if (i_reader != null)
        {
            i_reader.Dispose();
            i_reader = null;
        }

        if (i_sensor != null)
        {
            if (i_sensor.IsOpen)
            {
                i_sensor.Close(); i_sensor = null;
            }
        }
    }
Beispiel #31
0
        void kinect_IsAvailableChanged(object sender, IsAvailableChangedEventArgs e)
        {
            if (!e.IsAvailable)
            {
                return;
            }

            if (bodyIndexFrameDesc != null)
            {
                return;
            }

            // 表示のためのデータを作成
            bodyIndexFrameDesc = kinect.DepthFrameSource.FrameDescription;

            // ボディインデックデータ用のバッファ
            bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];

            // 表示のためのビットマップに必要なものを作成
            bodyIndexColorImage = new WriteableBitmap(bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height,
                                                      96, 96, PixelFormats.Bgra32, null);
            bodyIndexColorRect   = new Int32Rect(0, 0, bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height);
            bodyIndexColorStride = (int)(bodyIndexFrameDesc.Width * bodyIndexColorBytesPerPixel);

            // ボディインデックデータをBGRA(カラー)データにするためのバッファ
            bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels * bodyIndexColorBytesPerPixel];

            ImageBodyIndex.Source = bodyIndexColorImage;



            // ボディーインデックスリーダーを開く
            bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader();
            bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived;

            // Bodyを入れる配列を作る
            bodies = new Body[kinect.BodyFrameSource.BodyCount];

            // ボディーリーダーを開く
            bodyFrameReader = kinect.BodyFrameSource.OpenReader();
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

            // Audioリーダーを開く
            audioBeamFrameReader = kinect.AudioSource.OpenReader();
            audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
        }
Beispiel #32
0
        public void Dispose()
        {
            if (_kinect != null)
            {
                if (_kinect.IsOpen)
                {
                    _kinect.Close();
                }
                _kinect = null;
            }

            if (_audioBeamFrameReader != null)
            {
                _audioBeamFrameReader.Dispose();
                _audioBeamFrameReader = null;
            }
        }
Beispiel #33
0
        protected override void OnNavigatingFrom( NavigatingCancelEventArgs e )
        {
            base.OnNavigatingFrom( e );

            if ( waveFile  != null ) {
                waveFile.Dispose();
                waveFile = null;
            }

            if ( audioBeamFrameReader  != null ) {
                audioBeamFrameReader.Dispose();
                audioBeamFrameReader = null;
            }

            if ( kinect != null ) {
                kinect.Close();
                kinect = null;
            }
        }
Beispiel #34
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // ボディーインデックスリーダーを開く
                bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader();
                bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived;

                // ボディーリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

                // Audioリーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;

                // Bodyを入れる配列を作る
                bodies = new Body[kinect.BodyFrameSource.BodyCount];

                // 表示のためのデータを作成
                bodyIndexFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // ボディインデックデータ用のバッファ
                bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];

                // ビットマップ
                bodyIndexColorBitmap = new WriteableBitmap(
                    bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height );
                ImageBodyIndex.Source = bodyIndexColorBitmap;

                bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels *
                                                bodyIndexColorBytesPerPixels];
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
Beispiel #35
0
        protected override void OnNavigatedTo( NavigationEventArgs e )
        {
            base.OnNavigatedTo( e );

            try {
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 音声バッファを作成する
                audioBuffer = new byte[kinect.AudioSource.SubFrameLengthInBytes];

                // 音声リーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageDialog dlg = new MessageDialog(ex.Message);
                dlg.ShowAsync();
            }
        }
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 音声バッファを作成する
                audioBuffer = new byte[kinect.AudioSource.SubFrameLengthInBytes];

                // 音声リーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;

                audioBeamFrameReader.IsPaused = true;
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
        //CoordinateMappingExample cooordinateMappingExample;

        void CoordinateMapper_CoordinateMappingChanged(object sender, CoordinateMappingChangedEventArgs e)
        {
            kinect2Calibration = new RoomAliveToolkit.Kinect2Calibration();
            kinect2Calibration.RecoverCalibrationFromSensor(kinectSensor);
            kinect2CalibrationReady.Set();

            //cooordinateMappingExample = new CoordinateMappingExample();
            //cooordinateMappingExample.Run(kinect2Calibration, kinectSensor);

            depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();
            depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;

            colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
            colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;

            bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

            audioBeamFrameReader = kinectSensor.AudioSource.OpenReader();
            audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;

            audioBeamFrameReader.AudioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Manual;
            audioBeamFrameReader.AudioSource.AudioBeams[0].BeamAngle = 0;
        }
Beispiel #38
0
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                // Kinectを開く
                kinect = KinectSensor.GetDefault();
                kinect.Open();

                // 表示のためのデータを作成
                bodyIndexFrameDesc = kinect.DepthFrameSource.FrameDescription;

                // ボディインデックデータ用のバッファ
                bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];

                // 表示のためのビットマップに必要なものを作成
                bodyIndexColorImage = new WriteableBitmap( bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height,
                    96, 96, PixelFormats.Bgra32, null );
                bodyIndexColorRect = new Int32Rect( 0, 0, bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height );
                bodyIndexColorStride = (int)(bodyIndexFrameDesc.Width * bodyIndexColorBytesPerPixel);

                // ボディインデックデータをBGRA(カラー)データにするためのバッファ
                bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels * bodyIndexColorBytesPerPixel];

                ImageBodyIndex.Source = bodyIndexColorImage;

                // ボディーインデックスリーダーを開く
                bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader();
                bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived;

                // Bodyを入れる配列を作る
                bodies = new Body[kinect.BodyFrameSource.BodyCount];

                // ボディーリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

                // Audioリーダーを開く
                audioBeamFrameReader = kinect.AudioSource.OpenReader();
                audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived;
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
Beispiel #39
0
        void audioBeamFrameReader_FrameArrived(
            AudioBeamFrameReader sender, AudioBeamFrameArrivedEventArgs args )
        {
            using ( var audioFrame =
                args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList ) {

                if ( audioFrame == null ) {
                    return;
                }

                for ( int i = 0; i < audioFrame.Count; i++ ) {
                    using ( var frame = audioFrame[i] ) {
                        for ( int j = 0; j < frame.SubFrames.Count; j++ ) {
                            using ( var subFrame = frame.SubFrames[j] ) {
                                subFrame.CopyFrameDataToArray( audioBuffer );

                                waveFile.Write( audioBuffer );

                                // 参考:実際のデータは32bit IEEE floatデータ
                                //float data1 = BitConverter.ToSingle( audioBuffer, 0 );
                                //float data2 = BitConverter.ToSingle( audioBuffer, 4 );
                                //float data3 = BitConverter.ToSingle( audioBuffer, 8 );
                            }
                        }
                    }
                }
            }
        }
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            this.audioReader = this.kinectSensor.AudioSource.OpenReader();

            // get the depth (display) extents
            FrameDescription jointFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            FrameDescription infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription;

            colorRenderer = new ColorFrameRenderer(colorFrameDescription.Width, colorFrameDescription.Height, jointFrameDescription.Width, jointFrameDescription.Height,
                infraredFrameDescription.Width, infraredFrameDescription.Height);
            var drawingGroup = new DrawingGroup();
            var drawingImage = new DrawingImage(drawingGroup);
            hudRenderer = new HudRenderer(drawingGroup, drawingImage, colorFrameDescription.Width, colorFrameDescription.Height);

            AudioSource audioSource = this.kinectSensor.AudioSource;

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame 
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame. 
            // With 4 bytes per sample, that gives us 1024 bytes.
            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            this.audioReader.FrameArrived += audioReader_FrameArrived;

            //on startup hide the audio meter
            AudioMeterVisibility = Visibility.Hidden;

            this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader();
 
            //Infrared
            // open the reader for the depth frames
            this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader();

            // wire handler for frame arrival
            this.infraredFrameReader.FrameArrived += this.colorRenderer.Reader_InfraredFrameArrived;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text TODO: change namespace name in resources
            this.StatusText = this.kinectSensor.IsAvailable ? Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.RunningStatusText
                                                            : Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.NoSensorStatusText;
            
            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            //register the code which will tell the system what to do when keys are pressed
            SetupKeyHandlers();

            
            //initialize
            panTilt = PanTiltController.GetOrCreatePanTiltController();
            firingControl = FiringController.GetOrCreateFiringController();

            var panTiltErr = panTilt.TryInitialize();
            var firingErr = firingControl.TryInitialize();
            if (panTiltErr != null)
            {
                //crash the app. we can't do anything if it doesn't intialize
                throw panTiltErr;
            }

            if (firingErr != null)
            {
                //crash the app. we can't do anything if it doesn't intialize
                throw firingErr;
            }

            string safetyText;
            if (this.firingControl.VirtualSafetyOn)
            {
                safetyText = Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.SafetyDisengagedText;
            }
            else
            {
                safetyText = Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.SafetyEngagedText;
            }
            panTilt.TryInitialize();

            //draw the headsup display initially
            this.hudRenderer.RenderHud(new HudRenderingParameters()
            {
                CannonX = this.CannonX,
                CannonY = this.CannonY,
                //CannonTheta = this.CannonTheta,
                StatusText = this.statusText,
                SystemReady = (this.kinectSensor.IsAvailable && this.kinectSensor.IsOpen && this.panTilt.IsReady),
                FrameRate = this.FrameRate,
                TrackingMode = this.trackingMode,
                FiringSafety = this.firingControl.VirtualSafetyOn,
                FiringSafetyText = safetyText
            });

            //set voice synth to Hazel
            this.voiceSynth.SelectVoice("Microsoft Hazel Desktop");

            this.voiceSynth.SpeakAsync("Kinect Cannon Fully Initialized");
            

            //debug start frame rate counter
            FPSTimerStart();

            // Try to use the controller
            
        }
Beispiel #41
0
        private void ActuallyStartAudio()
        {
            if (kinect.IsAvailable)
            {
                //Start the audio stream if necessary
                if (masterKinectSettings.sendAudioAngle || masterSettings.audioOptions.sourceID == kinectID)
                {
                    audioReader = kinect.AudioSource.OpenReader();
                    audioReader.FrameArrived += audioReader_FrameArrived;

                    if (masterKinectSettings.audioTrackMode != KinectBase.AudioTrackingMode.Loudest)
                    {
                        for (int i = 0; i < kinect.AudioSource.AudioBeams.Count; i++)
                        {
                            kinect.AudioSource.AudioBeams[i].AudioBeamMode = AudioBeamMode.Manual;
                        }
                    }
                    else
                    {
                        for (int i = 0; i < kinect.AudioSource.AudioBeams.Count; i++)
                        {
                            kinect.AudioSource.AudioBeams[i].AudioBeamMode = AudioBeamMode.Manual;
                        }
                    }

                    if (kinect.AudioSource.AudioBeams.Count > 0)
                    {
                        audioStream = kinect.AudioSource.AudioBeams[0].OpenInputStream();
                    }
                }
            }
        }
        public AudioHandler(KinectSensor kinectSensor)
        {
            audioPreAnalysis = new AudioPreAnalysis();
            this.kinectSensor = kinectSensor;
            // Get its audio source
            AudioSource audioSource = this.kinectSensor.AudioSource;

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame 
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame. 
            // With 4 bytes per sample, that gives us 1024 bytes.
            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            // Open the reader for the audio frames
            this.reader = audioSource.OpenReader();

           // PixelFormats.Indexed1;
            this.energyBitmap = new WriteableBitmap(EnergyBitmapWidth, EnergyBitmapHeight, 96, 96, PixelFormats.Indexed4, new BitmapPalette(new List<Color> { Colors.White, Colors.Green, Colors.Red, Colors.LightBlue, Colors.Aquamarine, Colors.Pink, Colors.Orange}));

            // Initialize foreground pixels
            this.foregroundPixels = new byte[EnergyBitmapHeight];
            for (int i = 0; i < this.foregroundPixels.Length; ++i)
            {
                this.foregroundPixels[i] = 0xff;
            }
            for (int i = 0; i < this.lineVolumePixels.Length; ++i)
            {
                this.lineVolumePixels[i] = 0x55;
            }
            for (int i = 0; i < this.lineVolumePixelsIsSpeaking.Length; ++i)
            {
                this.lineVolumePixelsIsSpeaking[i] = 0x11;
            }
            for (int i = 0; i < this.lineVolumePixelsSoft.Length; ++i)
            {
                this.lineVolumePixelsSoft[i] = 0x22;
            }
            for (int i = 0; i < this.lineVolumePixelsLoud.Length; ++i)
            {
                this.lineVolumePixelsLoud[i] = 0x33;
            }

        //    this.kinectImage.Source = this.energyBitmap;
            CompositionTarget.Rendering += this.UpdateEnergy;
            
            

            //if (this.reader != null)
            //{
            //    // Subscribe to new audio frame arrived events
            //    this.reader.FrameArrived += this.Reader_FrameArrived;
            //}


            getSpeechthings();

        }
        public void close()
        {
           // CompositionTarget.Rendering -= this.UpdateEnergy;

            if (this.reader != null)
            {
                // AudioBeamFrameReader is IDisposable
                this.reader.Dispose();
                this.reader = null;
            }

            if (null != this.speechEngine)
            {
                this.speechEngine.SpeechRecognized -= this.SpeechRecognized;
                this.speechEngine.SpeechRecognitionRejected -= this.SpeechRejected;
                this.speechEngine.RecognizeAsyncStop();
            }
        }
Beispiel #44
0
        public void ShutdownSensor()
        {
            if (skeletonReader != null)
            {
                skeletonReader.FrameArrived -= skeletonReader_FrameArrived;
                skeletonReader.Dispose();
                skeletonReader = null;
            }
            if (depthReader != null)
            {
                depthReader.FrameArrived -= depthReader_FrameArrived;
                depthReader.Dispose();
                depthReader = null;
            }
            if (colorReader != null)
            {
                colorReader.FrameArrived -= colorReader_FrameArrived;
                colorReader.Dispose();
                colorReader = null;
            }
            if (irReader != null)
            {
                irReader.FrameArrived -= irReader_FrameArrived;
                irReader.Dispose();
                irReader = null;
            }
            if (audioStream != null)
            {
                audioStream.Close();
                audioStream.Dispose();
                audioStream = null;
            }
            if (audioReader != null)
            {
                audioReader.FrameArrived -= audioReader_FrameArrived;
                audioReader.Dispose();
                audioReader = null;
            }

            //Note: we don't close the Kinect here because it would remove it from the list of avaliable Kinects
        }
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            //BODY
            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // a bone defined as a line between two joints
            this.bones = new List<Tuple<JointType, JointType>>();

            // Torso
            this.bones.Add(new Tuple<JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple<JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple<JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple<JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple<JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List<Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            //FACE
            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;
            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];

            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List<Brush>()
            {
                Brushes.Red,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Blue,
                Brushes.Indigo,
                Brushes.Violet
            };

            //AUDIO
            // Get its audio source
            AudioSource audioSource = this.kinectSensor.AudioSource;
            // Open the reader for the audio frames
            this.audioreader = audioSource.OpenReader();

            // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame
            // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame.
            // With 4 bytes per sample, that gives us 1024 bytes.
            this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            EngagementInfo = new EngagementFeatures(this.bodyCount);
        }
Beispiel #46
0
        // http://mtaulty.com/CommunityServer/blogs/mike_taultys_blog/archive/2014/10/01/kinect-for-windows-v2-hello-audio-world-for-the-net-windows-app-developer-amp-harmonica-player.aspx
        void audioBeamFrameReader_FrameArrived( AudioBeamFrameReader sender,
                                            AudioBeamFrameArrivedEventArgs args )
        {
            using ( var audioFrame =
                args.FrameReference.AcquireBeamFrames() as AudioBeamFrameList ) {

                if ( audioFrame == null ) {
                    return;
                }

                for ( int i = 0; i < audioFrame.Count; i++ ) {
                    using ( var frame = audioFrame[i] ) {
                        for ( int j = 0; j < frame.SubFrames.Count; j++ ) {
                            using ( var subFrame = frame.SubFrames[j] ) {
                                // 音の方向
                                LineBeamAngle.Angle =
                                    (int)(subFrame.BeamAngle * 180 / Math.PI);

                                // ビーム角度、信頼性、ビーム方向のBody数を表示
                                TextBeamAngleConfidence.Text =
                                    subFrame.BeamAngleConfidence.ToString();
                                TextAudioBodyCorrelations.Text =
                                    subFrame.AudioBodyCorrelations.Count.ToString();

                                // ビーム方向に人がいれば、そのTrackibngIdを保存する
                                if ( subFrame.AudioBodyCorrelations.Count != 0 ) {
                                    AudioTrackingId =
                                        subFrame.AudioBodyCorrelations[0].BodyTrackingId;
                                }
                                else {
                                    AudioTrackingId = ulong.MaxValue;
                                }
                            }
                        }
                    }
                }
            }
        }
        /// Execute shutdown tasks
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void MainWindow_Closing(object sender, CancelEventArgs e)
        {
            CompositionTarget.Rendering -= this.UpdateEnergy;

            if (this.reader != null)
            {
                // AudioBeamFrameReader is IDisposable
                this.reader.Dispose();
                this.reader = null;
            }
/*
            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
*/
        }
        private void botonAudio_VEjemplos_Click(object sender, RoutedEventArgs e)
        {
            CompositionTarget.Rendering -= this.UpdateEnergy;

            if (this.reader != null)
            {
                // AudioBeamFrameReader is IDisposable
                this.reader.Dispose();
                this.reader = null;
            }
/*            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
*/
            VentanaEjemplosKinect v = new VentanaEjemplosKinect();
            v.Show();
            this.Close();
        }
        public VentanaAudioBasics()
        {
            // Initialize the components (controls) of the window
            this.InitializeComponent();
            // Only one Kinect Sensor is supported
            this.kinectSensor = KinectSensor.GetDefault();
            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            if (this.kinectSensor != null)
            {
                // Open the sensor
                this.kinectSensor.Open();
                // Get its audio source
                AudioSource audioSource = this.kinectSensor.AudioSource;
                // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame 
                // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame. 
                // With 4 bytes per sample, that gives us 1024 bytes.
                this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes];
                // Open the reader for the audio frames
                this.reader = audioSource.OpenReader();
                // Uncomment these two lines to overwrite the automatic mode of the audio beam.
                // It will change the beam mode to manual and set the desired beam angle.
                // In this example, point it straight forward.
                // Note that setting beam mode and beam angle will only work if the
                // application window is in the foreground.
                // Furthermore, setting these values is an asynchronous operation --
                // it may take a short period of time for the beam to adjust.
                /*
                audioSource.AudioBeams[0].AudioBeamMode = AudioBeamMode.Manual;
                audioSource.AudioBeams[0].BeamAngle = 0;
                */
            }
            else
            {
                // On failure, set the status text
                this.statusBarText.Text = Properties.Resources.NoSensorStatusText;
                return;
            }
            this.energyBitmap = new WriteableBitmap(EnergyBitmapWidth, EnergyBitmapHeight,
                                                        96, 96, PixelFormats.Indexed1,
                                                            new BitmapPalette(new List<Color> { Colors.White, (Color)this.Resources["KinectPurpleColor"] }));
        }