//------------


        public MainWindow()
        {
            InitializeComponent();


            //configuring end point
            ep = new IPEndPoint(IP, 9999);

            //initializing KinectSensor
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

/**/
                // Listen for multisurce data.
                _multiReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _multiReader.MultiSourceFrameArrived += MultiReader_MultiSourceFrameArrived;
/**/
                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Start tracking!
                _sensor.Open();
            }
        }
Ejemplo n.º 2
0
    void Start()
    {
        sensor     = KinectSensor.GetDefault();
        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();
        bodyReader.FrameArrived += BodyReader_FrameArrived;
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures);

        FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        FaceFrameReader = FaceFrameSource.OpenReader();
        FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        //CurrentFaceModel = FaceModel.Create();
        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();
    }
Ejemplo n.º 3
0
        private void InitializeKinect()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);// | FrameSourceTypes.LongExposureInfrared);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceSource.TrackingQuality = FaceAlignmentQuality.Low;
                _faceReader.FrameArrived   += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Ejemplo n.º 4
0
        public void InitTracker()
        {
            lastSensorAvail = false;
            sensor          = KinectSensor.GetDefault();

            bodySource = sensor.BodyFrameSource;
            bodyReader = bodySource.OpenReader();
            bodyReader.FrameArrived += NewBodyReaderFrame;

            hdFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            hdFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

            hdFaceFrameReader = hdFaceFrameSource.OpenReader();
            hdFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

            sensor.IsAvailableChanged += SensorAvailableChanged;
            Console.WriteLine("Face tracker ready.");

            dest     = IPAddress.Parse(ip);
            endPoint = new IPEndPoint(dest, port);

            sendBuffer = new byte[48];

            Console.WriteLine("UDP Socket created for port {0}", port);
        }
Ejemplo n.º 5
0
    void Start()
    {  //this like InitializeHDFace()
        theGeometry = new Mesh();

        //SetViewCollectionStatus();

        sensor = KinectSensor.GetDefault();

        bodySource = sensor.BodyFrameSource;

        bodyReader = bodySource.OpenReader();

        bodyReader.FrameArrived += BodyReader_FrameArrived;

        highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);

        highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();

        highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        CurrentFaceModel = FaceModel.Create();

        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();

        tempAus = new Dictionary <string, float>();
        actorBlendshapeNames = getBlendShapeNames(actorMesh);
    }
    void initialize()
    {
        IsFaceModelCollectCompleted = false;
        FaceCaptureStatus           = "";
        FaceVertices = new List <CameraSpacePoint>();

        sensor = KinectSensor.GetDefault();
        if (sensor == null)
        {
            return;
        }
        sensor.Open();

        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();

        hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
        hdFaceFrameReader = hdFaceFrameSource.OpenReader();

        faceModel     = FaceModel.Create();
        faceAlignment = FaceAlignment.Create();
        FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;

        faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes);
        faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed);
    }
Ejemplo n.º 7
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource    = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
                // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader    = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived    += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel        = new FaceModel();
                _faceAlignment    = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!
                _sensor.Open();
            }
        }
Ejemplo n.º 8
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();


                _sensor.Open();

                //Added by Aditya
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Ejemplo n.º 9
0
        public FacePage()
        {
            System.Media.SoundPlayer player = new System.Media.SoundPlayer();
            player.Play();

            InitializeComponent();

            currFaceState = FaceState.KinectWait;
            currBodyState = BodyState.KinectWait;
            faceSamples   = new double[NUM_SAMPLES];

            flagRuns = new int[Enum.GetNames(typeof(FlagType)).Length];

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                currFaceState   = FaceState.FaceWait;
                currBodyState   = BodyState.BodyWait;
                _infraredSource = _sensor.InfraredFrameSource;
                _infraredReader = _infraredSource.OpenReader();
                _infraredReader.FrameArrived += InfraredReader_FrameArrived;

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _sensor.Open();
            }
        }
Ejemplo n.º 10
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
               // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!        
                _sensor.Open();
            }
        }
Ejemplo n.º 11
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            /*ColorFrameReader cfr = _sensor.ColorFrameSource.OpenReader();
             * fd = _sensor.ColorFrameSource.FrameDescription;
             * colordata=new byte[fd.LengthInPixels*4];
             * bitmap = new WriteableBitmap(fd.Width, fd.Height, 96, 96, PixelFormats.Bgr32, null);
             *
             * this.image.Source = bitmap;*/
            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                //_bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                //_faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                //cfr.FrameArrived += cfr_FrameArrived;
                //_sensor.Open();
            }
        }
Ejemplo n.º 12
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            try {
                kinect = KinectSensor.GetDefault();
                if (kinect == null)
                {
                    throw new Exception("Kinectを開けません");
                }
                coordinateMapper = kinect.CoordinateMapper;
                FrameDescription frameDescription = kinect.ColorFrameSource.FrameDescription;
                displayWidth  = frameDescription.Width;
                displayHeight = frameDescription.Height;
                displayRect   = new Rect(0, 0, displayWidth, displayHeight);

                bodyFrameSource = kinect.BodyFrameSource;
                bodyFrameReader = bodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
                InitializeHDFace();

                kinect.Open();
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Ejemplo n.º 13
0
    public Server()
    {
        Form = new CustomPerPixelAlphaForm();
        FormSetProperties();
        FormDock();
        Form.Show();

        var clientBuildDirectory = Environment.CurrentDirectory + "\\..\\..\\..\\..\\..\\Reflecta.Client\\bin";
        var clientStartInfo      = new ProcessStartInfo
        {
            FileName         = clientBuildDirectory + "\\Client.exe",
            WorkingDirectory = clientBuildDirectory,
            WindowStyle      = ProcessWindowStyle.Minimized
        };

        Client = Process.Start(clientStartInfo);

        OpenPipes();

        SpeechSynthesizer = new SpeechSynthesizer();
        SpeechSynthesizer.SelectVoiceByHints(VoiceGender.Female);
        SpeechSynthesizer.SpeakStarted   += SpeechSynthesizer_SpeakStarted;
        SpeechSynthesizer.VisemeReached  += SpeechSynthesizer_VisemeReached;
        SpeechSynthesizer.SpeakCompleted += SpeechSynthesizer_SpeakCompleted;

        SpeechRecognitionEngine = new SpeechRecognitionEngine();
        SpeechRecognitionEngine.UnloadAllGrammars();
        SpeechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(KnownCommands)));
        SpeechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized;
        SpeechRecognitionEngine.SetInputToDefaultAudioDevice();
        SpeechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

        KinectSensor = KinectSensor.GetDefault();
        KinectSensor.Open();

        BodyFrameSource = KinectSensor.BodyFrameSource;
        BodyFrameReader = BodyFrameSource.OpenReader();
        BodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
        Bodies   = null;
        BodyDESP = new DESPQuaternion[(int)MoCapKinectBone.Count];
        for (var i = 0; i < (int)MoCapKinectBone.Count; i++)
        {
            BodyDESP[i] = new DESPQuaternion();
        }

        HighDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(KinectSensor);
        HighDefinitionFaceFrameSource.TrackingQuality = FaceAlignmentQuality.High;
        HighDefinitionFaceFrameReader = HighDefinitionFaceFrameSource.OpenReader();
        HighDefinitionFaceFrameReader.FrameArrived += HighDefinitionFaceFrameReader_FrameArrived;
        FaceAlignment = new FaceAlignment();

        FaceDESP           = new DESPQuaternion();
        FaceExpressionDESP = new DESPFloat[(int)MoCapKinectFacialExpression.Count];
        for (var i = 0; i < (int)MoCapKinectFacialExpression.Count; i++)
        {
            FaceExpressionDESP[i] = new DESPFloat();
        }
    }
Ejemplo n.º 14
0
 void InitKinect()
 {
     _bodies = new Body[6];
     _kinect = KinectSensor.GetDefault();
     _kinect.Open();
     _bodyFrameSource = _kinect.BodyFrameSource;
     _bodyFrameReader = _bodyFrameSource.OpenReader();
     _bodyFrameReader.FrameArrived += _bodyFrameReader_FrameArrived;
 }
Ejemplo n.º 15
0
        List <int[]> list_arr_index = new List <int[]>();// 配列のlist

        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Linten for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReaderFrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // multi frame reader
                this.multiFrameReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth |
                                                                           FrameSourceTypes.Infrared);

                // IRフレームの情報取得用オブジェクト取得
                infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription;

                // Depth Frame description
                depthFrameDescription = _sensor.DepthFrameSource.FrameDescription;

                infraredRect = new Int32Rect(0, 0, infraredFrameDescription.Width, infraredFrameDescription.Height);
                depthRect    = new Int32Rect(0, 0, depthFrameDescription.Width, depthFrameDescription.Height);

                // multistream event handlerをセット
                multiFrameReader.MultiSourceFrameArrived += ReaderMultiFrameArrived;

                // -----------------------------------------
                // IRフレームの画面表示用設定
                // -----------------------------------------
                // 表示用のWritableBitmapを作成
                infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width,
                                                     this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);


                depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width,
                                                  this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);
                // WriteableBitmapをWPFのImageコントローラーのソースに関連付け
                //ColorImage.Source = this.infraredBitmap; //ここでは動かない

                // start tracking
                _sensor.Open();
            }
        }
Ejemplo n.º 16
0
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            //Kinect 本体への参照を確保する。
            this.kinect = KinectSensor.GetDefault();

            //読み込む画像のフォーマットとリーダを設定する。
            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription
                = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;

            // 顔回転検出用
            //this.faceFrameSource = new FaceFrameSource(this.kinect, 0, );


            if (this.kinect != null)
            {
                _bodySource = kinect.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(kinect);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }

            //Kinect の動作を開始する。
            //aviWriter.FrameRate = 30;
            //aviWriter.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080);
            //writer.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080, 30, VideoCodec.MPEG4);

            /*
             * for (int i=0; i<1347; i++)
             * {
             *      sw.Write(i + ",,,,,,");
             * }
             * sw.WriteLine();
             * for(int i=0; i<1347; i++)
             * {
             *      sw.Write("X(m),Y(m),Z(m),X(pixel),Y(pixel),,");
             * }
             * sw.WriteLine();
             */
            this.kinect.Open();
        }
Ejemplo n.º 17
0
        public override bool StartSensor()
        {
            _bodySource = _kinect.BodyFrameSource;
            _bodyReader = _bodySource.OpenReader();
            _bodyReader.FrameArrived += BodyReader_FrameArrived;

            _hdFaceFrameSource =
                new HighDefinitionFaceFrameSource(_kinect);
            _hdFaceFrameSource.TrackingIdLost +=
                HdFaceSource_TrackingIdLost;

            _hdFaceFrameReader =
                _hdFaceFrameSource.OpenReader();
            _hdFaceFrameReader.FrameArrived +=
                HdFaceReader_FrameArrived;

            _currentFaceModel     = new FaceModel();
            _currentFaceAlignment = new FaceAlignment();

            InitializeMesh();
            UpdateMesh();

            // Text style for our jig

            _style      = new TextStyle();
            _style.Font =
                new FontDescriptor("standard.shx", false, false, 0, 0);
            _style.TextSize = 10;

            var res = base.StartSensor();

            if (res)
            {
                if (_faceModelBuilder != null)
                {
                    _faceModelBuilder.Dispose();
                }
                _faceModelBuilder =
                    _hdFaceFrameSource.OpenModelBuilder(
                        FaceModelBuilderAttributes.None
                        );
                _faceModelBuilder.BeginFaceDataCollection();
                _faceModelBuilder.CollectionCompleted +=
                    HdFaceBuilder_CollectionCompleted;
            }
            return(res);
        }
Ejemplo n.º 18
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                /// <summary>
                /// Tarea a realizar por alumno
                /// Fase Inicialización
                /// </summary>
                /// /////////////////////////////////////////////////////////////////////////////////////////////////
                // Obtener fuentes de cuerpos, lector de cuerpos, handler para eventos de frames de cuerpos
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Obtener fuente facial, lector facial, handler para eventos de frames faciales.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                //Añadimos el reader de gestos faciales. y el handler.
                _faceFrameSource = new FaceFrameSource(this._sensor, 0,
                                                       FaceFrameFeatures.BoundingBoxInColorSpace |
                                                       FaceFrameFeatures.FaceEngagement |
                                                       FaceFrameFeatures.Glasses |
                                                       FaceFrameFeatures.Happy |
                                                       FaceFrameFeatures.LeftEyeClosed |
                                                       FaceFrameFeatures.MouthOpen |
                                                       FaceFrameFeatures.PointsInColorSpace |
                                                       FaceFrameFeatures.RightEyeClosed
                                                       );
                _faceFrameReader = this._faceFrameSource.OpenReader();
                _faceFrameReader.FrameArrived += FaceFrameReader_FrameArrived;


                // Crear FaceModel, FaceAlignmet
                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Abrir el sensor.
                _sensor.Open();
                // Asignar el multireader
                multiSourceReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                multiSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Ejemplo n.º 19
0
        public void Open()
        {
            lock (_eventLock)
            {
                if (_bodyFrameReader == null)
                {
                    _bodyFrameReader = _bodyFrameSource.OpenReader();

                    if (!_isEventRegistered && _bodyFrameReady != null)
                    {
                        _bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
                    }
                }
                else
                {
                    _bodyFrameReader.IsPaused = false;
                }
            }
        }
Ejemplo n.º 20
0
        public FaceHD(KinectSensor sensor)
        {
            _sensor    = sensor;
            faceHDData = new Subject <FaceHDData>();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }
        }
Ejemplo n.º 21
0
    private void inizialiseKinect()
    {
        // Kinect sensor initialization
        while (_kinectSensor == null || !_kinectSensor.IsAvailable)
        {
            _kinectSensor = KinectSensor.GetDefault();

            if (_kinectSensor != null)
            {
                _kinectSensor.Open();
            }
        }
        Console.WriteLine("Kinect available");
        _bodyFrameSource = _kinectSensor.BodyFrameSource;
        _reader          = _bodyFrameSource.OpenReader();

        if (_reader != null)
        {
            _reader.FrameArrived += _reader_FrameArrived;
        }
    }
Ejemplo n.º 22
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Ejemplo n.º 24
0
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
        public FacePage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _infraredSource = _sensor.InfraredFrameSource;
                _infraredReader = _infraredSource.OpenReader();
                _infraredReader.FrameArrived += InfraredReader_FrameArrived;

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;;

                _sensor.Open();
            }
        }
Ejemplo n.º 26
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // color camera
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Ejemplo n.º 27
0
        void IFaceCamera <System.Drawing.PointF> .Start()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.IsAvailableChanged += OnKinectSensorChanged;

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();

                _bodyReader.FrameArrived += OnBodyReaderFrameArrived;

                _faceSourceHighDef = new HighDefinitionFaceFrameSource(_sensor);
                _faceReaderHighDef = _faceSourceHighDef.OpenReader();
                _faceReaderHighDef.FrameArrived += OnFaceReaderHighDefFrameArrived;

                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.Glasses |
                                                  FaceFrameFeatures.Happy |
                                                  FaceFrameFeatures.LeftEyeClosed |
                                                  FaceFrameFeatures.MouthOpen |
                                                  FaceFrameFeatures.MouthMoved |
                                                  FaceFrameFeatures.RightEyeClosed);

                _faceSource.TrackingIdLost        += _faceSource_TrackingIdLost;
                _faceSourceHighDef.TrackingIdLost += _faceSource_TrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += OnFaceReaderFrameArrived;


                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Ejemplo n.º 28
0
    public Server()
    {
        Form = new CustomPerPixelAlphaForm();
        FormSetProperties();
        FormDock();
        Form.Show();

        var clientBuildDirectory = Environment.CurrentDirectory + "\\..\\..\\..\\..\\..\\Reflecta.Client\\bin";
        var clientStartInfo = new ProcessStartInfo
        {
            FileName = clientBuildDirectory + "\\Client.exe",
            WorkingDirectory = clientBuildDirectory,
            WindowStyle = ProcessWindowStyle.Minimized
        };
        Client = Process.Start(clientStartInfo);

        OpenPipes();

        SpeechSynthesizer = new SpeechSynthesizer();
        SpeechSynthesizer.SelectVoiceByHints(VoiceGender.Female);
        SpeechSynthesizer.SpeakStarted += SpeechSynthesizer_SpeakStarted;
        SpeechSynthesizer.VisemeReached += SpeechSynthesizer_VisemeReached;
        SpeechSynthesizer.SpeakCompleted += SpeechSynthesizer_SpeakCompleted;

        SpeechRecognitionEngine = new SpeechRecognitionEngine();
        SpeechRecognitionEngine.UnloadAllGrammars();
        SpeechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(KnownCommands)));
        SpeechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized;
        SpeechRecognitionEngine.SetInputToDefaultAudioDevice();
        SpeechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

        KinectSensor = KinectSensor.GetDefault();
        KinectSensor.Open();

        BodyFrameSource = KinectSensor.BodyFrameSource;
        BodyFrameReader = BodyFrameSource.OpenReader();
        BodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
        Bodies = null;
        BodyDESP = new DESPQuaternion[(int) MoCapKinectBone.Count];
        for (var i = 0; i < (int) MoCapKinectBone.Count; i++)
            BodyDESP[i] = new DESPQuaternion();

        HighDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(KinectSensor);
        HighDefinitionFaceFrameSource.TrackingQuality = FaceAlignmentQuality.High;
        HighDefinitionFaceFrameReader = HighDefinitionFaceFrameSource.OpenReader();
        HighDefinitionFaceFrameReader.FrameArrived += HighDefinitionFaceFrameReader_FrameArrived;
        FaceAlignment = new FaceAlignment();

        FaceDESP = new DESPQuaternion();
        FaceExpressionDESP = new DESPFloat[(int) MoCapKinectFacialExpression.Count];
        for (var i = 0; i < (int) MoCapKinectFacialExpression.Count; i++)
            FaceExpressionDESP[i] = new DESPFloat();
    }
        private void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            binaryFilePath = System.IO.Path.Combine(Environment.CurrentDirectory, "data.bin");


            this.kinectSensor = KinectSensor.GetDefault();
            if (this.kinectSensor != null)
            {
                this.kinectSensor.Open();

                #region ColorFrame
                this.colorFrameSource               = this.kinectSensor.ColorFrameSource;
                this.colorFrameReader               = this.colorFrameSource.OpenReader();
                this.colorFrameDescription          = this.colorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                this.colorPixelData = new Byte[this.colorFrameDescription.LengthInPixels * 4];
                this.colorBitmap    = new WriteableBitmap(this.colorFrameDescription.Width,
                                                          this.colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                this.colorBitmapRect   = new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight);
                this.colorBitmapStride = this.colorFrameDescription.Width * 4;

                #endregion

                #region DepthFrame
                this.depthFrameSource               = this.kinectSensor.DepthFrameSource;
                this.depthFrameReader               = this.depthFrameSource.OpenReader();
                this.depthFrameDesription           = this.kinectSensor.DepthFrameSource.FrameDescription;
                this.depthFrameReader.FrameArrived += depthReader_FrameArrived;
                this.depthBitmap     = new WriteableBitmap(this.depthFrameDesription.Width, this.depthFrameDesription.Height, 96.0, 96.0, PixelFormats.Gray16, null);
                this.depthBitmapRect = new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight);
                this.depthPixelData  = new ushort[this.depthFrameDesription.LengthInPixels];          //取代宽度*高度,更加方便
                this.depthStride     = this.depthFrameDesription.Width * 2;
                #endregion

                #region FacePoints
                // Listen for body data.
                _bodySource = this.kinectSensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;
                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(this.kinectSensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
                #endregion

                abstractColorFrame.buffer      = new byte[1920 * 1080 * 4];
                abstractDepthFrame.buffer      = new ushort[512 * 424];
                abstractFacePointsFrame.buffer = new CameraSpacePoint[1347];

                colorImage.Source = this.colorBitmap;
                depthImage.Source = this.depthBitmap;

                queueSaver        = new QueueSaver(binaryFilePath);
                mainWritingThread = new Thread(new ThreadStart(saveAll2File));
                mainWritingThread.Start();
                string txtPath = System.IO.Path.Combine(Environment.CurrentDirectory, "word.txt");
                if (File.Exists(txtPath))
                {
                    wordReader = new StreamReader(txtPath, Encoding.Default);
                }
                else
                {
                    if (MessageBox.Show("提词器文本文件找不到!") == MessageBoxResult.OK)
                    {
                        Application.Current.Shutdown();
                    }
                }
                if (!wordReader.EndOfStream)
                {
                    this.txtBlock_narrator.Text = wordReader.ReadLine();
                }
                else
                {
                    this.txtBlock_narrator.Text       = "采集结束。";
                    this.btn_startRecording.IsEnabled = false;
                }
            }
        }
Ejemplo n.º 30
0
        private void Window_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                kinect = KinectSensor.GetDefault();
                if ( kinect == null ) {
                    throw new Exception( "Kinectを開けません" );
                }
                coordinateMapper = kinect.CoordinateMapper;
                FrameDescription frameDescription = kinect.ColorFrameSource.FrameDescription;
                displayWidth = frameDescription.Width;
                displayHeight = frameDescription.Height;
                displayRect = new Rect( 0, 0, displayWidth, displayHeight );

                bodyFrameSource = kinect.BodyFrameSource;
                bodyFrameReader = bodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
                InitializeHDFace();

                kinect.Open();
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }