예제 #1
0
        private void InitializeKinect()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);// | FrameSourceTypes.LongExposureInfrared);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceSource.TrackingQuality = FaceAlignmentQuality.Low;
                _faceReader.FrameArrived   += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
예제 #2
0
        public void InitTracker()
        {
            lastSensorAvail = false;
            sensor          = KinectSensor.GetDefault();

            bodySource = sensor.BodyFrameSource;
            bodyReader = bodySource.OpenReader();
            bodyReader.FrameArrived += NewBodyReaderFrame;

            hdFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            hdFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

            hdFaceFrameReader = hdFaceFrameSource.OpenReader();
            hdFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

            sensor.IsAvailableChanged += SensorAvailableChanged;
            Console.WriteLine("Face tracker ready.");

            dest     = IPAddress.Parse(ip);
            endPoint = new IPEndPoint(dest, port);

            sendBuffer = new byte[48];

            Console.WriteLine("UDP Socket created for port {0}", port);
        }
예제 #3
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();


                _sensor.Open();

                //Added by Aditya
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
예제 #4
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            /*ColorFrameReader cfr = _sensor.ColorFrameSource.OpenReader();
             * fd = _sensor.ColorFrameSource.FrameDescription;
             * colordata=new byte[fd.LengthInPixels*4];
             * bitmap = new WriteableBitmap(fd.Width, fd.Height, 96, 96, PixelFormats.Bgr32, null);
             *
             * this.image.Source = bitmap;*/
            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                //_bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                //_faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                //cfr.FrameArrived += cfr_FrameArrived;
                //_sensor.Open();
            }
        }
        private void Page_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor                = KinectSensor.GetDefault();
            _coordinateMapper      = _sensor.CoordinateMapper;
            _collectedMeasurements = new List <double>();

            if (_sensor != null)
            {
                _infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription;
                _infraredBitmap           = new WriteableBitmap(_infraredFrameDescription.Width, _infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null);
                camera.Source             = _infraredBitmap;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyCount  = _sensor.BodyFrameSource.BodyCount;
                _bodies     = new Body[_bodyCount];
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _faceFrameSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceFrameReader = _faceFrameSource.OpenReader();
                _faceFrameReader.FrameArrived += FaceReader_FrameArrived;

                _irReader = _sensor.InfraredFrameSource.OpenReader();
                _irReader.FrameArrived += InfraredReader_FrameArrived;

                _sensor.Open();
            }

            _settingsVM = DevPortalVM.LoadContext(SETTINGS_FILENAME);
            DevPortalGrid.DataContext = _settingsVM;
            _devicePortalClient       = new DevPortalHelper(_settingsVM);
        }
예제 #6
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource    = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
                // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader    = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived    += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel        = new FaceModel();
                _faceAlignment    = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!
                _sensor.Open();
            }
        }
예제 #7
0
    void Start()
    {  //this like InitializeHDFace()
        theGeometry = new Mesh();

        //SetViewCollectionStatus();

        sensor = KinectSensor.GetDefault();

        bodySource = sensor.BodyFrameSource;

        bodyReader = bodySource.OpenReader();

        bodyReader.FrameArrived += BodyReader_FrameArrived;

        highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);

        highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();

        highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        CurrentFaceModel = FaceModel.Create();

        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();

        tempAus = new Dictionary <string, float>();
        actorBlendshapeNames = getBlendShapeNames(actorMesh);
    }
    void initialize()
    {
        IsFaceModelCollectCompleted = false;
        FaceCaptureStatus           = "";
        FaceVertices = new List <CameraSpacePoint>();

        sensor = KinectSensor.GetDefault();
        if (sensor == null)
        {
            return;
        }
        sensor.Open();

        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();

        hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
        hdFaceFrameReader = hdFaceFrameSource.OpenReader();

        faceModel     = FaceModel.Create();
        faceAlignment = FaceAlignment.Create();
        FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;

        faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes);
        faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed);
    }
        //------------


        public MainWindow()
        {
            InitializeComponent();


            //configuring end point
            ep = new IPEndPoint(IP, 9999);

            //initializing KinectSensor
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

/**/
                // Listen for multisurce data.
                _multiReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _multiReader.MultiSourceFrameArrived += MultiReader_MultiSourceFrameArrived;
/**/
                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Start tracking!
                _sensor.Open();
            }
        }
예제 #10
0
        public FacePage()
        {
            System.Media.SoundPlayer player = new System.Media.SoundPlayer();
            player.Play();

            InitializeComponent();

            currFaceState = FaceState.KinectWait;
            currBodyState = BodyState.KinectWait;
            faceSamples   = new double[NUM_SAMPLES];

            flagRuns = new int[Enum.GetNames(typeof(FlagType)).Length];

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                currFaceState   = FaceState.FaceWait;
                currBodyState   = BodyState.BodyWait;
                _infraredSource = _sensor.InfraredFrameSource;
                _infraredReader = _infraredSource.OpenReader();
                _infraredReader.FrameArrived += InfraredReader_FrameArrived;

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _sensor.Open();
            }
        }
예제 #11
0
        static void Main(string[] args)
        {
            _sensor = KinectSensor.GetDefault();

            _worker.getSubjectID();

            if (_sensor != null)
            {
                _sensor.Open();
                Console.WriteLine("sensorOpened");
                if (_sensor.IsOpen)
                {
                    _coordinateMapper = _sensor.CoordinateMapper;
                    _bodyFrameReader  = _sensor.BodyFrameSource.OpenReader();

                    _bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;

                    _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                    _faceReader = _faceSource.OpenReader();
                    _faceReader.FrameArrived += FaceReader_FrameArrived;

                    _faceModel     = new FaceModel();
                    _faceAlignment = new FaceAlignment();
                }
            }
            string input = Console.ReadLine();

            _sensor.Close();
        }
예제 #12
0
    public Server()
    {
        Form = new CustomPerPixelAlphaForm();
        FormSetProperties();
        FormDock();
        Form.Show();

        var clientBuildDirectory = Environment.CurrentDirectory + "\\..\\..\\..\\..\\..\\Reflecta.Client\\bin";
        var clientStartInfo      = new ProcessStartInfo
        {
            FileName         = clientBuildDirectory + "\\Client.exe",
            WorkingDirectory = clientBuildDirectory,
            WindowStyle      = ProcessWindowStyle.Minimized
        };

        Client = Process.Start(clientStartInfo);

        OpenPipes();

        SpeechSynthesizer = new SpeechSynthesizer();
        SpeechSynthesizer.SelectVoiceByHints(VoiceGender.Female);
        SpeechSynthesizer.SpeakStarted   += SpeechSynthesizer_SpeakStarted;
        SpeechSynthesizer.VisemeReached  += SpeechSynthesizer_VisemeReached;
        SpeechSynthesizer.SpeakCompleted += SpeechSynthesizer_SpeakCompleted;

        SpeechRecognitionEngine = new SpeechRecognitionEngine();
        SpeechRecognitionEngine.UnloadAllGrammars();
        SpeechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(KnownCommands)));
        SpeechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized;
        SpeechRecognitionEngine.SetInputToDefaultAudioDevice();
        SpeechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

        KinectSensor = KinectSensor.GetDefault();
        KinectSensor.Open();

        BodyFrameSource = KinectSensor.BodyFrameSource;
        BodyFrameReader = BodyFrameSource.OpenReader();
        BodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
        Bodies   = null;
        BodyDESP = new DESPQuaternion[(int)MoCapKinectBone.Count];
        for (var i = 0; i < (int)MoCapKinectBone.Count; i++)
        {
            BodyDESP[i] = new DESPQuaternion();
        }

        HighDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(KinectSensor);
        HighDefinitionFaceFrameSource.TrackingQuality = FaceAlignmentQuality.High;
        HighDefinitionFaceFrameReader = HighDefinitionFaceFrameSource.OpenReader();
        HighDefinitionFaceFrameReader.FrameArrived += HighDefinitionFaceFrameReader_FrameArrived;
        FaceAlignment = new FaceAlignment();

        FaceDESP           = new DESPQuaternion();
        FaceExpressionDESP = new DESPFloat[(int)MoCapKinectFacialExpression.Count];
        for (var i = 0; i < (int)MoCapKinectFacialExpression.Count; i++)
        {
            FaceExpressionDESP[i] = new DESPFloat();
        }
    }
예제 #13
0
 private static void StartFace()
 {
     FaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
     if (FaceFrameSource != null)
     {
         faceReader    = FaceFrameSource.OpenReader();
         faceModel     = FaceModel.Create();
         faceAlignment = FaceAlignment.Create();
         faceGeometry  = new Vector[FaceModel.VertexCount];
     }
 }
예제 #14
0
        List <int[]> list_arr_index = new List <int[]>();// 配列のlist

        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Linten for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReaderFrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // multi frame reader
                this.multiFrameReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth |
                                                                           FrameSourceTypes.Infrared);

                // IRフレームの情報取得用オブジェクト取得
                infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription;

                // Depth Frame description
                depthFrameDescription = _sensor.DepthFrameSource.FrameDescription;

                infraredRect = new Int32Rect(0, 0, infraredFrameDescription.Width, infraredFrameDescription.Height);
                depthRect    = new Int32Rect(0, 0, depthFrameDescription.Width, depthFrameDescription.Height);

                // multistream event handlerをセット
                multiFrameReader.MultiSourceFrameArrived += ReaderMultiFrameArrived;

                // -----------------------------------------
                // IRフレームの画面表示用設定
                // -----------------------------------------
                // 表示用のWritableBitmapを作成
                infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width,
                                                     this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);


                depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width,
                                                  this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);
                // WriteableBitmapをWPFのImageコントローラーのソースに関連付け
                //ColorImage.Source = this.infraredBitmap; //ここでは動かない

                // start tracking
                _sensor.Open();
            }
        }
예제 #15
0
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            //Kinect 本体への参照を確保する。
            this.kinect = KinectSensor.GetDefault();

            //読み込む画像のフォーマットとリーダを設定する。
            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription
                = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;

            // 顔回転検出用
            //this.faceFrameSource = new FaceFrameSource(this.kinect, 0, );


            if (this.kinect != null)
            {
                _bodySource = kinect.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(kinect);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }

            //Kinect の動作を開始する。
            //aviWriter.FrameRate = 30;
            //aviWriter.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080);
            //writer.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080, 30, VideoCodec.MPEG4);

            /*
             * for (int i=0; i<1347; i++)
             * {
             *      sw.Write(i + ",,,,,,");
             * }
             * sw.WriteLine();
             * for(int i=0; i<1347; i++)
             * {
             *      sw.Write("X(m),Y(m),Z(m),X(pixel),Y(pixel),,");
             * }
             * sw.WriteLine();
             */
            this.kinect.Open();
        }
예제 #16
0
        public override bool StartSensor()
        {
            _bodySource = _kinect.BodyFrameSource;
            _bodyReader = _bodySource.OpenReader();
            _bodyReader.FrameArrived += BodyReader_FrameArrived;

            _hdFaceFrameSource =
                new HighDefinitionFaceFrameSource(_kinect);
            _hdFaceFrameSource.TrackingIdLost +=
                HdFaceSource_TrackingIdLost;

            _hdFaceFrameReader =
                _hdFaceFrameSource.OpenReader();
            _hdFaceFrameReader.FrameArrived +=
                HdFaceReader_FrameArrived;

            _currentFaceModel     = new FaceModel();
            _currentFaceAlignment = new FaceAlignment();

            InitializeMesh();
            UpdateMesh();

            // Text style for our jig

            _style      = new TextStyle();
            _style.Font =
                new FontDescriptor("standard.shx", false, false, 0, 0);
            _style.TextSize = 10;

            var res = base.StartSensor();

            if (res)
            {
                if (_faceModelBuilder != null)
                {
                    _faceModelBuilder.Dispose();
                }
                _faceModelBuilder =
                    _hdFaceFrameSource.OpenModelBuilder(
                        FaceModelBuilderAttributes.None
                        );
                _faceModelBuilder.BeginFaceDataCollection();
                _faceModelBuilder.CollectionCompleted +=
                    HdFaceBuilder_CollectionCompleted;
            }
            return(res);
        }
예제 #17
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                /// <summary>
                /// Tarea a realizar por alumno
                /// Fase Inicialización
                /// </summary>
                /// /////////////////////////////////////////////////////////////////////////////////////////////////
                // Obtener fuentes de cuerpos, lector de cuerpos, handler para eventos de frames de cuerpos
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Obtener fuente facial, lector facial, handler para eventos de frames faciales.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                //Añadimos el reader de gestos faciales. y el handler.
                _faceFrameSource = new FaceFrameSource(this._sensor, 0,
                                                       FaceFrameFeatures.BoundingBoxInColorSpace |
                                                       FaceFrameFeatures.FaceEngagement |
                                                       FaceFrameFeatures.Glasses |
                                                       FaceFrameFeatures.Happy |
                                                       FaceFrameFeatures.LeftEyeClosed |
                                                       FaceFrameFeatures.MouthOpen |
                                                       FaceFrameFeatures.PointsInColorSpace |
                                                       FaceFrameFeatures.RightEyeClosed
                                                       );
                _faceFrameReader = this._faceFrameSource.OpenReader();
                _faceFrameReader.FrameArrived += FaceFrameReader_FrameArrived;


                // Crear FaceModel, FaceAlignmet
                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Abrir el sensor.
                _sensor.Open();
                // Asignar el multireader
                multiSourceReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                multiSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
예제 #18
0
    protected virtual void Awake()
    {
        KinectSensor = KinectSensor.GetDefault();

        if (KinectSensor != null)
        {
            bodyFrameReader      = KinectSensor.BodyFrameSource.OpenReader();
            colorFrameReader     = KinectSensor.ColorFrameSource.OpenReader();
            depthFrameReader     = KinectSensor.DepthFrameSource.OpenReader();
            infraredFrameReader  = KinectSensor.InfraredFrameSource.OpenReader();
            bodyIndexFrameReader = KinectSensor.BodyIndexFrameSource.OpenReader();
            faceFrameSource      = HighDefinitionFaceFrameSource.Create(KinectSensor);
            faceFrameReader      = faceFrameSource.OpenReader();

            KinectSensor.Open();
        }
    }
예제 #19
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (enableFacetracking)
            {
                faceFrameSource = FaceFrameSource.Create(_Sensor, 0, FaceFrameFeatures.RotationOrientation);
                faceframeReader = faceFrameSource.OpenReader();
            }

            if (enableHDFace)
            {
                highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(_Sensor);
                highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();
                CurrentFaceModel     = FaceModel.Create();
                currentFaceAlignment = FaceAlignment.Create();

                var    triangles = new int[FaceModel.TriangleCount * 3];
                int    tryCount  = (int)FaceModel.TriangleCount;
                uint[] TriInd    = FaceModel.TriangleIndices.ToArray();
                for (int i = 0; i < tryCount; i += 3)
                {
                    triangles[i]     = (int)TriInd[i];
                    triangles[i + 1] = (int)TriInd[i + 1];
                    triangles[i + 2] = (int)TriInd[i + 2];
                }
                _CurrentFaceMeshTriangles = triangles;
            }

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }

            controller = GameObject.Find("Controller").GetComponent <Controller>();

            Debug.Log("KinectBodyManager::Started");
        }
    }
예제 #20
0
        public FaceHD(KinectSensor sensor)
        {
            _sensor    = sensor;
            faceHDData = new Subject <FaceHDData>();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }
        }
예제 #21
0
        public KinectHDFace(KinectSensor sensor, Canvas drawingCanvas)
        {
            _windowCanvas = drawingCanvas;
            _kinectSensor = sensor;
            //_colourImage = colourImg;

            if (_kinectSensor != null)
            {
                _multiFrameReader = _kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color
                                                                             | FrameSourceTypes.Body);
                _multiFrameReader.MultiSourceFrameArrived += _OnMultiFrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_kinectSensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += _FaceFrameHandler;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }
        }
예제 #22
0
        public FacialMicroExpressions(KinectSensor sensor)
        {
            _sensor = sensor;
            //  _msReader = source;
            _eyesState           = EyesState.Opened;
            this._faceAlignment  = new FaceAlignment();
            this._leftBrow       = new float[30];
            this._rightBrow      = new float[30];
            this._leftBrowDelta  = new float[30];
            this._rightBrowDelta = new float[30];

            _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Happy | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation);
            _faceReader = _faceSource.OpenReader();
            _faceReader.FrameArrived += _faceReader_FrameArrived;
            // _msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived;

            //TODO: Use HDFace to dermine gulping, Eyebrows
            _hdSource = new HighDefinitionFaceFrameSource(_sensor);
            _hdReader = _hdSource.OpenReader();
            _hdReader.FrameArrived += _hdReader_FrameArrived;
        }
예제 #23
0
        public Face(KinectSensor sensor, BodyFrameReader bodyReader)
        {
            var bodySource = sensor.BodyFrameSource;

            bodyReader.FrameArrived += BodyReader_FrameArrived;

            highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(sensor);
            highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

            highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();
            highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

            currentFaceModel     = new FaceModel();
            currentFaceAlignment = new FaceAlignment();

            UpdateMesh();

            faceModelBuilder = highDefinitionFaceFrameSource.OpenModelBuilder(FaceModelBuilderAttributes.None);
            faceModelBuilder.BeginFaceDataCollection();
            faceModelBuilder.CollectionCompleted += HdFaceBuilder_CollectionCompleted;
        }
예제 #24
0
        public void InitializeKinect()
        {
            m_sensor     = KinectSensor.GetDefault();
            m_bodyReader = m_sensor.BodyFrameSource.OpenReader();
            m_bodyReader.FrameArrived += m_bodyReader_FrameArrived;

            m_hdFaceSource = new HighDefinitionFaceFrameSource(m_sensor);
            m_hdFaceReader = m_hdFaceSource.OpenReader();
            m_hdFaceReader.FrameArrived += m_hdFaceReader_FrameArrived;
            m_faceModel   = new FaceModel();
            m_faceBuilder =
                m_hdFaceReader.HighDefinitionFaceFrameSource.OpenModelBuilder(FaceModelBuilderAttributes.HairColor | FaceModelBuilderAttributes.SkinColor);
            m_faceBuilder.CollectionCompleted     += m_faceBuilder_CollectionCompleted;
            m_faceBuilder.CaptureStatusChanged    += m_faceBuilder_CaptureStatusChanged;
            m_faceBuilder.CollectionStatusChanged += m_faceBuilder_CollectionStatusChanged;
            m_faceAlignment      = new FaceAlignment();
            m_trackedBodyId      = 0;
            m_faceBuilderStarted = false;
            m_faceBuildComplete  = false;
            m_sensor.Open();
        }
예제 #25
0
        private void findSensor()
        {
            if (activeSensor == null)
            {
                KinectSensor localSensor = KinectSensor.GetDefault();

                //If a sensor was found start it and enable its skeleton listening.
                if (localSensor != null)
                {
                    localSensor.IsAvailableChanged += localSensor_IsAvailableChanged;

                    // Turn on the body stream to receive body frames
                    bodyFrameReader = localSensor.BodyFrameSource.OpenReader();
                    bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

                    faceFrameSource = new HighDefinitionFaceFrameSource(localSensor);
                    faceFrameReader = faceFrameSource.OpenReader();
                    faceFrameReader.FrameArrived += faceFrameReader_FrameArrived;
                    currentFaceAlignment          = new FaceAlignment();

                    if (useColorFeed)
                    {
                        setColorFeedEnabled(true, localSensor);
                    }

                    // Start the sensor!
                    try
                    {
                        localSensor.Open();
                        Connected = true;
                    }
                    catch (IOException)
                    {
                        localSensor = null;
                    }

                    activeSensor = localSensor; //Make the class aware of the sensor
                }
            }
        }
예제 #26
0
        private void InitializeHDFace()
        {
            hdFaceFrameSource = new HighDefinitionFaceFrameSource(kinect);
            if (hdFaceFrameSource == null)
            {
                throw new Exception("Cannot create HD Face Frame Source");
            }
            hdFaceFrameReader = hdFaceFrameSource.OpenReader();
            hdFaceFrameReader.FrameArrived += hdFaceFrameReader_FrameArrived;
            faceModel     = new FaceModel();
            faceAlignment = new FaceAlignment();

            FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;

            faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes);
            if (faceModelBuilder == null)
            {
                throw new Exception("Cannot open Face Model Builder");
            }
            faceModelBuilder.BeginFaceDataCollection();
            faceModelBuilder.CollectionCompleted += faceModelBuilder_CollectionCompleted;
        }
        public FacePage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _infraredSource = _sensor.InfraredFrameSource;
                _infraredReader = _infraredSource.OpenReader();
                _infraredReader.FrameArrived += InfraredReader_FrameArrived;

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;;

                _sensor.Open();
            }
        }
예제 #28
0
        public BodyAnalysis(KinectSensor sensor)
        {
            _sensor = sensor;
            FrameDescription depthFrameDescription = _sensor.DepthFrameSource.FrameDescription;

            _depthWidth = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            // allocate space to put the pixels being received and converted
            this.depthFrameData = new ushort[_depthWidth * depthHeight];
            //_msReader = reader;
            //_msReader.MultiSourceFrameArrived += _msReader_MultiSourceFrameArrived;
            //reader.FrameArrived += reader_FrameArrived;
            _coordinateMapper         = _sensor.CoordinateMapper;
            _faceSource               = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.RotationOrientation);
            _faceReader               = _faceSource.OpenReader();
            _faceReader.FrameArrived += _faceReader_FrameArrived;
            _faceAlignment            = new FaceAlignment();

            _hdSource = new HighDefinitionFaceFrameSource(_sensor);
            _hdReader = _hdSource.OpenReader();
            _hdReader.FrameArrived += _hdReader_FrameArrived;
        }
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
예제 #30
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // color camera
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }