Exemple #1
0
    void Start()
    {
        sensor     = KinectSensor.GetDefault();
        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();
        bodyReader.FrameArrived += BodyReader_FrameArrived;
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures);

        FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        FaceFrameReader = FaceFrameSource.OpenReader();
        FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        //CurrentFaceModel = FaceModel.Create();
        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();
    }
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        this.faceFrameSources = new FaceFrameSource[_Sensor.BodyFrameSource.BodyCount];
        this.faceFrameReaders = new FaceFrameReader[_Sensor.BodyFrameSource.BodyCount];
        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.LookingAway;

        for (int i = 0; i < _Sensor.BodyFrameSource.BodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }
    }
        public void Initialize(DIOManager dioManager)
        {
            this.dioManager     = dioManager;
            this.BodySrcManager = dioManager.bodySrcManager;

            updateFrame = 0;

            // one sensor is currently supported
            kinectSensor = KinectSensor.GetDefault();

            // set the maximum number of bodies that would be tracked by Kinect
            bodyCount = kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            bodies = new Body[bodyCount];

            if (BodySrcManager == null)
            {
                Debug.Log("Falta asignar Game Object as BodySrcManager");
            }
            else
            {
                bodyManager = BodySrcManager.GetComponent <BodySourceManager>();
            }

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.BoundingBoxInInfraredSpace
                | FaceFrameFeatures.PointsInInfraredSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            faceFrameSources = new FaceFrameSource[bodyCount];
            faceFrameReaders = new FaceFrameReader[bodyCount];
            for (int i = 0; i < bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                faceFrameReaders[i] = faceFrameSources[i].OpenReader();
            }
            initialize = true;
        }
        private void InitializeFaceReaders()
        {
            this.FaceFrameResults = new FaceFrameResult[this.Sensor.BodyFrameSource.BodyCount];
            this.faceFrameSources = new FaceFrameSource[this.Sensor.BodyFrameSource.BodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.Sensor.BodyFrameSource.BodyCount];

            FaceFrameFeatures faceFrameFeatures = RequiredFaceFrameFeatures();

            for (int i = this.faceFrameSources.Length - 1; i >= 0; --i)
            {
                this.faceFrameSources[i] = FaceFrameSource.Create(this.Sensor, 0, faceFrameFeatures);
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }
        }
    /*KalmanFilterSimple1D kalman_X;
     * KalmanFilterSimple1D kalman_Y;
     * KalmanFilterSimple1D kalman_mod;*/

    void Start()
    {
        updateFrame = 0;

        /*kalman_X = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);
         * kalman_Y = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);
         * kalman_mod = new KalmanFilterSimple1D(f: 1, h: 1, q: qq, r: rr);*/

        sx = new StreamWriter("coords_X.txt");
        kx = new StreamWriter("coords_KX.txt");


        // one sensor is currently supported
        kinectSensor = KinectSensor.GetDefault();


        // set the maximum number of bodies that would be tracked by Kinect
        bodyCount = kinectSensor.BodyFrameSource.BodyCount;

        // allocate storage to store body objects
        bodies = new Body[bodyCount];

        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        // create a face frame source + reader to track each face in the FOV
        faceFrameSources = new FaceFrameSource[bodyCount];
        faceFrameReaders = new FaceFrameReader[bodyCount];
        for (int i = 0; i < bodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(kinectSensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }
    }
Exemple #6
0
        private void InitializeFaceReaders()
        {
            this.m_FaceFrameResults = new FaceFrameResult[this.m_Sensor.BodyFrameSource.BodyCount];
            this.m_FaceFrameSources = new FaceFrameSource[this.m_Sensor.BodyFrameSource.BodyCount];
            this.m_FaceFrameReaders = new FaceFrameReader[this.m_Sensor.BodyFrameSource.BodyCount];

            FaceFrameFeatures faceFrameFeatures = faceFrameFeatureType == FaceFrameFeatureType.Required
                                ? RequiredFaceFrameFeatures()
                                : FullFaceFrameFeatures();

            for (int i = 0; i < this.m_FaceFrameSources.Length; ++i)
            {
                this.m_FaceFrameSources[i] = FaceFrameSource.Create(this.m_Sensor, 0, faceFrameFeatures);
                this.m_FaceFrameReaders[i] = this.m_FaceFrameSources[i].OpenReader();
            }
        }
Exemple #7
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (enableFacetracking)
            {
                faceFrameSource = FaceFrameSource.Create(_Sensor, 0, FaceFrameFeatures.RotationOrientation);
                faceframeReader = faceFrameSource.OpenReader();
            }

            if (enableHDFace)
            {
                highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(_Sensor);
                highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();
                CurrentFaceModel     = FaceModel.Create();
                currentFaceAlignment = FaceAlignment.Create();

                var    triangles = new int[FaceModel.TriangleCount * 3];
                int    tryCount  = (int)FaceModel.TriangleCount;
                uint[] TriInd    = FaceModel.TriangleIndices.ToArray();
                for (int i = 0; i < tryCount; i += 3)
                {
                    triangles[i]     = (int)TriInd[i];
                    triangles[i + 1] = (int)TriInd[i + 1];
                    triangles[i + 2] = (int)TriInd[i + 2];
                }
                _CurrentFaceMeshTriangles = triangles;
            }

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }

            controller = GameObject.Find("Controller").GetComponent <Controller>();

            Debug.Log("KinectBodyManager::Started");
        }
    }
        /// <summary>
        /// Converts the FaceFrameArrivedEvent event to an observable sequence.
        /// You need one of these per body.
        /// </summary>
        /// <param name="kinectSensor">The kinect sensor.</param>
        /// <param name="trackingId">The body to observe.</param>
        /// <param name="features">The features to observe.</param>
        /// <returns>The observable sequence.</returns>
        public static IObservable <FaceFrameArrivedEventArgs> FaceFrameArrivedObservable(this KinectSensor kinectSensor, ulong trackingId, FaceFrameFeatures features)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            var source = FaceFrameSource.Create(kinectSensor, trackingId, features);

            return(Observable.Create <FaceFrameArrivedEventArgs>(observer =>
            {
                var reader = source.OpenReader();

                var disposable = reader.FaceFrameArrivedObservable()
                                 .Subscribe(x => observer.OnNext(x),
                                            e => observer.OnError(e),
                                            () => observer.OnCompleted());

                return new CompositeDisposable {
                    disposable, reader
                };
            }));
        }
Exemple #9
0
    void Awake()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        bodyCount = _Sensor.BodyFrameSource.BodyCount;
        FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.RotationOrientation;

        faceFrameSources = new FaceFrameSource[bodyCount];
        faceFrameReaders = new FaceFrameReader[bodyCount];

        avatarBodies = new Avatar.Body[bodyCount];
        for (int i = 0; i < bodyCount; i++)
        {
            faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures);
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();
        }

        for (int i = 0; i < bodyCount; i++)
        {
            avatarBodies[i] = new Avatar.Body();
            for (JointType jt = JointType.SpineBase; jt <= JointType.ThumbRight; jt++)
            {
                avatarBodies[i].Joints[jt]           = new Avatar.Joint();
                avatarBodies[i].Joints[jt].JointType = jt;
            }
        }
    }
Exemple #10
0
    public bool InitFaceTracking(bool bUseFaceModel, bool bDrawFaceRect)
    {
        isDrawFaceRect = bDrawFaceRect;

        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            //| FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInColorSpace
            //| FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            //| FaceFrameFeatures.Glasses
            //| FaceFrameFeatures.Happy
            //| FaceFrameFeatures.LeftEyeClosed
            //| FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            //| FaceFrameFeatures.MouthMoved
            //| FaceFrameFeatures.MouthOpen
        ;

        // create a face frame source + reader to track each face in the FOV
        faceFrameSources = new FaceFrameSource[this.bodyCount];
        faceFrameReaders = new FaceFrameReader[this.bodyCount];

        if (bUseFaceModel)
        {
            hdFaceFrameSources = new HighDefinitionFaceFrameSource[this.bodyCount];
            hdFaceFrameReaders = new HighDefinitionFaceFrameReader[this.bodyCount];

            hdFaceModels     = new FaceModel[this.bodyCount];
            hdFaceAlignments = new FaceAlignment[this.bodyCount];
        }

        for (int i = 0; i < bodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(this.kinectSensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();

            if (bUseFaceModel)
            {
                ///////// HD Face
                hdFaceFrameSources[i] = HighDefinitionFaceFrameSource.Create(this.kinectSensor);
                hdFaceFrameReaders[i] = hdFaceFrameSources[i].OpenReader();

                hdFaceModels[i]     = FaceModel.Create();
                hdFaceAlignments[i] = FaceAlignment.Create();
            }
        }

        // allocate storage to store face frame results for each face in the FOV
        faceFrameResults = new FaceFrameResult[this.bodyCount];

//		FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
//		faceDisplayWidth = frameDescription.Width;
//		faceDisplayHeight = frameDescription.Height;

        bFaceTrackingInited = true;

        return(bFaceTrackingInited);
    }