Exemple #1
0
    void Start()
    {  //this like InitializeHDFace()
        theGeometry = new Mesh();

        //SetViewCollectionStatus();

        sensor = KinectSensor.GetDefault();

        bodySource = sensor.BodyFrameSource;

        bodyReader = bodySource.OpenReader();

        bodyReader.FrameArrived += BodyReader_FrameArrived;

        highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);

        highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();

        highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        CurrentFaceModel = FaceModel.Create();

        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();

        tempAus = new Dictionary <string, float>();
        actorBlendshapeNames = getBlendShapeNames(actorMesh);
    }
    void initialize()
    {
        IsFaceModelCollectCompleted = false;
        FaceCaptureStatus           = "";
        FaceVertices = new List <CameraSpacePoint>();

        sensor = KinectSensor.GetDefault();
        if (sensor == null)
        {
            return;
        }
        sensor.Open();

        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();

        hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
        hdFaceFrameReader = hdFaceFrameSource.OpenReader();

        faceModel     = FaceModel.Create();
        faceAlignment = FaceAlignment.Create();
        FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;

        faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes);
        faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed);
    }
 private static void StartFace()
 {
     FaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
     if (FaceFrameSource != null)
     {
         faceReader    = FaceFrameSource.OpenReader();
         faceModel     = FaceModel.Create();
         faceAlignment = FaceAlignment.Create();
         faceGeometry  = new Vector[FaceModel.VertexCount];
     }
 }
    protected virtual void Awake()
    {
        KinectSensor = KinectSensor.GetDefault();

        if (KinectSensor != null)
        {
            bodyFrameReader      = KinectSensor.BodyFrameSource.OpenReader();
            colorFrameReader     = KinectSensor.ColorFrameSource.OpenReader();
            depthFrameReader     = KinectSensor.DepthFrameSource.OpenReader();
            infraredFrameReader  = KinectSensor.InfraredFrameSource.OpenReader();
            bodyIndexFrameReader = KinectSensor.BodyIndexFrameSource.OpenReader();
            faceFrameSource      = HighDefinitionFaceFrameSource.Create(KinectSensor);
            faceFrameReader      = faceFrameSource.OpenReader();

            KinectSensor.Open();
        }
    }
Exemple #5
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Reader = _Sensor.BodyFrameSource.OpenReader();

            if (enableFacetracking)
            {
                faceFrameSource = FaceFrameSource.Create(_Sensor, 0, FaceFrameFeatures.RotationOrientation);
                faceframeReader = faceFrameSource.OpenReader();
            }

            if (enableHDFace)
            {
                highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(_Sensor);
                highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();
                CurrentFaceModel     = FaceModel.Create();
                currentFaceAlignment = FaceAlignment.Create();

                var    triangles = new int[FaceModel.TriangleCount * 3];
                int    tryCount  = (int)FaceModel.TriangleCount;
                uint[] TriInd    = FaceModel.TriangleIndices.ToArray();
                for (int i = 0; i < tryCount; i += 3)
                {
                    triangles[i]     = (int)TriInd[i];
                    triangles[i + 1] = (int)TriInd[i + 1];
                    triangles[i + 2] = (int)TriInd[i + 2];
                }
                _CurrentFaceMeshTriangles = triangles;
            }

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }

            controller = GameObject.Find("Controller").GetComponent <Controller>();

            Debug.Log("KinectBodyManager::Started");
        }
    }
Exemple #6
0
    public bool InitFaceTracking(bool bUseFaceModel, bool bDrawFaceRect)
    {
        isDrawFaceRect = bDrawFaceRect;

        // specify the required face frame results
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            //| FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInColorSpace
            //| FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            //| FaceFrameFeatures.Glasses
            //| FaceFrameFeatures.Happy
            //| FaceFrameFeatures.LeftEyeClosed
            //| FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            //| FaceFrameFeatures.MouthMoved
            //| FaceFrameFeatures.MouthOpen
        ;

        // create a face frame source + reader to track each face in the FOV
        faceFrameSources = new FaceFrameSource[this.bodyCount];
        faceFrameReaders = new FaceFrameReader[this.bodyCount];

        if (bUseFaceModel)
        {
            hdFaceFrameSources = new HighDefinitionFaceFrameSource[this.bodyCount];
            hdFaceFrameReaders = new HighDefinitionFaceFrameReader[this.bodyCount];

            hdFaceModels     = new FaceModel[this.bodyCount];
            hdFaceAlignments = new FaceAlignment[this.bodyCount];
        }

        for (int i = 0; i < bodyCount; i++)
        {
            // create the face frame source with the required face frame features and an initial tracking Id of 0
            faceFrameSources[i] = FaceFrameSource.Create(this.kinectSensor, 0, faceFrameFeatures);

            // open the corresponding reader
            faceFrameReaders[i] = faceFrameSources[i].OpenReader();

            if (bUseFaceModel)
            {
                ///////// HD Face
                hdFaceFrameSources[i] = HighDefinitionFaceFrameSource.Create(this.kinectSensor);
                hdFaceFrameReaders[i] = hdFaceFrameSources[i].OpenReader();

                hdFaceModels[i]     = FaceModel.Create();
                hdFaceAlignments[i] = FaceAlignment.Create();
            }
        }

        // allocate storage to store face frame results for each face in the FOV
        faceFrameResults = new FaceFrameResult[this.bodyCount];

//		FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;
//		faceDisplayWidth = frameDescription.Width;
//		faceDisplayHeight = frameDescription.Height;

        bFaceTrackingInited = true;

        return(bFaceTrackingInited);
    }