public FaceAnimationTrackingInformation(int sensorId, int user, EnumIndexableCollection <AnimationUnit, float> c, double time)
 {
     this.sensorId = sensorId;
     this.user     = user;
     this.c        = c;
     this.time     = time;
 }
Exemplo n.º 2
0
        public bool calculateLogic(EnumIndexableCollection<FeaturePoint, PointF> facePoints, double difficulty)
        {
            double state = getState(facePoints);
            updateMinAndMax(state);
            measures++;
            if (measures == 1)
            {
                average = state;
            }
            else
            {
                double oldAverage = average;
                average = (state + (average * (measures - 1))) / measures;
                deviation = deviation + ((state - oldAverage) * (state - average));
            }

            if (previousState != 0.0)
            {
                if (previousState < average && state > getDeviation() * difficulty + average)
                    return countHighPoint(state);
                else if (previousState > average && state < average - getDeviation() * difficulty / 2)
                    return countLowPoint(state);
            }
            else
                previousState = state;
            return false;
        }
Exemplo n.º 3
0
            // Updates the face tracking information for this skeleton
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        this.facePoints = frame.GetProjected3DShape();
                    }
                }
            }
Exemplo n.º 4
0
        private float[,] calcDist(EnumIndexableCollection <FeaturePoint, Vector3DF> fp)
        {
            int fpSize = fp.Count();

            Vector3DF[] temp = new Vector3DF[fpSize];
            float[,] fpRes = new float[fpSize, fpSize];
            int k = 0;

            foreach (Vector3DF p in fp)
            {
                temp[k] = p;
                k++;
            }
            for (int i = 0; i < (fpSize - 1); i++)
            {
                for (int j = 0; j < (fpSize - 1); j++)
                {
                    if (i == j)
                    {
                    }
                    // Do stuff to temp!
                    else
                    {
                        fpRes[i, j] = Math.Abs((float)Math.Sqrt((float)Math.Pow(temp[i].X - temp[j].X, 2) + (float)Math.Pow(temp[i].Y - temp[j].Y, 2) + (float)Math.Pow(temp[i].Z - temp[j].Z, 2)));
                    }
                }
            }
            return(fpRes);
        }
Exemplo n.º 5
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        if (this.DrawFaceMesh || this.DrawFeaturePoints != DrawFeaturePoint.None)
                        {
                            this.facePoints = frame.GetProjected3DShape();
                        }

                        // get the shape points array
                        if (this.DrawShapePoints)
                        {
                            // see the !!!README.txt file to add the function
                            // to your toolkit project
                            this.shapePoints = frame.GetShapePoints();
                        }
                    }

                    // draw/remove the components
                    SetFeaturePointsLocations();
                    SetShapePointsLocations();
                }
            }
Exemplo n.º 6
0
        public void Evaluate(int SpreadMax)
        {
            //Output static indices all the time
            if (this.first)
            {
                this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES);
                this.first = false;
            }

            if (this.FInFrame.PluginIO.IsConnected)
            {
                if (this.FInFrame.IsChanged)
                {
                    this.FOutOK.SliceCount       = FInFrame.SliceCount;
                    this.FOutPosition.SliceCount = FInFrame.SliceCount;
                    this.FOutRotation.SliceCount = FInFrame.SliceCount;
                    this.FOutPts.SliceCount      = FInFrame.SliceCount;
                    this.FOutPPTs.SliceCount     = FInFrame.SliceCount;

                    for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++)
                    {
                        FaceTrackFrame frame = this.FInFrame[cnt];
                        this.FOutOK[cnt]       = frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * INVTWOPI;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = frame.Get3DShape();

                        this.FOutPPTs[cnt].SliceCount = pp.Count;
                        this.FOutPts[cnt].SliceCount  = p.Count;

                        for (int i = 0; i < pp.Count; i++)
                        {
                            this.FOutPPTs[cnt][i] = new Vector2(pp[i].X, pp[i].Y);
                            this.FOutPts[cnt][i]  = new Vector3(p[i].X, p[i].Y, p[i].Z);
                        }


                        /*FaceTriangle[] d = frame.GetTriangles();
                         * this.FOutIndices.SliceCount = d.Length * 3;
                         * for (int i = 0; i < d.Length; i++)
                         * {
                         *  this.FOutIndices[i * 3] = d[i].First;
                         *  this.FOutIndices[i * 3 + 1] = d[i].Second;
                         *  this.FOutIndices[i * 3 + 2] = d[i].Third;
                         * }*/
                    }
                }
            }
            else
            {
                this.FOutPosition.SliceCount = 0;
                this.FOutPPTs.SliceCount     = 0;
                this.FOutPts.SliceCount      = 0;
                this.FOutRotation.SliceCount = 0;
                this.FOutOK.SliceCount       = 0;
            }
        }
Exemplo n.º 7
0
 protected override double getState(EnumIndexableCollection<FeaturePoint, PointF> facePoints)
 {
     double averageBrowLength = (GameUtils.getLength(FeaturePoint.LeftOfLeftEyebrow, FeaturePoint.RightOfLeftEyebrow, facePoints) +
         GameUtils.getLength(FeaturePoint.LeftOfRightEyebrow, FeaturePoint.RightOfRightEyebrow, facePoints)) / 2.0;
     double averageBrowToEyeDistance = (GameUtils.getLength(FeaturePoint.MiddleBottomOfLeftEyebrow, FeaturePoint.AboveMidUpperLeftEyelid, facePoints) +
         GameUtils.getLength(FeaturePoint.MiddleBottomOfRightEyebrow, FeaturePoint.AboveMidUpperRightEyelid, facePoints)) / 2.0;
     return averageBrowToEyeDistance / averageBrowLength;
 }
Exemplo n.º 8
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor,
                                       ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat,
                                       short[] depthImage, Skeleton skeletonOfInterest) // <---------Skeleton data passed here *****************
            {
                // Here a skeletonOfInterest is available **********************************

                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // Here is where a skeletonOfInterest is available ***

                    // Call Track(), passing skeletonOfInterest
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        // Assign the facePoints
                        this.facePoints = frame.GetProjected3DShape();

                        // Gets the face data but does not draw the face or the skeleton

                        // This code gets the yaw, pitch and roll Capture it here
                        this.rotation = frame.Rotation;  // <-- frame is a FaceTrackFrame
                    }
                }
            }
Exemplo n.º 9
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (runtime != null)
                {
                    this.runtime.AllFrameReady -= KinectFaceNode_AllFrameReady;
                }

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        this.FInRuntime[0].AllFrameReady += KinectFaceNode_AllFrameReady;
                    }
                }

                this.FInvalidateConnect = false;
            }

            if (this.FInvalidate)
            {
                this.FOutOK.SliceCount       = this.trackedSkeletons.Count;
                this.FOutPosition.SliceCount = this.trackedSkeletons.Count;
                this.FOutRotation.SliceCount = this.trackedSkeletons.Count;

                List <FaceTrackFrame> frames = new List <FaceTrackFrame>();

                int cnt = 0;
                foreach (int key in this.trackedSkeletons.Keys)
                {
                    SkeletonFaceTracker sft = this.trackedSkeletons[key];
                    if (sft.frame != null)
                    {
                        frames.Add((FaceTrackFrame)sft.frame.Clone());
                        this.FOutOK[cnt]       = sft.frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(sft.frame.Translation.X, sft.frame.Translation.Y, sft.frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(sft.frame.Rotation.X, sft.frame.Rotation.Y, sft.frame.Rotation.Z) * (float)VMath.DegToCyc;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = sft.frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = sft.frame.Get3DShape();
                    }
                    else
                    {
                        this.FOutOK[cnt]       = false;
                        this.FOutPosition[cnt] = Vector3.Zero;
                        this.FOutRotation[cnt] = Vector3.Zero;
                    }
                    cnt++;
                }

                this.FOutFrame.AssignFrom(frames);
            }
        }
Exemplo n.º 10
0
 void EnqueueFaceAnimationMessage(int sensorId, int user, EnumIndexableCollection <AnimationUnit, float> c, double time)
 {
     if (!capturing)
     {
         return;
     }
     if (c == null)
     {
         return;
     }
     trackingInformationQueue.Add(new FaceAnimationTrackingInformation(sensorId, user, c, time));
 }
        /// <summary>
        /// Initializes a new instance of the TrackingResults class from a set of Kinect face points
        /// </summary>
        public TrackingResults(EnumIndexableCollection<FeaturePoint, PointF> facePoints)
        {
            this.FacePoints = this.FaceBoundaryPoints(facePoints);

            // Calculate facerect manually from facepoints
            var rectX = this.FacePoints.Min(x => x.X);
            var rectWidth = this.FacePoints.Max(x => x.X) - rectX;
            var rectY = this.FacePoints.Min(x => x.Y);
            var rectHeight = this.FacePoints.Max(x => x.Y) - rectY;

            this.FaceRect = new System.Drawing.Rectangle(rectX, rectY, rectWidth, rectHeight);
        }
Exemplo n.º 12
0
        /// <summary>
        /// Initializes a new instance of the TrackingResults class from a set of Kinect face points
        /// </summary>
        public TrackingResults(EnumIndexableCollection <FeaturePoint, PointF> facePoints)
        {
            this.FacePoints = this.FaceBoundaryPoints(facePoints);

            // Calculate facerect manually from facepoints
            var rectX      = this.FacePoints.Min(x => x.X);
            var rectWidth  = this.FacePoints.Max(x => x.X) - rectX;
            var rectY      = this.FacePoints.Min(x => x.Y);
            var rectHeight = this.FacePoints.Max(x => x.Y) - rectY;

            this.FaceRect = new System.Drawing.Rectangle(rectX, rectY, rectWidth, rectHeight);
        }
Exemplo n.º 13
0
        //function used for the face toRecognize : called for each frame sent by the kinect
        public EigenFace(EnumIndexableCollection <FeaturePoint, PointF> toRec)
        {
            double[,] vReduce     = new double[1, data.GetLowerBound(1)];
            double[,] vReduceM    = new double[1, distanceMouth.GetLowerBound(1)];
            double[,] vReduceEyeL = new double[1, distanceEyeLeft.GetLowerBound(1)];
            double[,] vReduceEyeR = new double[1, distanceEyeRight.GetLowerBound(1)];

            double[,] distanceToRec      = new double[n, 0];
            double[,] distanceMouthToRec = new double[coupleDistanceMouth.Length, 0];
            double[,] distanceEyeLToRec  = new double[coupledistanceEyeLeft.Length, 0];
            double[,] distanceEyeRToRec  = new double[coupledistanceEyeRight.Length, 0];


            //Calculate distances for the face to recognize
            for (int i = 0; i < coupleDistanceMouth.Length; i++)
            {
                xa = toRec.ElementAt(coupleDistanceMouth[i, 0]).X;
                xb = toRec.ElementAt(coupleDistanceMouth[i, 1]).X;
                ya = toRec.ElementAt(coupleDistanceMouth[i, 0] + 1).Y;
                yb = toRec.ElementAt(coupleDistanceMouth[i, 1] + 1).Y;
                distanceToRec[i, 0]      = distance(xa, xb, ya, yb, 0, 0);
                distanceMouthToRec[i, 0] = distance(xa, xb, ya, yb, 0, 0);
            }
            for (int i = 0; i < coupledistanceEyeLeft.Length; i++)
            {
                xa = toRec.ElementAt(coupledistanceEyeLeft[i, 0]).X;
                xb = toRec.ElementAt(coupledistanceEyeLeft[i, 1]).X;
                ya = toRec.ElementAt(coupledistanceEyeLeft[i, 0] + 1).Y;
                yb = toRec.ElementAt(coupledistanceEyeLeft[i, 1] + 1).Y;
                distanceToRec[i, 0]      = distance(xa, xb, ya, yb, 0, 0);
                distanceMouthToRec[i, 0] = distance(xa, xb, ya, yb, 0, 0);
            }
            for (int i = 0; i < coupledistanceEyeRight.Length; i++)
            {
                xa = toRec.ElementAt(coupledistanceEyeRight[i, 0]).X;
                xb = toRec.ElementAt(coupledistanceEyeRight[i, 1]).X;
                ya = toRec.ElementAt(coupledistanceEyeRight[i, 0] + 1).Y;
                yb = toRec.ElementAt(coupledistanceEyeRight[i, 1] + 1).Y;
                distanceToRec[i, 0]      = distance(xa, xb, ya, yb, 0, 0);
                distanceMouthToRec[i, 0] = distance(xa, xb, ya, yb, 0, 0);
            }

            //calculations for the face to recognize
            vReduce     = distanceToRec.Subtract(vectorMean, 1);
            vReduceM    = distanceMouthToRec.Subtract(vectorMeanM, 1);
            vReduceEyeL = distanceEyeLeft.Subtract(vectorMeanEyeL, 1);
            vReduceEyeR = distanceEyeRight.Subtract(vectorMeanEyeR, 1);

            vEpsilon     = eigenVectors.GetColumns(indexOflastEigenValues).Transpose().Multiply(vReduce.Transpose());
            vEpsilonM    = eigenVectorsM.GetColumns(indexOflastEigenValuesM).Transpose().Multiply(vReduceM.Transpose());
            vEpsilonEyeL = eigenVectorsEyeL.GetColumns(indexOflastEigenValuesEyeL).Transpose().Multiply(vReduceEyeL.Transpose());
            vEpsilonEyeR = eigenVectorsEyeR.GetColumns(indexOflastEigenValuesEyeR).Transpose().Multiply(vReduceEyeR.Transpose());
        }
Exemplo n.º 14
0
        public void writeToFile(string fileName, FaceTrackFrame faceFrame)
        {
            string path   = Directory.GetCurrentDirectory() + @"\output";
            string target = path + "\\" + fileName;

            if (!Directory.Exists(path))
            {
                Directory.CreateDirectory(path);
            }
            if (!File.Exists(target))
            {
                using (StreamWriter file = new StreamWriter(target, true))
                {
                    string heading = "Label ";
                    heading += "TimeStamp ";
                    foreach (var fp in featurePoints)
                    {
                        heading += fp.ToString() + ".X ";
                        heading += fp.ToString() + ".Y ";
                        heading += fp.ToString() + ".Z ";
                    }
                    heading.TrimEnd(' ');
                    file.WriteLine(heading);
                }
            }

            using (StreamWriter file = new StreamWriter(target, true))
            {
                string data_string = label_txt.Text + " ";

                //Timestamp
                TimeSpan t    = DateTime.UtcNow - new DateTime(1970, 1, 1);
                String   time = ((long)t.TotalMilliseconds).ToString();
                data_string += time + " ";

                EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceFrame.Get3DShape();
                var       shape = faceFrame.Get3DShape();
                float[][] arr   = shape
                                  .Select(x => new float[3] {
                    x.X, x.Y, x.Z
                })
                                  .ToArray();

                foreach (var v in arr)
                {
                    data_string += v[0] + " ";
                    data_string += v[1] + " ";
                    data_string += v[2] + " ";
                }
                file.WriteLine(data_string);
            }
        }
Exemplo n.º 15
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();


                        double dbX = facePoints[5].X;
                        double dbY = facePoints[5].Y;

                        App thisApp = App.Current as App;
                        thisApp.m_dbX = dbX;
                        thisApp.m_dbY = dbY;
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }


                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                        this.test       = frame.Get3DShape();

                        //info about rotations
                        pitchVal = frame.Rotation.X;
                        rollVal  = frame.Rotation.Z;
                        yawVal   = frame.Rotation.Y;
                    }
                }
            }
Exemplo n.º 17
0
        public static double[] ProcessData(EnumIndexableCollection <AnimationUnit, float> faceAnimationsUnits)
        {
            // Extract the coordinates of the points.
            var tmp = new double[6];

            if (faceAnimationsUnits != null)
            {
                tmp[0] = faceAnimationsUnits[AnimationUnit.BrowLower];
                tmp[1] = faceAnimationsUnits[AnimationUnit.BrowRaiser];
                tmp[2] = faceAnimationsUnits[AnimationUnit.JawLower];
                tmp[3] = faceAnimationsUnits[AnimationUnit.LipCornerDepressor];
                tmp[4] = faceAnimationsUnits[AnimationUnit.LipRaiser];
                tmp[5] = faceAnimationsUnits[AnimationUnit.LipStretcher];
            }
            return(tmp);
        }
Exemplo n.º 18
0
        protected void UpdateFrame(FaceTrackFrame frame)
        {
            this.lastFaceTrackSucceeded = frame.TrackSuccessful;
            if (this.lastFaceTrackSucceeded)
            {
                if (faceTriangles == null)
                {
                    // only need to get this once.  It doesn't change.
                    faceTriangles = frame.GetTriangles();
                }

                this.facePoints      = frame.Get3DShape();
                this.FaceRect        = frame.FaceRect;
                this.FaceTranslation = frame.Translation;
                this.FaceRotation    = frame.Rotation;
            }
        }
Exemplo n.º 19
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
            }
        }
Exemplo n.º 20
0
        public void updateData(EnumIndexableCollection<FeaturePoint, Vector3DF> pts)
        {
            if (count > NUM_SAMPLES)
            {
                Console.WriteLine("Done Scanning....");
                Environment.Exit(0);
                return;
            }
            else if (count == NUM_SAMPLES)
            {

                String s = DateTime.Now.Hour + "_" + DateTime.Now.Minute + "_" + DateTime.Now.Second;

                StreamWriter writer = new StreamWriter("C:\\Users\\3DAuth\\Documents\\Facial Testing\\" + s + ".txt");
                writer.WriteLine("Anton");

                for (int i = 0; i < totals.Length; i++)
                {
                    totals[i] /= NUM_SAMPLES;
                    writer.Write(totals[i] + ",");
                    //writer.WriteLine(totals[i]);
                }
                writer.Write("Siavash");
                writer.Close();

                count++;

            }
            else
            {
                count++;

                for (int i = 0; i < features.Count; i++)
                {
                    totals[i] += getDist(pts[features[i].P1], pts[features[i].P2]);
                }

            }
        }
Exemplo n.º 21
0
        private void saveFaceModel()
        {
            this.saveModel = false;          //notify model is saved:
            Skeleton skeletonOfInterest =
                this.skeletonData.FirstOrDefault(
                    skeleton =>
                    skeleton.TrackingId == this.trackingId &&
                    skeleton.TrackingState != SkeletonTrackingState.NotTracked);

            if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
            {
                FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                    this.colorImageFormat,
                    this.colorImage,
                    this.depthImageFormat,
                    this.depthImage,
                    skeletonOfInterest);

                if (faceTrackFrame.TrackSuccessful && number <= 3)
                {
                    EnumIndexableCollection <FeaturePoint, Vector3DF> fpA = faceTrackFrame.Get3DShape();
                    MessageBox.Show("saved model " + number + " for " + name);
                    //saveColorImage(name);
                    // save to file :

                    System.IO.File.WriteAllText(@"C:\Kex\data\" + name + number + ".txt", name);

                    using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Kex\data\" + name + number + ".txt"))

                    {
                        foreach (Vector3DF fp in fpA)
                        {
                            file.WriteLine("" + fp.X + " , " + fp.Y + " , " + fp.Z);
                        }
                    }
                    number++;
                }
            }
        }
        /// <summary>
        /// Returns only the bounding points for the face (in order so you can draw a loop)
        /// </summary>
        private List<System.Drawing.Point> FaceBoundaryPoints(EnumIndexableCollection<FeaturePoint, PointF> facePoints)
        {
            var result = new List<System.Drawing.Point>();

            result.Add(this.TranslatePoint(facePoints[FeaturePoint.TopSkull]));
            result.Add(this.TranslatePoint(facePoints[44]));
            result.Add(this.TranslatePoint(facePoints[45]));
            result.Add(this.TranslatePoint(facePoints[47]));
            result.Add(this.TranslatePoint(facePoints[62]));
            result.Add(this.TranslatePoint(facePoints[61]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.LeftSideOfCheek]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.LeftOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.BottomOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.RightOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.RightSideOfChin]));
            result.Add(this.TranslatePoint(facePoints[28]));
            result.Add(this.TranslatePoint(facePoints[29]));
            result.Add(this.TranslatePoint(facePoints[14]));
            result.Add(this.TranslatePoint(facePoints[12]));
            result.Add(this.TranslatePoint(facePoints[11]));

            return result;
        }
Exemplo n.º 23
0
        /// <summary>
        /// Returns only the bounding points for the face (in order so you can draw a loop)
        /// </summary>
        private List <System.Drawing.Point> FaceBoundaryPoints(EnumIndexableCollection <FeaturePoint, PointF> facePoints)
        {
            var result = new List <System.Drawing.Point>();

            result.Add(this.TranslatePoint(facePoints[FeaturePoint.TopSkull]));
            result.Add(this.TranslatePoint(facePoints[44]));
            result.Add(this.TranslatePoint(facePoints[45]));
            result.Add(this.TranslatePoint(facePoints[47]));
            result.Add(this.TranslatePoint(facePoints[62]));
            result.Add(this.TranslatePoint(facePoints[61]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.LeftSideOfCheek]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.LeftOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.BottomOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.RightOfChin]));
            result.Add(this.TranslatePoint(facePoints[FeaturePoint.RightSideOfChin]));
            result.Add(this.TranslatePoint(facePoints[28]));
            result.Add(this.TranslatePoint(facePoints[29]));
            result.Add(this.TranslatePoint(facePoints[14]));
            result.Add(this.TranslatePoint(facePoints[12]));
            result.Add(this.TranslatePoint(facePoints[11]));

            return(result);
        }
Exemplo n.º 24
0
        /// <summary>
        /// Handles the AllFramesReady event of the kinectSensor control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param>
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                    return;
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                    return;
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                    return;
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);
            if (skeleton == null)
                return;

            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                              kinectSensor.DepthStream.Format, depthPixelData,
                                              skeleton);
            String filePath = "C:\\Users\\Some Guest\\EmotionRecognition\\AwesomeFaceTracking\\Data\\Smile\\Jasmine_surprised_01.txt";

            // If a face is tracked, then we can use it.
            if (faceFrame.TrackSuccessful)
            {
                // Retrieve only the Animation Units coeffs.
                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                var facialPoints = faceFrame.Get3DShape();
                //var projectedFacialPoints = faceFrame.GetProjected3DShape();
                /*foreach (var featurePoint in facialPoints) {
                    String str = featurePoint.X + " " + featurePoint.Y + " " + featurePoint.Z;
                    using (StreamWriter writer = File.AppendText(filePath)) {
                        writer.WriteLine(str);
                    }
                    Debug.Print(str);
                }*/

                String s = AUCoeff[AnimationUnit.LipRaiser] +
                    " " + AUCoeff[AnimationUnit.JawLower] +
                    " " + AUCoeff[AnimationUnit.LipStretcher] +
                    " " + AUCoeff[AnimationUnit.BrowLower] +
                    " " + AUCoeff[AnimationUnit.LipCornerDepressor] +
                    " " + AUCoeff[AnimationUnit.BrowRaiser];
                /*using (StreamWriter writer = File.AppendText(filePath))
                {
                    writer.WriteLine("++++++++++");
                    writer.WriteLine(s);
                    writer.WriteLine("======");
                }*/

                Debug.Print("LipRaise:" + AUCoeff[AnimationUnit.LipRaiser] +
                    " JawLow:" + AUCoeff[AnimationUnit.JawLower] +
                    " LipStretch:" + AUCoeff[AnimationUnit.LipStretcher] +
                    " BrowLow:" + AUCoeff[AnimationUnit.BrowLower] +
                    " LipCorner:" + AUCoeff[AnimationUnit.LipCornerDepressor] +
                    " BrowRaise:" + AUCoeff[AnimationUnit.BrowRaiser]);

                if (!hasInitial)
                {
                    initialCoeff = faceFrame.GetAnimationUnitCoefficients();
                    hasInitial = true;
                    Debug.Print("GOT INITIAL VALUES");
                }
                else
                {
                    String signString = "";
                    if (AUCoeff[AnimationUnit.LipRaiser] - initialCoeff[AnimationUnit.LipRaiser] > 0)
                    {
                        signString += "lipRaiser positive - smile? ";
                    }
                    else if (AUCoeff[AnimationUnit.LipRaiser] - initialCoeff[AnimationUnit.LipRaiser] < 0)
                    {
                        signString += "lipRaiser negative ";
                    }
                    var lipRaiseDelta = AUCoeff[AnimationUnit.LipRaiser] - initialCoeff[AnimationUnit.LipRaiser];
                    Debug.Print("lip raise delta value ******************************** " + lipRaiseDelta);
                    if (//(Math.Abs(AUCoeff[AnimationUnit.LipRaiser]) > 0.4) && // && Math.Abs(AUCoeff[AnimationUnit.LipRaiser]) < 0.6) ||
                        (AUCoeff[AnimationUnit.LipStretcher] < 0.3 && AUCoeff[AnimationUnit.LipStretcher] > -0.03) &&
                        (AUCoeff[AnimationUnit.BrowLower] < 0.15 && AUCoeff[AnimationUnit.BrowLower] > -0.05))
                    {
                        Debug.Print("========================lip raise in smile potential region!" + " LIP RAISER: " + AUCoeff[AnimationUnit.LipRaiser]
                            + " LIP STRETCHER : " + AUCoeff[AnimationUnit.LipStretcher] + "  BROW LOWER: " + AUCoeff[AnimationUnit.BrowLower]);
                    }
                    if (AUCoeff[AnimationUnit.JawLower] - initialCoeff[AnimationUnit.JawLower] > 0)
                    {
                        signString += "JawLower positive ";
                    }
                    else if (AUCoeff[AnimationUnit.JawLower] - initialCoeff[AnimationUnit.JawLower] < 0)
                    {
                        signString += "JawLower negative ";
                    }
                    if (AUCoeff[AnimationUnit.LipStretcher] - initialCoeff[AnimationUnit.LipStretcher] > 0)
                    {
                        signString += "LipStretcher positive - smile? ";
                    }
                    else if (AUCoeff[AnimationUnit.LipStretcher] - initialCoeff[AnimationUnit.LipStretcher] < 0)
                    {
                        signString += "LipStretcher negative ";
                    }
                    if (AUCoeff[AnimationUnit.BrowLower] - initialCoeff[AnimationUnit.BrowLower] > 0)
                    {
                        signString += "BrowLower positive ";
                    }
                    else if (AUCoeff[AnimationUnit.BrowLower] - initialCoeff[AnimationUnit.BrowLower] < 0)
                    {
                        signString += "BrowLower negative ";
                    }
                    if (AUCoeff[AnimationUnit.LipCornerDepressor] - initialCoeff[AnimationUnit.LipCornerDepressor] > 0)
                    {
                        signString += "LipCornerDepressor positive ";
                    }
                    else if (AUCoeff[AnimationUnit.LipCornerDepressor] - initialCoeff[AnimationUnit.LipCornerDepressor] < 0)
                    {
                        signString += "LipCornerDepressor negative ";
                    }
                    if (AUCoeff[AnimationUnit.BrowRaiser] - initialCoeff[AnimationUnit.BrowRaiser] > 0)
                    {
                        signString += "lipRaiser positive ";
                    }
                    else if (AUCoeff[AnimationUnit.LipRaiser] - initialCoeff[AnimationUnit.BrowRaiser] < 0)
                    {
                        signString += "BrowRaiser negative ";
                    }
                    //Debug.Print(signString);
                }

                /*Debug.Print("string " + AUCoeff[AnimationUnit.BrowRaiser] + " " + AUCoeff[AnimationUnit.JawLower]);
                Console.Write("Work dammit");
                Debug.WriteLine("value of AuCoeff " + AUCoeff[AnimationUnit.BrowLower]);
                */
                var jawLowerer = AUCoeff[AnimationUnit.JawLower];
                jawLowerer = jawLowerer < 0 ? 0 : jawLowerer;
                MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1;
                MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1);

                LeftBrow.Y = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40;

                RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20);
                LeftBrowRotate.Angle = -RightBrowRotate.Angle;

                CanvasRotate.Angle = faceFrame.Rotation.Z;
                //CanvasTranslate.X = faceFrame.Translation.X;
                //CanvasTranslate.Y = faceFrame.Translation.Y;
            }
        }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {

                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                            initByteBuffer();
                        }
                        /*
                hurra, dont need 4th vertex
                hurra, dont need 35th vertex
                hurra, dont need 36th vertex
                hurra, dont need 37th vertex
                hurra, dont need 38th vertex
                hurra, dont need 39th vertex
                hurra, dont need 41th vertex
                hurra, dont need 42th vertex
                hurra, dont need 43th vertex
                        */

                        this.projectedShapePoints = frame.GetProjected3DShape();
                        this.shapePoints = frame.Get3DShape();

                        byte[] buffer = WriteFaceDataToBuffer();
                        //System.Diagnostics.Debug.WriteLine("Should send {0}", buffer.Length);//1468

                        //need to reduce by 444 bytes
                        //a point is 12. 85 points would result in 1020 bytes. 4 bytes remaining for face id...?
                        //currently 121 points. which ones can be leaft out?
                        //found 9 not needed. still 27 to much...
                        //header is useless. 434 bytes remaining after removing it
                        //byte[] buffer = buffer = new byte[1024];//works fast, even if unknown send to ip..
                        //ushort thefaceid = 22;
                        //Array.Copy(System.BitConverter.GetBytes(thefaceid), 0, buffer, 0, sizeof(ushort));
                        try
                        {
                            sending_socket.SendTo(buffer, sending_end_point); // is this blocking? need to start sending asynchronously?!?

                            //System.Diagnostics.Debug.WriteLine("sending {0} bytes to ip {1} on port {2}", buffer.Length, sending_end_point.Address, sending_end_point.Port);
                            //Console.WriteLine( buffer);
                        }
                        catch (Exception send_exception)
                        {
                            System.Diagnostics.Debug.WriteLine("Exception {0}", send_exception.Message);
                            //System.Diagnostics.Debug.WriteLine("Is the buffer with it's {0} bytes to long to send in one packet?", buffer.Length);
                        }
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;
                List<Point> faceModelPtsCompare = new List<Point>();


                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);

                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null )
                {
                    double shoulderLeft = (double)skeletonOfInterest.Joints[JointType.ShoulderLeft].Position.X;
                    double shoulderRight = (double)skeletonOfInterest.Joints[JointType.ShoulderRight].Position.X;
                    double head = (double)skeletonOfInterest.Joints[JointType.Head].Position.Y;
                    double hip = (double)skeletonOfInterest.Joints[JointType.HipCenter].Position.Y;
                    double headDistZ = (double)skeletonOfInterest.Joints[JointType.Head].Position.Z;
                    shoulderLeft = Math.Abs(shoulderLeft);
                    shoulderRight = Math.Abs(shoulderRight);
                    head = Math.Abs(head);
                    hip = Math.Abs(hip);
                    headDistZ = Math.Abs(headDistZ);
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    this.facePoints2 = frame.GetProjected3DShape();

                    for (int i = 0; i < this.facePoints2.Count; i++)
                    {
                        faceModelPtsCompare.Add(new Point(this.facePoints2[i].X + 0.5f, this.facePoints2[i].Y + 0.5f));
                    }

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (saveFace == true) 
                    {
                       // trackedSkeletonsSave.Add(skeletonOfInterest.TrackingId, this);
                        this.facePoints = frame.GetProjected3DShape();

                        for (int i = 0; i < this.facePoints.Count; i++)
                        {
                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                        }
                        shoulderDistX = (shoulderLeft + shoulderRight) * 100;
                        headhipDistY = (hip +head) * 100;
                        saveFaceList.Add(faceModelPts);
                        saveSkeleList.Add(skeletonOfInterest);
                        saveTrackList.Add(skeletonOfInterest.TrackingId);
                        saveFace = false;
                    }
                    if (this.lastFaceTrackSucceeded)
                    {


                        foreach (Faces kinects in faceList) 
                        {
                            if (colorCount < 3)
                            {
                                if (kinectSensor == kinects.kinect)
                                {

                                    break;

                                }

                                
                            }
                            else
                                colorCount = 0;
                            
                            colorCount++;
                        }
                        int countPt = 0;
                        double pointxDiff = 0;
                        double pointyDiff = 0;
                        double pointsNew = 0;
                        double pointsSaved = 0;
                        double shoulderDistDiff = 0;
                        double headhipDistDiff = 0;
                        double pointsDiff = 0;

                        if (faceModelPts.Count > 0)
                        {
                            foreach (Point pointNew in faceModelPtsCompare)
                            {
                                pointsNew = (pointNew.X + pointNew.Y) / headDistZ ;

                            }
                            foreach (Point pointSave in faceModelPts)
                            {
                                //pointxDiff = (pointNew.X - pointSave.X);
                                //pointyDiff = (pointNew.Y - pointSave.Y);

                                pointsSaved = (pointSave.X + pointSave.Y) / headDistZ ;
                            }
                            shoulderDistXNew = (shoulderLeft + shoulderRight) ;
                            headhipDistYNew = (hip + head);
                            
                         //   Debug.WriteLine("x diff: " + pointxDiff + " y diff: " + pointyDiff);
                            //Debug.WriteLine("new: " + pointsNew + " old: " + pointsSaved);
                            shoulderDistDiff = Math.Abs(shoulderDistXNew - shoulderDistX) *10 /headDistZ;
                            headhipDistDiff = Math.Abs(headhipDistYNew - headhipDistY)*10 /headDistZ;
                            pointsSaved = Math.Abs(pointsNew - pointsSaved) *10;
                            fMap.newFace = pointsNew;
                            fMap.oldFace = pointsSaved;

                            Debug.WriteLine("Shoulder Dist diff: " + shoulderDistDiff / headDistZ);
                            Debug.WriteLine("Head dist diff: " + headhipDistDiff / headDistZ);
                            Debug.WriteLine("face points diff: " + pointsSaved / headDistZ);

                            if (pointsSaved < (50 / headDistZ) )
                            {
                                colorCount = 3;
                            }
                        }


                        switch (colorCount)
                        {
                            case 0:
                                rectangle.Width = frame.FaceRect.Width;
                                rectangle.Height = frame.FaceRect.Height;
                                Point rectPt = new Point();
                                rectPt.X = frame.FaceRect.Left;
                                rectPt.Y = frame.FaceRect.Top;
                                rectangle.Location = (Point)rectPt;
                                drawNum = 0;
                                break;
                            case 1:
                                rectangle2.Width = frame.FaceRect.Width;
                                rectangle2.Height = frame.FaceRect.Height;
                                Point rectPt2 = new Point();
                                rectPt2.X = frame.FaceRect.Left;
                                rectPt2.Y = frame.FaceRect.Top;
                                rectangle2.Location = (Point)rectPt2;
                                drawNum = 1;
                                break;
                            case 2:
                                rectangle3.Width = frame.FaceRect.Width;
                                rectangle3.Height = frame.FaceRect.Height;
                                Point rectPt3 = new Point();
                                rectPt3.X = frame.FaceRect.Left;
                                rectPt3.Y = frame.FaceRect.Top;
                                rectangle3.Location = (Point)rectPt3;
                                drawNum = 2;
                                break;
                            case 3:
                                rectangle4.Width = frame.FaceRect.Width;
                                rectangle4.Height = frame.FaceRect.Height;
                                Point rectPt4 = new Point();
                                rectPt4.X = frame.FaceRect.Left;
                                rectPt4.Y = frame.FaceRect.Top;
                                rectangle4.Location = (Point)rectPt4;
                                drawNum = 3;
                                break;
                        }


                    }
                }
            }
Exemplo n.º 27
0
        public void Evaluate(int SpreadMax)
        {
            if (this.FInvalidateConnect)
            {
                if (runtime != null)
                {
                    this.runtime.AllFrameReady -= KinectFaceNode_AllFrameReady;
                }

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    //Cache runtime node
                    this.runtime = this.FInRuntime[0];

                    if (runtime != null)
                    {
                        this.FInRuntime[0].AllFrameReady += KinectFaceNode_AllFrameReady;
                    }
                }

                this.FInvalidateConnect = false;
            }

            if (this.FInvalidate)
            {
                this.FOutOK.SliceCount       = this.trackedSkeletons.Count;
                this.FOutPosition.SliceCount = this.trackedSkeletons.Count;
                this.FOutRotation.SliceCount = this.trackedSkeletons.Count;
                this.FOutPts.SliceCount      = this.trackedSkeletons.Count;
                this.FOutPPTs.SliceCount     = this.trackedSkeletons.Count;

                int cnt = 0;
                foreach (int key in this.trackedSkeletons.Keys)
                {
                    SkeletonFaceTracker sft = this.trackedSkeletons[key];
                    if (sft.frame != null)
                    {
                        this.FOutOK[cnt]       = sft.frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(sft.frame.Translation.X, sft.frame.Translation.Y, sft.frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(sft.frame.Rotation.X, sft.frame.Rotation.Y, sft.frame.Rotation.Z) * (float)VMath.DegToCyc;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = sft.frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = sft.frame.Get3DShape();

                        this.FOutPPTs[cnt].SliceCount    = pp.Count;
                        this.FOutPts[cnt].SliceCount     = p.Count;
                        this.FOutNormals[cnt].SliceCount = p.Count;

                        //Compute smoothed normals
                        Vector3[] norms    = new Vector3[p.Count];
                        int[]     inds     = KinectRuntime.FACE_INDICES;
                        int       tricount = inds.Length / 3;
                        for (int j = 0; j < tricount; j++)
                        {
                            int i1 = inds[j * 3];
                            int i2 = inds[j * 3 + 1];
                            int i3 = inds[j * 3 + 2];

                            Vector3 v1 = new Vector3(p[i1].X, p[i1].Y, p[i1].Z);
                            Vector3 v2 = new Vector3(p[i2].X, p[i2].Y, p[i2].Z);
                            Vector3 v3 = new Vector3(p[i3].X, p[i3].Y, p[i3].Z);

                            Vector3 faceEdgeA = v2 - v1;
                            Vector3 faceEdgeB = v1 - v3;
                            Vector3 norm      = Vector3.Cross(faceEdgeB, faceEdgeA);

                            norms[i1] += norm;
                            norms[i2] += norm;
                            norms[i3] += norm;
                        }

                        for (int i = 0; i < pp.Count; i++)
                        {
                            this.FOutPPTs[cnt][i]    = new Vector2(pp[i].X, pp[i].Y);
                            this.FOutPts[cnt][i]     = new Vector3(p[i].X, p[i].Y, p[i].Z);
                            this.FOutNormals[cnt][i] = Vector3.Normalize(norms[i]);
                        }

                        FaceTriangle[] d = sft.frame.GetTriangles();
                        this.FOutIndices.SliceCount = d.Length * 3;
                        for (int i = 0; i < d.Length; i++)
                        {
                            this.FOutIndices[i * 3]     = d[i].First;
                            this.FOutIndices[i * 3 + 1] = d[i].Second;
                            this.FOutIndices[i * 3 + 2] = d[i].Third;
                        }
                    }
                    else
                    {
                        this.FOutOK[cnt]              = false;
                        this.FOutPosition[cnt]        = new Vector3(sft.frame.Translation.X, sft.frame.Translation.Y, sft.frame.Translation.Z);
                        this.FOutRotation[cnt]        = new Vector3(sft.frame.Rotation.X, sft.frame.Rotation.Y, sft.frame.Rotation.Z) * (float)VMath.DegToCyc;
                        this.FOutIndices.SliceCount   = 0;
                        this.FOutPPTs[cnt].SliceCount = 0;
                        this.FOutPts[cnt].SliceCount  = 0;
                    }
                    cnt++;
                }

                //this.FOutOK[0] = this.frm.TrackSuccessful;
                //this.FOutPosition[0] = new Vector3(frm.Translation.X, frm.Translation.Y, frm.Translation.Z);
            }

            //this.FOutFrameIndex[0] = this.frameindex;
        }
Exemplo n.º 28
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null) {
                    try {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    } catch (InvalidOperationException) {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null) {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded) {
                        if (faceTriangles == null) {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        int top = frame.FaceRect.Top;
                        int left = frame.FaceRect.Left;
                        int width = frame.FaceRect.Right - frame.FaceRect.Left;
                        int height = frame.FaceRect.Bottom - frame.FaceRect.Top;
                        this.faceCoordinates = new int[] { left, top, width, height };
                        this.rotationXYZ = new float[] { frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z };
                        this.translationXYZ = new float[] { frame.Translation.X, frame.Translation.Y, frame.Translation.Z };
                    } else {
                        this.rotationXYZ = null;
                        this.faceCoordinates = null;
                        this.translationXYZ = null;
                    }
                }
            }
Exemplo n.º 29
0
    private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token) {
      if (interval.TotalMilliseconds == 0) return;

      // Initial wait time before we begin the periodic loop.
      if (dueTime > TimeSpan.Zero)
        await Task.Delay(dueTime, token);

      DateTime LocalTimestamp = Timestamp;
      FaceTracker tracker = new FaceTracker(Sensor);

      // Repeat this loop until cancelled.
      while (!token.IsCancellationRequested) {

        // Skip already work with given data
        if (Timestamp == LocalTimestamp) {
          await Task.Delay(interval, token);
          continue;
        }

        // Timestamp data
        LocalTimestamp = Timestamp;
        FaceTrackWatch.Again();

        // Do Job
        try {
          CopyColorData = true;
          CopySkeletons = true;
          FPoints = null;
          Mood = 0;
          if (null != GestureManager && null != GestureManager.Skeleton) {
            FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton);
            if (frame.TrackSuccessful) {
              
              // Only once.  It doesn't change.
              if (FTriangles == null) { FTriangles = frame.GetTriangles(); }
              FPoints = frame.GetProjected3DShape();
              Mood = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor];
              WSRProfileManager.GetInstance().UpdateMood(Mood);
            }
          }
        }
        catch (Exception ex) {
          WSRConfig.GetInstance().logError("FACE", ex);
        }
        FaceTrackWatch.Stop(); 

        // Wait to repeat again.
        if (interval > TimeSpan.Zero)
          await Task.Delay(interval, token);
      }

      // Dispose Tracker
      tracker.Dispose();
    }
Exemplo n.º 30
0
 protected override double getState(EnumIndexableCollection<FeaturePoint, PointF> facePoints)
 {
     double normLength = GameUtils.getLength(FeaturePoint.UnderNoseMiddle, FeaturePoint.NoseTop, facePoints);
     double mouthHeight = GameUtils.getLength(FeaturePoint.MiddleTopDipUpperLip, FeaturePoint.MiddleBottomLip, facePoints);
     return mouthHeight / normLength;
 }
Exemplo n.º 31
0
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                if (!timerStarted)
                {
                    timer.Start();
                    timerStarted = true;
                }
                //increment our frames
                numberOfFrames++;


                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        /*if ()
                         * {
                         *  Debug.WriteLine("hit " + (frameIter * sampleRate) + " frames in " + (timer.Elapsed) + " seconds");
                         *  frameIter++;
                         * }*/

                        //Also grab our points
                        EnumIndexableCollection <FeaturePoint, Vector3DF> facePoints3D = frame.Get3DShape();
                        int index = 0;
                        if (numberOfFrames > frameIter * sampleRate && frameIter < 5) //only grab 4 samples over our given sample rate
                        {
                            //Create a new thread so we don't make the visual thread throw up all over the place
                            new Thread(() =>
                            {
                                Thread.CurrentThread.IsBackground = true;

                                List <Tuple <float, float, float> > myPoints = new List <Tuple <float, float, float> >();
                                foreach (Vector3DF vector in facePoints3D)
                                {
                                    //csv.Append(string.Format("( ({1}, {2}, {3}){4}",vector.X, vector.Y, vector.Z, Environment.NewLine));
                                    myPoints.Add(new Tuple <float, float, float>(vector.X, vector.Y, vector.Z));
                                    index++;
                                }
                                calculateDistances(myPoints);
                                frameIter++;
                            }).Start();
                            //once = true;
                        }

                        if (frameIter == 5)
                        {
                            SetStatusText("Generating histograms...");
                            Console.WriteLine("We are ready to sample");
                            foreach (float distance in sampleOneDistances)
                            {
                                int sampleOneIndex = (int)Math.Floor(64 * distance / sampleOneMaxDistance);
                                sampleOneHistogram[sampleOneIndex]++;
                            }
                            foreach (float distance in sampleTwoDistances)
                            {
                                sampleTwoHistogram[(int)Math.Floor(64 * distance / sampleTwoMaxDistance)]++;
                            }
                            foreach (float distance in sampleThreeDistances)
                            {
                                sampleThreeHistogram[(int)Math.Floor(64 * distance / sampleThreeMaxDistance)]++;
                            }
                            foreach (float distance in sampleFourDistances)
                            {
                                sampleFourHistogram[(int)Math.Floor(64 * distance / sampleFourMaxDistance)]++;
                            }

                            //Go through histogram and divide by distances



                            //Get
                            for (int i = 0; i < sampleOneHistogram.Length; i++)
                            {
                                sampleOneHistogram[i] = sampleOneHistogram[i] / sampleOneDistances.Count;
                            }

                            for (int i = 0; i < sampleTwoHistogram.Length; i++)
                            {
                                sampleTwoHistogram[i] = sampleTwoHistogram[i] / sampleTwoDistances.Count;
                            }

                            for (int i = 0; i < sampleThreeHistogram.Length; i++)
                            {
                                sampleThreeHistogram[i] = sampleThreeHistogram[i] / sampleThreeDistances.Count;
                            }

                            for (int i = 0; i < sampleFourHistogram.Length; i++)
                            {
                                sampleFourHistogram[i] = sampleFourHistogram[i] / sampleFourDistances.Count;
                            }

                            int iter = 0;

                            foreach (int count in sampleTwoHistogram)//can iterate through any histogram, they're all of size 65
                            {
                                Console.WriteLine("Count for hist1/2/3/4[" + iter + "] is " + count + "/" + sampleOneHistogram[iter] + "/" + sampleThreeHistogram[iter] + "/" + sampleFourHistogram[iter]);
                                iter++;
                            }

                            //Write our histograms to a csv file
                            String[] sampleOneHistString = Array.ConvertAll(sampleOneHistogram, x => x.ToString());


                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(testFilePath))
                            {
                                file.Write(string.Join(",", Enumerable.Range(1, 65).ToArray()) + Environment.NewLine);
                                file.Write(string.Join(",", sampleOneHistString));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleTwoHistogram, x => x.ToString())));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleThreeHistogram, x => x.ToString())));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleFourHistogram, x => x.ToString())));
                            }
                            //pass that data file to jar
                            String jarPath = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\jar\\wekaClassifier.jar";
                            System.Diagnostics.Process clientProcess = new Process();
                            String jarargs = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\training_data.arff  C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\testFormat.dat";
                            clientProcess.StartInfo.FileName  = "java";
                            clientProcess.StartInfo.Arguments = "-jar " + jarPath + " " + jarargs;
                            clientProcess.StartInfo.RedirectStandardOutput = true;
                            clientProcess.StartInfo.UseShellExecute        = false;
                            clientProcess.Start();


                            String output = clientProcess.StandardOutput.ReadToEnd();
                            Console.WriteLine(output);
                            clientProcess.WaitForExit();
                            int code = clientProcess.ExitCode;

                            //write to dat file with 4 histograms averaged


                            frameIter++; //only do this once (will make conditional evaluate to false. Is this clean and clear? Not really? Do I care? Not particularly. At least it's documented.
                            ftNumPeople++;
                            SetPeopleText("People tracked : " + ftNumPeople);
                            SetStatusText("Status: waiting....");
                            SetPredictionText("Guess: " + output);
                        }
                    }
                }
            }
Exemplo n.º 32
0
        public void updateData(EnumIndexableCollection<FeaturePoint, Vector3DF> pts)
        {
            Console.WriteLine(count);
            if (count > NUM_SAMPLES)
            {
                //Environment.Exit(0);
                return;
            }
            else if (count == NUM_SAMPLES)
            {
                //Notify(count, NUM_SAMPLES);
                String s = DateTime.Now.Hour + "_" + DateTime.Now.Minute + "_" + DateTime.Now.Second;

                //StreamWriter writer = new StreamWriter("C:\\Users\\Administrator\\Documents\\Facial Testing\\" + s + ".txt");
                //writer.WriteLine("Siavash");

                if (!CurrentObjectBag.SLearningNewUser)
                {
                    int trim = NUM_SAMPLES / 4;
                    for (int i = 0; i < totalsList.Length; i++)
                    {
                        totalsList[i].Sort();
                        totalsList[i].RemoveRange(0, trim);
                        totalsList[i].RemoveRange(totalsList[i].Count - trim, trim);
                        totals[i] = totalsList[i].Sum();
                        totals[i] /= totalsList[i].Count;
                    }
                } else {
                    for (int i = 0; i < totals.Length; i++)
                    {
                        totals[i] /= NUM_SAMPLES;
                        // writer.WriteLine(totals[i]);
                        //writer.WriteLine(totals[i]);
                    }
                }

                // writer.Write("Siavash");
                // writer.Close();

                if (CurrentObjectBag.SLearningNewUser)
                {
                    // new user, so do additional scans
                    List<double> tempList = new List<double>();
                    for (int i = 0; i < totals.Length; i++)
                    {
                        tempList.Add(totals[i]);
                        totals[i] = 0;
                    }
                    learnList.Add(tempList);

                    if (learnCount < NUM_LEARNING_SCANS - 1)
                    {
                        count = 0;
                        learnCount++;
                        //Microsoft.Samples.Kinect.SkeletonBasics.MainWindow.faceScanCounter++;
                        ThreeDAuth.MainWindow.faceScanCounter++;

                    }
                    else
                    {
                        count++;
                        classifier.addUser(learnList);
                    }

                }
                else
                {
                    // existing user, so done scanning and validate it
                    //Notify(count, NUM_SAMPLES);
                    count++;
                    classifier.verifyUser(totals);
                }

            }
            else
            {

                //Microsoft.Samples.Kinect.SkeletonBasics.MainWindow.faceScanCount++;
                //Microsoft.Samples.Kinect.SkeletonBasics.MainWindow.faceScanCounter = learnCount;
                ThreeDAuth.MainWindow.faceScanCount++;
                ThreeDAuth.MainWindow.faceScanCounter = learnCount;
                count++;

                for (int i = 0; i < features.Count; i++)
                {
                    double dist = getDist(pts[features[i].P1], pts[features[i].P2]);
                    totals[i] += dist;
                    totalsList[i].Add(dist);
                }

            }
        }
 public FaceTrackerEventArgs(EnumIndexableCollection <FeaturePoint, PointF> facePoints, FaceTriangle[] faceTriangles) : base()
 {
     this.facePoints    = facePoints;
     this.faceTriangles = faceTriangles;
 }
Exemplo n.º 34
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();


                        double dbX = facePoints[5].X;
                        double dbY = facePoints[5].Y;

                        App thisApp = App.Current as App;
                        thisApp.m_dbX = dbX;
                        thisApp.m_dbY = dbY;
                    }
                }
            }
Exemplo n.º 35
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame   = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }


            using (depthImageFrame)
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton skeleton in skeletonData)
                    {
                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            /////////// 머리 정보를 받아온다 ///////////////
                            Joint joint = skeleton.Joints[JointType.Head];

                            DepthImagePoint depthPoint;
                            depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position);

                            System.Windows.Point point = new System.Windows.Point((int)(image1.ActualWidth * depthPoint.X
                                                                                        / depthImageFrame.Width),
                                                                                  (int)(image1.ActualHeight * depthPoint.Y
                                                                                        / depthImageFrame.Height));

                            textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}", point.X, point.Y, joint.Position.Z);

                            // 이전 헤드의 위치를 저장한다.
                            m_prevHeadX = m_headX;
                            m_prevHeadY = m_headY;
                            m_headX     = point.X;
                            m_headY     = point.Y;

                            if (Math.Abs(m_prevHeadX - point.X) < 10)
                            {
                                m_headX = m_prevHeadX;
                            }

                            if (Math.Abs(m_prevHeadY - point.Y) < 10)
                            {
                                m_headY = m_prevHeadY;
                            }

                            Canvas.SetLeft(ellipse1, point.X - ellipse1.Width / 2);
                            Canvas.SetTop(ellipse1, point.Y - ellipse1.Height / 2);

                            ////////////// face 정보를 받아온다//////////////////////
                            if (this.faceTracker == null)
                            {
                                try
                                {
                                    this.faceTracker = new FaceTracker(nui1);
                                }
                                catch (InvalidOperationException)
                                {
                                    // During some shutdown scenarios the FaceTracker
                                    // is unable to be instantiated.  Catch that exception
                                    // and don't track a face.
                                    this.faceTracker = null;
                                }
                            }

                            if (this.faceTracker != null)
                            {
                                FaceTrackFrame frame = this.faceTracker.Track(
                                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);

                                if (frame.TrackSuccessful)
                                {
                                    facePoints = frame.GetProjected3DShape();

                                    textBlock2.Text = string.Format("noseX:{0:0.00} noseY:{1:0.00} ", facePoints[107].X, facePoints[107].Y);

                                    m_noseX = facePoints[107].X;
                                    m_noseY = facePoints[107].Y;

                                    Canvas.SetLeft(ellipse2, facePoints[107].X - ellipse2.Width / 2);
                                    Canvas.SetTop(ellipse2, facePoints[107].Y - ellipse2.Width / 2);
                                }
                            }

                            ///////////////고개의 각도를 계산 ////////////////////

                            lineOfSight.X1 = m_headX;
                            lineOfSight.Y1 = m_headY;
                            lineOfSight.X2 = m_noseX;
                            lineOfSight.Y2 = m_noseY;

                            Canvas.SetLeft(m_sightRect, m_headX - m_sightRect.Width / 2);
                            Canvas.SetTop(m_sightRect, m_headY);

                            CheckWhichSight(depthImageFrame, m_noseX, m_noseY);
                        }
                    }
                }
            }
        }
Exemplo n.º 36
0
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (frameProccessed[1] == false)
            {
                frameProccessed[1] = true;
            }
            else
            {
                frameProccessed[1] = false;
                return;
            }
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame   = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }

                using (depthImageFrame)
                {
                    if (depthImageFrame != null && skeletonData != null)
                    {
                        foreach (Skeleton sd in skeletonData)
                        {
                            if (sd.TrackingState == SkeletonTrackingState.Tracked || sd.TrackingState == SkeletonTrackingState.PositionOnly)
                            {
                                Joint joint = sd.Joints[JointType.Head];

                                DepthImagePoint  depthPoint;
                                CoordinateMapper coordinateMapper = new CoordinateMapper(frontSensor);
                                depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30);

                                point = new System.Windows.Point((int)(frontSensor.ColorStream.FrameWidth * depthPoint.X
                                                                       / depthImageFrame.Width),
                                                                 (int)(frontSensor.ColorStream.FrameHeight * depthPoint.Y
                                                                       / depthImageFrame.Height));

                                /* textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}",
                                 *                              point.X,
                                 *                              point.Y,
                                 *                              joint.Position.Z); */

                                Canvas.SetLeft(headEllipse, point.X - headEllipse.Width / 2);
                                Canvas.SetTop(headEllipse, point.Y - headEllipse.Height / 2);

                                if (this.faceTracker == null)
                                {
                                    try
                                    {
                                        this.faceTracker = new FaceTracker(frontSensor);
                                    }
                                    catch (InvalidOperationException)
                                    {
                                        // During some shutdown scenarios the FaceTrack
                                        // is unable to be instantiated.  Catch that exception
                                        // and don't track a face.
                                        this.faceTracker = null;
                                    }
                                }
                                if (this.faceTracker != null)
                                {
                                    FaceTrackFrame frame = this.faceTracker.Track(
                                        colorImageFormat, colorImage, depthImageFormat, depthImage, sd);

                                    if (frame.TrackSuccessful)
                                    {
                                        faceTriangles   = frame.GetTriangles();
                                        this.facePoints = frame.GetProjected3DShape();

                                        var faceModelPts = new List <Point>();
                                        var faceModel    = new List <FaceModelTriangle>();


                                        for (int i = 0; i < this.facePoints.Count; i++)
                                        {
                                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                                        }

                                        foreach (var t in faceTriangles)
                                        {
                                            var triangle = new FaceModelTriangle();
                                            triangle.P1 = faceModelPts[t.First];
                                            //triangle.P2 = faceModelPts[t.Second];
                                            //triangle.P3 = faceModelPts[t.Third];
                                            faceModel.Add(triangle);
                                        }

                                        Canvas.SetLeft(noseEllipse, faceModel[108].P1.X - noseEllipse.Width / 2);
                                        Canvas.SetTop(noseEllipse, faceModel[108].P1.Y - noseEllipse.Height / 2);
                                        nosePoint = new Point(faceModel[108].P1.X, faceModel[108].P1.Y);
                                    }
                                }
                            }
                        }
                    }
                }

                getAttentionAngle(nosePoint);
            }
        }
 /// <summary>
 /// Try to recognize the given face 3D points
 /// </summary>
 /// <param name="face3DPoints">the collection of face 3D points</param>
 /// <returns>the name of the face</returns>
 public string Recognize(EnumIndexableCollection<FeaturePoint, Vector3DF> face3DPoints)
 {
     var id = forest.Recognize(face3DPoints);
     Debug.WriteLine("Recognized ID = " + id); 
     return nameLookup[RecognitionUtility.Shorten(id)];
 }
Exemplo n.º 38
0
            private bool CheckFace(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return(false);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        //getting the Animation Unit Coefficients
                        this.AUs = frame.GetAnimationUnitCoefficients();
                        var jawLowerer   = AUs[AnimationUnit.JawLower];
                        var browLower    = AUs[AnimationUnit.BrowLower];
                        var browRaiser   = AUs[AnimationUnit.BrowRaiser];
                        var lipDepressor = AUs[AnimationUnit.LipCornerDepressor];
                        var lipRaiser    = AUs[AnimationUnit.LipRaiser];
                        var lipStretcher = AUs[AnimationUnit.LipStretcher];
                        //set up file for output
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                                                 (@"C:\Users\Public\data.txt"))
                        {
                            file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt"));
                        }

                        //here is the algorithm to test different facial features

                        //BrowLower is messed up if you wear glasses, works if you don't wear 'em

                        string state = "";

                        //surprised
                        if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0)
                        {
                            state = "surprised";
                        }
                        //smiling
                        if (lipStretcher > 0.4 || lipDepressor < 0)
                        {
                            state = "smiling";
                        }
                        //sad
                        if (browRaiser < 0 && lipDepressor > 0)
                        {
                            state = "sad";
                        }
                        //angry
                        if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) ||
                            (browLower > 0 && lipDepressor > 0))
                        {
                            state = "angry";
                        }
                        //System.Diagnostics.Debug.WriteLine(browLower);

                        this.facePoints = frame.GetProjected3DShape();

                        if (states[currentState] == state)
                        {
                            Trace.WriteLine("Yo!");
                            return(true);
                        }
                    }
                }

                return(false);
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        //getting the Animation Unit Coefficients
                        this.AUs = frame.GetAnimationUnitCoefficients();
                        var jawLowerer = AUs[AnimationUnit.JawLower];
                        var browLower = AUs[AnimationUnit.BrowLower];
                        var browRaiser = AUs[AnimationUnit.BrowRaiser];
                        var lipDepressor = AUs[AnimationUnit.LipCornerDepressor];
                        var lipRaiser = AUs[AnimationUnit.LipRaiser];
                        var lipStretcher = AUs[AnimationUnit.LipStretcher];
                        //set up file for output
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter
                            (@"C:\Users\Public\data.txt"))
                        {
                            file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt"));
                        }

                        //here is the algorithm to test different facial features

                        //BrowLower is messed up if you wear glasses, works if you don't wear 'em

                        //surprised
                        if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0)
                        {
                            System.Diagnostics.Debug.WriteLine("surprised");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": surprised");
                                file.WriteLine("JawLowerer: " + jawLowerer);
                                file.WriteLine("BrowLowerer: " + browLower);
                            }
                        }
                        //smiling
                        if (lipStretcher > 0.4 || lipDepressor<0)
                        {
                            System.Diagnostics.Debug.WriteLine("Smiling");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": smiling");
                                file.WriteLine("LipStretcher: " + lipStretcher);
                            }
                        }
                        //kissing face
                        if (lipStretcher < -0.75)
                        {
                            System.Diagnostics.Debug.WriteLine("kissing face");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": kissing face");
                                file.WriteLine("LipStretcher: " + lipStretcher);
                            }
                        }
                        //sad
                        if (browRaiser < 0 && lipDepressor>0)
                        {
                            System.Diagnostics.Debug.WriteLine("sad");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": sad");
                                file.WriteLine("LipCornerDepressor: " + lipDepressor);
                                file.WriteLine("OuterBrowRaiser: " + browRaiser);
                            }
                        }
                        //angry
                        if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) ||
                            (browLower > 0 && lipDepressor > 0))
                        {
                            System.Diagnostics.Debug.WriteLine("angry");
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                (@"C:\Users\Public\data.txt", true))
                            {
                                file.WriteLine(DateTime.Now.ToString("HH:mm:ss tt") + ": angry");
                                file.WriteLine("LipCornerDepressor: " + lipDepressor);
                                file.WriteLine("BrowLowerer: " + browLower);
                                file.WriteLine("JawLowerer: " + jawLowerer);
                            }
                        }
                        //System.Diagnostics.Debug.WriteLine(browLower);

                        this.facePoints = frame.GetProjected3DShape();
                    }
                }
            }
Exemplo n.º 40
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }

            using (depthImageFrame)
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton skeleton in skeletonData)
                    {
                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            /////////// 머리 정보를 받아온다 ///////////////
                            Joint joint = skeleton.Joints[JointType.Head];

                            DepthImagePoint depthPoint;
                            depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position);

                            System.Windows.Point point = new System.Windows.Point((int)(image1.ActualWidth  * depthPoint.X
                                                               / depthImageFrame.Width),
                                                    (int)(image1.ActualHeight * depthPoint.Y
                                                               / depthImageFrame.Height));

                            textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}", point.X, point.Y, joint.Position.Z);

                            // 이전 헤드의 위치를 저장한다.
                            m_prevHeadX = m_headX;
                            m_prevHeadY = m_headY;
                            m_headX = point.X;
                            m_headY = point.Y;

                            if (Math.Abs(m_prevHeadX - point.X) < 10 )
                            {
                                m_headX = m_prevHeadX;
                            }

                            if (Math.Abs(m_prevHeadY - point.Y) < 10)
                            {
                                m_headY = m_prevHeadY;
                            }

                            Canvas.SetLeft(ellipse1, point.X - ellipse1.Width / 2);
                            Canvas.SetTop(ellipse1, point.Y - ellipse1.Height / 2);

                            ////////////// face 정보를 받아온다//////////////////////
                            if (this.faceTracker == null)
                            {
                                try
                                {
                                    this.faceTracker = new FaceTracker(nui1);
                                }
                                catch (InvalidOperationException)
                                {
                                    // During some shutdown scenarios the FaceTracker
                                    // is unable to be instantiated.  Catch that exception
                                    // and don't track a face.
                                    this.faceTracker = null;
                                }
                            }

                            if (this.faceTracker != null)
                            {
                                FaceTrackFrame frame = this.faceTracker.Track(
                                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);

                                if (frame.TrackSuccessful)
                                {
                                    facePoints = frame.GetProjected3DShape();

                                    textBlock2.Text = string.Format("noseX:{0:0.00} noseY:{1:0.00} ", facePoints[107].X, facePoints[107].Y);

                                    m_noseX = facePoints[107].X;
                                    m_noseY = facePoints[107].Y;

                                    Canvas.SetLeft(ellipse2, facePoints[107].X - ellipse2.Width / 2);
                                    Canvas.SetTop(ellipse2, facePoints[107].Y - ellipse2.Width / 2);

                                }
                            }

                            ///////////////고개의 각도를 계산 ////////////////////

                            lineOfSight.X1 = m_headX;
                            lineOfSight.Y1 = m_headY;
                            lineOfSight.X2 = m_noseX;
                            lineOfSight.Y2 = m_noseY;

                            Canvas.SetLeft(m_sightRect, m_headX - m_sightRect.Width / 2);
                            Canvas.SetTop(m_sightRect, m_headY);

                            CheckWhichSight(depthImageFrame, m_noseX, m_noseY);

                        }
                    }
                }
            }
        }
Exemplo n.º 41
0
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (frameProccessed[1] == false)
            {
                frameProccessed[1] = true;
            }
            else
            {
                frameProccessed[1] = false;
                return;
            }
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;

            try
            {

                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }

                using (depthImageFrame)
                {
                    if (depthImageFrame != null && skeletonData != null)
                    {
                        foreach (Skeleton sd in skeletonData)
                        {
                            if (sd.TrackingState == SkeletonTrackingState.Tracked || sd.TrackingState == SkeletonTrackingState.PositionOnly)
                            {
                                Joint joint = sd.Joints[JointType.Head];

                                DepthImagePoint depthPoint;
                                CoordinateMapper coordinateMapper = new CoordinateMapper(frontSensor);
                                depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30);

                                point = new System.Windows.Point((int)(frontSensor.ColorStream.FrameWidth * depthPoint.X
                                                                   / depthImageFrame.Width),
                                                        (int)(frontSensor.ColorStream.FrameHeight * depthPoint.Y
                                                                   / depthImageFrame.Height));

                                /* textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}",
                                                                point.X,
                                                                point.Y,
                                                                joint.Position.Z); */

                                Canvas.SetLeft(headEllipse, point.X - headEllipse.Width / 2);
                                Canvas.SetTop(headEllipse, point.Y - headEllipse.Height / 2);

                                if (this.faceTracker == null)
                                {
                                    try
                                    {
                                        this.faceTracker = new FaceTracker(frontSensor);
                                    }
                                    catch (InvalidOperationException)
                                    {
                                        // During some shutdown scenarios the FaceTrack
                                        // is unable to be instantiated.  Catch that exception
                                        // and don't track a face.
                                        this.faceTracker = null;
                                    }
                                }
                                if (this.faceTracker != null)
                                {
                                    FaceTrackFrame frame = this.faceTracker.Track(
                                        colorImageFormat, colorImage, depthImageFormat, depthImage, sd);

                                    if (frame.TrackSuccessful)
                                    {
                                        faceTriangles = frame.GetTriangles();
                                        this.facePoints = frame.GetProjected3DShape();

                                        var faceModelPts = new List<Point>();
                                        var faceModel = new List<FaceModelTriangle>();

                                        for (int i = 0; i < this.facePoints.Count; i++)
                                        {
                                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                                        }

                                        foreach (var t in faceTriangles)
                                        {
                                            var triangle = new FaceModelTriangle();
                                            triangle.P1 = faceModelPts[t.First];
                                            //triangle.P2 = faceModelPts[t.Second];
                                            //triangle.P3 = faceModelPts[t.Third];
                                            faceModel.Add(triangle);
                                        }

                                        Canvas.SetLeft(noseEllipse, faceModel[108].P1.X - noseEllipse.Width / 2);
                                        Canvas.SetTop(noseEllipse, faceModel[108].P1.Y - noseEllipse.Height / 2);
                                        nosePoint = new Point(faceModel[108].P1.X, faceModel[108].P1.Y);
                                    }
                                }
                            }
                        }
                    }
                }

                getAttentionAngle(nosePoint);
            }
        }
Exemplo n.º 42
0
 protected abstract double getState(EnumIndexableCollection<FeaturePoint, PointF> facePoints);
Exemplo n.º 43
0
            public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        XmlDocument XmlDocKalibracja = new XmlDocument();
                        XmlDocKalibracja.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                        XmlNodeList elemListKalibracja = XmlDocKalibracja.GetElementsByTagName("options");
                        kalibracja = Convert.ToBoolean(Convert.ToInt32(elemListKalibracja[0].Attributes["kalibracja"].Value));

                        if (kalibracja == false)
                        {
                            kalibracjaCounter++;
                            if (kalibracjaCounter == 1)
                            {
                                Kalibracja okno = new Kalibracja();
                                okno.Show();
                            }

                            if (kalibracjaCounter > 150)
                            {
                                oldX = Convert.ToInt32(this.facePoints[23].X);
                                oldY = -Convert.ToInt32(this.facePoints[23].Y);

                                oldMouseX = System.Windows.Forms.Cursor.Position.X;
                                oldMouseY = System.Windows.Forms.Cursor.Position.Y;

                                aktualnyX  = oldX;
                                aktualnyY  = oldY;
                                kalibracja = true;

                                string newValue = "1";
                                kalibracjaCounter = 0;

                                XmlDocument xmlDoc = new XmlDocument();
                                xmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNode node = xmlDoc.SelectSingleNode("options");
                                node.Attributes[5].Value = Convert.ToString(newValue);
                                xmlDoc.Save(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                            }
                        }

                        if (kalibracja == true)
                        {
                            try
                            {
                                //ustawienie gestów
                                XmlDocument XmlDoc = new XmlDocument();
                                XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList = XmlDoc.GetElementsByTagName("options");
                                gest[0] = Convert.ToInt32(elemList[0].Attributes["lpm"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc2 = new XmlDocument();
                                XmlDoc2.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList2 = XmlDoc2.GetElementsByTagName("options");
                                gest[1] = Convert.ToInt32(elemList2[0].Attributes["ppm"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc3 = new XmlDocument();
                                XmlDoc3.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList3 = XmlDoc3.GetElementsByTagName("options");
                                gest[2] = Convert.ToInt32(elemList3[0].Attributes["scrollup"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc4 = new XmlDocument();
                                XmlDoc4.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList4 = XmlDoc4.GetElementsByTagName("options");
                                gest[3] = Convert.ToInt32(elemList4[0].Attributes["scrolldown"].Value);
                            }
                            catch
                            {
                                MessageBox.Show("Błąd przy odczycie pliku settings.xml");
                            }



                            newX      = Convert.ToInt32(this.facePoints[23].X);
                            newY      = -Convert.ToInt32(this.facePoints[23].Y);
                            stosunekX = Math.Abs(newX / oldX);
                            stosunekY = Math.Abs(newY / oldY);

                            //odczyt czułości z pliku settings.xml
                            try
                            {
                                XmlDocument XmlDoc = new XmlDocument();
                                XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList = XmlDoc.GetElementsByTagName("options");
                                sensitive = Convert.ToInt32(elemList[0].Attributes["sensitive"].Value);
                            }
                            catch
                            {
                                sensitive = 80;
                            }

                            if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) < 28 && Math.Abs(oldX - newX) > 70 && Math.Abs(oldY - newY) > 70 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))) < 1980 || Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))) < 1200 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX))) >= 0 && Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY))) >= 0)
                            {
                                if (stosunekX > 1.03 && ruchY == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))), Convert.ToInt32(Math.Abs(oldMouseY)));
                                    oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive)));
                                    ruchX     = true;
                                }
                                if (stosunekX < 0.97 && ruchY == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX)))), Convert.ToInt32(Math.Abs(oldMouseY)));
                                    oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX))));
                                    ruchX     = true;
                                }
                                if (stosunekY > 1.03 && ruchX == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))));
                                    oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive)));
                                    ruchY     = true;
                                }
                                if (stosunekY < 0.97 && ruchX == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY)))));
                                    oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY))));
                                    ruchY     = true;
                                }
                            }


                            //stan spoczynku
                            if (Math.Abs(oldX - newX) < 70)
                            {
                                ruchX = false;
                            }
                            //stan spoczynku
                            if (Math.Abs(oldY - newY) < 70)
                            {
                                ruchY = false;
                            }


                            //PIERWSZY GEST
                            if (gest[0] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[0] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 15)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[0] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }

                            if (gest[0] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //DRUGI GEST
                            if (gest[1] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //TRZECI GEST
                            if (gest[2] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //CZWARTY GEST
                            if (gest[3] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                        }
                    }
                }
            }
Exemplo n.º 44
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        Vector3DF faceRotation = frame.Rotation;

                        var AUCoeff = frame.GetAnimationUnitCoefficients();

                        var jawLower = AUCoeff[AnimationUnit.JawLower];
                        var BrowLower = AUCoeff[AnimationUnit.BrowLower];
                        var BrowUpper = AUCoeff[AnimationUnit.BrowRaiser];
                        var lcd = AUCoeff[AnimationUnit.LipCornerDepressor];
                        var lipRaiser = AUCoeff[AnimationUnit.LipRaiser];
                        var lipStrectch = AUCoeff[AnimationUnit.LipStretcher];
                        var Pitch = faceRotation.X;
                        var Yaw = faceRotation.Y;
                        var Roll = faceRotation.Z;

                        dataToBeSent1 = "P: " + ((float)Pitch).ToString() + " Y: " + ((float)Yaw).ToString() + " R: " + ((float)Roll).ToString();
                        dataToBeSent2 = "JL: " + ((float)jawLower).ToString() + " BL: " + ((float)BrowLower).ToString() + " BU: " + ((float)BrowUpper).ToString();
                        dataToBeSent3 = "lcd: " + ((float)lcd).ToString() + " LR: " + ((float)lipRaiser).ToString() + " LS: " + ((float)lipStrectch).ToString();
                    }
                }
            }
Exemplo n.º 45
0
        public void Update(IPluginIO pin, DX11RenderContext context)
        {
            for (int i = 0; i < this.FOutput.SliceCount; i++)
            {
                bool update = this.FInvalidate;
                DX11IndexedGeometry geom;
                if (!this.FOutput[i].Contains(context))
                {
                    geom                = new DX11IndexedGeometry(context);
                    geom.InputLayout    = Pos3Norm3Tex2Vertex.Layout;
                    geom.VertexSize     = Pos3Norm3Tex2Vertex.VertexSize;
                    geom.HasBoundingBox = false;
                    geom.Topology       = PrimitiveTopology.TriangleList;

                    var indexstream = new DataStream(KinectRuntime.FACE_INDICES.Length * 4, true, true);
                    indexstream.WriteRange(KinectRuntime.FACE_INDICES);
                    indexstream.Position = 0;

                    geom.IndexBuffer = new DX11IndexBuffer(context, indexstream, false, true);

                    geom.VerticesCount = this.FInFrame[i].GetProjected3DShape().Count;

                    var vbuffer = new SlimDX.Direct3D11.Buffer(context.Device, new BufferDescription()
                    {
                        BindFlags      = BindFlags.VertexBuffer,
                        CpuAccessFlags = CpuAccessFlags.Write,
                        OptionFlags    = ResourceOptionFlags.None,
                        SizeInBytes    = geom.VerticesCount * geom.VertexSize,
                        Usage          = ResourceUsage.Dynamic
                    });
                    geom.VertexBuffer = vbuffer;

                    this.FOutput[i][context] = geom;
                    update = true;
                }
                else
                {
                    geom = this.FOutput[i][context];
                }



                if (update)
                {
                    DataStream ds = geom.LockVertexBuffer();
                    ds.Position = 0;

                    EnumIndexableCollection <FeaturePoint, PointF>    pp = this.FInFrame[i].GetProjected3DShape();
                    EnumIndexableCollection <FeaturePoint, Vector3DF> p  = this.FInFrame[i].Get3DShape();

                    Vector3[] norms = new Vector3[p.Count];

                    int[] inds     = KinectRuntime.FACE_INDICES;
                    int   tricount = inds.Length / 3;
                    //Compute smoothed normals
                    for (int j = 0; j < tricount; j++)
                    {
                        int i1 = inds[j * 3];
                        int i2 = inds[j * 3 + 1];
                        int i3 = inds[j * 3 + 2];

                        Vector3 v1 = p[i1].SlimVector();
                        Vector3 v2 = p[i2].SlimVector();
                        Vector3 v3 = p[i3].SlimVector();

                        Vector3 faceEdgeA = v2 - v1;
                        Vector3 faceEdgeB = v1 - v3;
                        Vector3 norm      = Vector3.Cross(faceEdgeB, faceEdgeA);

                        norms[i1] += norm; norms[i2] += norm; norms[i3] += norm;
                    }


                    for (int j = 0; j < geom.VerticesCount; j++)
                    {
                        Pos3Norm3Tex2Vertex vertex = new Pos3Norm3Tex2Vertex();
                        Vector3DF           v      = p[j];
                        vertex.Position  = new Vector3(v.X, v.Y, v.Z);
                        vertex.Normals   = Vector3.Normalize(norms[j]);
                        vertex.TexCoords = new Vector2(0, 0);
                        ds.Write <Pos3Norm3Tex2Vertex>(vertex);
                    }


                    geom.UnlockVertexBuffer();
                }
            }
        }
Exemplo n.º 46
0
 public static double getLength(FeaturePoint a, FeaturePoint b, EnumIndexableCollection<FeaturePoint, PointF> facePoints)
 {
     Point p1 = convertToPoint(a, facePoints);
     Point p2 = convertToPoint(b, facePoints);
     return (Math.Sqrt(Math.Pow(p1.X - p2.X, 2) + Math.Pow(p1.Y - p2.Y, 2)));
 }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                //  int cursorX = 500;
                //  int cursorY = 500;
                //  bool click = false;

                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        if (Globals.cambio == false)
                        {
                            int Pitch = (int)frame.Rotation.X;
                            int Yaw   = (int)frame.Rotation.Y;

                            /*  if ((Yaw > 20) && (Pitch > 0) || (Yaw < -20) && (Pitch > 0) || (Yaw < -20) && (Pitch < -10) || (Yaw > 20) && (Pitch < -10)) //si va en diagonal
                             * {
                             *    if ((Yaw > 20) && (Pitch > 0))//arribaderecha
                             *    {
                             *        Globals.YAW = 1;
                             *        Globals.PITCH = 1;
                             *    }
                             *    if ((Yaw < -20) && (Pitch > 0))//abajoderecha
                             *    {
                             *        Globals.YAW = -1;
                             *        Globals.PITCH = 1;
                             *    }
                             *    if ((Yaw < -20) && (Pitch < -10))//abajoizda
                             *    {
                             *        Globals.YAW = -1;
                             *        Globals.PITCH = -1;
                             *    }
                             *    if ((Yaw > 20) && (Pitch < -10))//arribaizda
                             *    {
                             *        Globals.YAW = 1;
                             *        Globals.PITCH = -1;
                             *    }
                             * }
                             * else
                             * { */
                            if (Yaw > 20)    //izquierda
                            {
                                Globals.YAW = 1;
                            }
                            if (Yaw < -20)    //derecha
                            {
                                Globals.YAW = -1;
                            }
                            if ((Yaw < 20) && (Yaw > -20))
                            {
                                Globals.YAW = 0;
                            }
                            if (Pitch > 0)    //arriba
                            {
                                Globals.PITCH = 1;
                            }
                            if (Pitch < -10)    //abajo
                            {
                                Globals.PITCH = -1;
                            }
                            if ((Pitch < 0) && (Pitch > -10))
                            {
                                Globals.PITCH = 0;
                            }
                            //}
                        }
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            //public static System.IO.Ports.SerialPort serialPort1;
            //private System.IO.Ports.SerialPort serialPort1;
            //serialPort1 = new SerialPort();
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                /*System.ComponentModel.IContainer components = new System.ComponentModel.Container();
                serialPort1 = new System.IO.Ports.SerialPort(components); // Creating the new object.
                serialPort1.PortName = "COM3"; //+ numCom.Value.ToString(); // Setting what port number.
                serialPort1.BaudRate = 9600; // Setting baudrate.
                serialPort1.DtrEnable = true; // Enable the Data Terminal Ready
                serialPort1.Open(); // Open the port for use.*/

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                        rotation = frame.Rotation;
                        float yRotation = frame.Rotation.Y;
                        Debug.WriteLine(yRotation);

                        if ((yRotation <= -25))
                        {
                            Debug.WriteLine("a");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("a");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation > -25) && (yRotation < -10))
                        {
                            Debug.WriteLine("b");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("b");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= -10) && (yRotation < 10))
                        {
                            Debug.WriteLine("c");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("c");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 10) && (yRotation < 20))
                        {
                            Debug.WriteLine("d");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("d");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 20) && (yRotation < 30))
                        {
                            Debug.WriteLine("e");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("e");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                        if ((yRotation >= 30))
                        {
                            Debug.WriteLine("f");
                            //port.Write("a");
                            if (!serialPort1.IsOpen)
                            {
                                try
                                {
                                    serialPort1.Open();
                                    serialPort1.Write("f");
                                    serialPort1.Close();
                                }
                                catch
                                {
                                    MessageBox.Show("There was an error. Please make sure that the correct port was selected, and the device, plugged in.");
                                }
                            }
                            //serialPort1.Write("1");

                        }

                    }
                }
            }
        /// <summary>
        /// Prepares the 3d face points and calls the ERTreesClassifier.Predict method
        /// </summary>
        /// <param name="forest">ERTreesClassifier object</param>
        /// <param name="face3DPoints">3d points of the face</param>
        /// <returns>ID of the face</returns>
        static public int Recognize(this ERTreesClassifier forest, EnumIndexableCollection<FeaturePoint, Vector3DF> face3DPoints)
        {           
            var varCount = face3DPoints.Count; // x, y, z 

            Matrix<float> data;
            data = new Matrix<float>(1, face3DPoints.Count()*3);
            
            for (int j = 0; j < varCount; j++)
            {
                data[0, j * 3] = face3DPoints[j].X;
                data[0, j * 3 + 1] = face3DPoints[j].Y;
                data[0, j * 3 + 2] = face3DPoints[j].Z;
            }

            var r = forest.Predict(data);
            return (int)r;
        }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.                    
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                isFaceTracked = false;
                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }
                        isFaceTracked = true;     
                        this.facePoints = frame.GetProjected3DShape();
                        this.facePoints3D = frame.Get3DShape();          //****************自己家的

                    }
                }
            }
Exemplo n.º 51
0
        public void Evaluate(int SpreadMax)
        {
            //Output static indices all the time
            if (this.first)
            {
                this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES);
                this.first = false;
            }

            if (this.FInFrame.IsConnected)
            {
                if (this.FInFrame.IsChanged)
                {
                    this.FOutOK.SliceCount       = FInFrame.SliceCount;
                    this.FOutPosition.SliceCount = FInFrame.SliceCount;
                    this.FOutRotation.SliceCount = FInFrame.SliceCount;
                    this.FOutPts.SliceCount      = FInFrame.SliceCount;
                    this.FOutPPTs.SliceCount     = FInFrame.SliceCount;

                    for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++)
                    {
                        FaceTrackFrame frame = this.FInFrame[cnt];
                        this.FOutOK[cnt]       = frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * (float)VMath.DegToCyc;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = frame.Get3DShape();

                        this.FOutPPTs[cnt].SliceCount    = pp.Count;
                        this.FOutPts[cnt].SliceCount     = p.Count;
                        this.FOutNormals[cnt].SliceCount = p.Count;

                        //Compute smoothed normals
                        Vector3[] norms    = new Vector3[p.Count];
                        int[]     inds     = KinectRuntime.FACE_INDICES;
                        int       tricount = inds.Length / 3;
                        for (int j = 0; j < tricount; j++)
                        {
                            int i1 = inds[j * 3];
                            int i2 = inds[j * 3 + 1];
                            int i3 = inds[j * 3 + 2];

                            Vector3 v1 = p[i1].SlimVector();
                            Vector3 v2 = p[i2].SlimVector();
                            Vector3 v3 = p[i3].SlimVector();

                            Vector3 faceEdgeA = v2 - v1;
                            Vector3 faceEdgeB = v1 - v3;
                            Vector3 norm      = Vector3.Cross(faceEdgeB, faceEdgeA);

                            norms[i1] += norm;
                            norms[i2] += norm;
                            norms[i3] += norm;
                        }

                        for (int i = 0; i < pp.Count; i++)
                        {
                            this.FOutPPTs[cnt][i]    = new Vector2(pp[i].X, pp[i].Y);
                            this.FOutPts[cnt][i]     = new Vector3(p[i].X, p[i].Y, p[i].Z);
                            this.FOutNormals[cnt][i] = Vector3.Normalize(norms[i]);
                        }

                        /*FaceTriangle[] d = frame.GetTriangles();
                         * this.FOutIndices.SliceCount = d.Length * 3;
                         * for (int i = 0; i < d.Length; i++)
                         * {
                         *  this.FOutIndices[i * 3] = d[i].First;
                         *  this.FOutIndices[i * 3 + 1] = d[i].Second;
                         *  this.FOutIndices[i * 3 + 2] = d[i].Third;
                         * }*/
                    }
                }
            }
            else
            {
                this.FOutPosition.SliceCount = 0;
                this.FOutPPTs.SliceCount     = 0;
                this.FOutPts.SliceCount      = 0;
                this.FOutRotation.SliceCount = 0;
                this.FOutOK.SliceCount       = 0;
            }
        }
Exemplo n.º 52
0
 public static Point convertToPoint(FeaturePoint p,  EnumIndexableCollection<FeaturePoint, PointF> facePoints)
 {
     return new Point(facePoints[p].X, facePoints[p].Y);
 }
Exemplo n.º 53
0
        protected void UpdateFrame(FaceTrackFrame frame)
        {
            this.lastFaceTrackSucceeded = frame.TrackSuccessful;
            if (this.lastFaceTrackSucceeded)
            {
                if (faceTriangles == null)
                {
                    // only need to get this once.  It doesn't change.
                    faceTriangles = frame.GetTriangles();
                }

                this.facePoints = frame.Get3DShape();
                this.FaceRect = frame.FaceRect;
                this.FaceTranslation = frame.Translation;
                this.FaceRotation = frame.Rotation;
            }
        }
Exemplo n.º 54
0
 void EnqueueFaceAnimationMessage(int sensorId, int user, EnumIndexableCollection<AnimationUnit, float> c, double time)
 {
     if (!capturing) { return; }
     if (c == null) { return; }
     trackingInformationQueue.Add(new FaceAnimationTrackingInformation(sensorId, user, c, time));
 }
Exemplo n.º 55
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            //Console.Out.WriteLine(" ###################### In UpdateMesh ############################# ");
            bool faceInCentre = true;

            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);

//                Console.Out.WriteLine("X = " + projected.X / (double)this.colorImageWritableBitmap.PixelWidth  + "Y = " + projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
                if (projected.X / (double)this.colorImageWritableBitmap.PixelWidth > .6 ||
                    projected.Y / (double)this.colorImageWritableBitmap.PixelHeight > .75)
                {
                    faceInCentre = false;
                }
            }

            if (faceInCentre)
            {
//                copyFaceImage();
                FaceMesh tempMeshData = new FaceMesh();
                tempMeshData.FaceViewport = viewport3d;
                FaceMeshData = tempMeshData;
            }
        }
Exemplo n.º 56
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // if the current skeleton is not tracked, track it now
                    //kinectSensor.SkeletonStream.ChooseSkeletons(skeletonOfInterest.TrackingId);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // hack to make this face tracking detect the face even when it is not actually tracked
                    // <!>need to confirm if it works
                    //skeletonOfInterest.TrackingState = SkeletonTrackingState.Tracked;

                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    //new Microsoft.Kinect.Toolkit.FaceTracking.Rect(skeletonOfInterest.Position.));

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();

                        }
                        if (faceTag == null)
                        {
                            // here call the face detection
                            faceTag = new FaceRecognizer().getFaceTag(this.colorImageBmp);

                            if (faceTag != null)
                            {
                                Global.StatusBarText.Text = "Found " + faceTag + "!";
                                if (Global.trackedPeople.ContainsKey(skeletonOfInterest))
                                    Global.trackedPeople[skeletonOfInterest] = faceTag;
                                else
                                    Global.trackedPeople.Add(skeletonOfInterest, faceTag);
                            }
                        }
                        this.facePoints = frame.GetProjected3DShape();
                        this.faceRect = frame.FaceRect;
                    }
                }
            }
Exemplo n.º 57
0
        private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token)
        {
            if (interval.TotalMilliseconds == 0)
            {
                return;
            }

            // Initial wait time before we begin the periodic loop.
            if (dueTime > TimeSpan.Zero)
            {
                await Task.Delay(dueTime, token);
            }

            DateTime    LocalTimestamp = Timestamp;
            FaceTracker tracker        = new FaceTracker(Sensor);

            // Repeat this loop until cancelled.
            while (!token.IsCancellationRequested)
            {
                // Skip already work with given data
                if (Timestamp == LocalTimestamp)
                {
                    await Task.Delay(interval, token);

                    continue;
                }

                // Timestamp data
                LocalTimestamp = Timestamp;
                FaceTrackWatch.Again();

                // Do Job
                try {
                    CopyColorData = true;
                    CopySkeletons = true;
                    FPoints       = null;
                    Mood          = 0;
                    if (null != GestureManager && null != GestureManager.Skeleton)
                    {
                        FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton);
                        if (frame.TrackSuccessful)
                        {
                            // Only once.  It doesn't change.
                            if (FTriangles == null)
                            {
                                FTriangles = frame.GetTriangles();
                            }
                            FPoints = frame.GetProjected3DShape();
                            Mood    = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor];
                            WSRProfileManager.GetInstance().UpdateMood(Mood);
                        }
                    }
                }
                catch (Exception ex) {
                    WSRConfig.GetInstance().logError("FACE", ex);
                }
                FaceTrackWatch.Stop();

                // Wait to repeat again.
                if (interval > TimeSpan.Zero)
                {
                    await Task.Delay(interval, token);
                }
            }

            // Dispose Tracker
            tracker.Dispose();
        }
Exemplo n.º 58
0
 protected override double getState(EnumIndexableCollection<FeaturePoint, PointF> facePoints)
 {
     double normLength = GameUtils.getLength(FeaturePoint.AboveMidUpperLeftEyelid, FeaturePoint.AboveMidUpperRightEyelid, facePoints);
     double lipsLength = GameUtils.getLength(FeaturePoint.OutsideLeftCornerMouth, FeaturePoint.OutsideRightCornerMouth, facePoints);
     return lipsLength / normLength;
 }
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                    }
                }
            }