Пример #1
0
        private float[,] calcDist(EnumIndexableCollection <FeaturePoint, Vector3DF> fp)
        {
            int fpSize = fp.Count();

            Vector3DF[] temp = new Vector3DF[fpSize];
            float[,] fpRes = new float[fpSize, fpSize];
            int k = 0;

            foreach (Vector3DF p in fp)
            {
                temp[k] = p;
                k++;
            }
            for (int i = 0; i < (fpSize - 1); i++)
            {
                for (int j = 0; j < (fpSize - 1); j++)
                {
                    if (i == j)
                    {
                    }
                    // Do stuff to temp!
                    else
                    {
                        fpRes[i, j] = Math.Abs((float)Math.Sqrt((float)Math.Pow(temp[i].X - temp[j].X, 2) + (float)Math.Pow(temp[i].Y - temp[j].Y, 2) + (float)Math.Pow(temp[i].Z - temp[j].Z, 2)));
                    }
                }
            }
            return(fpRes);
        }
Пример #2
0
        public Background(Vector3DF color, Vector3DF light)
        {
            byte[] shader_code =
                Engine.Graphics.GraphicsDeviceType == GraphicsDeviceType.OpenGL
                ? Engine.File.CreateStaticFile("Shaders/BackEffect.glsl").Buffer
                : Engine.Graphics.GraphicsDeviceType == GraphicsDeviceType.DirectX11
                ? Engine.File.CreateStaticFile("Shaders/BackEffect.hlsl").Buffer
                : new byte[0];

            // マテリアルを作成する
            material = Engine.Graphics.CreateMaterial2D(
                Engine.Graphics.CreateShader2D(
                    new UTF8Encoding().GetString(shader_code)
                    )
                );

            // ストップウォッチを作成する
            stopwatch = new Stopwatch();

            // マテリアル内の変数を設定する
            material.SetVector2DF("resolution", Engine.WindowSize.To2DF());
            material.SetVector3DF("color", color);
            material.SetVector3DF("light", light);

            // ストップウォッチを起動させる
            stopwatch.Start();
        }
Пример #3
0
        public void ScaleXY(Vector3DF rotation, out int xResult, out int yResult)
        {
            double screenWidth = SystemParameters.PrimaryScreenWidth;
            double y           = ((SystemParameters.PrimaryScreenHeight / 30) * -rotation.X) +
                                 (SystemParameters.PrimaryScreenHeight / 2);
            double x = ((SystemParameters.PrimaryScreenWidth / 40) * -rotation.Y) +
                       (SystemParameters.PrimaryScreenWidth / 2);

            if (x < 0)
            {
                x = 0;
            }
            else if (x > screenWidth - 1)
            {
                x = screenWidth - 1;
            }

            if (y < 0)
            {
                y = 0;
            }
            else if (y > SystemParameters.PrimaryScreenHeight - 1)
            {
                y = SystemParameters.PrimaryScreenHeight;
            }
            xResult = (int)x;
            yResult = (int)y;
        }
Пример #4
0
        public System.Drawing.Point ScaleXY(Vector3DF rotation)
        {
            double screenWidth  = Screen.PrimaryScreen.Bounds.Width;
            double screenHeight = Screen.PrimaryScreen.Bounds.Height;
            double y            = ((screenHeight / (int)nudHeadToScreenRelationYHeight.Value) * -rotation.X) +
                                  (screenHeight / 2);
            double x = ((screenWidth / (int)nudHeadToScreenRelationXWidth.Value) * -rotation.Y) +
                       (screenWidth / 2);

            if (x < 0)
            {
                x = 0;
            }
            else if (x > screenWidth - 1)
            {
                x = screenWidth - 1;
            }
            if (y < 0)
            {
                y = 0;
            }
            else if (y > screenHeight - 1)
            {
                y = screenHeight;
            }
            return(new System.Drawing.Point((int)x, (int)y));
        }
Пример #5
0
        /// <summary>
        /// 回転角や位置情報をOSCにより送信
        /// </summary>
        /// <param name="rotHeadXYZ"></param>
        /// <param name="posHeadXYZ"></param>
        /// <param name="posHandRightXYZ"></param>
        /// <param name="posHandLeftXYZ"></param>
        private void SendOSCMessage(Vector3DF rotHeadXYZ, Vector3DF posHeadXYZ, Vector3DF posHandRightXYZ, Vector3DF posHandLeftXYZ)
        {
            // OSCメッセージ生成
            OscMessage rotHeadOsc      = new OscMessage(from, "/head_rot", null);
            OscMessage posHeadOsc      = new OscMessage(from, "/head_pos", null);
            OscMessage posHandRightOsc = new OscMessage(from, "/hand_r", null);
            OscMessage posHandLeftOsc  = new OscMessage(from, "/hand_l", null);

            // 情報の追加
            rotHeadOsc.Append <float>(rotHeadXYZ.x);
            rotHeadOsc.Append <float>(rotHeadXYZ.y);
            rotHeadOsc.Append <float>(rotHeadXYZ.z);
            posHeadOsc.Append <float>(posHeadXYZ.x);
            posHeadOsc.Append <float>(posHeadXYZ.y);
            posHeadOsc.Append <float>(posHeadXYZ.z);
            posHandRightOsc.Append <float>(posHandRightXYZ.x);
            posHandRightOsc.Append <float>(posHandRightXYZ.y);
            posHandRightOsc.Append <float>(posHandRightXYZ.z);
            posHandLeftOsc.Append <float>(posHandLeftXYZ.x);
            posHandLeftOsc.Append <float>(posHandLeftXYZ.y);
            posHandLeftOsc.Append <float>(posHandLeftXYZ.z);

            // 送信
            rotHeadOsc.Send(toExp);
            posHeadOsc.Send(toExp);
            posHandRightOsc.Send(toExp);
            posHandLeftOsc.Send(toExp);
        }
Пример #6
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor,
                                       ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat,
                                       short[] depthImage, Skeleton skeletonOfInterest) // <---------Skeleton data passed here *****************
            {
                // Here a skeletonOfInterest is available **********************************

                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // Here is where a skeletonOfInterest is available ***

                    // Call Track(), passing skeletonOfInterest
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        // Assign the facePoints
                        this.facePoints = frame.GetProjected3DShape();

                        // Gets the face data but does not draw the face or the skeleton

                        // This code gets the yaw, pitch and roll Capture it here
                        this.rotation = frame.Rotation;  // <-- frame is a FaceTrackFrame
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                    }

                    // For head 3D
                    // Copy rotation and position
                    this.rotation    = frame.Rotation;
                    this.translation = frame.Translation;
                }
            }
Пример #8
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
            }
        }
Пример #9
0
        private System.Drawing.Point CalculateGaussianSmoothedXYPosition(FaceTrackFrame currentFaceFrame)
        {
            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
            Vector3DF rotationMedium = new Vector3DF();

            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
            int i = 0;

            while (i < gaussFilter.Count - 1)
            {
                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                i++;
            }
            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
            return(ScaleXY(rotationMedium));
        }
Пример #10
0
        public double getDist(Vector3DF point1, Vector3DF point2)
        {
            double X1 = point1.X;
            double Y1 = point1.Y;
            double Z1 = point1.Z;

            double X2 = point2.X;
            double Y2 = point2.Y;
            double Z2 = point2.Z;


            double deltaX = X1 - X2;
            double deltaY = Y1 - Y2;
            double deltaZ = Z1 - Z2;

            double distance = Math.Sqrt(deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ);

            return(distance);
        }
Пример #11
0
        public void Update(IPluginIO pin, DX11RenderContext context)
        {
            for (int i = 0; i < this.FOutput.SliceCount; i++)
            {
                bool update = this.FInvalidate;
                DX11IndexedGeometry geom;
                if (!this.FOutput[i].Contains(context))
                {
                    geom                = new DX11IndexedGeometry(context);
                    geom.InputLayout    = Pos3Norm3Tex2Vertex.Layout;
                    geom.VertexSize     = Pos3Norm3Tex2Vertex.VertexSize;
                    geom.HasBoundingBox = false;
                    geom.Topology       = PrimitiveTopology.TriangleList;

                    var indexstream = new DataStream(KinectRuntime.FACE_INDICES.Length * 4, true, true);
                    indexstream.WriteRange(KinectRuntime.FACE_INDICES);
                    indexstream.Position = 0;

                    geom.IndexBuffer = new DX11IndexBuffer(context, indexstream, false, true);

                    geom.VerticesCount = this.FInFrame[i].GetProjected3DShape().Count;

                    var vbuffer = new SlimDX.Direct3D11.Buffer(context.Device, new BufferDescription()
                    {
                        BindFlags      = BindFlags.VertexBuffer,
                        CpuAccessFlags = CpuAccessFlags.Write,
                        OptionFlags    = ResourceOptionFlags.None,
                        SizeInBytes    = geom.VerticesCount * geom.VertexSize,
                        Usage          = ResourceUsage.Dynamic
                    });
                    geom.VertexBuffer = vbuffer;

                    this.FOutput[i][context] = geom;
                    update = true;
                }
                else
                {
                    geom = this.FOutput[i][context];
                }



                if (update)
                {
                    DataStream ds = geom.LockVertexBuffer();
                    ds.Position = 0;

                    EnumIndexableCollection <FeaturePoint, PointF>    pp = this.FInFrame[i].GetProjected3DShape();
                    EnumIndexableCollection <FeaturePoint, Vector3DF> p  = this.FInFrame[i].Get3DShape();

                    Vector3[] norms = new Vector3[p.Count];

                    int[] inds     = KinectRuntime.FACE_INDICES;
                    int   tricount = inds.Length / 3;
                    //Compute smoothed normals
                    for (int j = 0; j < tricount; j++)
                    {
                        int i1 = inds[j * 3];
                        int i2 = inds[j * 3 + 1];
                        int i3 = inds[j * 3 + 2];

                        Vector3 v1 = p[i1].SlimVector();
                        Vector3 v2 = p[i2].SlimVector();
                        Vector3 v3 = p[i3].SlimVector();

                        Vector3 faceEdgeA = v2 - v1;
                        Vector3 faceEdgeB = v1 - v3;
                        Vector3 norm      = Vector3.Cross(faceEdgeB, faceEdgeA);

                        norms[i1] += norm; norms[i2] += norm; norms[i3] += norm;
                    }


                    for (int j = 0; j < geom.VerticesCount; j++)
                    {
                        Pos3Norm3Tex2Vertex vertex = new Pos3Norm3Tex2Vertex();
                        Vector3DF           v      = p[j];
                        vertex.Position  = new Vector3(v.X, v.Y, v.Z);
                        vertex.Normals   = Vector3.Normalize(norms[j]);
                        vertex.TexCoords = new Vector2(0, 0);
                        ds.Write <Pos3Norm3Tex2Vertex>(vertex);
                    }


                    geom.UnlockVertexBuffer();
                }
            }
        }
Пример #12
0
 public static Vector3 SlimVector(this Vector3DF v)
 {
     return(new Vector3(v.X, v.Y, v.Z));
 }
 public void CenterRotation()
 {
     this.initRotation = this.rotation;
 }
Пример #14
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            //Console.Out.WriteLine(" ###################### In UpdateMesh ############################# ");
            bool faceInCentre = true;

            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);

//                Console.Out.WriteLine("X = " + projected.X / (double)this.colorImageWritableBitmap.PixelWidth  + "Y = " + projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
                if (projected.X / (double)this.colorImageWritableBitmap.PixelWidth > .6 ||
                    projected.Y / (double)this.colorImageWritableBitmap.PixelHeight > .75)
                {
                    faceInCentre = false;
                }
            }

            if (faceInCentre)
            {
//                copyFaceImage();
                FaceMesh tempMeshData = new FaceMesh();
                tempMeshData.FaceViewport = viewport3d;
                FaceMeshData = tempMeshData;
            }
        }
 /// <summary>
 /// Computes the absolute gap between two vectors just at numeric view
 /// </summary>
 /// <param name="vector1">The first vector.</param>
 /// <param name="vector2">The second vector.</param>
 /// <returns>The absolute gap</returns>
 private static double GetAbsoluteGap(Vector3DF vector1, Vector3DF vector2)
 {
     return(Math.Abs(vector1.X - vector2.X) + Math.Abs(vector1.Y - vector2.Y) + Math.Abs(vector1.Z - vector2.Z));
 }
 /// <summary>
 /// Computes the distance of two points
 /// </summary>
 /// <param name="point1">The first point.</param>
 /// <param name="point2">The second point.</param>
 /// <returns>The distance of two points</returns>
 private static double GetDistance(Vector3DF point1, Vector3DF point2)
 {
     return(Math.Sqrt(((point1.X - point2.X) * (point1.X - point2.X))
                      + ((point1.Y - point2.Y) * (point1.Y - point2.Y)) + ((point1.Z - point2.Z) * (point1.Z - point2.Z))));
 }
Пример #17
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            ///

            public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                //No Touchy
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;
                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }
                if (faceTracker == null)
                {
                    faceTracker = new FaceTracker(kinectSensor);
                }
                frame = this.faceTracker.Track(
                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                if (this.lastFaceTrackSucceeded)
                {
                    if (faceTriangles == null)
                    {
                        faceTriangles = frame.GetTriangles();
                    }
                    this.facePoints = frame.GetProjected3DShape();



                    //Touchy

                    //Assign Reference points
                    this.absfacePoints = frame.Get3DShape();
                    leftForehead       = this.absfacePoints[FeaturePoint.TopLeftForehead];
                    rightForehead      = this.absfacePoints[FeaturePoint.TopRightForehead];
                    jaw           = this.absfacePoints[FeaturePoint.BottomOfChin];
                    faceRotationX = frame.Rotation.X;
                    faceRotationY = frame.Rotation.Y;
                    faceRotationZ = frame.Rotation.Z;

                    //Calculate Reference Points
                    foreheadReferencePointX = ((rightForehead.X - leftForehead.X) / 2);
                    foreheadReferencePointY = ((rightForehead.Y - leftForehead.Y) / 2);
                    foreheadReferencePointZ = ((rightForehead.Z - leftForehead.Z) / 2);

                    //Set Animation Units
                    AUCoeff        = frame.GetAnimationUnitCoefficients();
                    jawLowererAU   = AUCoeff[AnimationUnit.JawLower];
                    lipStretcherAU = AUCoeff[AnimationUnit.LipStretcher];
                    browRaiserAU   = AUCoeff[AnimationUnit.BrowRaiser];
                    setJawData(jaw.Y, leftForehead.Y, rightForehead.Y, jawLowererAU, lipStretcherAU);

                    rotations = new float[5];
                    //set up matlab
                    matlab = new MLApp.MLApp();
                    matlab.Execute(@"cd C:\Users\Bala\Documents\MATLAB");
                    result = null;

                    //get rotation values
                    rotations[0] = faceRotationX;
                    rotations[1] = faceRotationY;
                    rotations[2] = faceRotationZ;
                    rotations[3] = jawLowererAU;
                    rotations[4] = lipStretcherAU;
                    //Set up GlovePie
                    OscPacket.LittleEndianByteOrder = false;
                    IPEndPoint myapp    = new IPEndPoint(IPAddress.Loopback, 1944);
                    IPEndPoint glovepie = new IPEndPoint(IPAddress.Loopback, 1945);
                    Console.WriteLine(browRaiserAU);

                    matlab.Feval("nnW", 1, out result, rotations[0]);
                    object[] resW = result as object[];
                    nnoutput = (int)((float)resW[0] + 0.5f);
                    if (nnoutput == 1)
                    {
                        commandtoSend = 1;
                    }
                    else
                    {
                        result = null;
                        matlab.Feval("nnA", 1, out result, rotations[1]);
                        object[] resA = result as object[];
                        nnoutput = (int)((float)resA[0] + 0.5f);
                        if (nnoutput == 1)
                        {
                            commandtoSend = 2;
                        }
                        else
                        {
                            result = null;
                            matlab.Feval("nnS", 1, out result, rotations[0]);
                            object[] resS = result as object[];
                            nnoutput = (int)((float)resS[0] + 0.5f);
                            if (nnoutput == 1)
                            {
                                commandtoSend = 3;
                            }
                            else
                            {
                                result = null;
                                matlab.Feval("nnd", 1, out result, rotations[1]);
                                object[] resD = result as object[];
                                nnoutput = (int)((float)resD[0] + 0.5f);
                                if (nnoutput == 1)
                                {
                                    commandtoSend = 4;
                                }
                                else
                                {
                                    result = null;
                                    matlab.Feval("nnLC", 1, out result, rotations[2]);
                                    object[] resLC = result as object[];
                                    nnoutput = (int)((float)resLC[0] + 0.5f);
                                    if (nnoutput == 1)
                                    {
                                        commandtoSend = 5;
                                    }
                                    else
                                    {
                                        result = null;
                                        matlab.Feval("nnRC", 1, out result, rotations[2]);
                                        object[] resRC = result as object[];
                                        nnoutput = (int)((float)resRC[0] + 0.5f);
                                        if (nnoutput == 1)
                                        {
                                            commandtoSend = 6;
                                        }
                                        else
                                        {
                                            result = null;
                                            if (jawLowererAU > 0.7)
                                            {
                                                commandtoSend = 7;
                                            }

                                            /*
                                             * matlab.Feval("nnSpace", 1, out result, rotations[3]);
                                             * object[] resSpace = result as object[];
                                             * nnoutput = (int)((float)resSpace[0] + 0.5f);
                                             * if (nnoutput == 1)
                                             * {
                                             *  commandtoSend = 7;
                                             * }*/
                                            else
                                            {
                                                result = null;
                                                if (browRaiserAU > 0.4)
                                                {
                                                    commandtoSend = 8;
                                                }
                                                else
                                                {
                                                    result        = null;
                                                    commandtoSend = 0;
                                                }

                                                /*result = null;
                                                 * matlab.Feval("nnMiddle", 1, out result, lipStretcherAU);
                                                 * object[] resMiddle = result as object[];
                                                 * nnoutput = (int)((float)resMiddle[0] + 0.5f);
                                                 * if (nnoutput == 1)
                                                 * {
                                                 *  commandtoSend = 8;
                                                 * }
                                                 * else
                                                 * {
                                                 *  result = null;
                                                 *  commandtoSend = 0;
                                                 * }*/
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    //Console.WriteLine("Iteration Complete");
                    switch (commandtoSend)
                    {
                    case 0:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 1:
                        msg = new OscMessage(myapp, "/move/w", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 2:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 3:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 4:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 5:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 6:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 7:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 8:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 10.0f);
                        msg.Send(glovepie);
                        break;
                    }
                }
            }
Пример #18
0
        public void Update(Skeleton sk)
        {
            if (!AudioProvider.detectionActive)
            {
                TrackingSideTool.Get().SpeedRateDetectionCheckBox.IsChecked = true;
            }

            _up         = false;
            _complete   = false;
            _lookingDir = false;

            frame++;

            if (frame > NB_FRAME_MAX)
            {
                frame = 0;
                start = false;
                right = false;
                left  = false;
                _up   = true;

                DrawingSheetAvatarViewModel.displayCustomText = String.Empty;
                GestureRecognized?.Invoke(this, new EventArgs());
            }

            if (TrainingWithAvatarViewModel.Get().SkeletonList != null && TrainingWithAvatarViewModel.canBeInterrupted && !start)
            {
                DrawingSheetAvatarViewModel.displayCustomText = "Look at the direction of your raised arm then speak";
            }

            try
            {
                var face = KinectDevice.skeletonFaceTracker.facePoints3D;

                Vector3DF rightEye = face.ElementAt(20);
                Vector3DF leftEye  = face.ElementAt(53);

                Point3D handRight  = new Point3D(sk.Joints[JointType.HandRight].Position);
                Point3D handLeft   = new Point3D(sk.Joints[JointType.HandLeft].Position);
                Point3D elbowRight = new Point3D(sk.Joints[JointType.ElbowRight].Position);
                Point3D elbowLeft  = new Point3D(sk.Joints[JointType.ElbowLeft].Position);
                Point3D shoulder   = new Point3D(sk.Joints[JointType.ShoulderCenter].Position);

                if (verbose)
                {
                    Console.WriteLine("~~~~~~~~~~~~~~~~~~~~~~~~~");
                    Console.WriteLine("frame: " + frame);
                    Console.WriteLine("direction: " + (rightEye.Z - leftEye.Z));
                    Console.WriteLine("right: " + right);
                    Console.WriteLine("left: " + left);
                }

                if (Math.Abs(handRight.X - elbowRight.X) > 0.05 && handRight.Y > shoulder.Y)
                {
                    if (rightEye.Z - leftEye.Z < -0.015)
                    {
                        if (AudioProvider.currentIntensity > AUDIO_LOUDNESS)
                        {
                            start = true;
                            right = true;
                            DrawingSheetAvatarViewModel.displayCustomText = "Do the same with the other arm";

                            if (left)
                            {
                                frame     = 0;
                                start     = false;
                                right     = false;
                                left      = false;
                                _complete = true;

                                DrawingSheetAvatarViewModel.displayCustomText = String.Empty;
                                GestureRecognized?.Invoke(this, new EventArgs());
                            }
                        }
                    }
                    else if (!right)
                    {
                        frame       = 0;
                        start       = false;
                        right       = false;
                        left        = false;
                        _lookingDir = true;

                        DrawingSheetAvatarViewModel.displayCustomText = String.Empty;
                        GestureRecognized?.Invoke(this, new EventArgs());
                    }
                }

                if (Math.Abs(handLeft.X - elbowLeft.X) > 0.05 && handLeft.Y > shoulder.Y)
                {
                    if (rightEye.Z - leftEye.Z > 0.015)
                    {
                        if (AudioProvider.currentIntensity > AUDIO_LOUDNESS)
                        {
                            start = true;
                            left  = true;
                            DrawingSheetAvatarViewModel.displayCustomText = "Do the same with the other arm";

                            if (right)
                            {
                                frame     = 0;
                                start     = false;
                                right     = false;
                                left      = false;
                                _complete = true;

                                DrawingSheetAvatarViewModel.displayCustomText = String.Empty;
                                GestureRecognized?.Invoke(this, new EventArgs());
                            }
                        }
                    }
                    else if (!left)
                    {
                        frame       = 0;
                        start       = false;
                        right       = false;
                        left        = false;
                        _lookingDir = true;

                        DrawingSheetAvatarViewModel.displayCustomText = String.Empty;
                        GestureRecognized?.Invoke(this, new EventArgs());
                    }
                }
            }
            catch (Exception e) //the kinect don't catch the face
            {
                //System.Windows.Forms.MessageBox.Show("the kinect don't catch the face !");
                Console.Error.WriteLine(e);
            }
        }
Пример #19
0
        private void OnAllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();


                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values


                if (activeSkeleton != null)
                {
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    float          browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                    float          browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                    tbBrowLowerer.Text = browLowererValue.ToString();
                    tbBrowRaiser.Text  = browRaiserValue.ToString();
                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    pbLeftEye.Image = leftEye;

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    pbRightEye.Image = rightEye;

                    //Wende Kantenfilter auf die beiden Augen an.
                    double dxRight;
                    double dyRight;
                    double dxLeft;
                    double dyLeft;
                    if (rightEye != null && leftEye != null)
                    {
                        Bitmap edgePicRight = Convolution(ConvertGrey(rightEye), true, out dxRight, out dyRight);
                        Bitmap edgePicLeft  = Convolution(ConvertGrey(leftEye), false, out dxLeft, out dyLeft);



                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > filterLength && currentFaceFrame.TrackSuccessful)
                        {
                            int x = 0;
                            int y = 0;

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.



                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < filterLength - 1)
                            {
                                i++;
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);

                            MouseControl.Move(x, y);
                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));
                        }

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Пример #20
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData); // An array of skeletons

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            // Pass the skeleton to the skeletonFaceTracker - see that class below

                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);

                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;

                            this.rotation = skeletonFaceTracker.rotation;

                            // Draw the skeleton here...
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Пример #21
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values

                if (activeSkeleton != null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    if (currentFaceFrame.TrackSuccessful)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Recognized face successfully.\n");
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Couldn't find face in frame.\n");
                    }

                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    //this.pbRight.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye));
                    //

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    Bitmap leftEye2  = null;
                    Bitmap rightEye2 = null;
                    if (leftEye != null)
                    {
                        leftEye2 = new Bitmap(leftEye);
                    }
                    if (rightEye != null)
                    {
                        rightEye2 = new Bitmap(rightEye);
                    }
                    // System.Delegate d = new MethodInvoker(SetPictures));
                    //   this.Invoke(SetPictures, leftEye);
                    //pbRight.Image = rightEye;
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));
                    // this.Invoke(new MethodInvoker(SetPictures));
                    //Wende Kantenfilter auf die beiden Augen an.

                    if (rightEye != null && leftEye != null)
                    {
                        Dictionary <string, int> angleCount;
                        Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                        bool   rightEyeClosed = IsEyeClosed(angleCount);
                        Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                        bool   leftEyeClosed  = IsEyeClosed(angleCount);
                        //   pbLeftFaltung.Image = edgePicLeft;
                        //   pbRightFaltung.Image = edgePicRight;



                        if (rightEyeClosedHistory.Count > 100)
                        {
                            rightEyeClosedHistory.RemoveAt(0);
                        }
                        if (leftEyeClosedHistory.Count > 100)
                        {
                            leftEyeClosedHistory.RemoveAt(0);
                        }
                        leftEyeClosedHistory.Add(leftEyeClosed);
                        rightEyeClosedHistory.Add(rightEyeClosed);

                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > gaussFilter.Count - 1 && leftEyeClosedHistory.Count > nudConvolutionFilterLength.Value && currentFaceFrame.TrackSuccessful)
                        {
                            int   x = 0;
                            int   y = 0;
                            float browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                            float browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                            float mouthOpenValue   = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.JawLower];
                            if (browRaiserHistory.Count >= 100)
                            {
                                browRaiserHistory.RemoveAt(0);
                                browLowererHistory.RemoveAt(0);
                                mouthOpenHistory.RemoveAt(0);
                            }
                            browLowererHistory.Add(browLowererValue);
                            browRaiserHistory.Add(browRaiserValue);
                            mouthOpenHistory.Add(mouthOpenValue);

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < gaussFilter.Count - 1)
                            {
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                                i++;
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);


                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));


                            //Check for right, left or Double Click
                            //1. Check if there was already a click 20 Frames ago, or if Drag & Drop is active
                            if (clickDelay > nudClickDelay.Value && !pointNClickActive)
                            {
                                //2. If not, calculate mean values of dy's last 16 Frames
                                if (CalculateMeanConvolutionValues())
                                {
                                    clickDelay = 0;
                                }
                                else
                                {
                                    //Else check for open Mouth
                                    if (mouthOpenValue > (float)nudMouthOpenStartThreshold.Value && mouthOpenHistory[mouthOpenHistory.Count - 2] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 3] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 4] > (float)nudMouthOpenConfirmation.Value)
                                    {
                                        MouseControl.Move(mousePositionHistory[mousePositionHistory.Count - 4].X, mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y)));
                                        //lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        MouseControl.MouseDownLeft();
                                        pointNClickActive = true;
                                        clickDelay        = 0;
                                    }
                                }
                            }
                            else if (pointNClickActive)
                            {
                                if (mouthOpenValue < (float)nudMouthOpenEndThreshold.Value)
                                {
                                    this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Up on X: " + x + " Y: " + y)));
                                    MouseControl.MouseUpLeft();
                                    pointNClickActive = false;
                                    clickDelay        = 0;
                                }
                            }
                            MouseControl.Move(x, y);
                            if (browLowererValue > (float)nudBrowLowererStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(-browLowererValue * (int)nudScrollMultiplierDown.Value));
                            }
                            if (browRaiserValue > (float)nudBrowRaiserStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(browRaiserValue * (int)nudScrollMultiplierUp.Value));
                            }
                            if (mousePositionHistory.Count > 100)
                            {
                                mousePositionHistory.RemoveAt(0);
                            }
                            mousePositionHistory.Add(new Microsoft.Kinect.Toolkit.FaceTracking.Point(x, y));
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face and eyes successfully tracked.\n");
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                    }
                    clickDelay++;

                    headRotationHistory.Add(currentFaceFrame.Rotation);
                    if (headRotationHistory.Count >= 100)
                    {
                        headRotationHistory.RemoveAt(0);
                    }
                }
                else
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }