/// <summary>
        /// Selects the FeaturePoints of all tracked skeletons from the source observable.
        /// </summary>
        /// <param name="observable">The source observable.</param>
        /// <param name="faceTracker">The FaceTracker that is used to track the faces.</param>
        /// <returns>A sequence of a collection of FeaturePoints and their identifiers in a tuple.</returns>
        public static IObservable <IEnumerable <Tuple <Int32, SkeletonTrackingState, JointCollection, EnumIndexableCollection <FeaturePoint, PointF> > > > SelectPersonPoints(this IObservable <Tuple <ColorImageFormat, byte[], DepthImageFormat, short[], Skeleton[]> > observable, FaceTracker faceTracker)
        {
            if (observable == null)
            {
                throw new ArgumentNullException("observable");
            }
            if (faceTracker == null)
            {
                throw new ArgumentNullException("faceTracker");
            }

            return(observable.Select(_ => _.Item5.ForEach <Skeleton, Tuple <Int32, SkeletonTrackingState, JointCollection, EnumIndexableCollection <FeaturePoint, PointF> > >(__ =>
            {
                if (__.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    return Tuple.Create <Int32, SkeletonTrackingState, JointCollection, EnumIndexableCollection <FeaturePoint, PointF> >(__.TrackingId, __.TrackingState, __.Joints, null);
                }

                var faceTrackFrame = faceTracker.Track(_.Item1, _.Item2, _.Item3, _.Item4, __);

                if (!faceTrackFrame.TrackSuccessful)
                {
                    return Tuple.Create <Int32, SkeletonTrackingState, JointCollection, EnumIndexableCollection <FeaturePoint, PointF> >(__.TrackingId, __.TrackingState, __.Joints, null);
                }

                return Tuple.Create(__.TrackingId, __.TrackingState, __.Joints, faceTrackFrame.GetProjected3DShape());
            })));
        }
Exemplo n.º 2
0
        /// <summary>
        /// 顔を認識し回転のパラメータを取得
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="depthFrame"></param>
        /// <param name="skeletonFrame"></param>
        private void FaceDataAcquisition(ColorImageFrame colorFrame, DepthImageFrame depthFrame, Skeleton skeleton)
        {
            FaceTrackFrame faceFrame = faceTracker.Track(colorFrame.Format, colorFrame.ToPixelData(),
                                                         depthFrame.Format, depthFrame.ToPixelData(), skeleton);

            if (faceFrame.TrackSuccessful)
            {
                rotHeadXYZ.x = faceFrame.Rotation.X;
                rotHeadXYZ.y = faceFrame.Rotation.Y;
                rotHeadXYZ.z = faceFrame.Rotation.Z;

                // トラッキング状態を表示
                statusTextBlock.Text       = "Status: Tracked";
                statusTextBlock.Background = new SolidColorBrush(Color.FromRgb(0xBB, 0xDE, 0xFB));

                // 送信
                if (sendTextBlock.Text != "push 'Update IP and Port'")
                {
                    SendOSCMessage(rotHeadXYZ, posHeadXYZ, posHandRightXYZ, posHandLeftXYZ);
                }
            }
            else
            {
                // トラッキング状態を表示
                statusTextBlock.Text       = "Status: NOT Tracked";
                statusTextBlock.Background = (SolidColorBrush)this.FindResource("MyBrush");
            }
        }
Exemplo n.º 3
0
        private void SensorAllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                }
                else
                {
                    return;
                }
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    depthFrame.CopyPixelDataTo(this.depthPixelsShort);
                }
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                }
            }

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton == null)
                {
                    continue;
                }

                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                {
                    _gestureController.Update(skeleton);

                    faceFrame = faceTracker.Track(this.sensor.ColorStream.Format, this.colorPixels,
                                                  this.sensor.DepthStream.Format, this.depthPixelsShort, skeleton);
                }
            }

            colBitmap = new Bitmap(colorWidth, colorHeight);

            BitmapData colImageBitmapData = colBitmap.LockBits(new Rectangle(0, 0, colBitmap.Width, colBitmap.Height),
                                                               ImageLockMode.WriteOnly,
                                                               colBitmap.PixelFormat);
            IntPtr IptrColImage = colImageBitmapData.Scan0;

            Marshal.Copy(colorPixels, 0, IptrColImage, colorPixels.Length);
            colBitmap.UnlockBits(colImageBitmapData);

            //显示Bitmap
            UpdateColImage(colBitmap);
        }
Exemplo n.º 4
0
 public FaceTrackFrame GetFaceTrackFrame(Skeleton skeleton)
 {
     if (_faceFrame == null) /* Aus effizienzgruenden wird nicht bei jedem Zugriff ein neues Faceframe erzeugt, sondern nur ein Mal pro Frame. Siehe OnAllFramesReady unten.*/
     {
         _faceFrame = _faceTracker.Track(_kinectSensor.ColorStream.Format, ColorPixels, _kinectSensor.DepthStream.Format, DepthPixels, skeleton);
     }
     return(_faceFrame);
 }
        void TrackFace()
        {
            var skeleton = skeletonData
                           .Where(s => s.TrackingState != SkeletonTrackingState.NotTracked)
                           .OrderBy(s => s.Position.Z)
                           .FirstOrDefault();

            IsSkeletonTrackedUpdated(skeleton != null);

            if (skeleton == null)
            {
                skeletonId = -1;
                if (faceTracker != null)
                {
                    faceTracker.Dispose();
                    faceTracker = null;
                }

                JawLowerUpdated(null);
                return;
            }

            if (skeletonId != skeleton.TrackingId)
            {
                try
                {
                    if (faceTracker != null)
                    {
                        faceTracker.Dispose();
                    }
                    faceTracker = new FaceTracker(sensor);
                }
                catch (InvalidOperationException)
                {
                    return;
                }
            }
            skeletonId = skeleton.TrackingId;

            if (skeleton.TrackingState != SkeletonTrackingState.Tracked)
            {
                JawLowerUpdated(null);
                return;
            }

            // MEMO: FaceTrackFrame オブジェクトの Dispose メソッドを呼び出すと、以降の処理が正常に続かなくなります。
            var faceFrame = faceTracker.Track(sensor.ColorStream.Format, colorImage, sensor.DepthStream.Format, depthImage, skeleton);

            if (!faceFrame.TrackSuccessful)
            {
                JawLowerUpdated(null);
                return;
            }

            var animationUnits = faceFrame.GetAnimationUnitCoefficients();

            JawLowerUpdated(animationUnits[AnimationUnit.JawLower]);
        }
        /// <summary>
        /// Selects the successfully tracked FaceTrackFrames of the first tracked skeleton from the AllFramesReadyEventArgs observable.
        /// </summary>
        /// <param name="source">The source observable</param>
        /// <param name="faceTracker">The FaceTracker that is used to track the faces.</param>
        /// <returns>A sequence of FaceTrackFrame elements</returns>
        public static IObservable <FaceTrackFrame> SelectFaceTrackFrame(this IObservable <AllFramesReadyEventArgs> source, FaceTracker faceTracker)
        {
            if (source == null)
            {
                throw new ArgumentNullException("observable");
            }
            if (faceTracker == null)
            {
                throw new ArgumentNullException("faceTracker");
            }

            return(source.SelectFormatStreams()
                   .Select(_ => faceTracker.Track(_.Item1, _.Item2, _.Item3, _.Item4, _.Item5.First()))
                   .Where(_ => _.TrackSuccessful));
        }
Exemplo n.º 7
0
        public void Track(Skeleton skeleton)
        {
            // Colors
            if (colors == null)
            {
                colors = new byte[sensor.ColorStream.FramePixelDataLength];
            }

            var colorFrame = sensor.ColorStream.OpenNextFrame(0);

            if (colorFrame == null)
            {
                IsLookingToSensor = null;
                return;
            }

            colorFrame.CopyPixelDataTo(colors);

            // Depths
            if (depths == null)
            {
                depths = new short[sensor.DepthStream.FramePixelDataLength];
            }

            var depthFrame = sensor.DepthStream.OpenNextFrame(0);

            if (depthFrame == null)
            {
                IsLookingToSensor = null;
                return;
            }
            depthFrame.CopyPixelDataTo(depths);

            // Track
            var frame = faceTracker.Track(sensor.ColorStream.Format, colors, sensor.DepthStream.Format, depths, skeleton);

            if (frame == null)
            {
                IsLookingToSensor = null;
                return;
            }
            var shape = frame.Get3DShape();

            var leftEyeZ  = shape[FeaturePoint.AboveMidUpperLeftEyelid].Z;
            var rightEyeZ = shape[FeaturePoint.AboveMidUpperRightEyelid].Z;

            IsLookingToSensor = Math.Abs(leftEyeZ - rightEyeZ) <= epsilon;
        }
        /// <summary>
        /// Selects the successfully tracked FaceTrackFrames of all tracked skeletons from the AllFramesReadyEventArgs observable.
        /// </summary>
        /// <param name="source">The source observable.</param>
        /// <param name="faceTracker">The FaceTracker that is used to track the faces.</param>
        /// <returns>A sequence of a collection of FaceTrackFrames and their identifier in a tuple.</returns>
        public static IObservable <IEnumerable <Tuple <Int32, SkeletonTrackingState, FaceTrackFrame> > > SelectFaceTrackFrame(this IObservable <Tuple <ColorImageFormat, byte[], DepthImageFormat, short[], Skeleton[]> > source, FaceTracker faceTracker)
        {
            if (source == null)
            {
                throw new ArgumentNullException("observable");
            }
            if (faceTracker == null)
            {
                throw new ArgumentNullException("faceTracker");
            }

            return(source.Select(_ => _.Item5.Where(__ => __.TrackingState == SkeletonTrackingState.Tracked)
                                 .ForEach(__ => Tuple.Create(__.TrackingId, __.TrackingState, faceTracker.Track(_.Item1, _.Item2, _.Item3, _.Item4, __)))
                                 .Where(__ => __.Item3.TrackSuccessful)));
        }
Exemplo n.º 9
0
        /// <summary>
        /// Handles the AllFramesReady event of the kinectSensor control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param>
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null)
            {
                return;
            }

            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            // If a face is tracked, then we can use it
            if (faceFrame.TrackSuccessful)
            {
                // Retrieve only the Animation Units coeffs
                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                // Records to list buffer if record is enabled
                if (isRecord == true)
                {
                    // Start stopwatch
                    stopwatch.Start();

                    // AU coefficients
                    lipRaiserBuffer.Add(AUCoeff[AnimationUnit.LipRaiser]);
                    jawLowerBuffer.Add(AUCoeff[AnimationUnit.JawLower]);
                    lipStretchBuffer.Add(AUCoeff[AnimationUnit.LipStretcher]);
                    browLowerBuffer.Add(AUCoeff[AnimationUnit.BrowLower]);
                    lipDepressBuffer.Add(AUCoeff[AnimationUnit.LipCornerDepressor]);
                    browRaiserBuffer.Add(AUCoeff[AnimationUnit.BrowLower]);
                    // Face rotation
                    xRotation.Add(faceFrame.Rotation.X);
                    yRotation.Add(faceFrame.Rotation.Y);
                    zRotation.Add(faceFrame.Rotation.Z);
                    // Get time in ms
                    timeBuffer.Add(stopwatch.ElapsedMilliseconds);
                }

                // Display on UI coefficients and rotation for user
                LipRaiser.Content  = AUCoeff[AnimationUnit.LipRaiser];
                JawLower.Content   = AUCoeff[AnimationUnit.JawLower];
                LipStretch.Content = AUCoeff[AnimationUnit.LipStretcher];
                BrowLower.Content  = AUCoeff[AnimationUnit.BrowLower];
                LipDepress.Content = AUCoeff[AnimationUnit.LipCornerDepressor];
                BrowRaiser.Content = AUCoeff[AnimationUnit.BrowRaiser];
                XRotation.Content  = faceFrame.Rotation.X;
                YRotation.Content  = faceFrame.Rotation.Y;
                ZRotation.Content  = faceFrame.Rotation.Z;

                // Animates the drawn face
                var jawLowerer = AUCoeff[AnimationUnit.JawLower];
                jawLowerer = jawLowerer < 0 ? 0 : jawLowerer;
                MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1;
                MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1);
                LeftBrow.Y            = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40;
                RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20);
                LeftBrowRotate.Angle  = -RightBrowRotate.Angle;
                CanvasRotate.Angle    = faceFrame.Rotation.Z;
            }
        }
        void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (_faceTracker == null)
            {
                try
                {
                    _faceTracker = new FaceTracker(_sensorChooser.Kinect);
                }
                catch (InvalidOperationException)
                {
                    // During some shutdown scenarios the FaceTracker
                    // is unable to be instantiated.  Catch that exception
                    // and don't track a face.
                    Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                    _faceTracker = null;
                }
            }

            #region copying color data
            using (var colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = _currentColorImageFormat != colorFrame.Format;
                if (haveNewFormat)
                {
                    _currentColorImageFormat  = colorFrame.Format;
                    _colorImageData           = new byte[colorFrame.PixelDataLength];
                    _colorImageWritableBitmap = new WriteableBitmap(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null);

                    _colorImage.Source = _colorImageWritableBitmap;
                }

                colorFrame.CopyPixelDataTo(_colorImageData);
                _colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height),
                    _colorImageData,
                    colorFrame.Width * colorFrame.BytesPerPixel,
                    0);
            }
            #endregion

            #region copying depth data
            using (var depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

                depthData = new short[depthFrame.PixelDataLength];
                depthFrame.CopyPixelDataTo(depthData);
            }
            #endregion

            #region copying skeleton data
            using (var skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }

                Skeleton[] allSkeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                skeletonFrame.CopySkeletonDataTo(allSkeletons);

                firstSkeleton = (from c in allSkeletons
                                 where c.TrackingState == SkeletonTrackingState.Tracked
                                 select c).FirstOrDefault();

                if (firstSkeleton == null)
                {
                    return;
                }
            }
            #endregion

            if (_faceTracker != null)
            {
                FaceTrackFrame frame = _faceTracker.Track(_sensorChooser.Kinect.ColorStream.Format,
                                                          _colorImageData, _sensorChooser.Kinect.DepthStream.Format, depthData, firstSkeleton);

                if (frame.TrackSuccessful)
                {
                    AnalyzeFace(frame);

                    if (_thoughtBubble.Visibility != System.Windows.Visibility.Visible)
                    {
                        _thoughtBubble.Visibility = System.Windows.Visibility.Visible;
                    }
                }
                else
                {
                }
            }
        }
Exemplo n.º 11
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                //redimensionar o array dos esqueletos, se necessario
                if (this.skeletons.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                skeletonFrame.CopySkeletonDataTo(this.skeletons);


                if (this.colorPixelData == null || colorPixelData.Length != colorImageFrame.PixelDataLength)
                {
                    this.colorPixelData = new byte[colorImageFrame.PixelDataLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorPixelData);

                //detectar o numero de utilizadores e qual o mais proximo
                var newNearestId     = -1;
                var nearestDistance2 = double.MaxValue;
                int nUsers           = 0;
                int nTrackedUsers    = 0;
                foreach (var skeleton in skeletons)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                    {
                        // Find the distance squared.
                        var distance2 = (skeleton.Position.X * skeleton.Position.X) +
                                        (skeleton.Position.Y * skeleton.Position.Y) +
                                        (skeleton.Position.Z * skeleton.Position.Z);

                        // Is the new distance squared closer than the nearest so far?
                        if (distance2 < nearestDistance2)
                        {
                            newNearestId     = skeleton.TrackingId;
                            nearestDistance2 = distance2;
                        }
                        nTrackedUsers++;
                    }

                    if (skeleton.TrackingState != SkeletonTrackingState.NotTracked)
                    {
                        nUsers++;
                    }
                }

                this.nearestId           = newNearestId;
                this.NumberOfDetectUsers = nUsers;

                //processar a detecao de movimento
                if (detectMotion && motionDetector != null)
                {
                    if (this.depthPixelData == null || depthPixelData.Length != depthImageFrame.BytesPerPixel)
                    {
                        this.depthPixelData = new short[depthImageFrame.PixelDataLength];
                    }

                    depthImageFrame.CopyPixelDataTo(depthPixelData);

                    var coloredPixels = new byte[colorImageFrame.PixelDataLength];
                    colorImageFrame.CopyPixelDataTo(coloredPixels);

                    var colorBitmap  = ImageProcessingAux.CreateBitmapFromPixelData(coloredPixels, colorImageFrame.Width, colorImageFrame.Height);
                    var playerBitmap = ImageProcessingAux.ProccessPlayerPixels(coloredPixels, depthPixelData, colorImageFrame.Format);

                    motionDetector.ProcessFrame(ref colorBitmap, ref playerBitmap);
                }

                //processar a deteccao da face
                if (trackFace && faceTracker != null)
                {
                    faceTracker.Track(skeletons, colorImageFrame, depthImageFrame, nearestId);
                }

                //processar gestos
                if (gestureDetectorsValue.Count > 0)
                {
                    foreach (Skeleton s in skeletons)
                    {
                        if (s.TrackingId == nearestId)
                        {
                            foreach (GestureDetector gd in gestureDetectorsValue)
                            {
                                gd.Add(s.Joints[gd.TrackedJoint].Position, kinectSensorValue);
                            }
                        }
                    }
                }

                //processar posturas
                if (postureDetectorsValue.Count > 0)
                {
                    foreach (Skeleton s in skeletons)
                    {
                        if (s.TrackingId == nearestId)
                        {
                            foreach (PostureDetector pd in postureDetectorsValue)
                            {
                                pd.Add(s, kinectSensorValue);
                            }
                        }
                    }
                }
            }

            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Exemplo n.º 12
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }
                else //if (status == StatusEnum.ReadyActivated || status == StatusEnum.Initializing || status == StatusEnum.UnreadyDeactivated)
                {
                    // Check for image format changes.  The FaceTracker doesn't
                    // deal with that so we need to reset.
                    HandlePossibleImageFormatChanges(colorImageFrame, depthImageFrame);
                    WriteDataToMembers(colorImageFrame, depthImageFrame, skeletonFrame);
                    Skeleton activeSkeleton = null;
                    activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                    if (activeSkeleton != null)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                        FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                        LogFaceDetection(currentFaceFrame);

                        //Get relevant Points for blink detection
                        //Left eye
                        int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                        int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                        int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                        int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                        Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);

                        //Right eye
                        minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                        minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                        maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                        maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                        Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                        Bitmap leftEye2  = null;
                        Bitmap rightEye2 = null;
                        if (leftEye != null)
                        {
                            leftEye2 = new Bitmap(leftEye);
                        }
                        if (rightEye != null)
                        {
                            rightEye2 = new Bitmap(rightEye);
                        }

                        this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                        this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));

                        //Wende Kantenfilter auf die beiden Augen an.
                        if (rightEye != null && leftEye != null)
                        {
                            Dictionary <string, int> angleCount;
                            Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                            bool   rightEyeClosed = IsEyeClosed(angleCount);
                            Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                            bool   leftEyeClosed  = IsEyeClosed(angleCount);

                            if (rightEyeClosedHistory.Count > 100)
                            {
                                rightEyeClosedHistory.RemoveAt(0);
                            }
                            if (leftEyeClosedHistory.Count > 100)
                            {
                                leftEyeClosedHistory.RemoveAt(0);
                            }
                            leftEyeClosedHistory.Add(leftEyeClosed);
                            rightEyeClosedHistory.Add(rightEyeClosed);

                            //If Face is rotated, move Mouse
                            MoveMouseAccordingToFaceRotation(currentFaceFrame);
                        }
                        else
                        {
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                        }
                        clickDelay++;

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                    }
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Exemplo n.º 13
0
        /// <summary>
        /// Handles the AllFramesReady event of the kinectSensor control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param>
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null)
            {
                return;
            }

            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            // If a face is tracked, then we can use it.
            if (faceFrame.TrackSuccessful)
            {
                var triangles = faceFrame.GetTriangles();
                // Retrieve only the Animation Units coeffs.
                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                var jawLowerer = AUCoeff[AnimationUnit.JawLower];
                jawLowerer = jawLowerer < 0 ? 0 : jawLowerer;
                MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1;
                MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1);

                LeftBrow.Y            = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40;
                RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20);
                LeftBrowRotate.Angle  = -RightBrowRotate.Angle;
                CanvasRotate.Angle    = -faceFrame.Rotation.Z;
                // CanvasTranslate.X = faceFrame.Translation.X;
                // CanvasTranslate.Y = faceFrame.Translation.Y;

                if (logToFile)
                {
                    writeToFile(filename_txt.Text, faceFrame);
                }

                if (writeToOSC)
                {
                    sendOsc(osc_channel_txt.Text, faceFrame, oscWriter);
                }
            }
        }
Exemplo n.º 14
0
        private void OnAllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();


                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values


                if (activeSkeleton != null)
                {
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    float          browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                    float          browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                    tbBrowLowerer.Text = browLowererValue.ToString();
                    tbBrowRaiser.Text  = browRaiserValue.ToString();
                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    pbLeftEye.Image = leftEye;

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    pbRightEye.Image = rightEye;

                    //Wende Kantenfilter auf die beiden Augen an.
                    double dxRight;
                    double dyRight;
                    double dxLeft;
                    double dyLeft;
                    if (rightEye != null && leftEye != null)
                    {
                        Bitmap edgePicRight = Convolution(ConvertGrey(rightEye), true, out dxRight, out dyRight);
                        Bitmap edgePicLeft  = Convolution(ConvertGrey(leftEye), false, out dxLeft, out dyLeft);



                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > filterLength && currentFaceFrame.TrackSuccessful)
                        {
                            int x = 0;
                            int y = 0;

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.



                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < filterLength - 1)
                            {
                                i++;
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);

                            MouseControl.Move(x, y);
                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));
                        }

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Exemplo n.º 15
0
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
                //int strade = colorImageFrame.Width * 4;
                //image1.Source = BitmapSource.Create(colorImageFrame.Width, colorImageFrame.Height, 96, 96,
                //                                    PixelFormats.Bgr32, null, colorPixelData, strade);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null && !sessionClose)
            {
                serialP.WriteLine("s");
                serialP.WriteLine("c");
                serialP.WriteLine("p");
                serialP.WriteLine("g");
                if (isActive)
                {
                    isActive = false;
                }

                slejenie           = false;
                activatorRightHand = 0;
                activatorLeftHand  = false;
                firstMeet          = false;

                sessionClose = true;
                return;
            }
            else if (skeleton != null && !firstMeet)
            {
                serialP.WriteLine("i");
                playsound(comms[0]);
                firstMeet    = true;
                sessionClose = false;
            }
            if (sessionClose)
            {
                return;
            }
            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            EnumIndexableCollection <FeaturePoint, PointF> facePoints = faceFrame.GetProjected3DShape();


            // points of hands and shoulder - to determine HELLO, etc.
            Joint shoulderCenter = skeleton.Joints[JointType.ShoulderCenter];
            Joint head           = skeleton.Joints[JointType.Head];
            Joint rightHand      = skeleton.Joints[JointType.HandRight];
            Joint leftHand       = skeleton.Joints[JointType.HandLeft];

            // initialize sound for hello
            //SoundPlayer a = new SoundPlayer("C:\\sal.wav");


            // open stream for uart reading
            //serialP.Open();

            // points of lip's corner - with help of this I determine smile
            double x1 = facePoints[88].X;
            double y1 = facePoints[88].Y;

            System.Windows.Point leftLip = new System.Windows.Point(x1, y1);
            double x2 = facePoints[89].X;
            double y2 = facePoints[89].Y;

            System.Windows.Point rightLip = new System.Windows.Point(x2, y2);
            Vector subtr = System.Windows.Point.Subtract(leftLip, rightLip);

            // distance between kinect and human
            distance = skeleton.Position.Z * 100;

            // distance between two corners of lip
            double length = Math.Sqrt(subtr.X * subtr.X + subtr.Y * subtr.Y);

            int check = 100;

            double angle1 = 0d;
            double angle2 = 0d;
            double angle  = skeleton.Position.X * 100;

            #region "Smile deterine"
            if (distance >= 95 && distance < 110)
            {
                check = 22;
            }
            else if (distance >= 110 && distance < 120)
            {
                check = 19;
            }
            else if (distance >= 120 && distance < 130)
            {
                check = 18;
            }
            else if (distance >= 130 && distance < 140)
            {
                check = 17;
            }
            else if (distance >= 140 && distance < 150)
            {
                check = 16;
            }
            else if (distance >= 150 && distance < 160)
            {
                check = 14;
            }
            else if (distance >= 160 && distance < 170)
            {
                check = 13;
            }
            else if (distance >= 170 && distance < 180)
            {
                check = 12;
            }
            else if (distance >= 180 && distance < 190)
            {
                check = 11;
            }

            #endregion

            #region "Angle"
            if (distance >= 90 && distance < 110)
            {
                angle1 = -15;
                angle2 = 15;
            }
            else if (distance >= 110 && distance < 150)
            {
                angle1 = -20;
                angle2 = 20;
            }
            else if (distance >= 150 && distance < 170)
            {
                angle1 = -30;
                angle2 = 30;
            }
            else if (distance >= 170 && distance < 200)
            {
                angle1 = -35;
                angle2 = 35;
            }
            else if (distance >= 200)
            {
                angle1 = -40;
                angle2 = 40;
            }
            #endregion

            double condition1 = Math.Abs(leftHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);
            double condition2 = Math.Abs(rightHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);

            // If position of two hands higher than shoulder it's activate 'slejenie za ob'ektom'
            if (condition1 > 45 &&
                condition2 > 45 &&
                leftHand.Position.X < rightHand.Position.X)
            {
                if (!slejenie)
                {
                    isActive       = true;
                    FIXED_DISTANCE = distance;
                    slejenie       = true;
                }
            }

            // The command to stop 'slejenie za ob'ektom'
            if (leftHand.Position.X > rightHand.Position.X)
            {
                isActive = false;
            }

            // Slejenie za ob'ektom
            if (isActive)
            {
                int pinkIs   = (int)typeCondition.THIRD;
                int purpleIs = (int)typeCondition.FORTH;
                int redIs    = (int)typeCondition.FIVTH;
                int yellowIs = (int)typeCondition.SIXTH;

                if (distance > FIXED_DISTANCE + 10.0d)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//povorot na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)//povorot na levo
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Red;
                        if (currentAction != redIs)// vpered
                        {
                            currentAction = redIs;
                            serialP.WriteLine("f");
                        }
                    }
                }
                else if (distance > 90)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)// na levo
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Yellow;
                        if (currentAction != yellowIs)//stop, ili - do nothing
                        {
                            currentAction = yellowIs;
                            serialP.WriteLine("s");
                        }
                    }
                }
                else
                {
                    ellipseSmile.Fill = Brushes.Yellow;
                    if (currentAction != yellowIs)//stop, ili - do nothing
                    {
                        currentAction = yellowIs;
                        serialP.WriteLine("s");
                    }
                }
            }


            // esli 'slejenie za ob'ektom' otklu4en
            else if (!isActive)
            {
                int blueIs  = (int)typeCondition.FIRST;
                int blackIs = (int)typeCondition.SECOND;
                int onkol   = (int)typeCondition.SEVENTH;

                if (leftHand.Position.Y > head.Position.Y && rightHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != blueIs && !activatorLeftHand)//privet levoi rukoi ----------------------------------------------------------------------------

                    {
                        currentAction = blueIs;
                        serialP.WriteLine("q");
                        activatorLeftHand = true;
                    }
                }

                else if (rightHand.Position.Y > head.Position.Y && leftHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != onkol && activatorRightHand != 12)//privet pravoi rukoi   -----------------------------------------------------------------------------
                    {
                        currentAction = onkol;
                        serialP.WriteLine("w");
                        activatorRightHand = 12;
                    }
                }

                else
                {
                    ellipseSmile.Fill = Brushes.Black;
                    if (currentAction != blackIs)// toktaidy ili do nothing
                    {
                        currentAction = blackIs;
                        serialP.WriteLine("s");
                    }


                    if (currentAction == blackIs)
                    {
                        if (length >= check && currentFace != (int)faceConditions.FIRST)
                        {
                            serialP.WriteLine("z"); // smile
                            currentFace       = (int)faceConditions.FIRST;
                            ellipseSmile.Fill = Brushes.Brown;
                        }
                        else if (length < check && currentFace != (int)faceConditions.SECOND)
                        {
                            serialP.WriteLine("x"); // poker face
                            currentFace       = (int)faceConditions.SECOND;
                            ellipseSmile.Fill = Brushes.Gold;
                        }

                        #region "povoroti golovoi"
                        if (angle < angle1)
                        {
                            ellipseSmile.Fill = Brushes.Pink;
                            if (!headToRight)//povorot golovi na pravo
                            {
                                headToRight  = true;
                                headToCenter = false;
                                headToLeft   = false;
                                serialP.WriteLine("k");
                            }
                        }
                        else if (angle > angle2)//povorot golovi na levo
                        {
                            if (!headToLeft)
                            {
                                headToLeft   = true;
                                headToCenter = false;
                                headToRight  = false;
                                serialP.WriteLine("j");
                            }
                        }
                        else if (angle < angle2 && angle > angle1)//golova v centre
                        {
                            if (!headToCenter)
                            {
                                headToCenter = true;
                                headToRight  = false;
                                headToLeft   = false;
                                serialP.WriteLine("p");
                            }
                        }
                        #endregion
                    }
                    else if (!faceFrame.TrackSuccessful && currentFace != (int)faceConditions.NONE)
                    {
                        serialP.WriteLine("c"); // sad face
                        currentFace       = (int)faceConditions.NONE;
                        ellipseSmile.Fill = Brushes.Chocolate;
                    }
                }
            }

            label2.Content = distance.ToString();
            //label1.Content = (leftHand.Position.Z * 100).ToString();
            //label3.Content = (shoulderCenter.Position.Z * 100).ToString();

            //serialP.Close();
        }
Exemplo n.º 16
0
        public AppModel()
        {
            KinectManager.SensorConnected
            .Subscribe(sensor =>
            {
                sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                sensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                sensor.SkeletonStream.EnableWithDefaultSmoothing();

                try
                {
                    sensor.Start();
                }
                catch (Exception ex)
                {
                    // センサーが他のプロセスに既に使用されている場合に発生します。
                    Debug.WriteLine(ex);
                }
            });
            KinectManager.SensorDisconnected
            .Subscribe(sensor => sensor.Stop());
            KinectManager.Initialize();

            // フレームのデータ。
            var frameData = Observable.Interval(FramesInterval)
                            .Select(_ => new
            {
                Sensor    = KinectManager.Sensor.Value,
                ColorData = KinectManager.Sensor.Value.GetColorData(FramesInterval),
                DepthData = KinectManager.Sensor.Value.GetDepthDataInInt16(FramesInterval),
                BodyData  = KinectManager.Sensor.Value.GetSkeletonData(FramesInterval),
            })
                            .ToReadOnlyReactiveProperty(null, ReactivePropertyMode.DistinctUntilChanged);

            // ターゲットの人物。
            TargetBody = frameData
                         .Where(_ => _.BodyData != null)
                         .Select(_ => GetTargetBody(_.BodyData, TargetBody.Value))
                         .ToReadOnlyReactiveProperty(null, ReactivePropertyMode.DistinctUntilChanged);

            // ターゲットの人物が存在するかどうか。
            HasTargetBody = TargetBody
                            .Select(b => b != null)
                            .ToReadOnlyReactiveProperty();

            // ヒット ゾーン内の手。z 要素は体からの差分。
            HitHand = TargetBody
                      .Select(b => GetHitHand(b, HitHand.Value))
                      .ToReadOnlyReactiveProperty(null, ReactivePropertyMode.DistinctUntilChanged);

            // ヒット ゾーン内かどうか。
            IsHandHit = HitHand
                        .Select(_ => _.HasValue)
                        .ToReadOnlyReactiveProperty();

            AreHandsAbove = TargetBody.Select(GetAreHandsAbove).ToReadOnlyReactiveProperty();
            IsSquat       = TargetBody.Select(GetIsSquat).ToReadOnlyReactiveProperty();
            IsJumping     = TargetBody.Select(GetIsJumping).ToReadOnlyReactiveProperty();

            BodyOrientation = TargetBody.Select(GetBodyOrientation).ToReadOnlyReactiveProperty();
            IsLeftOriented  = BodyOrientation.Select(x => x < -0.35).ToReadOnlyReactiveProperty();
            IsRightOriented = BodyOrientation.Select(x => x > 0.35).ToReadOnlyReactiveProperty();

            JawLower = TargetBody
                       .Select(body =>
            {
                if (body == null)
                {
                    return(0);
                }

                var data = frameData.Value;
                if (data.ColorData == null || data.DepthData == null || data.BodyData == null)
                {
                    return(0);
                }

                if (faceTracker == null)
                {
                    faceTracker = new FaceTracker(data.Sensor);
                }

                var faceFrame = faceTracker.Track(data.Sensor.ColorStream.Format, data.ColorData, data.Sensor.DepthStream.Format, data.DepthData, body);
                if (!faceFrame.TrackSuccessful)
                {
                    return(0);
                }

                var animationUnits = faceFrame.GetAnimationUnitCoefficients();
                return(animationUnits[AnimationUnit.JawLower]);
            })
                       .ToReadOnlyReactiveProperty();
            IsMouthOpen = JawLower.Select(x => x > 0.35).ToReadOnlyReactiveProperty();
        }
Exemplo n.º 17
0
        private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token)
        {
            if (interval.TotalMilliseconds == 0)
            {
                return;
            }

            // Initial wait time before we begin the periodic loop.
            if (dueTime > TimeSpan.Zero)
            {
                await Task.Delay(dueTime, token);
            }

            DateTime    LocalTimestamp = Timestamp;
            FaceTracker tracker        = new FaceTracker(Sensor);

            // Repeat this loop until cancelled.
            while (!token.IsCancellationRequested)
            {
                // Skip already work with given data
                if (Timestamp == LocalTimestamp)
                {
                    await Task.Delay(interval, token);

                    continue;
                }

                // Timestamp data
                LocalTimestamp = Timestamp;
                FaceTrackWatch.Again();

                // Do Job
                try {
                    CopyColorData = true;
                    CopySkeletons = true;
                    FPoints       = null;
                    Mood          = 0;
                    if (null != GestureManager && null != GestureManager.Skeleton)
                    {
                        FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton);
                        if (frame.TrackSuccessful)
                        {
                            // Only once.  It doesn't change.
                            if (FTriangles == null)
                            {
                                FTriangles = frame.GetTriangles();
                            }
                            FPoints = frame.GetProjected3DShape();
                            Mood    = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor];
                            WSRProfileManager.GetInstance().UpdateMood(Mood);
                        }
                    }
                }
                catch (Exception ex) {
                    WSRConfig.GetInstance().logError("FACE", ex);
                }
                FaceTrackWatch.Stop();

                // Wait to repeat again.
                if (interval > TimeSpan.Zero)
                {
                    await Task.Delay(interval, token);
                }
            }

            // Dispose Tracker
            tracker.Dispose();
        }
Exemplo n.º 18
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values

                if (activeSkeleton != null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    if (currentFaceFrame.TrackSuccessful)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Recognized face successfully.\n");
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Couldn't find face in frame.\n");
                    }

                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    //this.pbRight.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye));
                    //

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    Bitmap leftEye2  = null;
                    Bitmap rightEye2 = null;
                    if (leftEye != null)
                    {
                        leftEye2 = new Bitmap(leftEye);
                    }
                    if (rightEye != null)
                    {
                        rightEye2 = new Bitmap(rightEye);
                    }
                    // System.Delegate d = new MethodInvoker(SetPictures));
                    //   this.Invoke(SetPictures, leftEye);
                    //pbRight.Image = rightEye;
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));
                    // this.Invoke(new MethodInvoker(SetPictures));
                    //Wende Kantenfilter auf die beiden Augen an.

                    if (rightEye != null && leftEye != null)
                    {
                        Dictionary <string, int> angleCount;
                        Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                        bool   rightEyeClosed = IsEyeClosed(angleCount);
                        Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                        bool   leftEyeClosed  = IsEyeClosed(angleCount);
                        //   pbLeftFaltung.Image = edgePicLeft;
                        //   pbRightFaltung.Image = edgePicRight;



                        if (rightEyeClosedHistory.Count > 100)
                        {
                            rightEyeClosedHistory.RemoveAt(0);
                        }
                        if (leftEyeClosedHistory.Count > 100)
                        {
                            leftEyeClosedHistory.RemoveAt(0);
                        }
                        leftEyeClosedHistory.Add(leftEyeClosed);
                        rightEyeClosedHistory.Add(rightEyeClosed);

                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > gaussFilter.Count - 1 && leftEyeClosedHistory.Count > nudConvolutionFilterLength.Value && currentFaceFrame.TrackSuccessful)
                        {
                            int   x = 0;
                            int   y = 0;
                            float browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                            float browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                            float mouthOpenValue   = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.JawLower];
                            if (browRaiserHistory.Count >= 100)
                            {
                                browRaiserHistory.RemoveAt(0);
                                browLowererHistory.RemoveAt(0);
                                mouthOpenHistory.RemoveAt(0);
                            }
                            browLowererHistory.Add(browLowererValue);
                            browRaiserHistory.Add(browRaiserValue);
                            mouthOpenHistory.Add(mouthOpenValue);

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < gaussFilter.Count - 1)
                            {
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                                i++;
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);


                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));


                            //Check for right, left or Double Click
                            //1. Check if there was already a click 20 Frames ago, or if Drag & Drop is active
                            if (clickDelay > nudClickDelay.Value && !pointNClickActive)
                            {
                                //2. If not, calculate mean values of dy's last 16 Frames
                                if (CalculateMeanConvolutionValues())
                                {
                                    clickDelay = 0;
                                }
                                else
                                {
                                    //Else check for open Mouth
                                    if (mouthOpenValue > (float)nudMouthOpenStartThreshold.Value && mouthOpenHistory[mouthOpenHistory.Count - 2] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 3] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 4] > (float)nudMouthOpenConfirmation.Value)
                                    {
                                        MouseControl.Move(mousePositionHistory[mousePositionHistory.Count - 4].X, mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y)));
                                        //lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        MouseControl.MouseDownLeft();
                                        pointNClickActive = true;
                                        clickDelay        = 0;
                                    }
                                }
                            }
                            else if (pointNClickActive)
                            {
                                if (mouthOpenValue < (float)nudMouthOpenEndThreshold.Value)
                                {
                                    this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Up on X: " + x + " Y: " + y)));
                                    MouseControl.MouseUpLeft();
                                    pointNClickActive = false;
                                    clickDelay        = 0;
                                }
                            }
                            MouseControl.Move(x, y);
                            if (browLowererValue > (float)nudBrowLowererStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(-browLowererValue * (int)nudScrollMultiplierDown.Value));
                            }
                            if (browRaiserValue > (float)nudBrowRaiserStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(browRaiserValue * (int)nudScrollMultiplierUp.Value));
                            }
                            if (mousePositionHistory.Count > 100)
                            {
                                mousePositionHistory.RemoveAt(0);
                            }
                            mousePositionHistory.Add(new Microsoft.Kinect.Toolkit.FaceTracking.Point(x, y));
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face and eyes successfully tracked.\n");
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                    }
                    clickDelay++;

                    headRotationHistory.Add(currentFaceFrame.Rotation);
                    if (headRotationHistory.Count >= 100)
                    {
                        headRotationHistory.RemoveAt(0);
                    }
                }
                else
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }