private RecognitionResult RunFacialRecognitionProcessor(FacialRecognitionProcessor processor)
        {
            var testFrame = new Bitmap("testframe.png");
            var recoResult = new RecognitionResult
            {
                OriginalBitmap = testFrame,
                ProcessedBitmap = (Bitmap)testFrame.Clone()
            };

            var trackingResults = Newtonsoft.Json.JsonConvert.DeserializeObject<TrackingResults>(File.ReadAllText("testframe.json"));

            var sw = new Stopwatch();
            sw.Start();

            processor.Process(recoResult, trackingResults);

            sw.Stop();

            return recoResult;
        }
Example #2
0
        /// <summary>
        /// Worker thread for recognition processing
        /// </summary>
        private void RecognizerWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            this.faceReady = this.multiSourceReady = false;
            var status = FaceModelBuilderCollectionStatus.Complete;

            if (!this.constructionInProcess && this.fmb != null)
            {
                status = this.fmb.CollectionStatus;
            }

            var result = new RecognitionResult();

            result.ColorSpaceBitmap = this.ImageToBitmap(this.colorImageBuffer, this.imageWidth, this.imageHeight);
            e.Result = result;

            if (this.faceModel != null && this.Processors.Any() && this.ProcessingEnabled)
            {
                var faceTrackingResult = new KinectFaceTrackingResult(this.faceModel, this.constructedFaceModel, status, this.faceAlignment, this.Kinect.CoordinateMapper);

                var rpResults = new List <IRecognitionProcessorResult>();

                foreach (var processor in this.Processors)
                {
                    rpResults.Add(processor.Process(result.ColorSpaceBitmap, faceTrackingResult));
                }

                result.Faces = new List <TrackedFace>
                {
                    new TrackedFace
                    {
                        ProcessorResults = rpResults,
                        TrackingResult   = faceTrackingResult
                    }
                };
            }
        }
        /// <summary>
        /// Attempt to find a trained face in the original bitmap
        /// </summary>
        public void Process(RecognitionResult result, TrackingResults trackingResults)
        {
            GraphicsPath origPath;

            using (var g = Graphics.FromImage(result.ProcessedBitmap))
            {
                // Create a path tracing the face and draw on the processed image
                origPath = new GraphicsPath();

                foreach (var point in trackingResults.FacePoints)
                {
                    origPath.AddLine(point, point);
                }

                origPath.CloseFigure();
                g.DrawPath(new Pen(Color.Red, 2), origPath);
            }

            var minX = (int)origPath.PathPoints.Min(x => x.X);
            var maxX = (int)origPath.PathPoints.Max(x => x.X);
            var minY = (int)origPath.PathPoints.Min(x => x.Y);
            var maxY = (int)origPath.PathPoints.Max(x => x.Y);
            var width = maxX - minX;
            var height = maxY - minY;

            // Create a cropped path tracing the face...
            var croppedPath = new GraphicsPath();

            foreach (var point in trackingResults.FacePoints)
            {
                var croppedPoint = new System.Drawing.Point(point.X - minX, point.Y - minY);
                croppedPath.AddLine(croppedPoint, croppedPoint);
            }

            croppedPath.CloseFigure();

            // ...and create a cropped image to use for facial recognition
            using (var croppedBmp = new Bitmap(width, height))
            {
                using (var croppedG = Graphics.FromImage(croppedBmp))
                {
                    croppedG.FillRectangle(Brushes.Gray, 0, 0, width, height);
                    croppedG.SetClip(croppedPath);
                    croppedG.DrawImage(result.OriginalBitmap, minX * -1, minY * -1);
                }

                using (var grayBmp = croppedBmp.MakeGrayscale(100, 100))
                {
                    grayBmp.HistogramEqualize();

                    string key = null;
                    float eigenDistance = -1;

                    if (this.Recognizer != null)
                        key = this.Recognizer.Recognize(grayBmp, out eigenDistance);

                    // Save detection info
                    result.Faces = new List<RecognitionResult.Face>()
                    {
                        new RecognitionResult.Face()
                        {
                            TrackingResults = trackingResults,
                            EigenDistance = eigenDistance,
                            GrayFace = (Bitmap)grayBmp.Clone(),
                            Key = key
                        }
                    };
                }
            }
        }
Example #4
0
        /// <summary>
        /// Handles recognition complete events
        /// </summary>
        private void Engine_RecognitionComplete(object sender, RecognitionResult e)
        {
            TrackedFace face = null;

            if (e.Faces != null)
                face = e.Faces.FirstOrDefault();

            using (var processedBitmap = (Bitmap)e.ColorSpaceBitmap.Clone())
            {
                if (face == null)
                {
                    this.viewModel.ReadyForTraining = false;
                }
                else
                {
                    using (var g = Graphics.FromImage(processedBitmap))
                    {
                        var isFmb = this.viewModel.ProcessorType == ProcessorTypes.FaceModel;
                        var rect = face.TrackingResult.FaceRect;
                        var faceOutlineColor = Color.Green;

                        if (isFmb)
                        {
                            if (face.TrackingResult.ConstructedFaceModel == null)
                            {
                                faceOutlineColor = Color.Red;

                                if (face.TrackingResult.BuilderStatus == FaceModelBuilderCollectionStatus.Complete)
                                    faceOutlineColor = Color.Orange;
                            }

                            var scale = (rect.Width + rect.Height) / 6;
                            var midX = rect.X + (rect.Width / 2);
                            var midY = rect.Y + (rect.Height / 2);

                            if ((face.TrackingResult.BuilderStatus & FaceModelBuilderCollectionStatus.LeftViewsNeeded) == FaceModelBuilderCollectionStatus.LeftViewsNeeded)
                                g.FillRectangle(new SolidBrush(Color.Red), rect.X - (scale * 2), midY, scale, scale);

                            if ((face.TrackingResult.BuilderStatus & FaceModelBuilderCollectionStatus.RightViewsNeeded) == FaceModelBuilderCollectionStatus.RightViewsNeeded)
                                g.FillRectangle(new SolidBrush(Color.Red), rect.X + rect.Width + (scale * 2), midY, scale, scale);

                            if ((face.TrackingResult.BuilderStatus & FaceModelBuilderCollectionStatus.TiltedUpViewsNeeded) == FaceModelBuilderCollectionStatus.TiltedUpViewsNeeded)
                                g.FillRectangle(new SolidBrush(Color.Red), midX, rect.Y - (scale * 2), scale, scale);

                            if ((face.TrackingResult.BuilderStatus & FaceModelBuilderCollectionStatus.FrontViewFramesNeeded) == FaceModelBuilderCollectionStatus.FrontViewFramesNeeded)
                                g.FillRectangle(new SolidBrush(Color.Red), midX, midY, scale, scale);
                        }

                        this.viewModel.ReadyForTraining = faceOutlineColor == Color.Green;
                        ////Output for face recognition
                        if (!check_ini)
                        {
                            check_ini = true;
                            output_data output = new output_data();
                            output.query = "_";
                            output.uid = "*****@*****.**";
                            output.reset = "true";
                            SendSpeechRequest(output);
                        }
                        g.DrawPath(new Pen(faceOutlineColor, 5), face.TrackingResult.GetFacePath());

                        if (!string.IsNullOrEmpty(face.Key))
                        {
                            var score = Math.Round(face.ProcessorResults.First().Score, 2);

                            // Write the key on the image...
                            g.DrawString(face.Key + ": " + score, new Font("Arial", 100), Brushes.Red, new System.Drawing.Point(rect.Left, rect.Top - 25));

                            SpeechToText();

                        }

                    }

                    if (this.takeTrainingImage)
                    {
                        var eoResult = (EigenObjectRecognitionProcessorResult)face.ProcessorResults.SingleOrDefault(x => x is EigenObjectRecognitionProcessorResult);
                        var fmResult = (FaceModelRecognitionProcessorResult)face.ProcessorResults.SingleOrDefault(x => x is FaceModelRecognitionProcessorResult);

                        var bstf = new BitmapSourceTargetFace();
                        bstf.Key = this.viewModel.TrainName;

                        if (eoResult != null)
                        {
                            bstf.Image = (Bitmap)eoResult.Image.Clone();
                        }
                        else
                        {
                            bstf.Image = face.TrackingResult.GetCroppedFace(e.ColorSpaceBitmap);
                        }

                        if (fmResult != null)
                        {
                            bstf.Deformations = fmResult.Deformations;
                            bstf.HairColor = fmResult.HairColor;
                            bstf.SkinColor = fmResult.SkinColor;
                        }

                        this.viewModel.TargetFaces.Add(bstf);

                        this.SerializeBitmapSourceTargetFace(bstf);

                        this.takeTrainingImage = false;

                        this.UpdateTargetFaces();
                    }
                }

                this.viewModel.CurrentVideoFrame = LoadBitmap(processedBitmap);

            }

            // Without an explicit call to GC.Collect here, memory runs out of control :(
            GC.Collect();
        }
        /// <summary>
        /// Handles recognition complete events
        /// </summary>
        private void Engine_RecognitionComplete(object sender, RecognitionResult e)
        {
            RecognitionResult.Face face = null;

            if (e.Faces != null)
                face = e.Faces.FirstOrDefault();

            if (face != null)
            {
                if (!string.IsNullOrEmpty(face.Key))
                {
                    // Write the key on the image...
                    using (var g = Graphics.FromImage(e.ProcessedBitmap))
                    {
                        var rect = face.TrackingResults.FaceRect;
                        g.DrawString(face.Key, new Font("Arial", 20), Brushes.Red, new System.Drawing.Point(rect.Left, rect.Top - 25));
                    }
                }

                if (this.takeTrainingImage)
                {
                    this.targetFaces.Add(new BitmapSourceTargetFace
                    {
                        Image = (Bitmap)face.GrayFace.Clone(),
                        Key = this.NameField.Text,
                        ID = RecognitionUtility.GenerateHash(this.NameField.Text),
                        Face3DPoints = face.TrackingResults.Face3DPoints
                    });

                    this.takeTrainingImage = false;
                    this.NameField.Text = this.NameField.Text.Replace(this.targetFaces.Count.ToString(), (this.targetFaces.Count + 1).ToString());

                    if (this.targetFaces.Count > 1)
                        this.engine.SetTargetFaces(this.targetFaces);
                }
            }

            this.Video.Source = LoadBitmap(e.ProcessedBitmap);
        }
        /// <summary>
        /// Attempt to find a trained face in the original bitmap
        /// </summary>
        public void Process(RecognitionResult result, TrackingResults trackingResults)
        {
            GraphicsPath origPath;

            using (var g = Graphics.FromImage(result.ProcessedBitmap))
            {
                // Create a path tracing the face and draw on the processed image
                origPath = new GraphicsPath();

                foreach (var point in trackingResults.FacePoints)
                {
                    origPath.AddLine(point, point);
                }

                origPath.CloseFigure();
                g.DrawPath(new Pen(Color.Red, 2), origPath);
            }

            var minX   = (int)origPath.PathPoints.Min(x => x.X);
            var maxX   = (int)origPath.PathPoints.Max(x => x.X);
            var minY   = (int)origPath.PathPoints.Min(x => x.Y);
            var maxY   = (int)origPath.PathPoints.Max(x => x.Y);
            var width  = maxX - minX;
            var height = maxY - minY;

            // Create a cropped path tracing the face...
            var croppedPath = new GraphicsPath();

            foreach (var point in trackingResults.FacePoints)
            {
                var croppedPoint = new System.Drawing.Point(point.X - minX, point.Y - minY);
                croppedPath.AddLine(croppedPoint, croppedPoint);
            }

            croppedPath.CloseFigure();

            // ...and create a cropped image to use for facial recognition
            using (var croppedBmp = new Bitmap(width, height))
            {
                using (var croppedG = Graphics.FromImage(croppedBmp))
                {
                    croppedG.FillRectangle(Brushes.Gray, 0, 0, width, height);
                    croppedG.SetClip(croppedPath);
                    croppedG.DrawImage(result.OriginalBitmap, minX * -1, minY * -1);
                }

                using (var grayBmp = croppedBmp.MakeGrayscale(100, 100))
                {
                    grayBmp.HistogramEqualize();

                    string key           = null;
                    float  eigenDistance = -1;

                    if (this.Recognizer != null)
                    {
                        key = this.Recognizer.Recognize(grayBmp, out eigenDistance);
                    }

                    // Save detection info
                    result.Faces = new List <RecognitionResult.Face>()
                    {
                        new RecognitionResult.Face()
                        {
                            TrackingResults = trackingResults,
                            EigenDistance   = eigenDistance,
                            GrayFace        = (Bitmap)grayBmp.Clone(),
                            Key             = key
                        }
                    };
                }
            }
        }
        /// <summary>
        /// Worker thread for recognition processing
        /// </summary>
        private void RecognizerWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            var result = new RecognitionResult();
            result.OriginalBitmap = this.ImageToBitmap(this.colorImageBuffer, this.imageWidth, this.imageHeight);
            result.ProcessedBitmap = (Bitmap)result.OriginalBitmap.Clone();
            e.Result = result;

            if (this.trackedSkeleton != null && this.trackedSkeleton.TrackingState == SkeletonTrackingState.Tracked)
            {
                // Reset the face tracker if we lost our old skeleton...
                if (this.trackedSkeleton.TrackingId != this.previousTrackedSkeletonId && this.faceTracker != null)
                    this.faceTracker.ResetTracking();

                this.previousTrackedSkeletonId = this.trackedSkeleton.TrackingId;

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(this.Kinect);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    var faceTrackFrame = this.faceTracker.Track(
                        this.colorImageFormat,
                        this.colorImageBuffer,
                        this.depthImageFormat,
                        this.depthImageBuffer,
                        this.trackedSkeleton);

                    if (faceTrackFrame.TrackSuccessful)
                    {
                        var trackingResults = new TrackingResults(faceTrackFrame.GetProjected3DShape());

                        lock (this.ProcessingMutex)
                        {
                            if (this.Processor != null && this.ProcessingEnabled)
                            {
                                this.Processor.Process(result, trackingResults);
                            }
                        }
                    }
                }
            }
        }
        /// <summary>
        /// Worker thread for recognition processing
        /// </summary>
        private void RecognizerWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            this.faceReady = this.multiSourceReady = false;
            var status = FaceModelBuilderCollectionStatus.Complete;

            if (!this.constructionInProcess && this.fmb != null)
                status = this.fmb.CollectionStatus;

            var result = new RecognitionResult();
            result.ColorSpaceBitmap = this.ImageToBitmap(this.colorImageBuffer, this.imageWidth, this.imageHeight);
            e.Result = result;

            if (this.faceModel != null && this.Processors.Any() && this.ProcessingEnabled)
            {
                var faceTrackingResult = new KinectFaceTrackingResult(this.faceModel, this.constructedFaceModel, status, this.faceAlignment, this.Kinect.CoordinateMapper);

                var rpResults = new List<IRecognitionProcessorResult>();

                foreach (var processor in this.Processors)
                    rpResults.Add(processor.Process(result.ColorSpaceBitmap, faceTrackingResult));

                result.Faces = new List<TrackedFace>
                {
                    new TrackedFace
                    {
                        ProcessorResults = rpResults,
                        TrackingResult = faceTrackingResult
                    }
                };
            }
        }