Beispiel #1
0
        public FaceDetection ConfigureDetection(NameValueCollection s)
        {
            var f = new FaceDetection();

            //Parse min/max faces
            int[] count = NameValueCollectionExtensions.GetList<int>(s,"f.faces",null,1,2);
            if (count == null) {
                f.MinFaces = 1;
                f.MaxFaces = 8;
            }else if (count.Length > 0){
                f.MinFaces = f.MaxFaces = count[0];
                if (count.Length > 1) f.MaxFaces = count[1];
            }

            //Parse min/default thresholds
            int[] threshold = NameValueCollectionExtensions.GetList<int>(s,"f.threshold",null,1,2);
            if (threshold != null && threshold.Length > 0){
                f.MinConfidenceLevel = f.ConfidenceLevelThreshold = threshold[0];
                if (threshold.Length > 1) f.ConfidenceLevelThreshold = threshold[1];
            }

            //Parse min size percent
            f.MinSizePercent = NameValueCollectionExtensions.Get<float>(s,"f.minsize",f.MinSizePercent);

            //Parse expandsion rules
            double[] expand = NameValueCollectionExtensions.GetList<double>(s, "f.expand", null, 1, 2);

            //Exapnd bounding box by requested percentage
            if (expand != null) {
                f.ExpandX = expand[0];
                f.ExpandY = expand.Length > 1 ? expand[1] : expand[0];
            }

            return f;
        }
Beispiel #2
0
    /// <summary>
    /// Initializes and assigns values to this class' data members.
    /// </summary>
    void Start()
    {
        mapGenerator     = gameObject.GetComponent(typeof(MapGenerator)) as MapGenerator;
        webcamController = gameObject.GetComponent(typeof(WebcamTextureController)) as WebcamTextureController;

        chunkWidth  = mapGenerator.MapChunkWidth - 1;
        chunkHeight = mapGenerator.MapChunkHeight - 1;

        _face = gameObject.GetComponent(typeof(FaceDetection)) as FaceDetection;

        numChunkWidth  = mapGenerator.NumChunkWidth;
        numChunkHeight = mapGenerator.NumChunkHeight;

        terrainChunkArray = new TerrainChunk[numChunkWidth, numChunkHeight];

        InitializeChunks();
    }
        private Image <Bgr, byte> Question(Image <Bgr, byte> imageFrame)
        {
            try
            {
                if (imageFrame == null)
                {
                    return(null);
                }

                // copy frame, for detection objects
                var imgCommit = imageFrame.Copy(); //.Resize(300, 300, Inter.Linear, true);
                imgCommit = imgCommit.Resize(300, 300, Inter.Linear, true);

                // time elapsed
                long timeElapsed = _qz.TQuestion.QStopWatch.ElapsedMilliseconds;
                _qz.TQuestion.QStopWatchTimeDiv = timeElapsed - _qz.TQuestion.QStopWatchTime;
                _qz.TQuestion.QStopWatchTime    = timeElapsed;

                // draw 3 parts
                int w = imageFrame.Width / 3;
                int h = imageFrame.Height;

                //imageFrame._EqualizeHist();
                var grayFrame = imageFrame.Convert <Gray, byte>();
                var faces     = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5);

                int index = 1;
                foreach (var face in faces)
                {
                    // face recognition - detect faces/answer
                    var result = QuestionFaceRecognition0(grayFrame, face);

                    // face recognition1 - detect quetion/answer
                    QuestionFaceRecognition1(imageFrame, w, face, result);
                }

                // cycle
                QuestionCycle(imageFrame, imgCommit, w, h);
            }
            catch (Exception)
            {
            }

            return(imageFrame);
        }
Beispiel #4
0
        private void Capture_ImageGrabbed(object sender, EventArgs e)
        {
            Frame = Capture.QueryFrame();
            var imageFrame = Frame.ToImage <Bgr, byte>();

            if (imageFrame != null)
            {
                var grayFrame = imageFrame.Convert <Gray, byte>();
                var faces     = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5);

                foreach (var face in faces)
                {
                    CvInvoke.Rectangle(imageFrame, face, new MCvScalar(255, 0, 0, 255), 2);
                }

                imbWebcam.Image = imageFrame;
            }
        }
    /// <summary>
    /// The start method used by Unity.
    /// This mostly contains calls to the various initializers for the different components used together to c
    /// </summary>
    private void Start()
    {
        webcamController = this.gameObject.GetComponent(typeof(WebcamTextureController)) as WebcamTextureController;

        face = this.gameObject.GetComponent(typeof(FaceDetection)) as FaceDetection;

        webcamController.Initialize();

        this.InitializeMapSizes();

        this.InitializeChunkSizes();

        this.InitializeNumOfChunks();

        this.fullNoiseMap = NoiseGenerator.GenerateNoiseMap(this.mapWidth, this.mapHeight, this.noiseData.seed, this.noiseData.noiseScale, this.noiseData.octaves, this.noiseData.persistance, this.noiseData.lacunarity, this.noiseData.offset, this.noiseData.normalizeMode);

        this.textureData.ApplyToMaterial(this.terrainMaterial);
    }
Beispiel #6
0
        private void Timer_Tick(object sender, EventArgs e)
        {
            Frame = Capture.QueryFrame();
            var imageFrame = Frame.ToImage <Gray, byte>();

            if (TimerCounter < TimeLimit)
            {
                TimerCounter++;

                if (imageFrame != null)
                {
                    var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5);

                    if (faces.Length > 0)
                    {
                        var processImage = imageFrame.Copy(faces[0]).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic);
                        Faces.Add(processImage);
                        Labels.Add(Convert.ToInt32(txtUserId.Text));
                        ScanCounter++;
                        rtbOutPut.AppendText($"{ScanCounter} Success Scan Taken... {Environment.NewLine}");
                        rtbOutPut.ScrollToCaret();
                    }
                }
            }
            else
            {
                var trainFaces = ConvertImageToMat(Faces);

                foreach (var face in trainFaces)
                {
                    DataStoreAccess.SaveFace(Convert.ToInt32(txtUserId.Text), txtUserName.Text, ConvertImageToBytes(face.Bitmap));
                }

                EigenFaceRecognizer.Train(trainFaces.ToArray(), Labels.ToArray());

                EigenFaceRecognizer.Write(YMLPath);
                Timer.Stop();
                TimerCounter       = 0;
                btnPredict.Enabled = true;
                rtbOutPut.AppendText($"Training Completed! {Environment.NewLine}");
                MessageBox.Show("Training Completed!");
            }
        }
Beispiel #7
0
        /// <summary>
        /// Constructs a new eyes object.
        /// </summary>
        /// <param name="visionPreview">A capture element that is placed on a canvas used for capturing what Robbie sees.</param>
        /// <param name="previewCanvas">A canvas element used for rendering the image preview showing what Robbie sees.</param>
        public Eyes(CaptureElement visionPreview, Canvas previewCanvas)
        {
            Camera.Instance.Initialize(visionPreview);
            this.previewCanvas = previewCanvas;

            faceTracking   = new FaceTracking();
            faceDetection  = new FaceDetection();
            computerVision = new ComputerVision();

            identityInterpolation = new IdentityInterpolation();
            visualization         = new Vision();
            panTilt     = new PanTilt();
            eyesDisplay = new EyesDisplay();

            identityInterpolation.LargestFaceChanged += IdentityInterpolation_LargestFaceChanged;

            // fire up the continuous tasks of processing video and controlling the servos
            ThreadPoolTimer.CreatePeriodicTimer(ProcessCurrentVideoFrame_Delegate, TimeSpan.FromMilliseconds(125)); // 8 fps
            ThreadPoolTimer.CreatePeriodicTimer(UpdatePanTiltPosition_Delegate, TimeSpan.FromMilliseconds(25));     // 40 fps
            ThreadPoolTimer.CreatePeriodicTimer(Blink_Delegate, TimeSpan.FromMilliseconds(5000));                   // 12 fpm
        }
Beispiel #8
0
        private void Init()
        {
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            _pImage = Marshal.AllocHGlobal(1920 * 1080 * 3);
            _image  = new Bitmap(1920, 1080, 1920 * 3, PixelFormat.Format24bppRgb, _pImage);

            var ffmpeg = new FFMpegConverter();

            outputStream = new MemoryStream();

            var setting =
                new ConvertSettings
            {
                CustomOutputArgs = "-an -r 15 -pix_fmt bgr24 -updatefirst 1" //根据业务需求-r参数可以调整,取决于摄像机的FPS
            };                                                               //-s 1920x1080 -q:v 2 -b:v 64k

            task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null,
                                           outputStream, Format.raw_video, setting);

            task.OutputDataReceived += DataReceived;
            task.Start();

            _renderTask = new Task(Render);
            _renderTask.Start();
        }
        private Image <Bgr, byte> TestGroup(Image <Bgr, byte> imageFrame)
        {
            try
            {
                if (imageFrame == null)
                {
                    return(null);
                }

                var grayFrame = imageFrame.Convert <Gray, byte>();
                var faces     = FaceDetection.DetectMultiScale(grayFrame, 1.2, 5);

                foreach (var face in faces)
                {
                    var procImg = grayFrame.Copy(face).Resize(
                        _qz.TRF_IMAGE_WIDTH,
                        _qz.TRF_IMAGE_HEIGHT,
                        Emgu.CV.CvEnum.Inter.Cubic);
                    var result = _qz.QZGroupArr[_qz.QZTGroup.TGroupNbr].FaceRecoEigen.Predict(procImg);

                    try
                    {
                        imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3);
                        imageFrame.Draw($"{_qz.QZFaceArr[result.Label].Name}", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red));

                        //                    if (result.Distance > 1.0)
                        //                        imageFrame.Draw($"{_qz.QSet.Faces[result.Label].Name}", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red));
                    }
                    catch (Exception)
                    {
                    }
                }
            }
            catch (Exception)
            {
            }

            return(imageFrame);
        }
Beispiel #10
0
        private void btnPredictFace_Click(object sender, EventArgs e)
        {
            WebCam.Retrieve(Frame);
            var imageFrame = Frame.ToImage <Gray, byte>();

            if (imageFrame != null)
            {
                var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5);

                if (faces.Count() != 0)
                {
                    foreach (var fc in faces)
                    {
                        var procImg = imageFrame.Copy(fc).Resize(ProcessImageWidth, ProcessImageHeight, Emgu.CV.CvEnum.Inter.Cubic);
                        var result  = FaceRecognition.Predict(procImg);

                        if (result.Label.ToString() == "1")
                        {
                            //tbOutput.AppendText($"FACE {result.Label}  {System.Environment.NewLine}");
                            //tbOutput.ScrollToCaret();
                            //imageFrame.Draw("CARL", new Point(faces[0].X, faces[0].Y), 2);
                            //imageFrame.Draw("dd",)

                            //                    }
                        }
                    }
                }
                else
                {
                    //tbOutput.AppendText($"?????  {System.Environment.NewLine}");
                }
            }
            else
            {
                //tbOutput.AppendText($"?????  {System.Environment.NewLine}");
            }
        }
Beispiel #11
0
        private void WebCam_ImageGrabbed(object sender, EventArgs e)
        {
            WebCam.Retrieve(Frame);
            var imageFrame = Frame.ToImage <Bgr, byte>();

            if (imageFrame != null)
            {
                var grayFrame = imageFrame.Convert <Gray, byte>();
                var faces     = FaceDetection.DetectMultiScale(grayFrame, 1.3, 5);

                foreach (var face in faces)
                {
                    imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3);

                    try
                    {
                        var procImg = grayFrame.Copy(face).Resize(ProcessImageWidth, ProcessImageHeight, Emgu.CV.CvEnum.Inter.Cubic);
                        if (_qz2018.FrmTrain != null)
                        {
                            _qz2018.FrmTrain.SetPicture(1, procImg);
                        }



                        //imageBox1.Image = procImg;
                        var result = FaceRecognition.Predict(procImg);
                        imageFrame.Draw($"<{result.Label}>", face.Location, Emgu.CV.CvEnum.FontFace.HersheyTriplex, 1.0, new Bgr(Color.Red));
                    }
                    catch (Exception ex)
                    {
                    }
                }

                //imageWebCam.Image = imageFrame;
            }
        }
Beispiel #12
0
        private void Timer_Tick(object sender, EventArgs e)
        {
            Camera.Retrieve(Frame);
            var ImgFrame = Frame.ToImage <Gray, byte>();

            if (TimerCounter < TimeLimit)
            {
                TimerCounter++;

                if (ImgFrame != null)
                {
                    var faces = FaceDetection.DetectMultiScale(ImgFrame, 1.3, 5);

                    if (faces.Count() > 0)
                    {
                        var processedImage = ImgFrame.Copy(faces[0]).Resize(ProcessedImageWidth, ProcessedImageHeight, Emgu.CV.CvEnum.Inter.Cubic);
                        Faces.Add(processedImage);
                        Ids.Add(Convert.ToInt32(IdBox.Text));
                        ScanCounter++;
                        OutputBox.AppendText($"{ScanCounter} Succesful Scans Taken...{Environment.NewLine}");
                        OutputBox.ScrollToCaret();
                    }
                }
            }
            else
            {
                FacialRecognition.Train(Faces.ToArray(), Ids.ToArray());
                FacialRecognition.Write(YMLPath);
                Timer.Stop();
                TimerCounter     = 0;
                btnTrain.Enabled = !btnTrain.Enabled;
                IdBox.Enabled    = !IdBox.Enabled;
                OutputBox.AppendText($"Training  Complete! {Environment.NewLine}");
                MessageBox.Show("Training Complete");
            }
        }
Beispiel #13
0
        /// <summary>
        /// 注册事件触发函数
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args">变量参数</param>
        public static void NewUserRegist(object sender, myEventArgs.RegistArgs args)
        {
            /****************************
            * 保存特征值时才区分性别
            * **************************/
            //下载图片到本地
            String url = String.Format("{0}/{1}_{2}.{3}",
                                       Url_DldImg, args.Regist.Id, args.Regist.Name, args.Regist.Extension);
            String pathImg = String.Format("{0}\\{1}_{2}.{3}",
                                           ImgRootPath_Regist, args.Regist.Id, args.Regist.Name, args.Regist.Extension);

            HttpUtil.downFile(url, pathImg);
            //判断性别(假设性别判断 100% 准确)
            try
            {
                ResultCode result = EngineActivate.ActivateEngine(APPID, APPKEY);
                if (result != ResultCode.SDK已激活)
                {
                    Console.WriteLine(result.ToString());
                    return;
                }
                IntPtr hengine = EngineFactory.GetEngineInstance(EngineFactory.Image,
                                                                 DetectionOrientPriority.ASF_OP_0_ONLY, 16); //检测角度指人脸在照片中的角度
                Bitmap img1 = new Bitmap(pathImg);

                var          face  = new FaceDetection(hengine, img1);
                var          r     = face.GetGender();
                LocalUserMod temp1 = new LocalUserMod();
                temp1.Name = args.Regist.Name;
                temp1.Num  = args.Regist.Id;
                //用Base64转码
                temp1.Freature = Convert.ToBase64String(face.getFaceFeature(img1));
                //销毁图片
                img1.Dispose();
                //提取特征值保存
                switch (r)
                {
                case "男":
                    XmlUtil.AddOneData(temp1, "Male");
                    break;

                case "女":
                    XmlUtil.AddOneData(temp1, "Female");
                    break;

                default:
                    Console.WriteLine("获取性别失败!用户:{0} , 学号:{1},注册时间:{2}",
                                      args.Regist.Name, args.Regist.Id, DateTime.Now.ToString());
                    return;
                }
                //提示用户注册
                Console.WriteLine("User registed: {0} at {1}", args.Regist.Name, DateTime.Now.ToString());
                //更新用户信息
                //令写一个函数
                getAllUserInfo();
            }
            catch (Exception ex)
            {
                Console.WriteLine("Regist Error: {0}", ex.Message);
            }
            finally
            {
                EngineFactory.DisposeEngine();
            }
        }
Beispiel #14
0
 void Awake()
 {
     instance      = this;
     Eyes          = GameObject.Find("Eye").transform;
     Eyes.position = transform.position + Vector3.forward * distanceEye;
 }
        protected static List <IPrediction> DeserializePredictions(ModelType modelType,
                                                                   dynamic jsonObject)
        {
            var propertyValues = (JObject)jsonObject.data;

            var data = new List <IPrediction>();

            if (propertyValues.Count > 0)
            {
                string typeName = modelType.Prediction.Name;
                switch (typeName)
                {
                case "Color":
                {
                    foreach (dynamic color in jsonObject.data.colors)
                    {
                        data.Add(Color.Deserialize(color));
                    }
                    break;
                }

                case "Concept":
                {
                    foreach (dynamic concept in jsonObject.data.concepts)
                    {
                        data.Add(Concept.Deserialize(concept));
                    }
                    break;
                }

                case "Demographics":
                {
                    foreach (dynamic demographics in jsonObject.data.regions)
                    {
                        data.Add(Demographics.Deserialize(demographics));
                    }
                    break;
                }

                case "Embedding":
                {
                    foreach (dynamic embedding in jsonObject.data.embeddings)
                    {
                        data.Add(Embedding.Deserialize(embedding));
                    }
                    break;
                }

                case "FaceConcepts":
                {
                    foreach (dynamic faceConcepts in
                             jsonObject.data.regions)
                    {
                        data.Add(FaceConcepts.Deserialize(faceConcepts));
                    }
                    break;
                }

                case "FaceDetection":
                {
                    foreach (dynamic faceDetection in jsonObject.data.regions)
                    {
                        data.Add(FaceDetection.Deserialize(faceDetection));
                    }
                    break;
                }

                case "FaceEmbedding":
                {
                    foreach (dynamic faceEmbedding in jsonObject.data.regions)
                    {
                        data.Add(FaceEmbedding.Deserialize(faceEmbedding));
                    }
                    break;
                }

                case "Focus":
                {
                    foreach (dynamic focus in jsonObject.data.regions)
                    {
                        data.Add(Focus.Deserialize(focus,
                                                   (decimal)jsonObject.data.focus.value));
                    }
                    break;
                }

                case "Frame":
                {
                    foreach (dynamic frame in jsonObject.data.frames)
                    {
                        data.Add(Frame.Deserialize(frame));
                    }
                    break;
                }

                case "Logo":
                {
                    foreach (dynamic logo in jsonObject.data.regions)
                    {
                        data.Add(Logo.Deserialize(logo));
                    }
                    break;
                }

                default:
                {
                    throw new ClarifaiException(
                              string.Format("Unknown output type `{0}`", typeName));
                }
                }
            }
            return(data);
        }
        private async Task <FaceDetection> DetectFacesAsync(MediaFile inputFile)
        {
            if (!CrossConnectivity.Current.IsConnected)
            {
                await DisplayAlert("Network error", "Please check your network connection and retry.", "OK");

                return(null);
            }

            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[]
            {
                FaceAttributeType.Gender,
                FaceAttributeType.Age,
                FaceAttributeType.Smile,
                FaceAttributeType.Emotion,
                FaceAttributeType.Glasses,
                FaceAttributeType.FacialHair,
                FaceAttributeType.HeadPose,
            };

            try
            {
                // Call the Face API.
                Face[] faces = await faceServiceClient.DetectAsync(
                    inputFile.GetStream(),
                    returnFaceId : true,
                    returnFaceLandmarks : false,
                    returnFaceAttributes : faceAttributes);

                if (faces.Length == 0)
                {
                    return(null);
                }

                // Get highest rated emotion
                var emotion = faces[0].FaceAttributes.Emotion.ToRankedList();

                FaceDetection theData = new FaceDetection()
                {
                    Age       = faces[0].FaceAttributes.Age,
                    Beard     = faces[0].FaceAttributes.FacialHair.Beard,
                    Emotion   = emotion.FirstOrDefault().Key,
                    Gender    = faces[0].FaceAttributes.Gender,
                    Glasses   = faces[0].FaceAttributes.Glasses.ToString(),
                    Moustache = faces[0].FaceAttributes.FacialHair.Moustache,
                    Smile     = faces[0].FaceAttributes.Smile
                };

                this.BindingContext       = theData;
                this.Indicator1.IsRunning = false;
                this.Indicator1.IsVisible = false;

                return(theData);
            }
            catch (FaceAPIException f)
            {
                await DisplayAlert("Network error", f.ErrorMessage, "OK");

                return(null);
            }
            catch (Exception e)
            {
                await DisplayAlert("Error", e.Message, "OK");

                return(null);
            }
        }
        public async Task <MemeberFaceUploadResult> UploadReplacementPhoto()
        {
            Member   _memberRec; string oldFace = "";
            string   stringMemberId, subPath;
            int      intMemberId;
            FileInfo fileInfo = null;

            FaceDetection.FaceDetetctResult faceRes = new FaceDetection.FaceDetetctResult();

            try
            {
                var streamProvider = new CustomMultipartFormDataStreamProvider(ServerUploadFolder);
                await Request.Content.ReadAsMultipartAsync(streamProvider);

                fileInfo = new FileInfo(streamProvider.FileData.Select(entry => entry.LocalFileName).FirstOrDefault());

                if (fileInfo == null)
                {
                    throw (new Exception("Cannot fine uploaded file"));
                }

                stringMemberId = streamProvider.FormData["MemberId"];
                if (string.IsNullOrEmpty(stringMemberId))
                {
                    if (File.Exists(fileInfo.FullName))
                    {
                        File.Delete(fileInfo.FullName);
                    }
                    return(new MemeberFaceUploadResult
                    {
                        IsAddOk = false,
                        FileNames = string.Empty,
                        ErrorMsg = "MemberId is missing",
                        FacesDetected = faceRes.FacesDetected,
                        IsFaceDetectionOk = faceRes.Outcome
                    });
                }

                intMemberId = Convert.ToInt32(stringMemberId);

                _memberRec = _memberSvc.FindMember(User.Identity.Name, intMemberId);

                if (_memberSvc == null)
                {
                    if (File.Exists(fileInfo.FullName))
                    {
                        File.Delete(fileInfo.FullName);
                    }
                    return(new MemeberFaceUploadResult
                    {
                        IsAddOk = false,
                        FileNames = string.Empty,
                        ErrorMsg = "Unauthorized Access",
                        FacesDetected = faceRes.FacesDetected,
                        IsFaceDetectionOk = faceRes.Outcome
                    });
                }

                // recraete the stream with subpath gotten from member retrieved
                subPath = intMemberId.ToString();
                File.Move(fileInfo.FullName, ServerUploadFolder + subPath + "\\" + fileInfo.Name);
                oldFace = _memberRec.FaceImage;


                //---------------------------- open cv routine to do face detection -------------------------------

                faceRes = FaceDetection.DetectFaceSave(fileInfo.Name, ServerUploadFolder + subPath, "", ServerBinFolder, fileInfo.Extension);

                if (faceRes.Outcome)
                {
                    //-------------------Face detection successful and continue ----------------------------

                    _memberRec.IsFaceDetected = faceRes.FacesDetected > 0 ? true : false;
                    _memberRec.IsFaceTagged   = true;

                    _memberRec.FaceImage    = faceRes.FacesDetected > 0 ? faceRes.FaceImages.FirstOrDefault() : null;
                    _memberRec.AbsoultePath = ServerUploadFolder + subPath + "\\";

                    _memberRec.DetectedFaceImage     = faceRes.FacesDetected > 0 ? faceRes.SuperImposedImage : fileInfo.Name;
                    _memberRec.AllDetectedFaceImages = faceRes.GetAllFaceImageNames();
                    _memberRec.UnDetectedFaceImage   = fileInfo.Name;
                    _memberRec.DetectedFaceCount     = faceRes.FacesDetected;
                    _memberRec.FolderPath            = "/UserContent/Member/" + subPath + "/";
                    _memberRec.OriginalFaceFileName  = streamProvider.GetOriginalFileName;
                    _memberRec.FaceDetectionRemarks  = faceRes.Remarks();

                    if (_memberRec.FaceImage != null)
                    {
                        int count = _memberSvc.UpdateMemberReplaceFace(_memberRec, _memberRec.FaceImage, oldFace);
                    }
                    else
                    {
                        _memberSvc.Update(_memberRec);
                    }
                }
                else
                {
                    _memberRec.IsFaceDetected    = false;
                    _memberRec.IsFaceTagged      = false;
                    _memberRec.DetectedFaceCount = faceRes.FacesDetected;
                    _memberRec.AbsoultePath      = ServerUploadFolder + subPath + "\\";

                    _memberRec.DetectedFaceImage    = fileInfo.Name;
                    _memberRec.UnDetectedFaceImage  = fileInfo.Name;
                    _memberRec.FolderPath           = "/UserContent/Member/" + subPath + "/";
                    _memberRec.OriginalFaceFileName = streamProvider.GetOriginalFileName;
                    _memberRec.FaceDetectionRemarks = String.IsNullOrEmpty(faceRes.ErrorMsg) ? "Some Exception" : faceRes.ErrorMsg;

                    _memberSvc.Update(_memberRec);
                }

                return(new MemeberFaceUploadResult
                {
                    IsAddOk = true,
                    FileNames = streamProvider.FileData.Select(entry => entry.LocalFileName).FirstOrDefault(),
                    ErrorMsg = !faceRes.Outcome ? faceRes.ErrorMsg : faceRes.Remarks(),
                    UpdatedTimestamp = DateTime.UtcNow,
                    FacesDetected = faceRes.FacesDetected,
                    MemberId = _memberRec.Key.ToString(),
                    IsFaceDetectionOk = faceRes.Outcome
                });
            }
            catch (Exception ex)
            {
                if (fileInfo != null)
                {
                    if (File.Exists(fileInfo.FullName))
                    {
                        File.Delete(fileInfo.FullName);
                    }
                }
                return(new MemeberFaceUploadResult
                {
                    IsAddOk = false,
                    FileNames = string.Empty,
                    ErrorMsg = ex.Message,
                    FacesDetected = faceRes.FacesDetected,
                    IsFaceDetectionOk = faceRes.Outcome,
                    UpdatedTimestamp = DateTime.UtcNow
                });
            }
        }
        /// <summary>
        /// Get and save metadata of all files from input folder
        /// </summary>
        /// <param name="folderPath"></param>
        /// <param name="appStartPath"></param>
        public void SaveMetadataOfAllImages(string folderPath, string appStartPath)
        {
            try
            {
                _blobWrapper = new DataUpload();
                _faceDetection = new FaceDetection(appStartPath);
                _imgGrabber = new ImageGrabber();
                DirectoryInfo directory = new DirectoryInfo(folderPath);
                List<DataLayer.EntityModel.Image> imageList = new List<DataLayer.EntityModel.Image>();
                String[] files =  GetFilesFrom(folderPath, imageFilters,true);
                List<string> fileNameList = new List<string>();
                fileNameList = GetUniqueImages(files.ToList());

                foreach (var fileObj in fileNameList)
                {
                    //Get metadata of file and save it
                    imageList.Add(GetImageMetadata(fileObj, appStartPath));
                    //Upload file to file to azure blob
                    _blobWrapper.UploadFile(fileObj);
                }

                new DataLayer.ModelClasses.Image().SaveUpdateMetadata(imageList);
            }
            catch (Exception ex)
            {
                throw;
            }
        }
        /// <summary>
        /// Get and save metadata of file
        /// </summary>
        /// <param name="filePath"></param>
        /// <param name="appStartPath"></param>
        public void SaveMetadataOfImage(string filePath, string appStartPath)
        {
            try
            {
                _blobWrapper = new DataUpload();
                _faceDetection = new FaceDetection(appStartPath);
                _imgGrabber = new ImageGrabber();
                List<DataLayer.EntityModel.Image> imageList = new List<DataLayer.EntityModel.Image>();
                String[] files = new string[1];
                files[0] = filePath;
                List<string> fileNameList = new List<string>();
                fileNameList = GetUniqueImages(files.ToList());

                foreach (var fileObj in fileNameList)
                {
                    //Get metadata of file and save it
                    imageList.Add(GetImageMetadata(fileObj, appStartPath));
                    //Upload file to file to azure blob
                    _blobWrapper.UploadFile(fileObj);
                }

                new DataLayer.ModelClasses.Image().SaveUpdateMetadata(imageList);
            }
            catch (Exception)
            {
                throw;
            }
        }
 /// <summary>
 /// Reason : TO detect face on selected image, if face(s) detect then mark it in box
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void btnFaceDetect_Click(object sender, EventArgs e)
 {
     if (!IsMediaFile(txtFilePath.Text.ToString().Trim(), imageExtensions))
     {
         MessageBox.Show("Please select valid Image file!!");
         return;
     }
     ShowLoader();
     _faceDetection = new FaceDetection(appStartPath);
     _faceDetection.DetectFace(appStartPath, txtFilePath.Text.ToString(), ref noOfFaces, ref newFrame);
     HideLoader();
     pbImage.Image = newFrame;
     pbImage.SizeMode = PictureBoxSizeMode.StretchImage;
     pbImage.BringToFront();
     MessageBox.Show("Selected Image contains "+ noOfFaces +" faces.");
 }
        /// <summary>
        /// reason : To show all frames of video in panel
        /// </summary>
        /// <param name="files"></param>
        /// <param name="imageName"></param>
        private void ShowAllFramesOnPanel(FileInfo[] files,string imageName)
        {
            flowLayoutPanel1.BringToFront();
            try
            {

                PictureBox[] pics = new PictureBox[files.Count()];
                FlowLayoutPanel[] flws = new FlowLayoutPanel[files.Count()];
                Label[] lbl = new Label[files.Count()];
                _faceDetection = new FaceDetection(appStartPath);
                int brh = 0;
                for (int i = 0; i < files.Count(); i++)
                {
                    if (!File.Exists(files[i].FullName))
                        continue;

                    noOfFaces = 0;
                    newFrame = null;
                    _faceDetection.DetectFace(appStartPath, files[i].FullName, ref noOfFaces, ref newFrame);
                    flws[i] = new FlowLayoutPanel();
                    flws[i].Name = "flw" + i;
                    flws[i].Location = new Point(3, brh);
                    flws[i].Size = new Size(217, 210);
                    flws[i].BackColor = Color.DarkCyan;
                    flws[i].BorderStyle = BorderStyle.Fixed3D;

                    lbl[i] = new Label();
                    lbl[i].Name = files[i].Name;
                    lbl[i].Size = new Size(100, 35);
                    lbl[i].Text = "Frame " + i + " Contains " + noOfFaces + " Face(s)";

                    pics[i] = new PictureBox();
                    pics[i].Name = files[i].FullName;
                    pics[i].Size = new Size(217, 175);
                    pics[i].Image = newFrame==null?  System.Drawing.Image.FromFile(files[i].FullName): newFrame;
                    pics[i].SizeMode = PictureBoxSizeMode.StretchImage;

                    flws[i].Controls.Add(lbl[i]);
                    flws[i].Controls.Add(pics[i]);
                   
                    this.Controls.Add(flws[i]);
                    flowLayoutPanel1.Controls.Add(flws[i]);
                }
            }
            catch(Exception)
            {
            }
        }
Beispiel #22
0
        /// <summary>
        /// 做扫描识别
        /// </summary>
        /// <param name="path"></param>
        private static void doScan(String path)
        {
            ResultCode result = EngineActivate.ActivateEngine(APPID, APPKEY);

            if (result != ResultCode.SDK已激活)
            {
                Console.WriteLine(result.ToString());
                return;
            }
            IntPtr hengine = EngineFactory.GetEngineInstance(EngineFactory.Image,
                                                             DetectionOrientPriority.ASF_OP_0_ONLY, 16);
            FaceDetection face = new FaceDetection(hengine);

            DirectoryInfo root = new DirectoryInfo(path);

            //获取所有下载的图片开始识别
            List <FileInfo> Filelist = root.GetFiles("*.jpeg").ToList();
            Bitmap          img      = null;

            foreach (FileInfo file in Filelist) //外层是文件
            {
                try
                {
                    img = new Bitmap(file.FullName);
                    foreach (LocalUserMod mod in allRegUsers) //内层是用户
                    {
                        try
                        {
                            //此人已被识别,不做识别
                            if (resultSend[mod.Name].isScaned) //如果已经查找了这个人
                            {
                                continue;
                            }
                            byte[] f1  = Convert.FromBase64String(mod.Freature);
                            byte[] f2  = face.getFaceFeature(img);
                            float  sim = face.Compare(f1, f2);
                            if (sim >= Similarity)
                            {
                                //获取3D角度
                                FaceDetection.TDAResult tdr = face.GetThreeDAngle(img);
                                Console.WriteLine("Name: {0}\nThreeDAngle: {1}", mod.Name, tdr.ToString());
                                //判断角度
                                //更改状态
                                if (tdr.Pitch <= -13)                //低头角度
                                {
                                    resultSend[mod.Name].Status = 1; //低头
                                }
                                else if (tdr.Yaw <= -26 || tdr.Yaw >= 26)
                                {
                                    resultSend[mod.Name].Status = 2; //左右
                                }
                                else
                                {
                                    resultSend[mod.Name].Status = 3; //认真
                                }
                                //标识已扫描
                                resultSend[mod.Name].isScaned = true;
                                //上传文件到前端
                                HttpUtil.postFile(Url_PostImg, file.FullName);
                                continue;
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.WriteLine("Error in scanning! {0}", ex.Message);
                            continue;
                        }
                        //未识别出
                        Console.WriteLine("Cannot distinguish : {0}", file.Name);
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                    continue;
                }
                finally
                {
                    //释放图片资源
                    if (img != null)
                    {
                        img.Dispose();
                    }
                }
            }
            try
            {
                String datas = JsonConvert.SerializeObject(resultSend);
                //post数据到前端
                HttpUtil.postData(Url_PostData, datas);
                //销毁引擎
                EngineFactory.DisposeEngine();
                Console.WriteLine("Scan over......");
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                return;
            }
        }
        public async Task <MemeberFaceUploadResult> UploadMemberandFace()
        {
            Member   _memberRec = new Member();
            FileInfo fileInfo;

            FaceDetection.FaceDetetctResult faceRes = new FaceDetection.FaceDetetctResult();

            try
            {
                //--- insert record in to Member table and create a directory - before reading the file -------------
                _memberRec.Name           = "Name";
                _memberRec.DisplayName    = "DisplayName";
                _memberRec.DateOfBirth    = DateTime.ParseExact("15/08/1947", "dd/MM/yyyy", CultureInfo.InvariantCulture);
                _memberRec.Relation       = "Relation";
                _memberRec.IsActive       = true;
                _memberRec.IsFaceDetected = false;
                _memberRec.IsFaceTagged   = false;
                _memberRec.Created        = DateTime.Now;
                _memberRec.UserKey        = _userSvc.UserDetails(User.Identity.Name).Key;
                _memberSvc.Insert(_memberRec);

                string subPath = _memberRec.Key.ToString(); // your code goes here
                bool   exists  = System.IO.Directory.Exists(ServerUploadFolder + subPath);
                if (!exists)
                {
                    System.IO.Directory.CreateDirectory(ServerUploadFolder + subPath);
                }
                //-------------------------------------------------------------------------------------------------------

                var streamProvider = new CustomMultipartFormDataStreamProvider(ServerUploadFolder + subPath);
                await Request.Content.ReadAsMultipartAsync(streamProvider);

                fileInfo = new FileInfo(streamProvider.FileData.Select(entry => entry.LocalFileName).FirstOrDefault());

                //---------------------------- open cv routine to do face detection -------------------------------

                faceRes = FaceDetection.DetectFaceSave(fileInfo.Name, ServerUploadFolder + subPath, "", ServerBinFolder, fileInfo.Extension);

                if (faceRes.Outcome)
                {
                    //-------------------Face detection successful and continue ----------------------------

                    _memberRec.Name           = streamProvider.FormData["Name"];
                    _memberRec.DisplayName    = streamProvider.FormData["DisplayName"];
                    _memberRec.DateOfBirth    = DateTime.ParseExact(streamProvider.FormData["DateOfBirth"], "dd/MM/yyyy", CultureInfo.InvariantCulture);
                    _memberRec.Relation       = streamProvider.FormData["Relation"];
                    _memberRec.IsFaceDetected = faceRes.FacesDetected > 0 ? true: false;
                    _memberRec.IsFaceTagged   = true;

                    _memberRec.FaceImage             = faceRes.FacesDetected > 0 ? faceRes.FaceImages.FirstOrDefault() : null;
                    _memberRec.AbsoultePath          = ServerUploadFolder + subPath + "\\";
                    _memberRec.DetectedFaceImage     = faceRes.FacesDetected > 0 ? faceRes.SuperImposedImage : fileInfo.Name;
                    _memberRec.AllDetectedFaceImages = faceRes.GetAllFaceImageNames();
                    _memberRec.UnDetectedFaceImage   = fileInfo.Name;
                    _memberRec.DetectedFaceCount     = faceRes.FacesDetected;
                    _memberRec.FolderPath            = "/UserContent/Member/" + subPath + "/";
                    _memberRec.OriginalFaceFileName  = streamProvider.GetOriginalFileName;
                    _memberRec.FaceDetectionRemarks  = faceRes.Remarks();

                    if (faceRes.FacesDetected > 0)
                    {
                        _memberSvc.UpdateMemberAddFace(_memberRec, faceRes.FaceImages.FirstOrDefault());
                    }
                    else
                    {
                        _memberSvc.Update(_memberRec);
                    }
                }
                else
                {
                    _memberRec.Name              = streamProvider.FormData["Name"];
                    _memberRec.DisplayName       = streamProvider.FormData["DisplayName"];
                    _memberRec.DateOfBirth       = DateTime.ParseExact(streamProvider.FormData["DateOfBirth"], "dd/MM/yyyy", CultureInfo.InvariantCulture);
                    _memberRec.Relation          = streamProvider.FormData["Relation"];
                    _memberRec.IsFaceDetected    = false;
                    _memberRec.IsFaceTagged      = false;
                    _memberRec.DetectedFaceCount = faceRes.FacesDetected;
                    _memberRec.AbsoultePath      = ServerUploadFolder + subPath + "\\";

                    _memberRec.DetectedFaceImage    = fileInfo.Name;
                    _memberRec.UnDetectedFaceImage  = fileInfo.Name;
                    _memberRec.FolderPath           = "/UserContent/Member/" + subPath + "/";
                    _memberRec.OriginalFaceFileName = streamProvider.GetOriginalFileName;
                    _memberRec.FaceDetectionRemarks = String.IsNullOrEmpty(faceRes.ErrorMsg) ? "Some Exception" : faceRes.ErrorMsg;

                    _memberSvc.Update(_memberRec);
                }

                return(new MemeberFaceUploadResult
                {
                    IsAddOk = true,
                    FileNames = streamProvider.FileData.Select(entry => entry.LocalFileName).FirstOrDefault(),
                    ErrorMsg = !faceRes.Outcome ? faceRes.ErrorMsg : faceRes.Remarks(),
                    UpdatedTimestamp = DateTime.UtcNow,
                    FacesDetected = faceRes.FacesDetected,
                    MemberId = _memberRec.Key.ToString(),
                    IsFaceDetectionOk = faceRes.Outcome
                });
            }
            catch (Exception ex)
            {
                if (_memberRec.Key > 0)
                {
                    _memberSvc.Delete(_memberRec);
                }
                return(new MemeberFaceUploadResult
                {
                    IsAddOk = false,
                    FileNames = string.Empty,
                    ErrorMsg = ex.Message,
                    FacesDetected = faceRes.FacesDetected,
                    IsFaceDetectionOk = faceRes.Outcome
                });
            }
        }
        /// <summary>
        /// Reason : To get all similar files from folder with percentage of similarity for selected file.
        /// compare files from folder which are having length of file +- 100000 of original file.
        /// </summary>
        /// <param name="inputFilePath">Input file path of image</param>
        /// <param name="length">Length of image file that varies to compare with another file</param>
        /// <param name="percentageString"> returns percentage of similarities of matched images in string seperated by comma(,)</param>
        public void GetAllSimilarImages(string inputFilePath,string appStartPath, double length, ref List<DuplicateImageDetails> duplicateImageList)
        {
            {
                try
                {
                    DuplicateImageDetails imgOriginalFile = new DuplicateImageDetails();
                    imgOriginalFile.FilePath = inputFilePath;
                    imgOriginalFile.FileName = inputFilePath.Contains("\\") ? inputFilePath.Split('\\')[inputFilePath.Split('\\').Count() - 1] : inputFilePath;
                    imgOriginalFile.Percentage = "Original Selected File";
                    duplicateImageList.Add(imgOriginalFile);

                    int count = 0;
                    FileInfo fileInfo = new FileInfo(inputFilePath);
                    DataLayer.EntityModel.Image metadataInputImgObj = new DataLayer.EntityModel.Image();
                    //Get metadata of input file
                    _faceDetection = new FaceDetection(appStartPath);
                    _imgGrabber = new ImageGrabber();
                    metadataInputImgObj = GetImageMetadata(inputFilePath, appStartPath);

                    var bestMatchImageList = new DataLayer.ModelClasses.Image().GetImagesByBestMatch(metadataInputImgObj);
                    // for Image similarity percentage need to compare both images
                    foreach (var infoObj in bestMatchImageList)
                    {
                        DuplicateImageDetails duplicateImageCheck = new DuplicateImageDetails();
                        duplicateImageCheck.FilePath = infoObj.ImagePath;
                        duplicateImageCheck.FileName = infoObj.ImagePath.Contains("\\") ? infoObj.ImagePath.Split('\\')[infoObj.ImagePath.Split('\\').Count() - 1] : infoObj.ImagePath;
                        duplicateImageCheck.Percentage = "";
                        duplicateImageList.Add(duplicateImageCheck);
                        count++;
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                }
            }
        }