Esempio n. 1
0
 public AnnotationsFetcher(FaceProcessor faceProcessor, string servicePrefix, string instance)
 {
     faceProcessor_   = faceProcessor;
     serviceInstance_ = instance;
     serviceNamespace = new Namespace(new Name(servicePrefix));
     serviceNamespace.setFace(faceProcessor.getFace());
 }
Esempio n. 2
0
        /// <summary>
        /// Enables the input of the webcam
        /// </summary>
        public static bool EnableWebcam(string cameraUrl = null)
        {
            IVideoSource capture;

            try
            {
                if (cameraUrl == null)
                {
                    capture = new VideoCaptureDevice(
                        new FilterInfoCollection(FilterCategory.VideoInputDevice)[0].MonikerString);
                }
                else //assumes that it's a MJPEG stream, it's forms so whatever
                {
                    capture = new MJPEGStream(cameraUrl);
                }
                source = new ProcessableVideoSource(capture);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                MessageBox.Show(Messages.cameraNotFound);
                return(false);
            }
            processor = new FaceProcessor(source);
            processor.Start();

            source.NewFrame += GetFrame;
            source.Start();
            return(true);
        }
Esempio n. 3
0
        private void Init()
        {
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            CvInvoke.UseOpenCL = false;

            _capture = GetWebCamera();
            _capture.Start();

            //Application.Idle += VideoCaptured; //可以采用此方式捕获视频,则无需单独开线程
            //_capture.ImageGrabbed += VideoCaptured; //不要采用此方式
            _run = new Task(VideoCaptured);
            _run.Start();
        }
        public async Task <ActionResult> CheckImage()
        {
            if (System.Web.HttpContext.Current.Request.Files.AllKeys.Any())
            {
                var   pic   = System.Web.HttpContext.Current.Request.Files["Image"];
                Image image = Image.FromStream(pic.InputStream);

                using (var uploadedImage = HelperMethods.ProcessImage(new Bitmap(image)))
                {
                    FrameAnalysisJSON result = await FaceProcessor.ProcessFrame(new Bitmap(uploadedImage));

                    if (result == null || result.Faces.Count == 0)
                    {
                        return(Json(new { result = new List <CroppedFace>() }
                                    , JsonRequestBehavior.AllowGet));
                    }

                    List <CroppedFace> croppedFaces = new List <CroppedFace>();
                    foreach (var face in result.CroppedFaces(uploadedImage, 25))
                    {
                        croppedFaces.Add(face);
                    }
                    return(Json(new { result = croppedFaces }
                                , JsonRequestBehavior.AllowGet));
                }
            }
            return(null);
        }
Esempio n. 5
0
        private void Init()
        {
            _age       = new FaceAge(AppId, AgeKey);
            _gender    = new FaceGender(AppId, GenderKey);
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey, _age, _gender) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            stride     = width * pixelSize;
            bufferSize = stride * height;

            _pImage = Marshal.AllocHGlobal(bufferSize);
            _image  = new Bitmap(width, height, stride, PixelFormat.Format24bppRgb, _pImage);

            var ffmpeg = new FFMpegConverter();

            outputStream = new MemoryStream();

            var setting =
                new ConvertSettings
            {
                CustomOutputArgs = "-s 1920x1080", //根据业务需求-r参数可以调整,取决于摄像机的FPS
            };                                     //-s 1920x1080 -q:v 2 -b:v 64k

            //-an -r 15 -pix_fmt bgr24 -updatefirst 1
            //task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null,
            //    outputStream, Format.raw_video, setting);

            /*
             * USB摄像头捕获
             * 通过ffmpeg可以捕获USB摄像,如下代码所示。
             * 首先通过:ffmpeg -list_devices true -f dshow -i dummy命令,可以列出系统中存在的USB摄像设备(或通过控制面板的设备管理工具查看设备名称),例如在我电脑中叫USB2.0 PC CAMERA。
             * 然后根据捕获的分辨率,修改视频图形信息,包括width和height,一般像素大小不用修改,如果要参考设备支持的分辨率,可以使用:
             * ffmpeg -list_options true -f dshow -i video="USB2.0 PC CAMERA"命令
             */
            task = ffmpeg.ConvertLiveMedia("video=Logitech HD Webcam C270", "dshow",
                                           outputStream, Format.raw_video, setting);

            task.OutputDataReceived += DataReceived;
            task.Start();

            _renderTask = new Task(Render);
            _renderTask.Start();
        }
        //this should probably return action result, otherwise nothing will happen in the frontend IIRC
        public async Task <ActionResult> CaptureSnapshot(string imgBase64, string latitude, string longitude)
        {
            FaceApiCalls apiCalls = new FaceApiCalls(new HttpClientWrapper());

            // Prepare base64 string
            imgBase64 = imgBase64.Substring(imgBase64.IndexOf("base64,", StringComparison.Ordinal) + 7);
            imgBase64 = imgBase64.Substring(0, imgBase64.LastIndexOf("\"", StringComparison.Ordinal));
            imgBase64 = imgBase64.Replace(" ", "+");

            // Create a bitmap
            byte[] bitmapData = Convert.FromBase64String(FixBase64ForImage(imgBase64));
            System.IO.MemoryStream streamBitmap = new System.IO.MemoryStream(bitmapData);
            Bitmap bitmap = new Bitmap((Bitmap)Image.FromStream(streamBitmap));

            bitmap = HelperMethods.ProcessImage(bitmap);
            // Analyze bitmap
            FrameAnalysisJSON analysisResult = await FaceProcessor.ProcessFrame(bitmap);

            if (analysisResult == null)
            {
                //error must've occured, should alert user.
                return(null);
            }

            if (analysisResult.Faces.Count == 0)
            {
                return(Json(new { result = "No faces have been found in the provided picture" }, JsonRequestBehavior.AllowGet));
            }

            var biggestConfidence = LikelinessConfidence.LowProbability;

            foreach (var face in analysisResult.Faces)
            {
                var searchResult = await apiCalls.SearchFaceInFaceset(face.Face_token);

                if (searchResult != null)
                {
                    foreach (var likelinessResult in searchResult.LikelinessConfidences()) //might want to set the camera properties to some value.
                    {
                        biggestConfidence = (likelinessResult.Confidence > biggestConfidence)
                            ? likelinessResult.Confidence
                            : biggestConfidence;

                        await SearchResultHandler.HandleOneResult(result : likelinessResult,
                                                                  minimumConfidence : LikelinessConfidence.VeryHighProbability,
                                                                  cameraProperties : new CameraProperties(Convert.ToDouble(latitude), Convert.ToDouble(longitude)));
                    }
                }
            }

            return
                (Json(new
                      { result =
                            string.Format("There's a {0} probability that the person/people from your picture are currently missing. Thank you for the submission, appropriate actions will be taken.",
                                          biggestConfidence.ToPrettyString()) }, JsonRequestBehavior.AllowGet));
        }
    void Start()
    {
        TextureReaderComponent.OnImageAvailableCallback += OnImageAvailable;
        //timestamp = gameObject.GetComponent<TangoARScreen> ().m_screenUpdateTime;
        frameMgr    = GameObject.FindObjectOfType <FramePoolManager>();
        frameNumber = 0;
        //frameObjects = new Dictionary<long, FrameObjectData> ();
        boxMgr                    = GameObject.FindObjectOfType <BoundingBoxPoolManager>();
        timestamp                 = 0;
        frameBuffer               = new ConcurrentQueue <Dictionary <int, FrameObjectData> > ();
        boundingBoxBufferToCalc   = new ConcurrentQueue <BoxData> ();
        boundingBoxBufferToUpdate = new ConcurrentQueue <CreateBoxData> ();
        boxData                   = new List <CreateBoxData> ();
//		frameObjectBuffer = new RingBuffer<FrameObjectData> (100000);
//		boxBufferToCalc = new RingBuffer<BoxData> (100000);
//		boxBufferToUpdate = new RingBuffer<CreateBoxData> (100000);
        camForCalcThread = GameObject.Find("Camera").GetComponent("Camera") as Camera;
        calc             = new Thread(calculationsForBoundingBox);
        calc.Start();
        labelColors = new Dictionary <string, Color> ();
        kalman      = new Dictionary <string, IKalmanWrapper> ();

        colors = new List <Color> {
            new Color(255f / 255, 109f / 255, 124f / 255),
            new Color(119f / 255, 231f / 255, 255f / 255),
            new Color(82f / 255, 255f / 255, 127f / 255),
            new Color(252f / 255, 187f / 255, 255f / 255),
            new Color(255f / 255, 193f / 255, 130f / 255)
        };


        // @Therese - these need to be moved somewhere to a higher-level entity as
        // configuration parameters (may be changed frequently during testing)
        string rootPrefix       = "/icear/user";
        string userId           = "peter";    // "mobile-terminal0";
        string serviceType      = "object_recognizer";
        string serviceInstance  = "yolo";     // "yolo";
        string serviceInstance2 = "openface"; // "yolo";

        NdnRtc.Initialize(rootPrefix, userId);
        faceProcessor_ = new FaceProcessor();
        faceProcessor_.start();

        assetFetcher_ = new AssetBundleFetcher(faceProcessor_);

        string servicePrefix = rootPrefix + "/" + userId + "/" + serviceType;

        // AnnotationsFetcher instance might also be a singleton class
        // and initialized/created somewhere else. here just as an example
        aFetcher_        = new AnnotationsFetcher(faceProcessor_, servicePrefix, serviceInstance);
        openFaceFetcher_ = new AnnotationsFetcher(faceProcessor_, servicePrefix, serviceInstance2);

        // setup CNL logging
        ILOG.J2CsMapping.Util.Logging.Logger.getLogger("").setLevel(ILOG.J2CsMapping.Util.Logging.Level.FINE);
        ILOG.J2CsMapping.Util.Logging.Logger.Write = delegate(string message) { Debug.Log(System.DateTime.Now + ": " + message); };
    }
Esempio n. 8
0
        /// <summary>
        /// Uploads the picture
        /// Author: Tomas Drasutis
        /// </summary>
        private async void uploadButton_Click(object sender, EventArgs e)
        {
            int    chosenImageIndex = 0;
            Bitmap uploadedImage    = ImageUpload.UploadImage();

            if (uploadedImage == null)
            {
                return;
            }
            else
            {
                uploadedImage = HelperMethods.ProcessImage(uploadedImage);
            }
            FrameAnalysisJSON result = await FaceProcessor.ProcessFrame((Bitmap)uploadedImage.Clone());

            missingPersonPictureBox.Image?.Dispose();
            missingPersonPictureBox.Image = null;
            if (result == null)
            {
                MessageBox.Show(Messages.errorWhileAnalysingImage);
                validImage = false;
                return;
            }
            switch (result.Faces.Count)
            {
            case 0:
                MessageBox.Show(Messages.noFacesInImage);
                validImage = false;
                uploadedImage.Dispose();
                return;

            case 1:
                break;

            default:
                var chooseFaceForm = new ChooseFaceFormcs(result.Faces, uploadedImage);
                chooseFaceForm.ShowDialog();
                if (chooseFaceForm.DialogResult == DialogResult.OK)
                {
                    chosenImageIndex = chooseFaceForm.SelectedFace;
                    chooseFaceForm.Dispose();
                }
                else
                {
                    return;
                }
                break;
            }
            validImage = true;
            missingPersonPictureBox.Image = HelperMethods.CropImage(uploadedImage, result.Faces[chosenImageIndex].Face_rectangle, 25);
            faceToken = result.Faces[chosenImageIndex].Face_token;
        }
Esempio n. 9
0
        public FaceDetectionService()
        {
            _age    = new FaceAge(AppConfigurations.AppId, AppConfigurations.AgeKey);        // 年龄识别
            _gender = new FaceGender(AppConfigurations.AppId, AppConfigurations.GenderKey);  // 性别识别
            //// 图片检测人脸
            _detection = LocatorFactory.GetDetectionLocator(AppConfigurations.AppId, AppConfigurations.FdKey, _age, _gender) as FaceDetection;
            _traking   = LocatorFactory.GetTrackingLocator(AppConfigurations.AppId, AppConfigurations.FtKey, _age, _gender) as FaceTracking;

            _recognize = new FaceRecognize(AppConfigurations.AppId, AppConfigurations.FrKey);

            _processor            = new FaceProcessor(_detection, _recognize);
            _personFaceRepository = new PersonFaceRepository();
        }
Esempio n. 10
0
        private static void TestMatching()
        {
            using (var proccesor = new FaceProcessor("appId",
                                                     "ftKey", "frKey", true))
            {
                var image1 = Image.FromFile("test2.jpg");
                var image2 = Image.FromFile("test.jpg");

                var result1 = proccesor.LocateExtract(new Bitmap(image1));
                var result2 = proccesor.LocateExtract(new Bitmap(image2));

                if ((result1 != null) & (result2 != null))
                {
                    Console.WriteLine(proccesor.Match(result1[0].FeatureData, result2[0].FeatureData, true));
                }
            }
        }
Esempio n. 11
0
        private void Init()
        {
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            _pImage = Marshal.AllocHGlobal(1920 * 1080 * 3);
            _image  = new Bitmap(1920, 1080, 1920 * 3, PixelFormat.Format24bppRgb, _pImage);

            var ffmpeg = new FFMpegConverter();

            outputStream = new MemoryStream();

            var setting =
                new ConvertSettings
            {
                CustomOutputArgs = "-an -r 15 -pix_fmt bgr24 -updatefirst 1" //根据业务需求-r参数可以调整,取决于摄像机的FPS
            };                                                               //-s 1920x1080 -q:v 2 -b:v 64k

            task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null,
                                           outputStream, Format.raw_video, setting);

            task.OutputDataReceived += DataReceived;
            task.Start();

            _renderTask = new Task(Render);
            _renderTask.Start();
        }
Esempio n. 12
0
 public AssetBundleFetcher(FaceProcessor faceProcessor)
 {
     faceProcessor_ = faceProcessor;
 }
Esempio n. 13
0
 /**
  * 增加处理回调,在人脸检测前会被回调。
  *
  * @param processor 图片帧处理回调
  */
 public void addPreProcessor(FaceProcessor processor)
 {
     preProcessors.Add(processor);
 }
Esempio n. 14
0
 public FaceProcessingService(IHubContext <FaceDetectedAlertHub> hubContext)
 {
     this.hubContext          = hubContext;
     Processor                = new FaceProcessor();
     Processor.FacesDetected += HandleFacesDetectedEvent;
 }
Esempio n. 15
0
    void Start()
    {
        Debug.Message("initializing OnCameraFrame...");

        try
        {
            renderBoundingBoxes = true;
            timestamp_          = 0;

            Debug.Log("adding callback for capturing camera frames");
            TextureReaderComponent.OnImageAvailableCallback += OnImageAvailable;

            frameMgr_ = GameObject.FindObjectOfType <FramePoolManager>();
            boxMgr_   = GameObject.FindObjectOfType <BoundingBoxPoolManager>();

            Debug.Log("creating structures for frames and bounding boxes processing");
            frameBuffer_               = new ConcurrentQueue <Dictionary <int, FrameObjectData> >();
            boundingBoxBufferToCalc_   = new ConcurrentQueue <BoxData>();
            boundingBoxBufferToUpdate_ = new ConcurrentQueue <CreateBoxData>();
            boxData_ = new List <CreateBoxData>();

            camForCalcThread_ = GameObject.Find("Camera").GetComponent("Camera") as Camera;
            //calc_ = new Thread(calculationsForBoundingBox);
            //calc_.Start();

            labelColors_ = new Dictionary <string, Color>();
            kalman_      = new Dictionary <string, IKalmanWrapper>();

            colors_ = new List <Color> {
                new Color(255f / 255, 109f / 255, 124f / 255),
                new Color(119f / 255, 231f / 255, 255f / 255),
                new Color(82f / 255, 255f / 255, 127f / 255),
                new Color(252f / 255, 187f / 255, 255f / 255),
                new Color(255f / 255, 193f / 255, 130f / 255)
            };

            Debug.Log("initializing semantic db controller");
            lastDbQuery_  = System.DateTime.Now;
            lastKeyFrame_ = System.DateTime.Now;
            dbQueryRate_  = 0.5f; // once every 2 seconds
            dbController_ = new SemanticDbController("http://131.179.142.7:8888/query");

            Debug.Log("initializing NDN modules");
            // @Therese - these need to be moved somewhere to a higher-level entity as
            // configuration parameters (may be changed frequently during testing)
            string rootPrefix  = "/icear/user";
            string userId      = "peter"; // "mobile-terminal0";
            string serviceType = "object_recognizer";

            string [] edgeServices = { "yolo", "openface" }; // these must be unique!

            NdnRtc.Initialize(rootPrefix, userId);
            faceProcessor_ = new FaceProcessor();
            faceProcessor_.start();

            assetFetcher_ = new AssetBundleFetcher(faceProcessor_);

            string servicePrefix = rootPrefix + "/" + userId + "/" + serviceType;
            annotationFetchers_ = new List <AnnotationsFetcher>();

            foreach (var service in edgeServices)
            {
                Debug.LogFormat("initializing annotations fetcher for {0}...", service);
                annotationFetchers_.Add(new AnnotationsFetcher(faceProcessor_, servicePrefix, service));
            }

            // setup CNL logging
            //ILOG.J2CsMapping.Util.Logging.Logger.getLogger("").setLevel(ILOG.J2CsMapping.Util.Logging.Level.FINE);
            //ILOG.J2CsMapping.Util.Logging.Logger.Write = delegate (string message) { Debug.Log(System.DateTime.Now + ": " + message); };
        }
        catch (System.Exception e)
        {
            Debug.LogExceptionFormat(e, "while initializing");
        }
    }