示例#1
0
        protected override async void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            SetContentView(Resource.Layout.FaceIdentify);

            this.cameraPreview  = this.FindViewById <CameraSourcePreview>(Resource.Id.preview);
            this.graphicOverlay = this.FindViewById <GraphicOverlay>(Resource.Id.faceOverlay);

            var button = (ImageButton)this.FindViewById(Resource.Id.flipButton);

            button.Click += (sender, args) =>
            {
                this.isFrontFacing = !this.isFrontFacing;

                if (this.cameraSource != null)
                {
                    this.cameraSource.Release();
                    this.cameraSource = null;
                }

                this.CreateCameraSource();
                this.StartCameraSource();
            };

            if (ActivityCompat.CheckSelfPermission(this, Manifest.Permission.Camera) == Permission.Granted)
            {
                this.CreateCameraSource();
                LiveCamHelper.Init();
                LiveCamHelper.GreetingsCallback     = (s) => { this.RunOnUiThread(() => Greetings = s); };
                LiveCamHelper.FacesDetectedCallback = result => { this.RunOnUiThread(() => CognitiveFacesResult = result); };

                this.faceService = new FaceService();
                await this.faceService.Initialize();

                //using (var stream = File.OpenRead("/data/user/0/CognitiveDemo.Droid/files/CognitivveFrame.png"))
                //{
                //    await this.faceService.IdentityFace(stream);
                //}
            }
            else
            {
                this.RequestCameraPermission();
            }
        }
示例#2
0
        public void OnPictureTaken(byte[] data)
        {
            Task.Run(async() =>
            {
                try
                {
                    isProcessing = true;
                    Console.WriteLine("face detected: ");
                    var imageAnalyzer = new ImageAnalyzer(data);
                    await LiveCamHelper.ProcessCameraCapture(imageAnalyzer);
                }

                finally
                {
                    isProcessing = false;
                }
            });
        }
示例#3
0
 private void _trainNewFaceButton_Click(object sender, EventArgs e)
 {
     if (PhotoProxy.LastPhoto != null)
     {
         Bitmap photo = PhotoProxy.LastPhoto;
         Task.Run(async() =>
         {
             //convert to base64
             var client         = new HttpClient(); //Iskelt kad ne ant kiekvieno siuntimo kurtu
             client.BaseAddress = new Uri("http://88.119.27.98:55555");
             byte[] byteArray   = LiveCamHelper.BitmapToGrayscaleBytes(photo);
             var content        = new ByteArrayContent(byteArray);
             var response       = await client.PostAsync("api/train/" + jsonOfLoggedInPerson.GetValue("Id") + "/1", content);
             Console.WriteLine("Response from /api/train is " + response.StatusCode);
             Console.WriteLine(await response.Content.ReadAsStringAsync());
         });
     }
 }
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);
            SetContentView(Resource.Layout.face_recognition);

            BTArduino = new BlueToothConnectionPortable();

            mPreview        = FindViewById <CameraSourcePreview>(Resource.Id.preview);
            mGraphicOverlay = FindViewById <GraphicOverlay>(Resource.Id.faceOverlay);

            if (ActivityCompat.CheckSelfPermission(this, Manifest.Permission.Camera) == Permission.Granted)
            {
                CreateCameraSource();
                LiveCamHelper.Init();
                LiveCamHelper.GreetingsCallback = (s) => { RunOnUiThread(() => GreetingsText = s); };
                //Task.Run(() => LiveCamHelper.RegisterFaces());
            }
            else
            {
                RequestCameraPermission();
            }
        }
示例#5
0
        protected async override void OnCreate(Bundle bundle)
        {
            base.OnCreate(bundle);

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);

            mPreview        = FindViewById <CameraSourcePreview>(Resource.Id.preview);
            mGraphicOverlay = FindViewById <GraphicOverlay>(Resource.Id.faceOverlay);
            //greetingsText = FindViewById<TextView>(Resource.Id.greetingsTextView);


            if (ActivityCompat.CheckSelfPermission(this, Manifest.Permission.Camera) == Permission.Granted)
            {
                CreateCameraSource();
                LiveCamHelper.Init();
                LiveCamHelper.GreetingsCallback = (s) => { RunOnUiThread(() => GreetingsText = s); };
                //await LiveCamHelper.RegisterFaces();
            }
            else
            {
                RequestCameraPermission();
            }
        }
示例#6
0
        public void OnPictureTaken(byte[] data)
        {
            Task.Run(async() =>
            {
                try
                {
                    isProcessing = true;

                    Console.WriteLine("face detected: ");
                    var bitmap = Bitmap.CreateScaledBitmap(BitmapFactory.DecodeByteArray(data, 0, data.Length), 240, 320, false);

                    if (_face != null)
                    {
                        //240width
                        //320height
                        //bitmap.Height = 320;
                        //bitmap.Width = 240;

                        Console.WriteLine($"position x: {_face.Position.X} position y: {_face. Position.Y} width: {_face.Width} height: {_face.Height} bitmapWidth: {bitmap.Width} bitmapHeight: {bitmap.Height}");

                        //var bitmapScalled = Bitmap.CreateScaledBitmap(bitmap, 128, 128, true);
                        //bitmap = Bitmap.CreateBitmap(bitmap, (int)_face.Position.X, (int)_face.Position.Y, (int)_face.Width, (int)_face.Height);


#pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
                        Task.Factory.StartNew(() =>
                        {
                            System.Threading.Thread.Sleep(200);
                            bitmap = Bitmap.CreateBitmap(bitmap, (int)_face.Position.X / 2, (int)_face.Position.Y / 2, (int)_face.Width / 2, (int)_face.Height / 2);
                            bitmap = Bitmap.CreateScaledBitmap(bitmap, 240, 320, false);
                            _img.SetImageBitmap(bitmap);


                            PhotoProxy.LastPhoto = bitmap;

                            //Task<string> task = PostRecognition(bitmap);

                            //task.Wait();
                            //var x = task.Result;
                            //Console.WriteLine(x+" --- Post recognition response");


                            Task.Run(async() =>
                            {
                                //convert to base64
                                var client         = new HttpClient(); //Iskelt kad ne ant kiekvieno siuntimo kurtu
                                client.BaseAddress = new Uri("http://88.119.27.98:55555");
                                //var stream = new MemoryStream();
                                byte[] byteArray = LiveCamHelper.BitmapToGrayscaleBytes(bitmap);
                                //stream.Write(byteArray, 0, byteArray.Length);
                                //bitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, stream);
                                var content = new ByteArrayContent(byteArray);
                                //content.Add(new StreamContent(stream));
                                var response = await client.PostAsync("api/recognize", content);
                                Console.WriteLine("Response from /api/recognize is " + response.StatusCode);
                                Console.WriteLine(await response.Content.ReadAsStringAsync());
                                //if (stream.Equals(null)) Console.WriteLine("The stream is null");
                                //else Console.WriteLine("the stream is not null");
                                //stream.Dispose();
                                responseObject = JObject.Parse(await response.Content.ReadAsStringAsync());
                                if (!string.IsNullOrWhiteSpace((string)responseObject.GetValue("Name")))
                                {
                                    newestResponse = responseObject.GetValue("Name") + ", " + responseObject.GetValue("Bio") + ", " + responseObject.GetValue("Likes");
                                }
                            });
                        });
#pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
                    }


                    var imageAnalyzer = new ImageAnalyzer(data);
                    await LiveCamHelper.ProcessCameraCapture(imageAnalyzer);
                }
                catch (Exception ex)
                {
                    Console.WriteLine("======================================");
                    Console.WriteLine(ex);
                    throw;
                }
                finally
                {
                    isProcessing = false;
                }
            });
        }
示例#7
0
        async void ProcessingImage(NSData uIImage)
        {
            lock (processingobjlocker) {
                if (isAPIprocessing)
                {
                    return;
                }
                isAPIprocessing = true;
            }

            cancellationTokenSource?.Cancel();
            cancellationTokenSource = new CancellationTokenSource();
            cancellationTokenSource.Token.ThrowIfCancellationRequested();
            //UIImage uIImage = null;
            //DispatchQueue.MainQueue.DispatchSync(() => {

            //    uIImage = ivPictureTaken.Image;
            //});



            try {
                if (task != null && (task.Status == TaskStatus.Running || task.Status == TaskStatus.WaitingToRun || task.Status == TaskStatus.WaitingForActivation))
                {
                    Console.WriteLine("Task has attempted to start while already running");
                }
                else
                {
                    Console.WriteLine("running api face recognition: ");
                    task = await Task.Factory.StartNew(async() => {
                        //await Task.Delay(10000);
                        //await Task.Delay(10000);
                        //await Task.Delay(10000);
                        //UIImage uIImage = UIImage.FromFile(ruta);

                        //using (uIImage) {
                        ImageAnalyzer imageAnalyzer = new ImageAnalyzer(() => Task.FromResult <Stream>(uIImage.AsStream()), null);
                        await LiveCamHelper.ProcessCameraCapture(imageAnalyzer).ConfigureAwait(false);

                        //}


                        return(true);
                    }, cancellationTokenSource.Token,
                                                       TaskCreationOptions.None,
                                                       TaskScheduler.Default).ConfigureAwait(false);;
                    await task;
                }
            }
            catch (Exception e) {
                Console.WriteLine("error api face recogniion ");
            }

            finally {
                //processingFaceDetection = false;
                //lock (lockerobj) {
                await Task.Delay(2000);

                processingFaceDetection = false;
                isAPIprocessing         = false;
                setupAVFoundationFaceDetection();
            }
            Console.WriteLine("finished processing ");
        }