private async void ExecuteDetection(int mIndex)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mIndex);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    if (mIndex == 0)
                    {
                        mBitmap0.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }
                    else
                    {
                        mBitmap1.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                SetUiAfterDetection(faces, mIndex, mSucceed);
            });
        }
コード例 #2
0
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                AddLog("Response: Success. Detected " + (faces == null ? 0 : faces.Length) + " face(s) in " + mImageUri);

                // Show the result on screen when detection is done.
                ListView list_detected_faces = FindViewById <ListView>(Resource.Id.list_detected_faces);
                SetUiAfterDetection(faces, mSucceed, list_detected_faces);
            });
        }
        private async void ExecuteDetection(int mRequestCode, string mImageUri, Bitmap mInternalBitmap)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mInternalBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }
                if (mRequestCode == REQUEST_ADD_FACE)
                {
                    SetUiAfterDetectionForAddFace(faces);
                }
                else if (mRequestCode == REQUEST_SELECT_IMAGE)
                {
                    SetUiAfterDetectionForSelectImage(faces);
                }
            });
        }
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting " + mImageUriStr);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    int faces_count = (faces == null) ? 0 : faces.Count();
                    AddLog("Response: Success. Detected " + faces_count.ToString() + " Face(s)");
                }

                SetUiAfterDetection(faces, mSucceed);
            });
        }
コード例 #5
0
        private async void ExecuteDetection(string mImageUri)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }

                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    SetInfo("Detection is done");

                    // Show the detailed list of original faces.
                    mFaceListAdapter.AddFaces(faces);
                    GridView listView = (GridView)FindViewById(Resource.Id.all_faces);
                    listView.Adapter  = mFaceListAdapter;

                    TextView textView = (TextView)FindViewById(Resource.Id.text_all_faces);
                    textView.Text     = String.Format(
                        "{0} face{1} in total",
                        mFaceListAdapter.faces.Count,
                        mFaceListAdapter.faces.Count != 1 ? "s" : "");
                }

                if (mFaceListAdapter.faces.Count >= 2 && mFaceListAdapter.faces.Count <= 100)
                {
                    SetGroupButtonEnabledStatus(true);
                }
                else
                {
                    SetGroupButtonEnabledStatus(false);
                }
            });
        }
        private async void ExecuteDetection()
        {
            Face[] faces = null;

            mProgressDialog.Show();
            //AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    // Set the adapter of the ListView which contains the details of detected faces.
                    mFaceListAdapter  = new FaceListAdapter(faces, this);
                    ListView listView = (ListView)FindViewById(Resource.Id.list_identified_faces);
                    listView.Adapter  = mFaceListAdapter;

                    if (faces.Count() == 0)
                    {
                        detected = false;
                        SetInfo("No faces detected!");
                    }
                    else
                    {
                        detected = true;
                        SetInfo("Click on the \"Identify\" button to identify the faces in image.");
                    }
                }
                else
                {
                    detected = false;
                }

                RefreshIdentifyButtonEnabledStatus();
            });
        }