Esempio n. 1
0
        /// <summary>
        /// Checks whether an {@link InputStream} is in the Horrible
        /// Property Set Format.
        /// </summary>
        /// <param name="stream">The {@link InputStream} To check. In order To
        /// perform the check, the method Reads the first bytes from the
        /// stream. After Reading, the stream is Reset To the position it
        /// had before Reading. The {@link InputStream} must support the
        /// {@link InputStream#mark} method.</param>
        /// <returns>
        ///     <c>true</c> if the stream is a property Set
        /// stream; otherwise, <c>false</c>.
        /// </returns>
        public static bool IsPropertySetStream(Stream stream)
        {
            ByteArrayInputStream dis = stream as ByteArrayInputStream;

            /*
             * Read at most this many bytes.
             */
            int BUFFER_SIZE = 50;

            /*
             * Mark the current position in the stream so that we can
             * Reset To this position if the stream does not contain a
             * property Set.
             */
            if (dis == null || !dis.MarkSupported())
            {
                throw new MarkUnsupportedException(stream.GetType().Name);
            }
            dis.Mark(BUFFER_SIZE);

            /*
             * Read a couple of bytes from the stream.
             */
            byte[] buffer = new byte[BUFFER_SIZE];
            int    bytes  =
                stream.Read(buffer, 0,
                            (int)Math.Min(buffer.Length, dis.Available()));
            bool isPropertySetStream =
                IsPropertySetStream(buffer, 0, bytes);

            stream.Seek(0, SeekOrigin.Begin);
            dis.Reset();
            return(isPropertySetStream);
        }
        public void Available_InitializedStream_ReturnsNumberOfAvailableBytes()
        {
            byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
            ByteArrayInputStream stream = ByteArrayInputStream.Create(data);

            Assert.AreEqual(11, stream.Available());
        }
        private async void ExecuteDetection(int mIndex)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mIndex);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    if (mIndex == 0)
                    {
                        mBitmap0.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }
                    else
                    {
                        mBitmap1.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                SetUiAfterDetection(faces, mIndex, mSucceed);
            });
        }
Esempio n. 4
0
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                AddLog("Response: Success. Detected " + (faces == null ? 0 : faces.Length) + " face(s) in " + mImageUri);

                // Show the result on screen when detection is done.
                ListView list_detected_faces = FindViewById <ListView>(Resource.Id.list_detected_faces);
                SetUiAfterDetection(faces, mSucceed, list_detected_faces);
            });
        }
        private async void ExecuteDetection(int mRequestCode, string mImageUri, Bitmap mInternalBitmap)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mInternalBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }
                if (mRequestCode == REQUEST_ADD_FACE)
                {
                    SetUiAfterDetectionForAddFace(faces);
                }
                else if (mRequestCode == REQUEST_SELECT_IMAGE)
                {
                    SetUiAfterDetectionForSelectImage(faces);
                }
            });
        }
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting " + mImageUriStr);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    int faces_count = (faces == null) ? 0 : faces.Count();
                    AddLog("Response: Success. Detected " + faces_count.ToString() + " Face(s)");
                }

                SetUiAfterDetection(faces, mSucceed);
            });
        }
Esempio n. 7
0
        private static SignedCertificateTimestamp[] parseSCTsFromCertExtension(byte[] extensionvalue)
        {
            System.Console.WriteLine("parseSCTsFromCertExtension:" + BitConverter.ToString(extensionvalue));
            List <SignedCertificateTimestamp> sctList = new List <SignedCertificateTimestamp>();
            ByteArrayInputStream bis = new ByteArrayInputStream(extensionvalue);
            int i = ReadUint16(bis);

            if (i == 1154)
            {
                SCTS_TYPE = 1;
                i         = ReadUint16(bis);
            }
            i = ReadUint16(bis);
            i = ReadUint16(bis); // first one is the length of all SCTs concatenated, we don't actually need this
            while (bis.Available() > 2)
            {
                byte[] sctBytes = ReadOpaque16(bis);
                sctList.Add(ParseSCTFromBinary(new ByteArrayInputStream(sctBytes)));
            }
            return(sctList.ToArray());
        }
        private async void ExecuteFaceTask(List <int> mFaceIndices)
        {
            AddPersistedFaceResult result = null;
            bool mSucceed = true;

            mProgressDialog.Show();

            try
            {
                var  faceClient = new FaceClient();
                UUID personId   = UUID.FromString(mPersonId);
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Adding face...");
                        SetInfo("Adding face...");

                        foreach (int index in mFaceIndices)
                        {
                            FaceRectangle faceRect = mFaceGridViewAdapter.faceRectList[index];
                            AddLog("Request: Adding face to person " + mPersonId);

                            result = await faceClient.AddPersonFace(mPersonGroupId, personId, output, "User data", faceRect);

                            mFaceGridViewAdapter.faceIdList[index] = result.PersistedFaceId;
                        }
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                mProgressDialog.Dismiss();

                if (mSucceed)
                {
                    String faceIds = "";
                    foreach (int index in mFaceIndices)
                    {
                        String faceId = mFaceGridViewAdapter.faceIdList[index].ToString();
                        faceIds      += faceId + ", ";

                        try
                        {
                            var file = System.IO.Path.Combine(Application.Context.FilesDir.Path, faceId);
                            using (var fs = new FileStream(file, FileMode.OpenOrCreate))
                            {
                                mFaceGridViewAdapter.faceThumbnails[index].Compress(Bitmap.CompressFormat.Jpeg, 100, fs);
                            }

                            Android.Net.Uri uri = Android.Net.Uri.Parse(file);
                            StorageHelper.SetFaceUri(faceId, uri.ToString(), mPersonId, this);
                        }
                        catch (Java.IO.IOException e)
                        {
                            SetInfo(e.Message);
                        }
                    }
                    AddLog("Response: Success. Face(s) " + faceIds + "added to person " + mPersonId);
                    Finish();
                }
            });
        }
        private async void ExecuteDetection(string mImageUri)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }

                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    SetInfo("Detection is done");

                    // Show the detailed list of original faces.
                    mFaceListAdapter.AddFaces(faces);
                    GridView listView = (GridView)FindViewById(Resource.Id.all_faces);
                    listView.Adapter  = mFaceListAdapter;

                    TextView textView = (TextView)FindViewById(Resource.Id.text_all_faces);
                    textView.Text     = String.Format(
                        "{0} face{1} in total",
                        mFaceListAdapter.faces.Count,
                        mFaceListAdapter.faces.Count != 1 ? "s" : "");
                }

                if (mFaceListAdapter.faces.Count >= 2 && mFaceListAdapter.faces.Count <= 100)
                {
                    SetGroupButtonEnabledStatus(true);
                }
                else
                {
                    SetGroupButtonEnabledStatus(false);
                }
            });
        }
        private async void ExecuteDetection()
        {
            Face[] faces = null;

            mProgressDialog.Show();
            //AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    // Set the adapter of the ListView which contains the details of detected faces.
                    mFaceListAdapter  = new FaceListAdapter(faces, this);
                    ListView listView = (ListView)FindViewById(Resource.Id.list_identified_faces);
                    listView.Adapter  = mFaceListAdapter;

                    if (faces.Count() == 0)
                    {
                        detected = false;
                        SetInfo("No faces detected!");
                    }
                    else
                    {
                        detected = true;
                        SetInfo("Click on the \"Identify\" button to identify the faces in image.");
                    }
                }
                else
                {
                    detected = false;
                }

                RefreshIdentifyButtonEnabledStatus();
            });
        }