/// <exception cref="System.IO.IOException"></exception>
 private void AssertNoCrLfHelper(string expect, string input)
 {
     byte[] inbytes = Sharpen.Runtime.GetBytesForString(input);
     byte[] expectBytes = Sharpen.Runtime.GetBytesForString(expect);
     for (int i = 0; i < 5; ++i)
     {
         byte[] buf = new byte[i];
         InputStream @in = new ByteArrayInputStream(inbytes);
         ByteArrayOutputStream bos = new ByteArrayOutputStream();
         OutputStream @out = new AutoCRLFOutputStream(bos);
         if (i > 0)
         {
             int n;
             while ((n = @in.Read(buf)) >= 0)
             {
                 @out.Write(buf, 0, n);
             }
         }
         else
         {
             int c;
             while ((c = @in.Read()) != -1)
             {
                 @out.Write(c);
             }
         }
         @out.Flush();
         @in.Close();
         @out.Close();
         byte[] actualBytes = bos.ToByteArray();
         NUnit.Framework.Assert.AreEqual(Encode(expectBytes), Encode(actualBytes), "bufsize="
              + i);
     }
 }
        public virtual void TestDataAfterPackFooterSplitObjectRead()
        {
            byte[] data = Constants.Encode("0123456789");
            // Build a pack ~17k
            int objects = 900;

            TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024);
            PackHeader(pack, objects);
            for (int i = 0; i < objects; i++)
            {
                pack.Write((Constants.OBJ_BLOB) << 4 | 10);
                Deflate(pack, data);
            }
            Digest(pack);
            byte[] packData   = pack.ToByteArray();
            byte[] streamData = new byte[packData.Length + 1];
            System.Array.Copy(packData, 0, streamData, 0, packData.Length);
            streamData[packData.Length] = unchecked ((int)(0x7e));
            InputStream @in = new ByteArrayInputStream(streamData);
            PackParser  p   = Index(@in);

            p.SetAllowThin(true);
            p.SetCheckEofAfterPackFooter(false);
            p.SetExpectDataAfterPackFooter(true);
            p.Parse(NullProgressMonitor.INSTANCE);
            NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read());
        }
        public virtual void TestDataAfterPackFooterSplitHeaderRead()
        {
            TestRepository d = new TestRepository <FileRepository>(db);

            byte[]  data    = Constants.Encode("a");
            RevBlob b       = d.Blob(data);
            int     objects = 248;

            TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024);
            PackHeader(pack, objects + 1);
            int           offset = 13;
            StringBuilder sb     = new StringBuilder();

            for (int i = 0; i < offset; i++)
            {
                sb.Append(i);
            }
            offset = sb.ToString().Length;
            int lenByte = (Constants.OBJ_BLOB) << 4 | (offset & unchecked ((int)(0x0F)));

            offset >>= 4;
            if (offset > 0)
            {
                lenByte |= 1 << 7;
            }
            pack.Write(lenByte);
            while (offset > 0)
            {
                lenByte  = offset & unchecked ((int)(0x7F));
                offset >>= 6;
                if (offset > 0)
                {
                    lenByte |= 1 << 7;
                }
                pack.Write(lenByte);
            }
            Deflate(pack, Constants.Encode(sb.ToString()));
            for (int i_1 = 0; i_1 < objects; i_1++)
            {
                // The last pack header written falls across the 8192 byte boundary
                // between [8189:8210]
                pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4);
                b.CopyRawTo(pack);
                Deflate(pack, new byte[] { unchecked ((int)(0x1)), unchecked ((int)(0x1)), unchecked (
                                               (int)(0x1)), (byte)('b') });
            }
            Digest(pack);
            byte[] packData   = pack.ToByteArray();
            byte[] streamData = new byte[packData.Length + 1];
            System.Array.Copy(packData, 0, streamData, 0, packData.Length);
            streamData[packData.Length] = unchecked ((int)(0x7e));
            InputStream @in = new ByteArrayInputStream(streamData);
            PackParser  p   = Index(@in);

            p.SetAllowThin(true);
            p.SetCheckEofAfterPackFooter(false);
            p.SetExpectDataAfterPackFooter(true);
            p.Parse(NullProgressMonitor.INSTANCE);
            NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read());
        }
        public virtual void TestOneBlockAndHalf_Copy()
        {
            TemporaryBuffer b = new TemporaryBuffer.LocalFile();

            byte[] test = new TestRng(Sharpen.Extensions.GetTestName()).NextBytes(TemporaryBuffer.Block
                                                                                  .SZ * 3 / 2);
            try
            {
                ByteArrayInputStream @in = new ByteArrayInputStream(test);
                b.Write(@in.Read());
                b.Copy(@in);
                b.Close();
                NUnit.Framework.Assert.AreEqual(test.Length, b.Length());
                {
                    byte[] r = b.ToByteArray();
                    NUnit.Framework.Assert.IsNotNull(r);
                    NUnit.Framework.Assert.AreEqual(test.Length, r.Length);
                    NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r));
                }
                {
                    ByteArrayOutputStream o = new ByteArrayOutputStream();
                    b.WriteTo(o, null);
                    o.Close();
                    byte[] r = o.ToByteArray();
                    NUnit.Framework.Assert.AreEqual(test.Length, r.Length);
                    NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r));
                }
            }
            finally
            {
                b.Destroy();
            }
        }
        public virtual void TestDataAfterPackFooterSingleRead()
        {
            TestRepository d = new TestRepository <FileRepository>(db);
            RevBlob        a = d.Blob("a");

            TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024);
            PackHeader(pack, 1);
            pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4);
            a.CopyRawTo(pack);
            Deflate(pack, new byte[] { unchecked ((int)(0x1)), unchecked ((int)(0x1)), unchecked (
                                           (int)(0x1)), (byte)('b') });
            Digest(pack);
            byte[] packData   = pack.ToByteArray();
            byte[] streamData = new byte[packData.Length + 1];
            System.Array.Copy(packData, 0, streamData, 0, packData.Length);
            streamData[packData.Length] = unchecked ((int)(0x7e));
            InputStream @in = new ByteArrayInputStream(streamData);
            PackParser  p   = Index(@in);

            p.SetAllowThin(true);
            p.SetCheckEofAfterPackFooter(false);
            p.SetExpectDataAfterPackFooter(true);
            p.Parse(NullProgressMonitor.INSTANCE);
            NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read());
        }
        public void Read_InitializedStream_ShouldReturnCorrectResults()
        {
            byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
            ByteArrayInputStream stream = ByteArrayInputStream.Create(data);
            IntPtr aBuf = Marshal.AllocCoTaskMem(1);

            // Read first byte
            stream.Read(aBuf, 1);
            byte result = Marshal.ReadByte(aBuf);

            Assert.AreEqual(0, result);

            // Read second byte.
            stream.Read(aBuf, 1);
            result = Marshal.ReadByte(aBuf);
            Assert.AreEqual(1, result);

            Marshal.FreeCoTaskMem(aBuf);
        }
        public static long ReadLong(ByteArrayInputStream @in)
        {
            long ret = 0;

            for (int i = 0; i < LongLength; i++)
            {
                ret = (ret << 8) + ((byte)@in.Read() & unchecked ((int)(0xff)));
            }
            return(ret);
        }
        private async void ExecuteDetection(int mIndex)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mIndex);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    if (mIndex == 0)
                    {
                        mBitmap0.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }
                    else
                    {
                        mBitmap1.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    }

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                SetUiAfterDetection(faces, mIndex, mSucceed);
            });
        }
Esempio n. 9
0
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, true, new[] {
                            FaceServiceClientFaceAttributeType.Age,
                            FaceServiceClientFaceAttributeType.Gender,
                            FaceServiceClientFaceAttributeType.Smile,
                            FaceServiceClientFaceAttributeType.Glasses,
                            FaceServiceClientFaceAttributeType.FacialHair,
                            FaceServiceClientFaceAttributeType.Emotion,
                            FaceServiceClientFaceAttributeType.HeadPose
                        });
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                AddLog("Response: Success. Detected " + (faces == null ? 0 : faces.Length) + " face(s) in " + mImageUri);

                // Show the result on screen when detection is done.
                ListView list_detected_faces = FindViewById <ListView>(Resource.Id.list_detected_faces);
                SetUiAfterDetection(faces, mSucceed, list_detected_faces);
            });
        }
Esempio n. 10
0
        private void ReaderOperationWithMime(byte[] mime, string part1ExpectedStr, string
                                             part2ExpectedStr, int recommendedChunkSize)
        {
            Encoding utf8 = Sharpen.Extensions.GetEncoding("UTF-8");
            // if the caller passes in a special chunksize, which is not equal to mime.length, then
            // lets test the algorithm _only_ at that chunksize.  otherwise, test it at every chunksize
            // between 1 and mime.length.  (this is needed because when testing with a very large mime value,
            // the test takes too long to test at every single chunk size)
            int chunkSize = 1;

            if (recommendedChunkSize != mime.Length)
            {
                chunkSize = recommendedChunkSize;
            }
            for (; chunkSize <= recommendedChunkSize; ++chunkSize)
            {
                ByteArrayInputStream mimeInputStream = new ByteArrayInputStream(mime);
                MultipartReaderTest.TestMultipartReaderDelegate delegate_ = new MultipartReaderTest.TestMultipartReaderDelegate
                                                                                (this);
                string          contentType = "multipart/related; boundary=\"BOUNDARY\"";
                MultipartReader reader      = new MultipartReader(contentType, delegate_);
                NUnit.Framework.Assert.IsFalse(reader.Finished());
                int location = 0;
                int length   = 0;
                do
                {
                    NUnit.Framework.Assert.IsTrue("Parser didn't stop at end", location < mime.Length
                                                  );
                    length = Math.Min(chunkSize, (mime.Length - location));
                    byte[] bytesRead = new byte[length];
                    mimeInputStream.Read(bytesRead, 0, length);
                    reader.AppendData(bytesRead);
                    location += chunkSize;
                }while (!reader.Finished());
                NUnit.Framework.Assert.AreEqual(delegate_.partList.Count, 2);
                NUnit.Framework.Assert.AreEqual(delegate_.headersList.Count, 2);
                byte[]          part1Expected = Sharpen.Runtime.GetBytesForString(part1ExpectedStr, utf8);
                byte[]          part2Expected = Sharpen.Runtime.GetBytesForString(part2ExpectedStr, utf8);
                ByteArrayBuffer part1         = delegate_.partList[0];
                ByteArrayBuffer part2         = delegate_.partList[1];
                NUnit.Framework.Assert.IsTrue(Arrays.Equals(part1.ToByteArray(), part1Expected));
                NUnit.Framework.Assert.IsTrue(Arrays.Equals(part2.ToByteArray(), part2Expected));
                IDictionary <string, string> headers1 = delegate_.headersList[0];
                NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Foo"));
                NUnit.Framework.Assert.AreEqual(headers1.Get("Foo"), "Bar");
                NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Header"));
                NUnit.Framework.Assert.AreEqual(headers1.Get("Header"), "Val ue");
            }
        }
        private async void ExecuteDetection(int mRequestCode, string mImageUri, Bitmap mInternalBitmap)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mInternalBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }
                if (mRequestCode == REQUEST_ADD_FACE)
                {
                    SetUiAfterDetectionForAddFace(faces);
                }
                else if (mRequestCode == REQUEST_SELECT_IMAGE)
                {
                    SetUiAfterDetectionForSelectImage(faces);
                }
            });
        }
Esempio n. 12
0
        /// <exception cref="System.IO.IOException"></exception>
        public override void WriteTo(OutputStream @out)
        {
            if (@out == null)
            {
                throw new ArgumentException("Output stream may not be null");
            }
            InputStream @in = new ByteArrayInputStream(this.content);

            byte[] tmp = new byte[4096];
            int    l;

            while ((l = @in.Read(tmp)) != -1)
            {
                @out.Write(tmp, 0, l);
            }
            @out.Flush();
        }
Esempio n. 13
0
        /// <exception cref="System.Exception"/>
        public virtual void TestPreVersion21CompatibilityEmptyTuple()
        {
            Writable[] manyWrits = new Writable[0];
            TestTupleWritable.PreVersion21TupleWritable oldTuple = new TestTupleWritable.PreVersion21TupleWritable
                                                                       (manyWrits);
            // don't set any values written
            ByteArrayOutputStream @out = new ByteArrayOutputStream();

            oldTuple.Write(new DataOutputStream(@out));
            ByteArrayInputStream @in    = new ByteArrayInputStream(@out.ToByteArray());
            TupleWritable        dTuple = new TupleWritable();

            dTuple.ReadFields(new DataInputStream(@in));
            NUnit.Framework.Assert.IsTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable"
                                          , oldTuple.IsCompatible(dTuple));
            NUnit.Framework.Assert.AreEqual("All tuple data has not been read from the stream"
                                            , -1, @in.Read());
        }
        private async void ExecuteDetection()
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting " + mImageUriStr);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    int faces_count = (faces == null) ? 0 : faces.Count();
                    AddLog("Response: Success. Detected " + faces_count.ToString() + " Face(s)");
                }

                SetUiAfterDetection(faces, mSucceed);
            });
        }
Esempio n. 15
0
        /// <exception cref="System.Exception"/>
        public virtual void TestWideWritable2()
        {
            Writable[]    manyWrits = MakeRandomWritables(71);
            TupleWritable sTuple    = new TupleWritable(manyWrits);

            for (int i = 0; i < manyWrits.Length; i++)
            {
                sTuple.SetWritten(i);
            }
            ByteArrayOutputStream @out = new ByteArrayOutputStream();

            sTuple.Write(new DataOutputStream(@out));
            ByteArrayInputStream @in    = new ByteArrayInputStream(@out.ToByteArray());
            TupleWritable        dTuple = new TupleWritable();

            dTuple.ReadFields(new DataInputStream(@in));
            NUnit.Framework.Assert.IsTrue("Failed to write/read tuple", sTuple.Equals(dTuple)
                                          );
            NUnit.Framework.Assert.AreEqual("All tuple data has not been read from the stream"
                                            , -1, @in.Read());
        }
Esempio n. 16
0
        /// <summary>Tests compatibility with pre-0.21 versions of TupleWritable</summary>
        /// <exception cref="System.Exception"/>
        public virtual void TestPreVersion21Compatibility()
        {
            Writable[] manyWrits = MakeRandomWritables(64);
            TestTupleWritable.PreVersion21TupleWritable oldTuple = new TestTupleWritable.PreVersion21TupleWritable
                                                                       (manyWrits);
            for (int i = 0; i < manyWrits.Length; i++)
            {
                if (i % 3 == 0)
                {
                    oldTuple.SetWritten(i);
                }
            }
            ByteArrayOutputStream @out = new ByteArrayOutputStream();

            oldTuple.Write(new DataOutputStream(@out));
            ByteArrayInputStream @in    = new ByteArrayInputStream(@out.ToByteArray());
            TupleWritable        dTuple = new TupleWritable();

            dTuple.ReadFields(new DataInputStream(@in));
            NUnit.Framework.Assert.IsTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable"
                                          , oldTuple.IsCompatible(dTuple));
            NUnit.Framework.Assert.AreEqual("All tuple data has not been read from the stream"
                                            , -1, @in.Read());
        }
Esempio n. 17
0
 public static int ReadInt(ByteArrayInputStream @in)
 {
     return((@in.Read() << 24) | ((@in.Read() & 255) << 16) | ((@in.Read() & 255) << 8
                                                               ) | (@in.Read() & 255));
 }
        private async void ExecuteDetection(string mImageUri)
        {
            Face[] faces    = null;
            bool   mSucceed = true;

            mProgressDialog.Show();
            AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                if (mSucceed)
                {
                    AddLog("Response: Success. Detected " + faces.Count() + " Face(s) in image");
                }

                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    SetInfo("Detection is done");

                    // Show the detailed list of original faces.
                    mFaceListAdapter.AddFaces(faces);
                    GridView listView = (GridView)FindViewById(Resource.Id.all_faces);
                    listView.Adapter  = mFaceListAdapter;

                    TextView textView = (TextView)FindViewById(Resource.Id.text_all_faces);
                    textView.Text     = String.Format(
                        "{0} face{1} in total",
                        mFaceListAdapter.faces.Count,
                        mFaceListAdapter.faces.Count != 1 ? "s" : "");
                }

                if (mFaceListAdapter.faces.Count >= 2 && mFaceListAdapter.faces.Count <= 100)
                {
                    SetGroupButtonEnabledStatus(true);
                }
                else
                {
                    SetGroupButtonEnabledStatus(false);
                }
            });
        }
Esempio n. 19
0
        public virtual void TestCorruptedIFile()
        {
            int  fetcher             = 7;
            Path onDiskMapOutputPath = new Path(name.GetMethodName() + "/foo");
            Path shuffledToDisk      = OnDiskMapOutput.GetTempPath(onDiskMapOutputPath, fetcher);

            fs = FileSystem.GetLocal(job).GetRaw();
            MapOutputFile mof = Org.Mockito.Mockito.Mock <MapOutputFile>();
            OnDiskMapOutput <Text, Text> odmo = new OnDiskMapOutput <Text, Text>(map1ID, id, mm
                                                                                 , 100L, job, mof, fetcher, true, fs, onDiskMapOutputPath);
            string                mapData = "MAPDATA12345678901234567890";
            ShuffleHeader         header  = new ShuffleHeader(map1ID.ToString(), 14, 10, 1);
            ByteArrayOutputStream bout    = new ByteArrayOutputStream();
            DataOutputStream      dos     = new DataOutputStream(bout);
            IFileOutputStream     ios     = new IFileOutputStream(dos);

            header.Write(dos);
            int headerSize = dos.Size();

            try
            {
                ios.Write(Sharpen.Runtime.GetBytesForString(mapData));
            }
            finally
            {
                ios.Close();
            }
            int dataSize = bout.Size() - headerSize;
            // Ensure that the OnDiskMapOutput shuffler can successfully read the data.
            MapHost host             = new MapHost("TestHost", "http://test/url");
            ByteArrayInputStream bin = new ByteArrayInputStream(bout.ToByteArray());

            try
            {
                // Read past the shuffle header.
                bin.Read(new byte[headerSize], 0, headerSize);
                odmo.Shuffle(host, bin, dataSize, dataSize, metrics, Reporter.Null);
            }
            finally
            {
                bin.Close();
            }
            // Now corrupt the IFile data.
            byte[] corrupted = bout.ToByteArray();
            corrupted[headerSize + (dataSize / 2)] = unchecked ((int)(0x0));
            try
            {
                bin = new ByteArrayInputStream(corrupted);
                // Read past the shuffle header.
                bin.Read(new byte[headerSize], 0, headerSize);
                odmo.Shuffle(host, bin, dataSize, dataSize, metrics, Reporter.Null);
                NUnit.Framework.Assert.Fail("OnDiskMapOutput.shuffle didn't detect the corrupted map partition file"
                                            );
            }
            catch (ChecksumException e)
            {
                Log.Info("The expected checksum exception was thrown.", e);
            }
            finally
            {
                bin.Close();
            }
            // Ensure that the shuffled file can be read.
            IFileInputStream iFin = new IFileInputStream(fs.Open(shuffledToDisk), dataSize, job
                                                         );

            try
            {
                iFin.Read(new byte[dataSize], 0, dataSize);
            }
            finally
            {
                iFin.Close();
            }
        }
        private async void ExecuteDetection()
        {
            Face[] faces = null;

            mProgressDialog.Show();
            //AddLog("Request: Detecting in image " + mImageUri);

            try
            {
                var faceClient = new FaceClient();
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);

                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Detecting...");
                        SetInfo("Detecting...");
                        faces = await faceClient.Detect(output, true, false, null);
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                mProgressDialog.Dismiss();

                SetAllButtonsEnabledStatus(true);

                if (faces != null)
                {
                    // Set the adapter of the ListView which contains the details of detected faces.
                    mFaceListAdapter  = new FaceListAdapter(faces, this);
                    ListView listView = (ListView)FindViewById(Resource.Id.list_identified_faces);
                    listView.Adapter  = mFaceListAdapter;

                    if (faces.Count() == 0)
                    {
                        detected = false;
                        SetInfo("No faces detected!");
                    }
                    else
                    {
                        detected = true;
                        SetInfo("Click on the \"Identify\" button to identify the faces in image.");
                    }
                }
                else
                {
                    detected = false;
                }

                RefreshIdentifyButtonEnabledStatus();
            });
        }
Esempio n. 21
0
 private void AssertEOF()
 {
     NUnit.Framework.Assert.AreEqual(-1, rawIn.Read());
 }
        private async void ExecuteFaceTask(List <int> mFaceIndices)
        {
            AddPersistedFaceResult result = null;
            bool mSucceed = true;

            mProgressDialog.Show();

            try
            {
                var  faceClient = new FaceClient();
                UUID personId   = UUID.FromString(mPersonId);
                using (MemoryStream pre_output = new MemoryStream())
                {
                    mBitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, pre_output);
                    using (ByteArrayInputStream inputStream = new ByteArrayInputStream(pre_output.ToArray()))
                    {
                        byte[] arr = new byte[inputStream.Available()];
                        inputStream.Read(arr);
                        var output = new MemoryStream(arr);

                        mProgressDialog.SetMessage("Adding face...");
                        SetInfo("Adding face...");

                        foreach (int index in mFaceIndices)
                        {
                            FaceRectangle faceRect = mFaceGridViewAdapter.faceRectList[index];
                            AddLog("Request: Adding face to person " + mPersonId);

                            result = await faceClient.AddPersonFace(mPersonGroupId, personId, output, "User data", faceRect);

                            mFaceGridViewAdapter.faceIdList[index] = result.PersistedFaceId;
                        }
                    }
                }
            }
            catch (Java.Lang.Exception e)
            {
                mSucceed = false;
                AddLog(e.Message);
            }

            RunOnUiThread(() =>
            {
                mProgressDialog.Dismiss();

                if (mSucceed)
                {
                    String faceIds = "";
                    foreach (int index in mFaceIndices)
                    {
                        String faceId = mFaceGridViewAdapter.faceIdList[index].ToString();
                        faceIds      += faceId + ", ";

                        try
                        {
                            var file = System.IO.Path.Combine(Application.Context.FilesDir.Path, faceId);
                            using (var fs = new FileStream(file, FileMode.OpenOrCreate))
                            {
                                mFaceGridViewAdapter.faceThumbnails[index].Compress(Bitmap.CompressFormat.Jpeg, 100, fs);
                            }

                            Android.Net.Uri uri = Android.Net.Uri.Parse(file);
                            StorageHelper.SetFaceUri(faceId, uri.ToString(), mPersonId, this);
                        }
                        catch (Java.IO.IOException e)
                        {
                            SetInfo(e.Message);
                        }
                    }
                    AddLog("Response: Success. Face(s) " + faceIds + "added to person " + mPersonId);
                    Finish();
                }
            });
        }