Example #1
0
        public void Detect(String fileName)
        {
            Tensor imageTensor = ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            Stopwatch watch = Stopwatch.StartNew();

            MultiboxGraph.Result result = graph.Detect(imageTensor);
            watch.Stop();

            Bitmap bmp = new Bitmap(fileName);

            MultiboxGraph.DrawResults(bmp, result, 0.1f);


            if (InvokeRequired)
            {
                this.Invoke((MethodInvoker)(() =>
                {
                    resultPictureBox.Image = bmp;
                    messageLabel.Text = String.Format("Detection completed in {0} milliseconds", watch.ElapsedMilliseconds);
                }));
            }
            else
            {
                resultPictureBox.Image = bmp;
                messageLabel.Text      = String.Format("Detection completed in {0} milliseconds", watch.ElapsedMilliseconds);
            }
        }
Example #2
0
        void multiboxGraph_OnDownloadProgressChanged(object sender, System.Net.DownloadProgressChangedEventArgs e)
        {
            String fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result detectResult = _multiboxGraph.Detect(imageTensor);
            NSImage image = new NSImage(fileName);

            MultiboxGraph.DrawResults(image, detectResult, 0.1f);
            SetImage(image);
        }
Example #3
0
        public void Detect(String fileName)
        {
            //MultiboxGraph.Download();
            MultiboxGraph graph       = new MultiboxGraph();
            Tensor        imageTensor = ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result result = graph.Detect(imageTensor);

            Bitmap bmp = new Bitmap(fileName);

            MultiboxGraph.DrawResults(bmp, result, 0.1f);
            resultPictureBox.Image = bmp;
        }
Example #4
0
        public void TestMultiboxPeopleDetect()
        {
            Tensor imageResults = ImageIO.ReadTensorFromImageFile("surfers.jpg", 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph multiboxGraph = new MultiboxGraph();

            MultiboxGraph.Result result = multiboxGraph.Detect(imageResults);

            Bitmap bmp = new Bitmap("surfers.jpg");

            MultiboxGraph.DrawResults(bmp, result, 0.1f);
            bmp.Save("surfers_result.jpg");
        }
Example #5
0
    // Use this for initialization
    void Start()
    {
        //Warning: The following code is used to get around a https certification issue for downloading tesseract language files from Github
        //Do not use this code in a production environment. Please make sure you understand the security implication from the following code before using it
        ServicePointManager.ServerCertificateValidationCallback += delegate(object sender, X509Certificate cert, X509Chain chain, SslPolicyErrors sslPolicyErrors) {
            HttpWebRequest webRequest = sender as HttpWebRequest;
            if (webRequest != null)
            {
                String requestStr = webRequest.Address.AbsoluteUri;
                if (requestStr.StartsWith(@"https://github.com/") || requestStr.StartsWith(@"https://raw.githubusercontent.com/"))
                {
                    return(true);
                }
            }
            return(false);
        };

        TfInvoke.CheckLibraryLoaded();

        WebCamDevice[] devices     = WebCamTexture.devices;
        int            cameraCount = devices.Length;

        if (cameraCount == 0)
        {
            _liveCameraView = false;
            Texture2D texture     = Resources.Load <Texture2D>("surfers");
            Tensor    imageTensor = ImageIO.ReadTensorFromTexture2D(texture, 224, 224, 128.0f, 1.0f / 128.0f, true);

            //byte[] raw = ImageIO.EncodeJpeg(imageTensor, 128.0f, 128.0f);
            //System.IO.File.WriteAllBytes("surfers_out.jpg", raw);

            _multiboxGraph = new MultiboxGraph();
            MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

            drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false);
            drawableTexture.SetPixels(texture.GetPixels());
            MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

            this.GetComponent <GUITexture>().texture    = drawableTexture;
            this.GetComponent <GUITexture>().pixelInset = new Rect(-texture.width / 2, -texture.height / 2, texture.width, texture.height);
        }
        else
        {
            _liveCameraView = true;
            webcamTexture   = new WebCamTexture(devices[0].name);
            _multiboxGraph  = new MultiboxGraph();
            baseRotation    = transform.rotation;
            webcamTexture.Play();
            //data = new Color32[webcamTexture.width * webcamTexture.height];
        }
    }
Example #6
0
        partial void multiboxClicked(NSObject sender)
        {
            messageLabel.StringValue = "Multibox Clicked";
            mainImageView.Image      = null;

            MultiboxGraph graph    = new MultiboxGraph();
            String        fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result detectResult = graph.Detect(imageTensor);
            NSImage image = new NSImage(fileName);

            MultiboxGraph.DrawResults(image, detectResult, 0.1f);
            mainImageView.Image = image;
        }
Example #7
0
        public MultiboxDetectionPage()
            : base()
        {
            if (_multiboxGraph == null)
            {
                _multiboxGraph = new MultiboxGraph();
                _multiboxGraph.OnDownloadProgressChanged += onDownloadProgressChanged;
                _multiboxGraph.OnDownloadCompleted       += onDownloadCompleted;
                _multiboxGraph.OnDownloadCompleted       += (sender, e) =>
                {
                    OnButtonClicked(sender, e);
                };
            }

            OnImagesLoaded += (sender, image) =>
            {
                try
                {
                    SetMessage("Please wait...");
                    SetImage();
                    Stopwatch            watch        = Stopwatch.StartNew();
                    Tensor               imageTensor  = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(image[0], 224, 224, 128.0f, 1.0f / 128.0f);
                    MultiboxGraph.Result detectResult = _multiboxGraph.Detect(imageTensor);
                    watch.Stop();

                    byte[] jpeg = _multiboxGraph.DrawResultsToJpeg(image[0], detectResult);

                    watch.Stop();
                    SetImage(jpeg);
#if __MACOS__
                    NSImage img          = new NSImage(image[0]);
                    var     displayImage = this.GetImage();
                    displayImage.WidthRequest  = img.Size.Width;
                    displayImage.HeightRequest = img.Size.Height;
#endif

                    SetMessage(String.Format("Detected in {0} milliseconds.", watch.ElapsedMilliseconds));
                }
                catch (Exception excpt)
                {
                    String msg = excpt.Message.Replace(System.Environment.NewLine, " ");
                    SetMessage(msg);
                }
            };
        }
Example #8
0
        public void TestMultiboxPeopleDetect()
        {
            Tensor imageResults = ImageIO.ReadTensorFromImageFile("surfers.jpg", 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph multiboxGraph    = new MultiboxGraph();
            bool          processCompleted = false;

            multiboxGraph.OnDownloadCompleted += (sender, e) =>
            {
                MultiboxGraph.Result result = multiboxGraph.Detect(imageResults);

                Bitmap bmp = new Bitmap("surfers.jpg");
                MultiboxGraph.DrawResults(bmp, result, 0.1f);
                bmp.Save("surfers_result.jpg");
                processCompleted = true;
            };

            multiboxGraph.Init();

            while (!processCompleted)
            {
                Thread.Sleep(1000);
            }
        }
    // Update is called once per frame
    void Update()
    {
        if (!_multiboxGraph.Imported)
        {
            _displayMessage = String.Format("Downloading multibox model files, {0} % of file {1}...", _multiboxGraph.DownloadProgress * 100, _multiboxGraph.DownloadFileName);
        }
        else if (_liveCameraView)
        {
            if (webcamTexture != null && webcamTexture.didUpdateThisFrame)
            {
                #region convert the webcam texture to RGBA bytes

                if (data == null || (data.Length != webcamTexture.width * webcamTexture.height))
                {
                    data = new Color32[webcamTexture.width * webcamTexture.height];
                }
                webcamTexture.GetPixels32(data);

                if (bytes == null || bytes.Length != data.Length * 4)
                {
                    bytes = new byte[data.Length * 4];
                }
                GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned);
                Marshal.Copy(handle.AddrOfPinnedObject(), bytes, 0, bytes.Length);
                handle.Free();

                #endregion

                #region convert the RGBA bytes to texture2D

                if (resultTexture == null || resultTexture.width != webcamTexture.width ||
                    resultTexture.height != webcamTexture.height)
                {
                    resultTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGBA32,
                                                  false);
                }

                resultTexture.LoadRawTextureData(bytes);
                resultTexture.Apply();

                #endregion

                if (!_textureResized)
                {
                    this.GetComponent <GUITexture>().pixelInset = new Rect(-webcamTexture.width / 2,
                                                                           -webcamTexture.height / 2, webcamTexture.width, webcamTexture.height);
                    _textureResized = true;
                }

                transform.rotation = baseRotation * Quaternion.AngleAxis(webcamTexture.videoRotationAngle, Vector3.up);


                Tensor imageTensor           = ImageIO.ReadTensorFromTexture2D(resultTexture, 224, 224, 128.0f, 1.0f / 128.0f, true);
                MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

                if (drawableTexture == null || drawableTexture.width != resultTexture.width ||
                    drawableTexture.height != resultTexture.height)
                {
                    drawableTexture = new Texture2D(resultTexture.width, resultTexture.height, TextureFormat.ARGB32, false);
                }
                drawableTexture.SetPixels(resultTexture.GetPixels());
                MultiboxGraph.DrawResults(drawableTexture, results, 0.2f);

                this.GetComponent <GUITexture>().texture = drawableTexture;
                //count++;
            }
        }
        else if (!_staticViewRendered)
        {
            Texture2D texture     = Resources.Load <Texture2D>("surfers");
            Tensor    imageTensor = ImageIO.ReadTensorFromTexture2D(texture, 224, 224, 128.0f, 1.0f / 128.0f, true);

            //byte[] raw = ImageIO.EncodeJpeg(imageTensor, 128.0f, 128.0f);
            //System.IO.File.WriteAllBytes("surfers_out.jpg", raw);


            MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

            drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false);
            drawableTexture.SetPixels(texture.GetPixels());
            MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

            this.GetComponent <GUITexture>().texture    = drawableTexture;
            this.GetComponent <GUITexture>().pixelInset = new Rect(-texture.width / 2, -texture.height / 2, texture.width, texture.height);

            _displayMessage     = String.Empty;
            _staticViewRendered = true;
        }

        DisplayText.text = _displayMessage;
    }
Example #10
0
    // Update is called once per frame
    void Update()
    {
        if (_liveCameraView)
        {
            if (webcamTexture != null && webcamTexture.didUpdateThisFrame)
            {
                #region convert the webcam texture to RGBA bytes

                if (data == null || (data.Length != webcamTexture.width * webcamTexture.height))
                {
                    data = new Color32[webcamTexture.width * webcamTexture.height];
                }
                webcamTexture.GetPixels32(data);

                if (bytes == null || bytes.Length != data.Length * 4)
                {
                    bytes = new byte[data.Length * 4];
                }
                GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned);
                Marshal.Copy(handle.AddrOfPinnedObject(), bytes, 0, bytes.Length);
                handle.Free();

                #endregion

                #region convert the RGBA bytes to texture2D

                if (resultTexture == null || resultTexture.width != webcamTexture.width ||
                    resultTexture.height != webcamTexture.height)
                {
                    resultTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGBA32,
                                                  false);
                }

                resultTexture.LoadRawTextureData(bytes);
                resultTexture.Apply();

                #endregion

                if (!_textureResized)
                {
                    this.GetComponent <GUITexture>().pixelInset = new Rect(-webcamTexture.width / 2,
                                                                           -webcamTexture.height / 2, webcamTexture.width, webcamTexture.height);
                    _textureResized = true;
                }

                transform.rotation = baseRotation * Quaternion.AngleAxis(webcamTexture.videoRotationAngle, Vector3.up);


                Tensor imageTensor           = ImageIO.ReadTensorFromTexture2D(resultTexture, 224, 224, 128.0f, 1.0f / 128.0f, true);
                MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

                if (drawableTexture == null || drawableTexture.width != resultTexture.width ||
                    drawableTexture.height != resultTexture.height)
                {
                    drawableTexture = new Texture2D(resultTexture.width, resultTexture.height, TextureFormat.ARGB32, false);
                }
                drawableTexture.SetPixels(resultTexture.GetPixels());
                MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

                this.GetComponent <GUITexture>().texture = drawableTexture;
                //count++;
            }
        }
    }
Example #11
0
        public MultiboxDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform People Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                GetLabel().Text = "Please wait...";
                SetImage();

                Task <Tuple <byte[], long> > t = new Task <Tuple <byte[], long> >(
                    () =>
                {
                    //MultiboxGrapho.Download();
                    MultiboxGraph graph = new MultiboxGraph();
                    Tensor imageTensor  = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(image[0], 224, 224, 128.0f, 1.0f / 128.0f);
                    MultiboxGraph.Result detectResult = graph.Detect(imageTensor);
#if __ANDROID__
                    BitmapFactory.Options options = new BitmapFactory.Options();
                    options.InMutable             = true;
                    Android.Graphics.Bitmap bmp   = BitmapFactory.DecodeFile(image[0], options);
                    MultiboxGraph.DrawResults(bmp, detectResult, 0.2f);
                    using (MemoryStream ms = new MemoryStream())
                    {
                        bmp.Compress(Bitmap.CompressFormat.Jpeg, 90, ms);
                        return(new Tuple <byte[], long>(ms.ToArray(), 0));
                    }
#elif __UNIFIED__ && !__IOS__
                    NSImage img = NSImage.ImageNamed(image[0]);

                    Xamarin.Forms.Device.BeginInvokeOnMainThread(() =>
                    {
                        MultiboxGraph.DrawResults(img, detectResult, 0.1f);
                        var imageData = img.AsTiff();
                        var imageRep  = NSBitmapImageRep.ImageRepsWithData(imageData)[0] as NSBitmapImageRep;
                        var jpegData  = imageRep.RepresentationUsingTypeProperties(NSBitmapImageFileType.Jpeg, null);
                        byte[] raw    = new byte[jpegData.Length];
                        System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length);
                        SetImage(raw);
                        GetLabel().Text = String.Format("Detected with in {0} milliseconds.", 0);
                    });



                    return(new Tuple <byte[], long>(null, 0));
#elif __IOS__
                    UIImage uiimage = new UIImage(image[0]);

                    Xamarin.Forms.Device.BeginInvokeOnMainThread(() =>
                    {
                        UIImage newImg = MultiboxGraph.DrawResults(uiimage, detectResult, 0.1f);
                        var jpegData   = newImg.AsJPEG();
                        byte[] raw     = new byte[jpegData.Length];
                        System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length);
                        SetImage(raw);
                        GetLabel().Text = String.Format("Detected with in {0} milliseconds.", 0);
                    });

                    return(new Tuple <byte[], long>(null, 0));
#else
                    return(new Tuple <byte[], long>(new byte[10], 0));
#endif
                });
                t.Start();

#if !(__UNIFIED__)
                var result = await t;
                SetImage(t.Result.Item1);
                GetLabel().Text = String.Format("Detected with in {0} milliseconds.", t.Result.Item2);
#endif
            };
        }