// Use this for initialization
    void Start()
    {
        _multiboxGraph = new MultiboxGraph();

        bool loaded = TfInvoke.CheckLibraryLoaded();

        //DisplayText.text = String.Format("Tensorflow library loaded: {0}", loaded);

        _liveCameraView = false;

        /*
         * WebCamDevice[] devices = WebCamTexture.devices;
         * int cameraCount = devices.Length;
         *
         * if (cameraCount == 0)
         * {
         *  _liveCameraView = false;
         *
         * }
         * else
         * {
         *  _liveCameraView = true;
         *  webcamTexture = new WebCamTexture(devices[0].name);
         *  baseRotation = transform.rotation;
         *  webcamTexture.Play();
         * }*/

        StartCoroutine(_multiboxGraph.Init());
    }
Exemple #2
0
        public void TestMultiboxPeopleDetect()
        {
            Tensor imageResults = ImageIO.ReadTensorFromImageFile <float>("surfers.jpg", 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph multiboxGraph    = new MultiboxGraph();
            bool          processCompleted = false;

            multiboxGraph.OnDownloadCompleted += (sender, e) =>
            {
                MultiboxGraph.Result[] result = multiboxGraph.Detect(imageResults);

                //Bitmap bmp = new Bitmap("surfers.jpg");
                //MultiboxGraph.DrawResults(bmp, result, 0.1f);
                //MultiboxGraph.
                //bmp.Save("surfers_result.jpg");
                processCompleted = true;
            };

            multiboxGraph.Init();

            while (!processCompleted)
            {
                Thread.Sleep(1000);
            }
        }
Exemple #3
0
        async void multiboxClicked(NSObject sender)
        {
            SetMessage("Please wait while we download Multibox model from internet...");
            SetImage(null);

            if (_multiboxGraph == null)
            {
                _multiboxGraph = new MultiboxGraph();
                _multiboxGraph.OnDownloadProgressChanged += OnDownloadProgressChanged;
                //_multiboxGraph.OnDownloadCompleted += multiboxGraph_OnDownloadCompleted;
            }
            await _multiboxGraph.Init();

            SetMessage("");

            String fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result[]   detectResult = _multiboxGraph.Detect(imageTensor);
            Emgu.Models.Annotation[] annotations  = MultiboxGraph.FilterResults(detectResult, 0.1f);

            NSImage img = new NSImage(fileName);

            Emgu.Models.NativeImageIO.DrawAnnotations(img, annotations);

            SetImage(img);
        }
Exemple #4
0
        public void Detect(String fileName)
        {
            Tensor imageTensor = ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            Stopwatch watch = Stopwatch.StartNew();

            MultiboxGraph.Result result = graph.Detect(imageTensor);
            watch.Stop();

            Bitmap bmp = new Bitmap(fileName);

            MultiboxGraph.DrawResults(bmp, result, 0.1f);


            if (InvokeRequired)
            {
                this.Invoke((MethodInvoker)(() =>
                {
                    resultPictureBox.Image = bmp;
                    messageLabel.Text = String.Format("Detection completed in {0} milliseconds", watch.ElapsedMilliseconds);
                }));
            }
            else
            {
                resultPictureBox.Image = bmp;
                messageLabel.Text      = String.Format("Detection completed in {0} milliseconds", watch.ElapsedMilliseconds);
            }
        }
 private void DetectImage(string fileName)
 {
     using (_graph = new MultiboxGraph())
     {
         _fileName = fileName;
         _graph.OnDownloadCompleted += OnDownloadComplete;
         _graph.Init();
     }
 }
Exemple #6
0
        public MultiboxDetectionPage()
            : base()
        {
            Title = "Multibox People Detection";

            if (_multiboxGraph == null)
            {
                SessionOptions so = new SessionOptions();
                if (TfInvoke.IsGoogleCudaEnabled)
                {
                    Tensorflow.ConfigProto config = new Tensorflow.ConfigProto();
                    config.GpuOptions             = new Tensorflow.GPUOptions();
                    config.GpuOptions.AllowGrowth = true;
                    so.SetConfig(config.ToProtobuf());
                }
                _multiboxGraph = new MultiboxGraph(null, so);
                _multiboxGraph.OnDownloadProgressChanged += onDownloadProgressChanged;
                _multiboxGraph.OnDownloadCompleted       += onDownloadCompleted;
                _multiboxGraph.OnDownloadCompleted       += (sender, e) =>
                {
                    OnButtonClicked(sender, e);
                };
            }

            OnImagesLoaded += (sender, image) =>
            {
                try
                {
                    SetMessage("Please wait...");
                    SetImage();
                    Stopwatch watch = Stopwatch.StartNew();

                    Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(image[0], 224, 224, 128.0f, 1.0f / 128.0f);
                    MultiboxGraph.Result[] detectResult = _multiboxGraph.Detect(imageTensor);
                    watch.Stop();
                    Emgu.Models.Annotation[] annotations = MultiboxGraph.FilterResults(detectResult, 0.1f);

                    var jpeg = Emgu.Models.NativeImageIO.ImageFileToJpeg(image[0], annotations);

                    watch.Stop();
                    SetImage(jpeg.Raw, jpeg.Width, jpeg.Height);
#if __MACOS__
                    var displayImage = this.DisplayImage;
                    displayImage.WidthRequest  = jpeg.Width;
                    displayImage.HeightRequest = jpeg.Height;
#endif

                    SetMessage(String.Format("Detected in {0} milliseconds.", watch.ElapsedMilliseconds));
                }
                catch (Exception excpt)
                {
                    String msg = excpt.Message.Replace(System.Environment.NewLine, " ");
                    SetMessage(msg);
                }
            };
        }
Exemple #7
0
        public async Task TestMultiboxPeopleDetect()
        {
            Tensor imageResults = ImageIO.ReadTensorFromImageFile <float>("surfers.jpg", 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph multiboxGraph = new MultiboxGraph();
            //bool processCompleted = false;

            await multiboxGraph.Init();

            MultiboxGraph.Result[] result = multiboxGraph.Detect(imageResults);
        }
Exemple #8
0
        void multiboxGraph_OnDownloadProgressChanged(object sender, System.Net.DownloadProgressChangedEventArgs e)
        {
            String fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result detectResult = _multiboxGraph.Detect(imageTensor);
            NSImage image = new NSImage(fileName);

            MultiboxGraph.DrawResults(image, detectResult, 0.1f);
            SetImage(image);
        }
Exemple #9
0
        public MainForm()
        {
            InitializeComponent();
            TfInvoke.CheckLibraryLoaded();
            SetLabelText(String.Empty);

            graph = new MultiboxGraph();
            graph.OnDownloadProgressChanged += OnDownloadProgressChangedEventHandler;
            graph.OnDownloadCompleted       += onDownloadCompleted;

            graph.Init();
        }
Exemple #10
0
        public void Detect(String fileName)
        {
            //MultiboxGraph.Download();
            MultiboxGraph graph       = new MultiboxGraph();
            Tensor        imageTensor = ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result result = graph.Detect(imageTensor);

            Bitmap bmp = new Bitmap(fileName);

            MultiboxGraph.DrawResults(bmp, result, 0.1f);
            resultPictureBox.Image = bmp;
        }
Exemple #11
0
        partial void multiboxClicked(NSObject sender)
        {
            SetMessage("Please wait while we download Multibox model from internet...");
            SetImage(null);

            if (_multiboxGraph == null)
            {
                _multiboxGraph = new MultiboxGraph();
                _multiboxGraph.OnDownloadProgressChanged += OnDownloadProgressChanged;
                _multiboxGraph.OnDownloadProgressChanged += multiboxGraph_OnDownloadProgressChanged;
            }
            _multiboxGraph.Init();
        }
Exemple #12
0
        public void TestMultiboxPeopleDetect()
        {
            Tensor imageResults = ImageIO.ReadTensorFromImageFile("surfers.jpg", 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph multiboxGraph = new MultiboxGraph();

            MultiboxGraph.Result result = multiboxGraph.Detect(imageResults);

            Bitmap bmp = new Bitmap("surfers.jpg");

            MultiboxGraph.DrawResults(bmp, result, 0.1f);
            bmp.Save("surfers_result.jpg");
        }
Exemple #13
0
    // Use this for initialization
    void Start()
    {
        //Warning: The following code is used to get around a https certification issue for downloading tesseract language files from Github
        //Do not use this code in a production environment. Please make sure you understand the security implication from the following code before using it
        ServicePointManager.ServerCertificateValidationCallback += delegate(object sender, X509Certificate cert, X509Chain chain, SslPolicyErrors sslPolicyErrors) {
            HttpWebRequest webRequest = sender as HttpWebRequest;
            if (webRequest != null)
            {
                String requestStr = webRequest.Address.AbsoluteUri;
                if (requestStr.StartsWith(@"https://github.com/") || requestStr.StartsWith(@"https://raw.githubusercontent.com/"))
                {
                    return(true);
                }
            }
            return(false);
        };

        TfInvoke.CheckLibraryLoaded();

        WebCamDevice[] devices     = WebCamTexture.devices;
        int            cameraCount = devices.Length;

        if (cameraCount == 0)
        {
            _liveCameraView = false;
            Texture2D texture     = Resources.Load <Texture2D>("surfers");
            Tensor    imageTensor = ImageIO.ReadTensorFromTexture2D(texture, 224, 224, 128.0f, 1.0f / 128.0f, true);

            //byte[] raw = ImageIO.EncodeJpeg(imageTensor, 128.0f, 128.0f);
            //System.IO.File.WriteAllBytes("surfers_out.jpg", raw);

            _multiboxGraph = new MultiboxGraph();
            MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

            drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false);
            drawableTexture.SetPixels(texture.GetPixels());
            MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

            this.GetComponent <GUITexture>().texture    = drawableTexture;
            this.GetComponent <GUITexture>().pixelInset = new Rect(-texture.width / 2, -texture.height / 2, texture.width, texture.height);
        }
        else
        {
            _liveCameraView = true;
            webcamTexture   = new WebCamTexture(devices[0].name);
            _multiboxGraph  = new MultiboxGraph();
            baseRotation    = transform.rotation;
            webcamTexture.Play();
            //data = new Color32[webcamTexture.width * webcamTexture.height];
        }
    }
Exemple #14
0
        void multiboxGraph_OnDownloadProgressChanged(object sender, System.Net.DownloadProgressChangedEventArgs e)
        {
            String fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result[] detectResult = _multiboxGraph.Detect(imageTensor);
            Emgu.Models.NativeImageIO.Annotation[] annotations = MultiboxGraph.FilterResults(detectResult, 0.1f);

            NSImage img = new NSImage(fileName);

            Emgu.Models.NativeImageIO.DrawAnnotations(img, annotations);

            SetImage(img);
        }
Exemple #15
0
        partial void multiboxClicked(NSObject sender)
        {
            messageLabel.StringValue = "Multibox Clicked";
            mainImageView.Image      = null;

            MultiboxGraph graph    = new MultiboxGraph();
            String        fileName = "surfers.jpg";

            Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result detectResult = graph.Detect(imageTensor);
            NSImage image = new NSImage(fileName);

            MultiboxGraph.DrawResults(image, detectResult, 0.1f);
            mainImageView.Image = image;
        }
Exemple #16
0
        public MultiboxDetectionPage()
            : base()
        {
            if (_multiboxGraph == null)
            {
                _multiboxGraph = new MultiboxGraph();
                _multiboxGraph.OnDownloadProgressChanged += onDownloadProgressChanged;
                _multiboxGraph.OnDownloadCompleted       += onDownloadCompleted;
                _multiboxGraph.OnDownloadCompleted       += (sender, e) =>
                {
                    OnButtonClicked(sender, e);
                };
            }

            OnImagesLoaded += (sender, image) =>
            {
                try
                {
                    SetMessage("Please wait...");
                    SetImage();
                    Stopwatch watch = Stopwatch.StartNew();

                    Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(image[0], 224, 224, 128.0f, 1.0f / 128.0f);
                    MultiboxGraph.Result[] detectResult = _multiboxGraph.Detect(imageTensor);
                    watch.Stop();
                    Emgu.Models.NativeImageIO.Annotation[] annotations = MultiboxGraph.FilterResults(detectResult, 0.1f);

                    byte[] jpeg = Emgu.Models.NativeImageIO.ImageFileToJpeg(image[0], annotations);

                    watch.Stop();
                    SetImage(jpeg);
#if __MACOS__
                    NSImage img          = new NSImage(image[0]);
                    var     displayImage = this.GetImage();
                    displayImage.WidthRequest  = img.Size.Width;
                    displayImage.HeightRequest = img.Size.Height;
#endif

                    SetMessage(String.Format("Detected in {0} milliseconds.", watch.ElapsedMilliseconds));
                }
                catch (Exception excpt)
                {
                    String msg = excpt.Message.Replace(System.Environment.NewLine, " ");
                    SetMessage(msg);
                }
            };
        }
 private void OnDownloadComplete(object sender, AsyncCompletedEventArgs e)
 {
     try
     {
         using (var imageTensor = ImageIO.ReadTensorFromImageFile <float>(_fileName, 224, 224, 128.0f, 1.0f / 128.0f))
         {
             var detectionResult      = _graph.Detect(imageTensor);
             var detectionAnnotations = MultiboxGraph.FilterResults(detectionResult, 0.1f);
             var detectionImage       = NativeImageIO.ImageFileToJpeg(_fileName, detectionAnnotations);
             var typeConverter        = TypeDescriptor.GetConverter(typeof(Bitmap));
             var detectionBmpImage    = (Bitmap)typeConverter.ConvertFrom(detectionImage.Raw);
             ImageSource = BitmapConverter.ConvertBitmap(detectionBmpImage);
         }
         IsModalVisible = false;
     }
     catch (Exception ex)
     {
         _exceptionLogDataAccess.LogException(ex.ToString());
         IsModalVisible = false;
     }
 }
Exemple #18
0
        public void Detect(String fileName)
        {
            Tensor imageTensor = ImageIO.ReadTensorFromImageFile(fileName, 224, 224, 128.0f, 1.0f / 128.0f);

            MultiboxGraph.Result[] result;
            if (_coldSession)
            {
                //First run of the detection, here we will compile the graph and initialize the session
                //This is expected to take much longer time than consecutive runs.
                result       = graph.Detect(imageTensor);
                _coldSession = false;
            }

            //Here we are trying to time the execution of the graph after it is loaded
            //If we are not interest in the performance, we can skip the 3 lines that follows
            Stopwatch watch = Stopwatch.StartNew();

            result = graph.Detect(imageTensor);
            watch.Stop();

            //Bitmap bmp = new Bitmap(fileName);
            float[][] rectangles = MultiboxGraph.FilterResults(result, 0.1f);
            byte[]    jpeg       = MultiboxGraph.DrawRectanglesToJpeg(fileName, rectangles);
            //MultiboxGraph.DrawResults(bmp, result, 0.1f);

            Bitmap bmp = new Bitmap()
                         if (InvokeRequired)
            {
                this.Invoke((MethodInvoker)(() =>
                {
                    RenderResult(bmp, watch.ElapsedMilliseconds);
                }));
            }
            else
            {
                RenderResult(bmp, watch.ElapsedMilliseconds);
            }
        }
    // Update is called once per frame
    void Update()
    {
        if (!_multiboxGraph.Imported)
        {
            _displayMessage = String.Format("Downloading multibox model files, {0} % of file {1}...", _multiboxGraph.DownloadProgress * 100, _multiboxGraph.DownloadFileName);
        }
        else if (_liveCameraView)
        {
            if (webcamTexture != null && webcamTexture.didUpdateThisFrame)
            {
                #region convert the webcam texture to RGBA bytes

                if (data == null || (data.Length != webcamTexture.width * webcamTexture.height))
                {
                    data = new Color32[webcamTexture.width * webcamTexture.height];
                }
                webcamTexture.GetPixels32(data);

                if (bytes == null || bytes.Length != data.Length * 4)
                {
                    bytes = new byte[data.Length * 4];
                }
                GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned);
                Marshal.Copy(handle.AddrOfPinnedObject(), bytes, 0, bytes.Length);
                handle.Free();

                #endregion

                #region convert the RGBA bytes to texture2D

                if (resultTexture == null || resultTexture.width != webcamTexture.width ||
                    resultTexture.height != webcamTexture.height)
                {
                    resultTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGBA32,
                                                  false);
                }

                resultTexture.LoadRawTextureData(bytes);
                resultTexture.Apply();

                #endregion

                if (!_textureResized)
                {
                    this.GetComponent <GUITexture>().pixelInset = new Rect(-webcamTexture.width / 2,
                                                                           -webcamTexture.height / 2, webcamTexture.width, webcamTexture.height);
                    _textureResized = true;
                }

                transform.rotation = baseRotation * Quaternion.AngleAxis(webcamTexture.videoRotationAngle, Vector3.up);


                Tensor imageTensor           = ImageIO.ReadTensorFromTexture2D(resultTexture, 224, 224, 128.0f, 1.0f / 128.0f, true);
                MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

                if (drawableTexture == null || drawableTexture.width != resultTexture.width ||
                    drawableTexture.height != resultTexture.height)
                {
                    drawableTexture = new Texture2D(resultTexture.width, resultTexture.height, TextureFormat.ARGB32, false);
                }
                drawableTexture.SetPixels(resultTexture.GetPixels());
                MultiboxGraph.DrawResults(drawableTexture, results, 0.2f);

                this.GetComponent <GUITexture>().texture = drawableTexture;
                //count++;
            }
        }
        else if (!_staticViewRendered)
        {
            Texture2D texture     = Resources.Load <Texture2D>("surfers");
            Tensor    imageTensor = ImageIO.ReadTensorFromTexture2D(texture, 224, 224, 128.0f, 1.0f / 128.0f, true);

            //byte[] raw = ImageIO.EncodeJpeg(imageTensor, 128.0f, 128.0f);
            //System.IO.File.WriteAllBytes("surfers_out.jpg", raw);


            MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

            drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false);
            drawableTexture.SetPixels(texture.GetPixels());
            MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

            this.GetComponent <GUITexture>().texture    = drawableTexture;
            this.GetComponent <GUITexture>().pixelInset = new Rect(-texture.width / 2, -texture.height / 2, texture.width, texture.height);

            _displayMessage     = String.Empty;
            _staticViewRendered = true;
        }

        DisplayText.text = _displayMessage;
    }
Exemple #20
0
        public MultiboxDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform People Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                GetLabel().Text = "Please wait...";
                SetImage();

                Task <Tuple <byte[], long> > t = new Task <Tuple <byte[], long> >(
                    () =>
                {
                    //MultiboxGrapho.Download();
                    MultiboxGraph graph = new MultiboxGraph();
                    Tensor imageTensor  = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile(image[0], 224, 224, 128.0f, 1.0f / 128.0f);
                    MultiboxGraph.Result detectResult = graph.Detect(imageTensor);
#if __ANDROID__
                    BitmapFactory.Options options = new BitmapFactory.Options();
                    options.InMutable             = true;
                    Android.Graphics.Bitmap bmp   = BitmapFactory.DecodeFile(image[0], options);
                    MultiboxGraph.DrawResults(bmp, detectResult, 0.2f);
                    using (MemoryStream ms = new MemoryStream())
                    {
                        bmp.Compress(Bitmap.CompressFormat.Jpeg, 90, ms);
                        return(new Tuple <byte[], long>(ms.ToArray(), 0));
                    }
#elif __UNIFIED__ && !__IOS__
                    NSImage img = NSImage.ImageNamed(image[0]);

                    Xamarin.Forms.Device.BeginInvokeOnMainThread(() =>
                    {
                        MultiboxGraph.DrawResults(img, detectResult, 0.1f);
                        var imageData = img.AsTiff();
                        var imageRep  = NSBitmapImageRep.ImageRepsWithData(imageData)[0] as NSBitmapImageRep;
                        var jpegData  = imageRep.RepresentationUsingTypeProperties(NSBitmapImageFileType.Jpeg, null);
                        byte[] raw    = new byte[jpegData.Length];
                        System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length);
                        SetImage(raw);
                        GetLabel().Text = String.Format("Detected with in {0} milliseconds.", 0);
                    });



                    return(new Tuple <byte[], long>(null, 0));
#elif __IOS__
                    UIImage uiimage = new UIImage(image[0]);

                    Xamarin.Forms.Device.BeginInvokeOnMainThread(() =>
                    {
                        UIImage newImg = MultiboxGraph.DrawResults(uiimage, detectResult, 0.1f);
                        var jpegData   = newImg.AsJPEG();
                        byte[] raw     = new byte[jpegData.Length];
                        System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, raw, 0, (int)jpegData.Length);
                        SetImage(raw);
                        GetLabel().Text = String.Format("Detected with in {0} milliseconds.", 0);
                    });

                    return(new Tuple <byte[], long>(null, 0));
#else
                    return(new Tuple <byte[], long>(new byte[10], 0));
#endif
                });
                t.Start();

#if !(__UNIFIED__)
                var result = await t;
                SetImage(t.Result.Item1);
                GetLabel().Text = String.Format("Detected with in {0} milliseconds.", t.Result.Item2);
#endif
            };
        }
    // Update is called once per frame
    void Update()
    {
        if (_multiboxGraph == null)
        {
            _multiboxGraph = new MultiboxGraph();
            StartCoroutine(_multiboxGraph.Init());
        }
        else if (!_multiboxGraph.Imported)
        {
            _displayMessage = String.Format("Downloading multibox model files, {0} % of file {1}...", _multiboxGraph.DownloadProgress * 100, _multiboxGraph.DownloadFileName);
        }
        else if (_liveCameraView)
        {
            _displayMessage = String.Empty;

            if (_webcamTexture != null && _webcamTexture.didUpdateThisFrame)
            {
                int webcamWidth  = _webcamTexture.width;
                int webcamHeight = _webcamTexture.height;
                #region convert the webcam texture to RGBA bytes

                if (_data == null || (_data.Length != webcamWidth * webcamHeight))
                {
                    _data = new Color32[webcamWidth * webcamHeight];
                }
                _webcamTexture.GetPixels32(_data);

                if (_bytes == null || _bytes.Length != _data.Length * 4)
                {
                    _bytes = new byte[_data.Length * 4];
                }
                GCHandle handle = GCHandle.Alloc(_data, GCHandleType.Pinned);
                Marshal.Copy(handle.AddrOfPinnedObject(), _bytes, 0, _bytes.Length);
                handle.Free();

                #endregion

                #region convert the RGBA bytes to texture2D
                if (_drawableTexture == null || _drawableTexture.width != webcamWidth ||
                    _drawableTexture.height != webcamHeight)
                {
                    _drawableTexture = new Texture2D(webcamWidth, webcamHeight, TextureFormat.RGBA32,
                                                     false);
                }

                _drawableTexture.LoadRawTextureData(_bytes);
                _drawableTexture.Apply();

                #endregion

                //Tensor imageTensor = ImageIO.ReadTensorFromTexture2D(_drawableTexture, 224, 224, 128.0f, 1.0f/128.0f, true);
                Tensor imageTensor = ImageIO.ReadTensorFromColor32(
                    _data,
                    webcamWidth,
                    webcamHeight,
                    224,
                    224,
                    128.0f,
                    1.0f / 128.0f,
                    true);
                MultiboxGraph.Result[] results = _multiboxGraph.Detect(imageTensor);

                MultiboxGraph.DrawResults(_drawableTexture, results, 0.2f, true);

                if (!_textureResized)
                {
                    ResizeTexture(_drawableTexture);
                    _textureResized = true;
                }

                this.transform.rotation = _baseRotation * Quaternion.AngleAxis(_webcamTexture.videoRotationAngle, Vector3.up);
                RenderTexture(_drawableTexture);
                //count++;
            }
        }
        else if (!_staticViewRendered)
        {
            Texture2D texture     = Resources.Load <Texture2D>("surfers");
            Tensor    imageTensor = ImageIO.ReadTensorFromTexture2D(texture, 224, 224, 128.0f, 1.0f / 128.0f, true);

            //byte[] raw = ImageIO.EncodeJpeg(imageTensor, 128.0f, 128.0f);
            //System.IO.File.WriteAllBytes("surfers_out.jpg", raw);

            MultiboxGraph.Result[] results = _multiboxGraph.Detect(imageTensor);

            _drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false);
            _drawableTexture.SetPixels32(texture.GetPixels32());
            MultiboxGraph.DrawResults(_drawableTexture, results, 0.1f, true);

            RenderTexture(_drawableTexture);
            ResizeTexture(_drawableTexture);

            _displayMessage     = String.Empty;
            _staticViewRendered = true;
        }

        DisplayText.text = _displayMessage;
    }
Exemple #22
0
    // Update is called once per frame
    void Update()
    {
        if (_liveCameraView)
        {
            if (webcamTexture != null && webcamTexture.didUpdateThisFrame)
            {
                #region convert the webcam texture to RGBA bytes

                if (data == null || (data.Length != webcamTexture.width * webcamTexture.height))
                {
                    data = new Color32[webcamTexture.width * webcamTexture.height];
                }
                webcamTexture.GetPixels32(data);

                if (bytes == null || bytes.Length != data.Length * 4)
                {
                    bytes = new byte[data.Length * 4];
                }
                GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned);
                Marshal.Copy(handle.AddrOfPinnedObject(), bytes, 0, bytes.Length);
                handle.Free();

                #endregion

                #region convert the RGBA bytes to texture2D

                if (resultTexture == null || resultTexture.width != webcamTexture.width ||
                    resultTexture.height != webcamTexture.height)
                {
                    resultTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGBA32,
                                                  false);
                }

                resultTexture.LoadRawTextureData(bytes);
                resultTexture.Apply();

                #endregion

                if (!_textureResized)
                {
                    this.GetComponent <GUITexture>().pixelInset = new Rect(-webcamTexture.width / 2,
                                                                           -webcamTexture.height / 2, webcamTexture.width, webcamTexture.height);
                    _textureResized = true;
                }

                transform.rotation = baseRotation * Quaternion.AngleAxis(webcamTexture.videoRotationAngle, Vector3.up);


                Tensor imageTensor           = ImageIO.ReadTensorFromTexture2D(resultTexture, 224, 224, 128.0f, 1.0f / 128.0f, true);
                MultiboxGraph.Result results = _multiboxGraph.Detect(imageTensor);

                if (drawableTexture == null || drawableTexture.width != resultTexture.width ||
                    drawableTexture.height != resultTexture.height)
                {
                    drawableTexture = new Texture2D(resultTexture.width, resultTexture.height, TextureFormat.ARGB32, false);
                }
                drawableTexture.SetPixels(resultTexture.GetPixels());
                MultiboxGraph.DrawResults(drawableTexture, results, 0.1f);

                this.GetComponent <GUITexture>().texture = drawableTexture;
                //count++;
            }
        }
    }