Ejemplo n.º 1
0
        // Update is called once per frame
        void Update()
        {
            updateCount++;
            elapsed += Time.deltaTime;
            if (elapsed >= 1f)
            {
                updateFPS    = updateCount / elapsed;
                onFrameFPS   = onFrameCount / elapsed;
                drawFPS      = drawCount / elapsed;
                updateCount  = 0;
                onFrameCount = 0;
                drawCount    = 0;
                elapsed      = 0;

                Debug.Log("didUpdateThisFrame: " + didUpdateThisFrame + " updateFPS: " + updateFPS + " onFrameFPS: " + onFrameFPS + " drawFPS: " + drawFPS);
                if (fpsMonitor != null)
                {
                    fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                    fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));

                    if (matrix != null)
                    {
                        fpsMonitor.Add("width", matrix.width().ToString());
                        fpsMonitor.Add("height", matrix.height().ToString());
                    }
                    fpsMonitor.Add("orientation", Screen.orientation.ToString());
                }
            }

            if (NatCam.IsPlaying && didUpdateThisFrame)
            {
                drawCount++;

                Mat matrix = GetMat(matCaptureMethod);

                if (matrix != null)
                {
                    ProcessImage(matrix, imageProcessingType);

                    // The Imgproc.putText method is too heavy to use for mobile device benchmark purposes.
                    //Imgproc.putText (matrix, "W:" + matrix.width () + " H:" + matrix.height () + " SO:" + Screen.orientation, new Point (5, matrix.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    //Imgproc.putText (matrix, "updateFPS:" + updateFPS.ToString("F1") + " onFrameFPS:" + onFrameFPS.ToString("F1") + " drawFPS:" + drawFPS.ToString("F1"), new Point (5, matrix.rows () - 50), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                    switch (imageFlippingMethod)
                    {
                    default:
                    case ImageFlippingMethod.OpenCVForUnity_Flip:
                        // Restore the coordinate system of the image by OpenCV's Flip function.
                        Utils.fastMatToTexture2D(matrix, texture, true, 0, false);
                        break;

                    case ImageFlippingMethod.Shader:
                        // Restore the coordinate system of the image by Shader. (use GPU)
                        Utils.fastMatToTexture2D(matrix, texture, false, 0, false);
                        break;
                    }
                }
            }
        }
Ejemplo n.º 2
0
        protected override void Start()
        {
            // Load global camera benchmark settings.
            int width, height, framerate;

            NatCamWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
            NatCamWithOpenCVForUnityExample.ExampleSceneConfiguration(out performImageProcessingEachTime);
            // Create camera source
            cameraSource = new NatCamSource(width, height, framerate, useFrontCamera);
            if (!cameraSource.activeCamera)
            {
                cameraSource = new NatCamSource(width, height, framerate, !useFrontCamera);
            }
            cameraSource.StartPreview(OnStart, OnFrame);
            // Create comic filter
            comicFilter = new ComicFilter();

            exampleTitle      = "[NatCamWithOpenCVForUnity Example] (" + NatCamWithOpenCVForUnityExample.GetNatCamVersion() + ")";
            exampleSceneTitle = "- Integration With NatShare Example";

            fpsMonitor = GetComponent <FpsMonitor>();
            if (fpsMonitor != null)
            {
                fpsMonitor.Add("Name", "IntegrationWithNatShareExample");
                fpsMonitor.Add("performImageProcessingEveryTime", performImageProcessingEachTime.ToString());
                fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
                fpsMonitor.Add("width", "");
                fpsMonitor.Add("height", "");
                fpsMonitor.Add("isFrontFacing", "");
                fpsMonitor.Add("orientation", "");
            }
        }
        protected override void Start()
        {
            // Load global camera benchmark settings.
            int width, height, framerate;

            NatCamWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
            NatCamWithOpenCVForUnityExample.ExampleSceneConfiguration(out performImageProcessingEachTime);
            // Create camera source
            cameraSource = new WebCamMatSource(width, height, framerate, useFrontCamera);
            if (!cameraSource.activeCamera)
            {
                cameraSource = new WebCamMatSource(width, height, framerate, !useFrontCamera);
            }
            cameraSource.StartPreview(OnStart, OnFrame);
            // Update UI
            imageProcessingTypeDropdown.value = (int)imageProcessingType;

            fpsMonitor = GetComponent <FpsMonitor>();
            if (fpsMonitor != null)
            {
                fpsMonitor.Add("Name", "WebCamTextureToMatExample");
                fpsMonitor.Add("performImageProcessingEveryTime", performImageProcessingEachTime.ToString());
                fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
                fpsMonitor.Add("width", "");
                fpsMonitor.Add("height", "");
                fpsMonitor.Add("isFrontFacing", "");
                fpsMonitor.Add("orientation", "");
            }
        }
Ejemplo n.º 4
0
        // Update is called once per frame
        void Update()
        {
            updateCount++;
            elapsed += Time.deltaTime;
            if (elapsed >= 1f)
            {
                updateFPS    = updateCount / elapsed;
                onFrameFPS   = onFrameCount / elapsed;
                drawFPS      = drawCount / elapsed;
                updateCount  = 0;
                onFrameCount = 0;
                drawCount    = 0;
                elapsed      = 0;

                Debug.Log("didUpdateThisFrame: " + didUpdateThisFrame + " updateFPS: " + updateFPS + " onFrameFPS: " + onFrameFPS + " drawFPS: " + drawFPS);
                if (fpsMonitor != null)
                {
                    fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                    fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));

                    if (matrix != null)
                    {
                        fpsMonitor.Add("width", matrix.width().ToString());
                        fpsMonitor.Add("height", matrix.height().ToString());
                    }
                    fpsMonitor.Add("orientation", Screen.orientation.ToString());
                }
            }

            if (NatCam.IsPlaying && didUpdateThisFrame)
            {
                drawCount++;

                Mat matrix = GetMat();

                if (matrix != null)
                {
                    comicFilter.Process(matrix, matrix);

                    Imgproc.putText(matrix, "[NatCam With OpenCVForUnity Example]", new Point(5, matrix.rows() - 50), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(matrix, "- Integration With NatShare Example", new Point(5, matrix.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                    // Restore the coordinate system of the image by OpenCV's Flip function.
                    Utils.fastMatToTexture2D(matrix, texture, true, 0, false);
                }
            }
        }
Ejemplo n.º 5
0
        protected override void OnStart()
        {
            settingInfo1 = "- resolution: " + cameraSource.width + "x" + cameraSource.height;

            // Create matrix
            if (frameMatrix != null)
            {
                frameMatrix.Dispose();
            }
            frameMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC4);
            // Create texture
            if (texture != null)
            {
                Texture2D.Destroy(texture);
            }
            texture = new Texture2D(
                cameraSource.width,
                cameraSource.height,
                TextureFormat.RGBA32,
                false,
                false
                );
            // Display preview
            rawImage.texture         = texture;
            aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
            Debug.Log("NatCam camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", cameraSource.width.ToString());
                fpsMonitor.Add("height", cameraSource.height.ToString());
                fpsMonitor.Add("isFrontFacing", cameraSource.isFrontFacing.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
        protected override void OnStart()
        {
            base.OnStart();

            // Create pixel buffer
            pixelBuffer = new byte[cameraSource.width * cameraSource.height * 4];
            // Create texture
            if (texture != null)
            {
                Texture2D.Destroy(texture);
            }
            texture = new Texture2D(
                cameraSource.width,
                cameraSource.height,
                TextureFormat.RGBA32,
                false,
                false
                );
            // Display texture
            rawImage.texture         = texture;
            aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
            Debug.Log("WebCam camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", cameraSource.width.ToString());
                fpsMonitor.Add("height", cameraSource.height.ToString());
                fpsMonitor.Add("isFrontFacing", cameraSource.isFrontFacing.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Raises the webcam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            Utils.fastMatToTexture2D(webCamTextureMat, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", webCamTextureToMatHelper.GetWidth().ToString());
                fpsMonitor.Add("height", webCamTextureToMatHelper.GetHeight().ToString());
                fpsMonitor.Add("isFrontFacing", webCamTextureToMatHelper.IsFrontFacing().ToString());
                fpsMonitor.Add("rotate90Degree", webCamTextureToMatHelper.rotate90Degree.ToString());
                fpsMonitor.Add("flipVertical", webCamTextureToMatHelper.flipVertical.ToString());
                fpsMonitor.Add("flipHorizontal", webCamTextureToMatHelper.flipHorizontal.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }
        }
Ejemplo n.º 8
0
        // Use this for initialization
        void Start()
        {
            // Load global camera benchmark settings.
            int width, height, fps;

            NatCamWithOpenCVForUnityExample.GetCameraResolution(out width, out height);
            NatCamWithOpenCVForUnityExample.GetCameraFps(out fps);
            requestedWidth  = width;
            requestedHeight = height;
            requestedFPS    = fps;

            fpsMonitor = GetComponent <FpsMonitor> ();
            if (fpsMonitor != null)
            {
                fpsMonitor.Add("Name", "WebCamTextureOnlyExample");
                fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
                fpsMonitor.Add("width", "");
                fpsMonitor.Add("height", "");
                fpsMonitor.Add("orientation", "");
            }

            imageProcessingTypeDropdown.value = (int)imageProcessingType;

            Initialize();
        }
Ejemplo n.º 9
0
        public virtual void Start()
        {
            fpsMonitor = GetComponent <FpsMonitor> ();

            if (!NatCam.Implementation.HasPermissions)
            {
                Debug.LogError("NatCam.Implementation.HasPermissions == false");

                if (fpsMonitor != null)
                {
                    fpsMonitor.consoleText = "NatCam.Implementation.HasPermissions == false";
                }
            }

            // Load global camera benchmark settings.
            int width, height, fps;

            NatCamWithOpenCVForUnityExample.GetCameraResolution(out width, out height);
            NatCamWithOpenCVForUnityExample.GetCameraFps(out fps);
            previewResolution = new NatCamU.Core.CameraResolution(width, height);
            requestedFPS      = fps;

            // Set the active camera
            NatCam.Camera = useFrontCamera ? DeviceCamera.FrontCamera : DeviceCamera.RearCamera;

            // Null checking
            if (!NatCam.Camera)
            {
                Debug.LogError("Camera is null. Consider using " + (useFrontCamera ? "rear" : "front") + " camera");
                return;
            }
            if (!preview)
            {
                Debug.LogError("Preview RawImage has not been set");
                return;
            }

            // Set the camera's preview resolution
            NatCam.Camera.PreviewResolution = previewResolution;
            // Set the camera framerate
            NatCam.Camera.Framerate = requestedFPS;
            NatCam.Play();
            NatCam.OnStart += OnStart;
            NatCam.OnFrame += OnFrame;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("Name", "NatCamPreviewOnlyExample");
                fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
                fpsMonitor.Add("width", "");
                fpsMonitor.Add("height", "");
                fpsMonitor.Add("orientation", "");
            }

            imageProcessingTypeDropdown.value = (int)imageProcessingType;
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Method called when the camera preview starts
        /// </summary>
        public virtual void OnStart()
        {
            // Set the preview RawImage texture once the preview starts
            preview.texture = NatCam.Preview;

            // Scale the panel to match aspect ratios
            aspectFitter.aspectRatio = NatCam.Preview.width / (float)NatCam.Preview.height;

            if (fpsMonitor != null)
            {
                fpsMonitor.consoleText = "";
            }

            Debug.Log("# Active Camera Properties #####################");

            try
            {
                Dictionary <string, string> cameraProps = new Dictionary <string, string> ();

                cameraProps.Add("IsFrontFacing", NatCam.Camera.IsFrontFacing.ToString());

                cameraProps.Add("Framerate", NatCam.Camera.Framerate.ToString());

                cameraProps.Add("PreviewResolution", NatCam.Camera.PreviewResolution.width + "x" + NatCam.Camera.PreviewResolution.height);
                cameraProps.Add("PhotoResolution", NatCam.Camera.PhotoResolution.width + "x" + NatCam.Camera.PhotoResolution.height);

                cameraProps.Add("ExposureMode", NatCam.Camera.ExposureMode.ToString());
                cameraProps.Add("ExposureBias", NatCam.Camera.ExposureBias.ToString());
                cameraProps.Add("MinExposureBias", NatCam.Camera.MinExposureBias.ToString());
                cameraProps.Add("MaxExposureBias", NatCam.Camera.MaxExposureBias.ToString());

                cameraProps.Add("IsFlashSupported", NatCam.Camera.IsFlashSupported.ToString());
                cameraProps.Add("FlashMode", NatCam.Camera.FlashMode.ToString());

                cameraProps.Add("FocusMode", NatCam.Camera.FocusMode.ToString());

                cameraProps.Add("HorizontalFOV", NatCam.Camera.HorizontalFOV.ToString());
                cameraProps.Add("VerticalFOV", NatCam.Camera.VerticalFOV.ToString());

                cameraProps.Add("IsTorchSupported", NatCam.Camera.IsTorchSupported.ToString());
                cameraProps.Add("TorchEnabled", NatCam.Camera.TorchEnabled.ToString());

                cameraProps.Add("MaxZoomRatio", NatCam.Camera.MaxZoomRatio.ToString());
                cameraProps.Add("ZoomRatio", NatCam.Camera.ZoomRatio.ToString());

                foreach (string key in cameraProps.Keys)
                {
                    Debug.Log(key + ": " + cameraProps[key]);
                }

                if (fpsMonitor != null)
                {
                    fpsMonitor.boxWidth  = 200;
                    fpsMonitor.boxHeight = 620;
                    fpsMonitor.LocateGUI();

                    foreach (string key in cameraProps.Keys)
                    {
                        fpsMonitor.Add(key, cameraProps[key]);
                    }
                }
            }
            catch (Exception e)
            {
                Debug.Log("Exception: " + e);
                if (fpsMonitor != null)
                {
                    fpsMonitor.consoleText = "Exception: " + e;
                }
            }
            Debug.Log("#######################################");


            Debug.Log("OnStart (): " + NatCam.Preview.width + " " + NatCam.Preview.height);
        }
Ejemplo n.º 11
0
        // Update is called once per frame
        void Update()
        {
            updateCount++;
            elapsed += Time.deltaTime;
            if (elapsed >= 1f)
            {
                updateFPS    = updateCount / elapsed;
                onFrameFPS   = onFrameCount / elapsed;
                drawFPS      = drawCount / elapsed;
                updateCount  = 0;
                onFrameCount = 0;
                drawCount    = 0;
                elapsed      = 0;

                Debug.Log("didUpdateThisFrame: " + webCamTexture.didUpdateThisFrame + " updateFPS: " + updateFPS + " onFrameFPS: " + onFrameFPS + " drawFPS: " + drawFPS);
                if (fpsMonitor != null)
                {
                    fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
                    fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));

                    if (texture != null)
                    {
                        fpsMonitor.Add("width", texture.width.ToString());
                        fpsMonitor.Add("height", texture.height.ToString());
                    }
                    fpsMonitor.Add("orientation", Screen.orientation.ToString());
                }
            }


            // Catch the orientation change of the screen.
            if (screenOrientation != Screen.orientation && (screenWidth != Screen.width || screenHeight != Screen.height))
            {
                Initialize();
            }
            else
            {
                screenWidth  = Screen.width;
                screenHeight = Screen.height;
            }

            if (hasInitDone && webCamTexture.isPlaying && webCamTexture.didUpdateThisFrame)
            {
                drawCount++;

                Color32[] colors = GetColors();

                if (colors != null)
                {
                    if (imageProcessingType != ImageProcessingType.None)
                    {
                        // Process
                        ProcessImage(colors, texture.width, texture.height, colors.Length, imageProcessingType);
                    }

                    // Set texture data
                    texture.SetPixels32(colors);
                    // Upload to GPU
                    texture.Apply();
                    // Display the result
                    preview.texture = texture;
                }
            }
        }
Ejemplo n.º 12
0
        protected override void OnStart()
        {
            base.OnStart();

            // Create matrices
            if (frameMatrix != null)
            {
                frameMatrix.Dispose();
            }
            frameMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC4);
            if (grayMatrix != null)
            {
                grayMatrix.Dispose();
            }
            grayMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC1);
            // Create texture
            if (texture != null)
            {
                Texture2D.Destroy(texture);
            }
            texture = new Texture2D(
                cameraSource.width,
                cameraSource.height,
                TextureFormat.RGBA32,
                false,
                false
                );
            // Display preview
            rawImage.texture         = texture;
            aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
            Debug.Log("NatCam camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
            // Log camera properties
            var camera      = cameraSource.activeCamera;
            var cameraProps = new Dictionary <string, string>();

            cameraProps.Add("ExposureBias", camera.ExposureBias.ToString());
            cameraProps.Add("ExposureLock", camera.ExposureLock.ToString());
            cameraProps.Add("FlashMode", camera.FlashMode.ToString());
            cameraProps.Add("FocusLock", camera.FocusLock.ToString());
            cameraProps.Add("Framerate", camera.Framerate.ToString());
            cameraProps.Add("HorizontalFOV", camera.HorizontalFOV.ToString());
            cameraProps.Add("IsExposureLockSupported", camera.IsExposureLockSupported.ToString());
            cameraProps.Add("IsFlashSupported", camera.IsFlashSupported.ToString());
            cameraProps.Add("IsFocusLockSupported", camera.IsFocusLockSupported.ToString());
            cameraProps.Add("IsFrontFacing", camera.IsFrontFacing.ToString());
            //cameraProps.Add("IsRunning", camera.IsRunning.ToString());
            cameraProps.Add("IsTorchSupported", camera.IsTorchSupported.ToString());
            cameraProps.Add("IsWhiteBalanceLockSupported", camera.IsWhiteBalanceLockSupported.ToString());
            cameraProps.Add("MaxExposureBias", camera.MaxExposureBias.ToString());
            cameraProps.Add("MaxZoomRatio", camera.MaxZoomRatio.ToString());
            cameraProps.Add("MinExposureBias", camera.MinExposureBias.ToString());
            cameraProps.Add("PhotoResolution", camera.PhotoResolution.width + "x" + camera.PhotoResolution.height);
            cameraProps.Add("PreviewResolution", camera.PreviewResolution.width + "x" + camera.PreviewResolution.height);
            cameraProps.Add("TorchEnabled", camera.TorchEnabled.ToString());
            cameraProps.Add("UniqueID", camera.UniqueID.ToString());
            cameraProps.Add("VerticalFOV", camera.VerticalFOV.ToString());
            cameraProps.Add("WhiteBalanceLock", camera.WhiteBalanceLock.ToString());
            cameraProps.Add("ZoomRatio", camera.ZoomRatio.ToString());
            Debug.Log("# Active Camera Properties #####################");
            foreach (string key in cameraProps.Keys)
            {
                Debug.Log(key + ": " + cameraProps[key]);
            }
            Debug.Log("#######################################");

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", cameraSource.width.ToString());
                fpsMonitor.Add("height", cameraSource.height.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());

                fpsMonitor.boxWidth  = 240;
                fpsMonitor.boxHeight = 800;
                fpsMonitor.LocateGUI();

                foreach (string key in cameraProps.Keys)
                {
                    fpsMonitor.Add(key, cameraProps[key]);
                }
            }
        }