예제 #1
0
    public void updateCamImage()
    {
        bool isActive = camDevice.IsActive();

        Debug.Log("camDevice active = " + isActive);

        camImage = camDevice.GetCameraImage(pixelFormat);
        if (camImage != null)
        {
            Debug.Log("camera width = " + camImage.Width + ", height = " + camImage.Height);

            Texture2D texImage = new Texture2D(camImage.Width, camImage.Height);
            camImage.CopyToTexture(texImage);
            texImage.Apply();

            //texImage = Resources.Load ("images/150463757969262") as Texture2D ;

            Material material = capsule.GetComponent <Renderer> ().material;
            material.SetTexture("_MainTex", texImage as Texture);
        }
        else
        {
            Debug.Log("camImage : null");
        }
    }
예제 #2
0
    public void OnTrackablesUpdated()
    {
        if (!m_RegisteredFormat)
        {
            CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true);
            m_RegisteredFormat = true;
        }
        if (m_LogInfo)
        {
            CameraDevice cam   = CameraDevice.Instance;
            Image        image = cam.GetCameraImage(m_PixelFormat);
            if (image == null)
            {
                Debug.Log(m_PixelFormat + " image is not available yet");
            }
            else
            {
                //Android build
                //message = "This demo is configured for the Nexus 7 tablet (screen aspect ratio 1.74, camera aspect ratio 1.33). Your screen resolution is " + Screen.width + " x " + Screen.height + " pixels (aspect ratio " + (Mathf.Round((float)Screen.width / Screen.height * 100) / 100) + "). Your camera has a resolution of " + image.Width + "x" + image.Height + " pixels (aspect ratio " + (Mathf.Round((float)image.Width / image.Height * 100) / 100) + ")";

                //Unity Editor demo
                message = "This demo is configured for a screen aspect ratio of 1.78 (16:9) and a camera aspect ratio of 1.33 (4:3). Your screen resolution is " + Screen.width + " x " + Screen.height + " pixels (aspect ratio " + (Mathf.Round((float)Screen.width / Screen.height * 100) / 100) + "). Your camera has a resolution of " + image.Width + "x" + image.Height + " pixels (aspect ratio " + (Mathf.Round((float)image.Width / image.Height * 100) / 100) + ")";

                string s = m_PixelFormat + " image: \n";
                s += "  size: " + image.Width + "x" + image.Height + "\n";
                s += "  bufferSize: " + image.BufferWidth + "x" + image.BufferHeight + "\n";
                s += "  stride: " + image.Stride;
                Debug.Log(s);
                m_LogInfo = false;
            }
        }
    }
 public void OnTrackablesUpdated()
 {
     Debug.Log("Trackable updated called");
     if (!m_RegisteredFormat)
     {
         CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true); //HERE IT GIVES THE ERROR
         m_RegisteredFormat = true;
     }
     if (m_LogInfo)
     {
         CameraDevice cam = CameraDevice.Instance;
         image = cam.GetCameraImage(m_PixelFormat);
         if (image == null)
         {
             Debug.Log(m_PixelFormat + " image is not available yet");
             //boxMesh.material.mainTexture = tx;
         }
         else
         {
             string s = m_PixelFormat + " image: \n";
             s += "  size: " + image.Width + "x" + image.Height + "\n";
             s += "  bufferSize: " + image.BufferWidth + "x" + image.BufferHeight + "\n";
             s += "  stride: " + image.Stride;
             Debug.Log(s);
         }
     }
 }
예제 #4
0
    /// <summary>
    /// This method retrieve the camera image and save it to TargetStats as a Bitmap.
    /// It should run when the target's screen coordinates is OK.
    /// </summary>
    /// <returns></returns>
    public IEnumerator TakeScreenshot()
    {
        CameraDevice cam   = CameraDevice.Instance;
        Image        image = cam.GetCameraImage(m_PixelFormat);

        if (image == null)
        {
            Debug.Log(m_PixelFormat + " image is not available yet");
            yield return(null);
        }
        else
        {
            // Declare a texture with the size of the image
            Texture2D tex = new Texture2D(image.Width, image.Height, TextureFormat.RGB24, false);
            // Copy Image to the texture
            image.CopyToTexture(tex);
            tex.Apply();
            // Copy texture to byte array
            imageData = tex.EncodeToPNG();
            Destroy(tex);

            // PrepareImage should run on another thread to avoid drop of frame
            Thread prepare = new Thread(new ThreadStart(PrepareImage));
            prepare.Start();

            while (prepareOK == false)
            {
                yield return(null);
            }
            if (writeFile)
            {
                // For testing purposes, also write to a file in the project folder
                MemoryStream stream = new MemoryStream();
                iBitmap.Save(stream, iBitmap.RawFormat);
                imageData = stream.ToArray();
                if (Application.platform == RuntimePlatform.Android)
                {
                    File.WriteAllBytes(Application.persistentDataPath + "/Screenshot.png", imageData);
                }
                else
                {
                    File.WriteAllBytes("Screenshot.png", imageData);
                }
            }

            prepareOK = false;

            // Pass state and Bitmap to TargetStats
            targetStats.IMAGEBITMAP     = iBitmap;
            targetStats.TARGET_IMAGE_OK = true;

            yield return(null);
        }
    }
예제 #5
0
    public void OnTrackablesUpdated()
    {
        if (!m_RegisteredFormat)
        {
            CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true);
            m_RegisteredFormat = true;
        }

        CameraDevice cam   = CameraDevice.Instance;
        Image        image = cam.GetCameraImage(m_PixelFormat);

        if (image == null)
        {
            Debug.Log(m_PixelFormat + " image is not available yet");
        }
        else
        {
            if (inputMat == null)
            {
                inputMat = new Mat(image.Height, image.Width, CvType.CV_8UC1);
                //Debug.Log ("inputMat dst ToString " + inputMat.ToString ());
            }


            inputMat.put(0, 0, image.Pixels);

            Core.putText(inputMat, "CameraImageToMatSample " + inputMat.cols() + "x" + inputMat.rows(), new Point(5, inputMat.rows() - 5), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255, 0, 0, 255));


            if (outputTexture == null)
            {
                outputTexture = new Texture2D(inputMat.cols(), inputMat.rows(), TextureFormat.RGBA32, false);
            }

            Utils.matToTexture2D(inputMat, outputTexture);


            quad.transform.localScale = new Vector3((float)image.Width, (float)image.Height, 1.0f);
            quad.GetComponent <Renderer> ().material.mainTexture = outputTexture;

            mainCamera.orthographicSize = image.Height / 2;
        }
    }
예제 #6
0
    /// <summary>
    /// We can retrieve the image usingfrom this callback to ensure that
    /// we retrieve the latest camera image that matches the current frame.
    /// </summary>
    public void OnTrackablesUpdated()
    {
        if (!m_RegisteredFormat)
        {
            CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true);
            m_RegisteredFormat = true;
        }
        if (m_LogInfo)
        {
            CameraDevice cam   = CameraDevice.Instance;
            Image        image = cam.GetCameraImage(m_PixelFormat);
            if (image == null)
            {
                Debug.Log(m_PixelFormat + " image is not available yet");
            }
            else
            {
                if (Application.platform == RuntimePlatform.Android)
                {
                    // On Android portrait mode, the camera image was actually rotated 90 degree
                    // We need to swap the value of width and height for calculating purpose.
                    if (Screen.orientation == ScreenOrientation.Portrait)
                    {
                        imageWidth  = image.Height;
                        imageHeight = image.Width;
                    }
                }
                else
                {
                    imageWidth  = image.Width;
                    imageHeight = image.Height;
                }

                //Calculate the scale factor after we have the right width and height
                CalculateScaleFactor();

                Debug.Log("Camera image: " + imageWidth + ":" + imageHeight + " Screen: " + Screen.width + ":" + Screen.height + ", ScaleByHeight = " + scaleByHeight + ", scalarFactor = " + scalarFactor);
                m_LogInfo = false;
            }
        }
    }
예제 #7
0
    public void Update()
    {
        if (mImageTargetBehaviour == null)
        {
            Debug.Log("ImageTargetBehaviour not found");
            return;
        }
        if (!m_RegisteredFormat)
        {
            CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true);
            m_RegisteredFormat = true;
        }
        CameraDevice cam   = CameraDevice.Instance;
        Image        image = cam.GetCameraImage(m_PixelFormat);

        if (image == null)
        {
            Debug.Log(m_PixelFormat + " image is not available yet");
        }
        else
        {
            string s = m_PixelFormat + " image: \n";
            s += "  size: " + image.Width + "x" + image.Height + "\n";
            s += "  bufferSize: " + image.BufferWidth + "x" + image.BufferHeight + "\n";
            s += "  stride: " + image.Stride;
            Debug.Log(s);
            m_LogInfo = false;
            Texture2D tex = new Texture2D(2, 2);

            // Get coordinates
            Vector2 targetSize   = mImageTargetBehaviour.GetSize();
            float   targetAspect = targetSize.x / targetSize.y;

            // We define a point in the target local reference
            // we take the bottom-left corner of the target,
            // just as an example
            // Note: the target reference plane in Unity is X-Z,
            // while Y is the normal direction to the target plane
            Vector3 pointOnTarget = new Vector3(-0.5f, 0, -0.5f / targetAspect);

            // We convert the local point to world coordinates
            Vector3 targetPointInWorldRef = transform.TransformPoint(pointOnTarget);

            // We project the world coordinates to screen coords (pixels)
            Vector3 screenPoint = Camera.main.WorldToScreenPoint(targetPointInWorldRef);

            Debug.Log("target point in screen coords (bottom left): " + screenPoint.x + ", " + screenPoint.y);

            pointOnTarget         = new Vector3(0.5f, 0, -0.5f / targetAspect);
            targetPointInWorldRef = transform.TransformPoint(pointOnTarget);
            Vector3 screenPoint2 = Camera.main.WorldToScreenPoint(targetPointInWorldRef);
            Debug.Log("target point in screen coords (bottom right): " + screenPoint2.x + ", " + screenPoint2.y);

            pointOnTarget         = new Vector3(0.5f, 0, 0.5f / targetAspect);
            targetPointInWorldRef = transform.TransformPoint(pointOnTarget);
            Vector3 screenPoint3 = Camera.main.WorldToScreenPoint(targetPointInWorldRef);
            Debug.Log("target point in screen coords (top right): " + screenPoint3.x + ", " + screenPoint3.y);

            pointOnTarget         = new Vector3(-0.5f, 0, 0.5f / targetAspect);
            targetPointInWorldRef = transform.TransformPoint(pointOnTarget);
            Vector3 screenPoint4 = Camera.main.WorldToScreenPoint(targetPointInWorldRef);
            Debug.Log("target point in screen coords (top left): " + screenPoint4.x + ", " + screenPoint4.y);


            /*float[] xValues = { screenPoint.x, screenPoint2.x, screenPoint3.x, screenPoint.x };
            *  float[] yValues = { screenPoint.y, screenPoint2.y, screenPoint3.y, screenPoint.y };*/

            //image.CopyToTexture(tex);

            /*Color newBlack = new Color(0,0,0);
             *
             * Resolution res = Screen.currentResolution;
             *
             * /*int minY = (int)(yValues.Min() / res.height) * tex.height;
             * int minX = (int)(xValues.Min() / res.width) * tex.width;
             * int maxY = (int)(yValues.Max() / res.height) * tex.height;
             * int maxX = (int)(xValues.Max() / res.width) * tex.width;
             *
             * // Will need to change each coordinate to match texture size
             * screenPoint.x = (screenPoint.x / res.width) * tex.width;
             * screenPoint.y = (screenPoint.y / res.height) * tex.height;
             * screenPoint2.x = (screenPoint2.x / res.width) * tex.width;
             * screenPoint2.y = (screenPoint2.y / res.height) * tex.height;
             * screenPoint3.x = (screenPoint3.x / res.width) * tex.width;
             * screenPoint3.y = (screenPoint3.y / res.height) * tex.height;
             * screenPoint4.x = (screenPoint4.x / res.width) * tex.width;
             * screenPoint4.y = (screenPoint4.y / res.height) * tex.height;
             *
             * // Calculate expression (gradient and offset)
             *
             * // for now just to block out the whole rectangle covering the image
             * for (int i = minY; i < maxY; i++)
             * {
             *  for (int j = minX; j < maxX; j++)
             *  {
             *      tex.SetPixel(j, i, newBlack);
             *  }
             *
             * }
             *
             * Debug.Log("dimensions of texture: " + tex.height + ", " + tex.width);*/

            ////newMaterialRef.mainTexture = tex;
            //tex.Apply();

            // Encode texture into PNG
            //byte[] bytes = tex.EncodeToPNG();

            // For testing purposes, also write to a file in the project folder
            //File.WriteAllBytes(Application.persistentDataPath + "/../SavedScreen.png", bytes);
        }
    }
예제 #8
0
    public void OnTrackablesUpdated()
    {
        if (!m_RegisteredFormat)
        {
            if (CameraDevice.Instance.SetFrameFormat(m_PixelFormat, true))
            {
                m_RegisteredFormat = true;
            }
        }

        CameraDevice cam   = CameraDevice.Instance;
        Image        image = cam.GetCameraImage(m_PixelFormat);

        if (image == null)
        {
            Debug.Log("Image is not available yet");
        }
        else
        {
            byte[]   pixels      = image.Pixels;
            GCHandle pixelHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);
            try
            {
                if (m_PixelFormat == Image.PIXEL_FORMAT.RGBA8888)
                {
                    using (
                        Mat m = new Mat(new Size(image.Width, image.Height), DepthType.Cv8U, 4,
                                        pixelHandle.AddrOfPinnedObject(), image.Stride))
                        using (Mat alphaChannel = new Mat())
                        {
                            //process the image (RGBA) here, replace the following with your code
                            CvInvoke.ExtractChannel(m, alphaChannel, 3); //extract the alphaChannel
                            CvInvoke.BitwiseNot(m, m);                   //simple inversion, invert all channels including alpha
                            CvInvoke.InsertChannel(alphaChannel, m, 3);  //put the alphaChannel back
                        }
                }
                else if (m_PixelFormat == Image.PIXEL_FORMAT.RGB888)
                {
                    using (
                        Mat m = new Mat(new Size(image.Width, image.Height), DepthType.Cv8U, 3,
                                        pixelHandle.AddrOfPinnedObject(), image.Stride))
                    {
                        //process the image (RGB) here, replace the following with your code.
                        CvInvoke.BitwiseNot(m, m);
                    }
                }
                else
                {
                    string s = String.Format("Image type {0} is not supported\n", m_PixelFormat);
                    s += "  size: " + image.Width + "x" + image.Height + "\n";
                    s += "  bufferSize: " + image.BufferWidth + "x" + image.BufferHeight + "\n";
                    s += "  stride: " + image.Stride;
                    Debug.Log(s);
                }
            }
            finally
            {
                pixelHandle.Free();
            }
        }
    }