Пример #1
0
    public unsafe byte[] CamGetFrame()
    {
        XRCpuImage image;

        if (m_CameraManager.TryAcquireLatestCpuImage(out image))
        {
            var conversionParams = new XRCpuImage.ConversionParams
            {
                // Get the entire image.
                inputRect = new RectInt(0, 0, image.width, image.height),

                outputDimensions = new Vector2Int(image.width, image.height),

                // Choose RGBA format.
                outputFormat = TextureFormat.RGBA32,

                // Flip across the vertical axis (mirror image).
                transformation = XRCpuImage.Transformation.MirrorY
            };

            // See how many bytes we need to store the final image.
            int size = image.GetConvertedDataSize(conversionParams);

            // Allocate a buffer to store the image
            var buffer = new NativeArray <byte>(size, Allocator.Temp);

            // Extract the image data

            image.Convert(conversionParams, new IntPtr(buffer.GetUnsafePtr()), buffer.Length);
            Debug.Log("buffer.Length" + buffer.Length);
            // The image was converted to RGBA32 format and written into the provided buffer
            // so we can dispose of the CameraImage. We must do this or it will leak resources.
            image.Dispose();

            // At this point, we could process the image, pass it to a computer vision algorithm, etc.
            // In this example, we'll just apply it to a texture to visualize it.

            // We've got the data; let's put it into a texture so we can visualize it.
            Texture2D m_Texture = new Texture2D(
                conversionParams.outputDimensions.x,
                conversionParams.outputDimensions.y,
                conversionParams.outputFormat,
                false);

            m_Texture.LoadRawTextureData(buffer);
            m_Texture.Apply();
            buffer.Dispose();

            byte[] bb = m_Texture.EncodeToJPG(100);
            Destroy(m_Texture);
            return(bb);
        }
        return(null);
    }
Пример #2
0
    unsafe void convertCPUImage()
    {
        XRCpuImage image;

        if (!cameraManager.TryAcquireLatestCpuImage(out image))
        {
            Debug.Log("Cant get image");
            return;
        }

        if (float.IsNegativeInfinity(ALPHA))
        {
            ALPHA      = (float)image.height / image.width;
            imageRatio = (float)(BETA / ALPHA);
        }

        var conversionParams = new XRCpuImage.ConversionParams {
            // Get the entire image
            inputRect = new RectInt(0, 0, image.width, image.height),
            // Downsample by 2
            outputDimensions = new Vector2Int(image.width / 2, image.height / 2),
            // Choose RGBA format
            outputFormat = TextureFormat.RGBA32,
            // Flip across the vertical axis (mirror image)
            transformation = XRCpuImage.Transformation.MirrorY
        };

        int size = image.GetConvertedDataSize(conversionParams);

        var buffer = new NativeArray <byte>(size, Allocator.Temp);

        image.Convert(conversionParams, new IntPtr(buffer.GetUnsafePtr()), buffer.Length);
        image.Dispose();

        Texture2D m_Texture = new Texture2D(
            conversionParams.outputDimensions.x,
            conversionParams.outputDimensions.y,
            conversionParams.outputFormat,
            false);

        m_Texture.LoadRawTextureData(buffer);
        m_Texture.Apply();
        buffer.Dispose();
        // pass image for mediapipe
        long time = new DateTimeOffset(DateTime.Now).ToUnixTimeMilliseconds();

        Debug.Log("Texture loaded at: " + time.ToString());
        handProcessor.addFrameTexture(m_Texture);
    }
Пример #3
0
        unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
        {
            // Attempt to get the latest camera image. If this method succeeds,
            // it acquires a native resource that must be disposed (see below).
            XRCpuImage image;

            if (!m_CameraManager.TryAcquireLatestCpuImage(out image))
            {
                return;
            }

            var format = TextureFormat.RGBA32;

            if (m_CameraTexture == null || m_CameraTexture.width != image.width || m_CameraTexture.height != image.height)
            {
                m_CameraTexture = new Texture2D(image.width, image.height, format, false);
            }

            // Convert the image to format, flipping the image across the Y axis.
            // We can also get a sub rectangle, but we'll get the full image here.
            var conversionParams = new XRCpuImage.ConversionParams(image, format, XRCpuImage.Transformation.MirrorY);
            // Texture2D allows us write directly to the raw texture data
            // This allows us to do the conversion in-place without making any copies.
            var rawTextureData = m_CameraTexture.GetRawTextureData <byte>();

            try
            {
                image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
            }
            finally
            {
                // We must dispose of the XRCameraImage after we're finished
                // with it to avoid leaking native resources.
                image.Dispose();
            }

            // Apply the updated texture data to our texture
            m_CameraTexture.Apply();

            // Set the RawImage's texture so we can visualize it.
            m_RawImage.material.SetTexture("_CameraTex", m_CameraTexture);

            ///////////////////////////////////////

            Matrix4x4 cameraMatrix = eventArgs.displayMatrix ?? Matrix4x4.identity;

            Vector2 affineBasisX      = new Vector2(1.0f, 0.0f);
            Vector2 affineBasisY      = new Vector2(0.0f, 1.0f);
            Vector2 affineTranslation = new Vector2(0.0f, 0.0f);

            affineBasisX      = new Vector2(cameraMatrix[0, 0], cameraMatrix[1, 0]);
            affineBasisY      = new Vector2(cameraMatrix[0, 1], cameraMatrix[1, 1]);
            affineTranslation = new Vector2(cameraMatrix[2, 0], cameraMatrix[2, 1]);

            // The camera display matrix includes scaling and offsets to fit the aspect ratio of the device. In most
            // cases, the camera display matrix should be used directly without modification when applying depth to
            // the scene because that will line up the depth image with the camera image. However, for this demo,
            // we want to show the full depth image as a picture-in-picture, so we remove these scaling and offset
            // factors while preserving the orientation.
            affineBasisX                  = affineBasisX.normalized;
            affineBasisY                  = affineBasisY.normalized;
            m_DisplayRotationMatrix       = Matrix4x4.identity;
            m_DisplayRotationMatrix[0, 0] = affineBasisX.x;
            m_DisplayRotationMatrix[0, 1] = affineBasisY.x;
            m_DisplayRotationMatrix[1, 0] = affineBasisX.y;
            m_DisplayRotationMatrix[1, 1] = affineBasisY.y;
            m_DisplayRotationMatrix[2, 0] = Mathf.Round(affineTranslation.x);
            m_DisplayRotationMatrix[2, 1] = Mathf.Round(affineTranslation.y);

            // Set the matrix to the raw image material.
            m_RawImage.material.SetMatrix(k_DisplayRotationPerFrameId, m_DisplayRotationMatrix);
            m_RawImage.material.SetMatrix(k_InverseMatrixId, m_DisplayRotationMatrix.inverse);
        }
Пример #4
0
    // Update is called once per frame
    void Update()
    {
        // update fps
        {
            frameCount++;

            float current = Time.realtimeSinceStartup;
            if (nextUpdate <= current)
            {
                float fps = frameCount / (current - lastUpdate);
                TextFPS.text = "FPS: " + String.Format("{0:00.0}", fps);
                lastUpdate   = current;
                nextUpdate   = current + 1.0f;
                frameCount   = 0;
            }
        }

        bool saveCurrent = (save || (rec && saveRestCount > 0));

        // 0: Env, 1: HumanDepth, 2: HumanStencil
        for (int d = 0; d < 3; d++)
        {
            Texture2D depthMap = (Texture2D)depths[d].texture;

            if (depthMap == null || depthMap.width <= 0)
            {
                Debug.Log("depth[" + d + "] is invalid. " + depthMap);
                continue;
            }

            if (rts[d] == null)
            {
                rts[d] = RenderTexture.GetTemporary(
                    depthMap.width, depthMap.height, 0, RenderTextureFormat.RFloat, RenderTextureReadWrite.Default);
            }
            Graphics.Blit(depthMap, rts[d]);
            RenderTexture pre = RenderTexture.active;
            RenderTexture.active = rts[d];

            // envDepth(png), csv, color

            if (d == 0 && texShow[0] == null)
            {
                texShow[0]      = new Texture2D(rts[d].width, rts[d].height, TextureFormat.RGBA32, false);
                imgShow.texture = texShow[0];
                bytes           = new byte[rts[d].width * rts[d].height * 4];
                // fill alpha
                for (int i = 0; i < rts[d].width * rts[d].height; i++)
                {
                    bytes[i * 4 + 3] = (byte)255;
                }
            }

            if (texBuffer == null)
            {
                texBuffer = new Texture2D(rts[d].width, rts[d].height, TextureFormat.RFloat, false);
            }
            texBuffer.ReadPixels(new Rect(0, 0, rts[d].width, rts[d].height), 0, 0);
            texBuffer.Apply();

            RenderTexture.active = pre;

            Color[] cs = texBuffer.GetPixels();

            if (d == 0)
            {
                // make color scale

                const float range7    = 8.0f / 7; // 8m まで
                const float range2557 = 255.0f / range7;
                const float range1277 = 127.5f / range7;
                for (int y = rts[d].height; y > 0;)
                {
                    y--;

                    for (int x = 0; x < rts[d].width; x++)
                    {
                        int   j = (rts[d].height - 1 - y) * rts[d].width + x;
                        float f = cs[j].r;

                        int i = y * rts[d].width + x;

                        byte r = 0;
                        byte g = 0;
                        byte b = 0;
                        if (f < range7)
                        {
                            b = (byte)(f * range2557);
                        }
                        else if (f < range7 * 2)
                        {
                            b = (byte)255;
                            g = (byte)((f - range7) * range1277);
                        }
                        else if (f < range7 * 3)
                        {
                            g = (byte)((f - range7 * 2) * range1277 + 127.5f);
                            b = (byte)((range7 * 3 - f) * range2557);
                        }
                        else if (f < range7 * 4)
                        {
                            g = (byte)255;
                            r = (byte)((f - range7 * 3) * range2557);
                        }
                        else if (f < range7 * 5)
                        {
                            r = (byte)255;
                            g = (byte)((range7 * 5 - f) * range2557);
                        }
                        else if (f < range7 * 6)
                        {
                            r = (byte)255;
                            b = (byte)((f - range7 * 5) * range2557);
                        }
                        else if (f < range7 * 7)
                        {
                            r = (byte)255;
                            b = (byte)255;
                            g = (byte)((f - range7 * 6) * range2557);
                        }
                        else
                        {
                            r = (byte)255;
                            b = (byte)255;
                            g = (byte)255;
                        }

                        bytes[i * 4 + 0] = r;
                        bytes[i * 4 + 1] = g;
                        bytes[i * 4 + 2] = b;
                    }
                }

                texShow[0].LoadRawTextureData(bytes);
                texShow[0].Apply();
            }

            if (saveCurrent)
            {
                Debug.Log("save depth.");

                save = false;

                StringBuilder sb = new StringBuilder();
                for (int y = rts[d].height; y > 0;)
                {
                    y--;

                    for (int x = 0; x < rts[d].width; x++)
                    {
                        int   j = (rts[d].height - 1 - y) * rts[d].width + x;
                        float f = cs[j].r;

                        if (x != 0)
                        {
                            sb.Append(",");
                        }
                        sb.Append(f);
                    }

                    sb.AppendLine();
                }

                string fileName;
                if (rec)
                {
                    // rec frames

                    saveCount++;
                    DateTime now = startRec;
                    fileName = keta(now.Year, 2) + keta(now.Month, 2) + keta(now.Day, 2) + keta(now.Hour, 2) + keta(now.Minute, 2) + keta(now.Second, 2);
                    fileName = fileName + "_" + keta(saveCount, 2);
                    saveRestCount--;
                    if (saveRestCount <= 0)
                    {
                        rec = false;
                        ButtonStartRec.GetComponentInChildren <Text>().text = "Start Rec.";
                    }
                    else
                    {
                        ButtonStartRec.GetComponentInChildren <Text>().text = "Stop (" + saveCount + ")";
                    }
                }
                else
                {
                    // save 1 frame
                    DateTime now = DateTime.Now;
                    fileName = keta(now.Year, 2) + keta(now.Month, 2) + keta(now.Day, 2) + keta(now.Hour, 2) + keta(now.Minute, 2) + keta(now.Second, 2);
                }

                if (d == 0)
                {
                    // color(jpg) & depth(png)
                    XRCpuImage image;
                    if (cm.TryAcquireLatestCpuImage(out image))
                    {
                        var conversionParams = new XRCpuImage.ConversionParams
                        {
                            inputRect        = new RectInt(0, 0, image.width, image.height),
                            outputDimensions = new Vector2Int(image.width / 2, image.height / 2),
                            outputFormat     = TextureFormat.RGBA32,
                            transformation   = XRCpuImage.Transformation.MirrorX
                        };
                        int size = image.GetConvertedDataSize(conversionParams);
                        if (b4 == null || b4.Length != size)
                        {
                            if (b4 != null)
                            {
                                b4.Dispose();
                            }
                            b4 = new NativeArray <byte>(size, Allocator.Temp);
                        }
                        unsafe
                        {
                            image.Convert(conversionParams, new IntPtr(b4.GetUnsafePtr()), b4.Length);
                        }
                        image.Dispose();

                        if (t4 == null ||
                            t4.width != conversionParams.outputDimensions.x ||
                            t4.height != conversionParams.outputDimensions.y ||
                            t4.format != conversionParams.outputFormat)
                        {
                            t4 = new Texture2D(
                                conversionParams.outputDimensions.x,
                                conversionParams.outputDimensions.y,
                                conversionParams.outputFormat, false);
                        }
                        t4.LoadRawTextureData(b4);
                        t4.Apply();

                        byte[] jpg = t4.EncodeToJPG();
                        File.WriteAllBytes(Application.persistentDataPath + "/" + fileName + ".jpg", jpg);
                    }

                    byte[] png = texShow[0].EncodeToPNG();
                    File.WriteAllBytes(Application.persistentDataPath + "/" + fileName + names[d] + ".png", png);
                }

                // csv
                File.WriteAllText(Application.persistentDataPath + "/" + fileName + names[d] + ".csv", sb.ToString());
            }
        }
    }