コード例 #1
0
ファイル: TestArCamCap.cs プロジェクト: zjingcong/mp
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    private void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if (image != null)
                {
                    /*
                     * string imageInfo = mPixelFormat + " image: \n";
                     * imageInfo += " size: " + image.Width + " x " + image.Height + "\n";
                     * imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n";
                     * imageInfo += " stride: " + image.Stride;
                     * Debug.Log(imageInfo);
                     */
                    byte[] pixels = image.Pixels;
                    // Debug.Log("pixels length: " + pixels.Length);
                    if (pixels != null && pixels.Length > 0)
                    {
                        /*
                         * Texture2D tex = new Texture2D(image.Width, image.Height, TextureFormat.RGB24, false);
                         * image.CopyToTexture(tex);
                         * rawImage.texture = tex;
                         * rawImage.material.mainTexture = tex;
                         */

                        currentFrame = new Mat(image.Height, image.Width, CvType.CV_8UC4);
                        currentFrame.put(0, 0, pixels);
                        Texture2D tex = new Texture2D(image.Width, image.Height, TextureFormat.RGBA32, false);
                        Utils.matToTexture2D(currentFrame, tex);

                        rawImage.texture = tex;
                        rawImage.material.mainTexture = tex;

                        // Imgcodecs.imwrite("D:\\dpa\\unity3d\\Hide_n_Seek_AR\\TestTmpFiles\\test_frame_pre.png", currentFrame);
                        Imgproc.cvtColor(currentFrame, currentFrame, Imgproc.COLOR_BGRA2RGBA);
                        // Imgcodecs.imwrite("D:\\dpa\\unity3d\\Hide_n_Seek_AR\\TestTmpFiles\\test_frame_post.png", currentFrame);


                        /*
                         * Texture2D tex = new Texture2D(image.Width, image.Height, TextureFormat.RGBA32, false);
                         * tex.LoadRawTextureData(pixels);
                         * tex.Apply();
                         * rawImage.texture = tex;
                         * rawImage.material.mainTexture = tex;
                         */

                        // Debug.Log("texture length: " + tex.GetRawTextureData().Length);

                        /*
                         * Mat mat = new Mat(image.Height, image.Width, MatType.CV_8UC4, pixels);
                         * Cv2.CvtColor(mat, mat, ColorConversion.BgraToRgb);
                         * Cv2.ImShow("test", mat);
                         */
                    }
                }
            }
        }
    }
コード例 #2
0
 ///
 /// Called each time the Vuforia state is updated
 ///
 void OnTrackablesUpdated()
 {
     if (mFormatRegistered)
     {
         if (mAccessCameraImage)
         {
             Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
             if (image != null)
             {
                 //Debug.Log(
                 //    "\nImage Format: " + image.PixelFormat +
                 //    "\nImage Size:   " + image.Width + "x" + image.Height +
                 //    "\nBuffer Size:  " + image.BufferWidth + "x" + image.BufferHeight +
                 //    "\nImage Stride: " + image.Stride + "\n"
                 //);
                 byte[] pixels = image.Pixels;
                 if (pixels != null && pixels.Length > 0)
                 {
                     //Debug.Log(
                     //    "\nImage pixels: " +
                     //    pixels[0] + ", " +
                     //    pixels[1] + ", " +
                     //    pixels[2] + ", ...\n"
                     //);
                 }
             }
         }
     }
 }
コード例 #3
0
    public bool getImage()
    {
        if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
        {
            Debug.Log("Successfully registered pixel format " + mPixelFormat.ToString());
            Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
            if (image != null)
            {
                string imageInfo = mPixelFormat + " image: \n";
                imageInfo += " size: " + image.Width + " x " + image.Height + "\n";
                imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n";
                imageInfo += " stride: " + image.Stride;
                Debug.Log(imageInfo);
                byte[] pixels = image.Pixels;
                if (pixels != null && pixels.Length > 0)
                {
                    Debug.Log("Image pixels: " + pixels [0] + "," + pixels [1] + "," + pixels [2] + ",...");

                    saveImage(image);
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                Debug.Log("Faill to save the image");
                return(false);
            }
        }
        return(true);
    }
コード例 #4
0
        /// <summary>
        /// Calculates an average pixel value in HSV space for the given image format.
        /// </summary>
        /// <param name="image">      Image.</param>
        /// <param name="pixelGetter">Pixel getter.</param>
        void GetAveragePixelIntensity(Vuforia.Image image, GetPixelFromBuffer pixelGetter)
        {
            float avgH, avgS, avgV;

            avgH = avgS = avgV = 0;

            int bytesPerPixel = image.Stride / image.Width;
            int numPixels     = image.Width * image.Height;

            for (int r = 0; r < NumSampleRows; ++r)
            {
                int y = Random.Range(0, image.Height);
                for (int c = 0; c < NumSamplesPerRow; ++c)
                {
                    int x = Random.Range(0, image.Width);
                    int i = y * image.Stride + x * bytesPerPixel;

                    Color pixel = pixelGetter(image.Pixels, i);

                    float h, s, v;
                    Color.RGBToHSV(pixel, out h, out s, out v);

                    avgH += h / numPixels;
                    avgS += s / numPixels;
                    avgV += v / numPixels;
                }
            }

            lastIntensity = avgV;
            lastColor     = Color.HSVToRGB(avgH, avgS, avgV);
        }
コード例 #5
0
    public static Texture2D GetLatestTexture()
    {
        Instance.RegisterFormat();

        Vuforia.Image image = CameraDevice.Instance.GetCameraImage(Instance.mPixelFormat);
        Debug.Log("CustomMessage: Pixel format: " + Instance.mPixelFormat);
        Debug.Log("CustomMessage: Pixel count: " + image.Pixels.Length);

        if (image != null)
        {
            byte[] pixels = image.Pixels;

            texture.Resize(image.Width, image.Height);
            texture.LoadRawTextureData(pixels);
            texture.Apply();

            texture = RotateTexture(texture);
            FlipTexture(texture, false);
            // rawImage.texture = texture;
            // rawImage.material.mainTexture = texture;

            return(texture);
        }

        return(null);
    }
コード例 #6
0
ファイル: saveCapture.cs プロジェクト: StattikRose/Marginalia
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    private void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if ((image != null) && (captureCounter < 3))
                {
                    Texture2D snap = new Texture2D(image.Width, image.Height);
                    image.CopyToTexture(snap);
                    snap.Apply();

                    Texture2D flipped = new Texture2D(snap.width, snap.height);

                    int xN = snap.width;
                    int yN = snap.height;

                    for (int i = 0; i < xN; i++)
                    {
                        for (int j = 0; j < yN; j++)
                        {
                            flipped.SetPixel(i, yN - j - 1, snap.GetPixel(i, j));
                        }
                    }

                    flipped.Apply();


                    System.IO.File.WriteAllBytes(savePath + captureCounter.ToString() + ".png", flipped.EncodeToPNG());
                    ++captureCounter;
                }
            }
        }
    }
コード例 #7
0
    void takeScreenshots()
    {
        /* Grab texture and send to server */
        //arcamera.GetComponent<ColorBlindFilter>().colorBlindMode = ColorBlindMode.None;
        Vuforia.Image cameraImage = CameraDevice.Instance.GetCameraImage(mPixelFormat);

        if (cameraImage == null)
        {
            Debug.Log("pixel format is not available yet");
        }
        else
        {
            Texture2D texture2DHuman = new Texture2D(cameraImage.Height, cameraImage.Width);
            cameraImage.CopyToTexture(texture2DHuman);
            ScreenshotManager.SaveImage(flipTexture(texture2DHuman), "HumanVision");
            //StartCoroutine(savePhoto(texture2DHuman, "HumanVision"));

            Texture2D texture2DDoggy = new Texture2D(cameraImage.Height, cameraImage.Width);
            cameraImage.CopyToTexture(texture2DDoggy);
            ScreenshotManager.SaveImage(addColorBlindnessToTexture(flipTexture(texture2DDoggy)), "DoggyVision");
            //StartCoroutine(savePhoto(texture2DDoggy, "DoggyVision"));

            // Upload bytes of image to server
            //StartCoroutine(Upload(bytes));
        }
    }
コード例 #8
0
    /// <summary>
    /// Decode any single QR code present in a camera frame.
    /// </summary>
    /// <param name="image">The camera feed frame.</param>
    private void ReadQRCode(Vuforia.Image image)
    {
        if (image == null || image.Pixels == null)
        {
            return;
        }

        try
        {
            // decode the current frame
            var result = barcodeReader.Decode(image.Pixels, image.BufferWidth, image.BufferHeight, VuforiaToZXingBitmapFormat(pixelFormat));
            if (result != null)
            {
                scannedText.text = result.Text;
                Debug.Log($"Decoded {result.Text}");
            }
            else
            {
                Debug.Log("Nothing decoded yet.");
            }
        }
        catch (Exception e)
        {
            Debug.LogError(e.Message);
        }
    }
コード例 #9
0
    private void ScanQRCode()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); //DC this is where problem is
                if (image != null && image.IsValid())
                {
                    string imageInfo = mPixelFormat + " image: \n";
                    imageInfo += " size: " + image.Width + " x " + image.Height + "\n";
                    imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n";
                    imageInfo += " stride: " + image.Stride;
                    Debug.Log(imageInfo);
                    byte[] pixels = image.Pixels;

                    if (pixels != null && pixels.Length > 0)
                    {
                        Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",...");
                        Texture2D tex = new Texture2D(image.BufferWidth, image.BufferHeight, TextureFormat.RGB24, false); // RGB24
                        tex.LoadRawTextureData(pixels);
                        tex.Apply();
                        m_Texture             = tex;
                        m_RawImageBig.texture = tex;
                        m_RawImageBig.material.mainTexture = tex;
                        //QRCodeChecker qr = QRCodeChecker.getSingleton();
                        //Debug.Log(qr.findQRCodeInImage(m_Texture));
                    }
                }
            }
        }
    }
コード例 #10
0
ファイル: ArCamCap.cs プロジェクト: zjingcong/mp
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    private void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if (image != null)
                {
                    byte[] pixels = image.Pixels;
                    // Debug.Log("Height: " + image.Height + " Width: " + image.Width);

                    if (pixels != null && pixels.Length > 0)
                    {
                        if (mPixelFormat == Vuforia.Image.PIXEL_FORMAT.RGBA8888)
                        {
                            currentFrame = new Mat(image.Height, image.Width, CvType.CV_8UC4);
                            currentFrame.put(0, 0, pixels);
                            Imgproc.cvtColor(currentFrame, currentFrame, Imgproc.COLOR_BGRA2RGBA);
                            // Debug.Log("current pixel format: RGBA8888");
                        }
                        else if (mPixelFormat == Vuforia.Image.PIXEL_FORMAT.RGB888)
                        {
                            currentFrame = new Mat(image.Height, image.Width, CvType.CV_8UC3);
                            currentFrame.put(0, 0, pixels);
                            Imgproc.cvtColor(currentFrame, currentFrame, Imgproc.COLOR_BGR2RGBA);
                            // Debug.Log("current pixel format: RGB888");
                        }
                    }
                }
            }
        }
    }
コード例 #11
0
    /*
     * Get snaped image from camera
     */
    private Texture2D TakeTextureSnap()
    {
        Vuforia.Image captured = CameraDevice.Instance.GetCameraImage(mPixelFormat);

        if (captured == null || !captured.IsValid())
        {
            return(null);
        }

        byte[] pixels = captured.Pixels;

        if (pixels == null || pixels.Length <= 0)
        {
            return(null);
        }

        // Make temperate Texture2D to copy camera pixel data
        Texture2D tmp = new Texture2D(captured.Width, captured.Height, TextureFormat.RGB24, false);

        captured.CopyToTexture(tmp);

        /*
         * TODO: Change captureImageWidth to be proportional to Screen.width
         */
        return(TextureTools.CropWithRect(
                   tmp,
                   //new Rect(0, 0, Mathf.Min(tmp.width, Screen.width), Mathf.Min(tmp.height, Screen.height)),
                   //new Rect(0, 0, 610, 1280),
                   new Rect(0, 0, capturedImageWidth, capturedImageWidth),
                   TextureTools.RectOptions.Center,
                   0, 0));
    }
コード例 #12
0
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// Tracks the given color and sets the position and rotation of the spraycan
    /// </summary>
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);   // get the current camera image in the given pixel format

                if (image != null)
                {
                #if UNITY_EDITOR
                    inputMat = new Mat(image.Height, image.Width, MatType.CV_8UC1, image.Pixels);
                #else
                    inputMat = new Mat(image.Height, image.Width, MatType.CV_8UC3, image.Pixels);   // store the image's pixels in an OpenCV mat
                #endif

                    Cv2.Resize(inputMat, smallMat, new Size(480, 270));                                                                       // resizing for performance reasons (keep aspect ratio!)
                    Cv2.GaussianBlur(smallMat, blurredMat, new Size(11, 11), 0);                                                              // blur image to reduce noise
                    Cv2.CvtColor(blurredMat, hsvMat, ColorConversionCodes.RGB2HSV);                                                           // convert to HSV colors
                    Cv2.InRange(hsvMat, lowerHSVColor, upperHSVColor, thresholdMat);                                                          // filter out all pixels matching the given HSV range

                    Cv2.Erode(thresholdMat, thresholdMat, Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(3, 3)), null, 2);           // shave off pixels from blobs to eliminate small blobs
                    Cv2.Dilate(thresholdMat, thresholdMat, Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(3, 3)), null, 2);          // strengthen the remaining blobs

                    Cv2.FindContours(thresholdMat, out contours, hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple); // detect the blobs and save them as contours

                    if (contours.Length > 0)
                    {
                        Mat contour = contours.Aggregate((i, j) => i.ContourArea() > j.ContourArea() ? i : j);  // find the blob with the biggest ContourArea/Size

                        Point2f point;
                        float   radius;
                        Cv2.MinEnclosingCircle(contour, out point, out radius); // get the radius for passing a final threshold

                        if (radius > 5)
                        {
                            Moments moments = Cv2.Moments(contour); // use moments to calculate the center point of the biggest blob
                            double  area    = moments.M00;
                            double  m01     = moments.M01;
                            double  m10     = moments.M10;

                            double posX = m10 / area;
                            double posY = m01 / area;

                            double rotX = MapValue(posX, 0, 480, -31.5, 31.5);  // map the values to match coordinates usable in Unity
                            double rotY = MapValue(posY, 0, 270, -19.75, 19.75);

                            posX = MapValue(posX, 0, 480, -6, 6);
                            posY = MapValue(posY, 0, 270, 3.5, -3.5);

                            this.transform.localPosition    = new Vector3((float)posX, (float)posY, 10); // apply the changes to position and rotation
                            this.transform.localEulerAngles = new Vector3((float)rotY, (float)rotX, 0);
                        }
                    }
                }
            }
        }
    }
コード例 #13
0
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                //if (image != null)
                //{
                //    Debug.Log(
                //        "\nImage Format: " + image.PixelFormat +
                //        "\nImage Size:   " + image.Width + "x" + image.Height +
                //        "\nBuffer Size:  " + image.BufferWidth + "x" + image.BufferHeight +
                //        "\nImage Stride: " + image.Stride + "\n"
                //    );
                //    byte[] pixels = image.Pixels;
                //    if (pixels != null && pixels.Length > 0)
                //    {
                //        Debug.Log(
                //            "\nImage pixels: " +
                //            pixels[0] + ", " +
                //            pixels[1] + ", " +
                //            pixels[2] + ", ...\n"
                //        );
                //    }
                //}

                byte[] pixels_mat = image.Pixels;
                //Debug.Log("Image Size: " + image.Width + "x" + image.Height);
                //Debug.Log("pixels_mat.length: " + pixels_mat.Length);
                //Texture2D text = new Texture2D(1, 1, TextureFormat.ARGB32, false);
                Texture2D text = new Texture2D(1, 1);
                text.LoadRawTextureData(pixels_mat);

                //print("pixels_mat.Length: " + pixels_mat.Length);
                //print("text.width: " + text.width);
                //print("text.height: " + text.height);

                Sprite sprite = Sprite.Create(text, new Rect(0, 0, text.width, text.height), new Vector2(.5f, .5f));

                GameObject test_box = GameObject.Find("Test_button");

                if (test_box)
                {
                    SpriteRenderer spriteRenderer = test_box.GetComponent <SpriteRenderer>();
                    spriteRenderer.sprite = sprite;

                    for (int i = 0; i < 0; i++)
                    {
                        Debug.Log("pixels_mat[i]: " + pixels_mat[i]);
                        Debug.Log("text.GetPixel(i, 0): " + text.GetPixel(i, 0));
                    }
                }
            }
        }
    }
コード例 #14
0
 void OnTrackablesUpdated()
 {
     if (mFormatRegistered)
     {
         if (mAccessCameraImage)
         {
             Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
         }
     }
 }
 /// <summary>
 /// gets the stream image pixels in the given format
 /// </summary>
 /// <returns name="imagePixels">byte[] pixels</returns>
 private byte[] GetVuforiaStream()
 {
     Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
     if (image != null)
     {
         imageDimensions[0] = image.Width;
         imageDimensions[1] = image.Height;
     }
     return(image != null ? image.Pixels : null);
 }
コード例 #16
0
    private void doRawShot()
    {
        waitScreen = false;

        int width  = Screen.width;
        int height = Screen.height;


        Texture2D tex = new Texture2D(width, height);         //, TextureFormat.RGB565, false);

        vImg = CameraDevice.Instance.GetCameraImage(Vuforia.Image.PIXEL_FORMAT.GRAYSCALE);
        vImg.CopyToTexture(tex);

        tex.Apply();

        DateTime dt = DateTime.Now;

        string path;
        string imgPath;

        if (Application.isEditor)
        {
            path = "/AR_Screen/";
        }
        else
        {
            path = "/../../../../DCIM/AR_Screen/";
        }

        string allDirPath = Application.persistentDataPath + path;

        if (!Directory.Exists(allDirPath))
        {
            Directory.CreateDirectory(allDirPath);
        }

        string str = string.Format("_RawScreenshot_{0:D4}{1:D2}{2:D2}_{3:D2}{4:D2}{5:D2}.jpg", dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second);

        imgPath = allDirPath + str;
        Debug.Log(str);

        byte[] ba = tex.EncodeToJPG();
        File.WriteAllBytes(imgPath, ba);

        if (File.Exists(imgPath))
        {
            Debug.Log("saved " + imgPath);
            savedText.text = "Сохранено : " + imgPath;
        }
        else
        {
            savedText.text = "Не удалось сохранить : " + imgPath;
            Debug.Log("not found " + imgPath);
        }
    }
コード例 #17
0
    /////// THA DEBUG ZONE /////////////


    void InitTexture()
    {
        Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
        tex     = new Texture2D(1920, 1080, TextureFormat.ARGB32, false);
        pixel32 = tex.GetPixels32();
        //Pin pixel32 array
        pixelHandle = GCHandle.Alloc(pixel32, GCHandleType.Pinned);
        //Get the pinned address
        pixelPtr = pixelHandle.AddrOfPinnedObject();
        Debug.Log("Tex Initialised");
    }
コード例 #18
0
    public IEnumerator ReadQRCode()
    {
        while (true)
        {
            if (cameraInitialized)
            {
                image = CameraDevice.Instance.GetCameraImage(mPixelFormat);

                if (image == null)
                {
                    Debug.Log("No camera image found");
                }
                else
                {
                    result = barCodeReader.Decode(image.Pixels, image.BufferWidth, image.BufferHeight, RGBLuminanceSource.BitmapFormat.RGB24);

                    image = null;

                    if (result != null)
                    {
                        // QRCode detected.
                        QRVisible = true;

                        Debug.Log(i + " QR code: " + result.Text);
                        //Do something with the QR code.

                        GameObject instantiatedSucces = Instantiate(succes, GameObject.Find("Canvas Start").transform);

                        Manager.Instance.controllerUrl = result.Text;
                        Debug.Log("Manager's controllerUrl set: " + Manager.Instance.controllerUrl);

                        int    firstIndex  = 0;
                        int    secondIndex = result.Text.IndexOf("can");
                        int    thirdIndex  = result.Text.IndexOf("=") + 1;
                        string canvasUrl   = result.Text.Substring(firstIndex, secondIndex) + result.Text.Substring(thirdIndex);

                        Manager.Instance.canvasUrl = canvasUrl + "?raw";
                        Debug.Log("Manager's variable set: " + Manager.Instance.canvasUrl);
                        result = null;  // clear data
                        yield return(new WaitForSeconds(1));

                        Destroy(instantiatedSucces);

                        uiMethods.Show(GameObject.Find("Canvas full"));
                        uiMethods.Hide(GameObject.Find("Canvas Start"));

                        yield break;
                    }
                }
            }

            yield return(new WaitForSeconds(0.5f));
        }
    }
コード例 #19
0
 //RGBA8888 to RGBA32
 Color32[] ImageToColor32(Vuforia.Image a)
 {
     Color32[] r = new Color32[a.BufferWidth * a.BufferHeight];
     for (int i = 0; i < r.Length; i++)
     {
         r[i].b = a.Pixels[i * 3];
         r[i].g = a.Pixels[i * 3 + 1];
         r[i].r = a.Pixels[i * 3 + 2];
         r[i].a = 1;
     }
     return(r);
 }
コード例 #20
0
    public void TakePhoto(bool display)
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if (image != null && image.IsValid())
                {
                    /*
                     * string imageInfo = mPixelFormat + " image: \n";
                     * imageInfo += " size: " + image.Width + " x " + image.Height + "\n";
                     * imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n";
                     * imageInfo += " stride: " + image.Stride;
                     * Debug.Log(imageInfo);*/
                    byte[] pixels = image.Pixels;

                    if (pixels != null && pixels.Length > 0)
                    {
                        if (display)
                        {
                            //Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",...");
                            Texture2D tex = new Texture2D(image.BufferWidth, image.BufferHeight, TextureFormat.RGB24, false); // RGB24
                            tex.LoadRawTextureData(pixels);
                            tex.Apply();
                            m_Texture = tex;
                            //since I noticed this was set to null in the main scene, I decided to save my network code if someone derps.
                            if (m_RawImageBig)
                            {
                                m_RawImageBig.texture = tex;
                                m_RawImageBig.material.mainTexture = tex;
                            }
                            else
                            {
                                Debug.LogError("You didn't assign m_RawImageBig");
                            }
                            if (m_RawImageSmall)
                            {
                                m_RawImageSmall.texture = tex;
                                m_RawImageSmall.material.mainTexture = tex;
                            }
                            else
                            {
                                Debug.LogError("You didn't assign m_RawImageSmall");
                            }
                            //ServerConnect.S.sendPicture(m_Texture);
                        }
                    }
                }
            }
        }
    }
コード例 #21
0
ファイル: QRCodeReader.cs プロジェクト: Litmin/Browser-AR
 /// <summary>
 /// Vuforia 图片格式转为Color32
 /// </summary>
 /// <param name="a"></param>
 /// <returns></returns>
 Color32[] ImageToColor32(Vuforia.Image a)
 {
     if (!a.IsValid())
     {
         return(null);
     }
     Color32[] r = new Color32[a.BufferWidth * a.BufferHeight];
     for (int i = 0; i < r.Length; i++)
     {
         r[i].r = r[i].g = r[i].b = a.Pixels[i];
     }
     return(r);
 }
コード例 #22
0
    public void saveImage(Vuforia.Image image)
    {
        snap = new Texture2D(image.Width, image.Height);

        //int num = Random.Range(0, 26);

        image.CopyToTexture(snap);
        snap.Apply();


        System.IO.File.WriteAllBytes((Application.persistentDataPath + "/image" + num.ToString() + ".png"), snap.EncodeToPNG());
        num++;
        SaveLoad.Save(num);
    }
コード例 #23
0
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);

                if (image != null)
                {
                    byte[] pixels = image.Pixels;
                }
            }
        }
    }
コード例 #24
0
    public void TakePhoto()
    {
        /*
         * RegisterFormat();
         * Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
         *
         * if (image != null)
         * {
         *  Texture2D tex = new Texture2D(1280, 720, TextureFormat.RGB24, false);
         *  tex.LoadRawTextureData(image.Pixels);
         *  tex.Apply();
         *  //tex.LoadImage(image.Pixels);
         *  m_Texture = tex;
         * }
         */

        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if (image != null && image.IsValid())
                {
                    string imageInfo = mPixelFormat + " image: \n";
                    imageInfo += " size: " + image.Width + " x " + image.Height + "\n";
                    imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n";
                    imageInfo += " stride: " + image.Stride;
                    Debug.Log(imageInfo);
                    byte[] pixels = image.Pixels;

                    if (pixels != null && pixels.Length > 0)
                    {
                        Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",...");
                        Texture2D tex = new Texture2D(image.BufferWidth, image.BufferHeight, TextureFormat.RGB24, false); // RGB24
                        tex.LoadRawTextureData(pixels);
                        tex.Apply();
                        m_Texture             = tex;
                        m_RawImageBig.texture = tex;
                        m_RawImageBig.material.mainTexture = tex;
                        QRCodeChecker qr = QRCodeChecker.getSingleton();
                        Debug.Log(qr.findQRCodeInImage(m_Texture));
                    }
                }
            }
        }
    }
コード例 #25
0
    IEnumerator ReadQR()
    {
        while (true)
        {
            yield return(new WaitForSeconds(0.5f));

            if (cameraInitialized)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);

                if (image == null)
                {
                    Debug.Log("No camera image found");
                }
                //Debug.Log("WORKS!!");

                var result = barCodeReader.Decode(image.Pixels, image.BufferWidth, image.BufferHeight, RGBLuminanceSource.BitmapFormat.RGB24);
                if (result != null)
                {
                    // QRCode detected.
                    QRVisible = true;

                    Debug.Log("QR code: " + result.Text);
                    //Do something with the QR code.

                    GameObject instantiatedSucces = Instantiate(succes, GameObject.Find("Canvas").transform);

                    Manager.Instance.controllerUrl = result.Text;
                    int firstIndex  = 0;
                    int secondIndex = result.Text.IndexOf("can");
                    int thirdIndex  = result.Text.IndexOf("=") + 1;

                    string canvasUrl = result.Text.Substring(firstIndex, secondIndex) + result.Text.Substring(thirdIndex);
                    Debug.Log("Canvas string: " + canvasUrl);
                    Manager.Instance.canvasUrl = canvasUrl + "?raw";
                    result = null;  // clear data
                    yield return(new WaitForSeconds(2));

                    SceneManager.LoadScene("SnapshotMedFeatureMatcher");
                    break;
                }
            }
        }
    }
コード例 #26
0
 public void CapturePic()
 {
     Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
     if (image != null)
     {
         Texture2D texture2D = new Texture2D(image.Width, image.Height);
         image.CopyToTexture(texture2D);
         texture2D.SetPixels32(f1(texture2D.GetPixels32(), image.Width, image.Height));
         if (Screen.orientation == ScreenOrientation.Portrait)
         {
             texture2D.SetPixels32(f2(texture2D.GetPixels32(), image.Width, image.Height));
             Color32[] color32s = f3(texture2D.GetPixels32(), image.Width, image.Height);
             texture2D = new Texture2D(image.Height, image.Width);
             texture2D.SetPixels32(color32s);
         }
         System.Action <WWW> action = callback;
         apiCaller.Send(texture2D, action);
     }
 }
コード例 #27
0
ファイル: CamController.cs プロジェクト: Marwan0/ARecorder
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);

                if (image != null)
                {
//					Debug.Log(
//						"\nImage Format: " + image.PixelFormat +
//						"\nImage Size:   " + image.Width + "x" + image.Height +
//						"\nBuffer Size:  " + image.BufferWidth + "x" + image.BufferHeight +
//						"\nImage Stride: " + image.Stride + "\n"
//					);

                    byte[] pixels = image.Pixels;
                    cameraWidth  = image.Width;
                    cameraHeight = image.Height;
//					if (pixels != null && pixels.Length > 0)
//					{
//						Debug.Log(
//							"\nImage pixels: " +
//							pixels[0] + ", " +
//							pixels[1] + ", " +
//							pixels[2] + ", ...\n"
//						);
//						Debug.Log (pixels.Length);
//					}
#if !UNITY_EDITOR
                    unsafe
                    {
                        fixed(byte *pixelData = image.Pixels)
                        {
                            processBuffer((int)(Time.time * 600), image.Width, image.Height, image.Stride, (IntPtr)pixelData);
                        }
                    }
#endif
                }
            }
        }
    }
コード例 #28
0
    /// Called each time the Vuforia state is updated
    /// unitunity
    void OnTrackablesUpdated()
    {
        if (mFormatRegistered)
        {
            if (useCam)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                if (image != null)
                {
                    /*                      Debug.Log(
                     *                        "\nImage Format: " + image.PixelFormat +
                     *                        "\nImage Size:   " + image.Width + "x" + image.Height +
                     *                        "\nBuffer Size:  " + image.BufferWidth + "x" + image.BufferHeight +
                     *                        "\nImage Stride: " + image.Stride + "\n"
                     *                    );*/
                    if (mTexture == null || mTexture.width != image.Width)
                    {
                        mTexture = new Texture2D((int)image.Width, (int)image.Height, TextureFormat.RGBA32, false);
                    }
                    byte[] pixels = image.Pixels;
                    if (pixels != null && pixels.Length > 0)
                    {
                        // Debug.Log("\nImage pixels: " + pixels.Length);

                        /*   Debug.Log(
                         *     "\nImage pixels: " +
                         *     pixels[0] + ", " +
                         *     pixels[1] + ", " +
                         *     pixels[2] + ", ...\n"
                         * );*/
                        //    toast.text = "x: " + x + "y:" + y + "b:" + bytes.Length;

                        // 转化为Texture2D
                        //  image.CopyToTexture(mTexture);
                        mTexture.SetPixels32(ImageToColor32(image));
                        mTexture.Apply();
                        //   byte[] bytes = mTexture.GetRawTextureData();
                        //    ShareCam(image.Width, image.Height, bytes);
                    }
                }
            }
        }
    }
コード例 #29
0
    private void OnTrackablesUpdated()
    {
        delta += Time.deltaTime;
        if (mFormatRegistered && delta > thres_delta)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);

                if (image != null)
                {
                    byte[] pixels = image.Pixels;
                    if (pixels != null && pixels.Length > 0)
                    {
                        if (recognition_finished())
                        {
                            RecognitionSmoother.enqueue(country_code);
                            var code = RecognitionSmoother.most_common();
                            if (country_code != code)
                            {
                                text.text = "正在识别中...";
                            }
                            else
                            {
                                text.text = CountryMap.countries [code.ToString()];
                            }
                            if (audio.get_source() != code.ToString())
                            {
                                audio.set_source(code.ToString());
                            }
                            if (model_display.get_source() != code)
                            {
                                model_display.set_source(code);
                            }
                            setRecognitionStatus(RECOG_STATUS.PROCESSING);
                            UploadBytes(ref pixels, image.BufferWidth, image.BufferHeight);
                        }
                    }
                }
            }
            delta = 0f;
        }
    }
コード例 #30
0
    /// <summary>
    /// Called each time the Vuforia state is updated.
    /// </summary>
    void OnTrackablesUpdated()
    {
        if ((pixelFormatSetted) && (canGetCameraImage))
        {
            if (frameCount <= 0)
            {
                frameCount = skipFrames;

                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(pixelFormat);

                if (image != null)
                {
                    pixels            = image.Pixels;
                    totalIllumination = 0.0;

                    if (pixels != null && pixels.Length > 0)
                    {
                        for (int p = 0; p < pixels.Length; p += 64)
                        {
                            // Get pixel's Y (luminance) component of YIQ color scheme and add it to total image illumination counter
                            // More about conversion here https://en.wikipedia.org/wiki/YIQ
                            totalIllumination += pixels[p] * 0.299 + pixels[p + 1] * 0.587 + pixels[p + 2] * 0.114;
                        }

                        totalIllumination  /= pixels.Length / 16;
                        totalIllumination  /= 255.0;
                        countedIllumination = (float)((8 * totalIllumination) * ambientLightWeight) + initialIllumination * (1 - ambientLightWeight);

                        if (countedIllumination <= 0f)
                        {
                            countedIllumination = 0f;
                        }
                    }
                }
            }

            countedIllumination    = ambientLight.intensity * damping + countedIllumination * (1 - damping);
            ambientLight.intensity = countedIllumination;

            frameCount--;
        }
    }