Ejemplo n.º 1
0
    IEnumerator Capture()
    {
        yield return(m_WaitTimeAfterCameraStart);

        Texture2D snapshot = new Texture2D(m_WebcamTexture.width, m_WebcamTexture.height);

        // Android devices gives images rotated by 90 degrees. So we rotate it again to detect faces properly.
#if UNITY_ANDROID && !UNITY_EDITOR
        Color32[] data = m_WebcamTexture.GetPixels32();
        Debug.Log("[GreeterMA] capture rotation : " + m_WebcamTexture.videoRotationAngle);
        int angle = m_WebcamTexture.videoRotationAngle;
        if (m_WebcamTexture.videoRotationAngle > 180)
        {
            angle = 360 - m_WebcamTexture.videoRotationAngle;
        }
        m_NativeCvPlugin.Flip(ref data, m_WebcamTexture.width, m_WebcamTexture.height, angle);
        snapshot.SetPixels32(data);
#endif
#if UNITY_EDITOR
        snapshot.SetPixels32(m_WebcamTexture.GetPixels32());
#endif
        snapshot.Apply();

        if (OnCapture != null)
        {
            OnCapture.Invoke(snapshot);
        }
    }
Ejemplo n.º 2
0
    /// <summary>
    /// Gets the camera frame pixel colors.
    /// </summary>
    protected void GetCameraFrameInformation()
    {
        if (!backFacingCamera)
        {
            Debug.LogError("No device camera available");
            return;
        }
        if (backFacingCamera.GetPixels32().Length < 300)
        {
            Debug.LogWarning("The frame from the camera is too small. Pixel array length:  " + backFacingCamera.GetPixels32().Length);
            return;
        }

        if (currentManoMotionFrame.pixels.Length != backFacingCamera.GetPixels32().Length)
        {
            ResizeManoMotionFrameResolution(backFacingCamera.width, backFacingCamera.height);
            return;
        }

        currentManoMotionFrame.pixels = backFacingCamera.GetPixels32();
        currentManoMotionFrame.texture.SetPixels32(backFacingCamera.GetPixels32());
        currentManoMotionFrame.texture.Apply();
        currentManoMotionFrame.orientation = Input.deviceOrientation;

        if (OnFrameUpdated != null)
        {
            OnFrameUpdated(currentManoMotionFrame);
        }
    }
Ejemplo n.º 3
0
        /**
         * Sets Image from WebCamTexture.
         *
         * @param webCamTexture
         * @param bufferColors the optional array to receive pixel data.
         * You can optionally pass in an array of Color32s to use in colors to avoid allocating new memory each frame.
         * The array needs to be initialized to a length matching width * height of the texture.(<a href="http://docs.unity3d.com/ScriptReference/WebCamTexture.GetPixels32.html">http://docs.unity3d.com/ScriptReference/WebCamTexture.GetPixels32.html</a>)
         */
        public void SetImage(WebCamTexture webCamTexture, Color32[] bufferColors)
        {
            if (webCamTexture == null)
            {
                throw new ArgumentNullException("webCamTexture");
            }
            ThrowIfDisposed();

            GCHandle colorsHandle;

            if (bufferColors == null)
            {
                Color32[] colors = webCamTexture.GetPixels32();

                colorsHandle = GCHandle.Alloc(colors, GCHandleType.Pinned);
            }
            else
            {
                webCamTexture.GetPixels32(bufferColors);

                colorsHandle = GCHandle.Alloc(bufferColors, GCHandleType.Pinned);
            }

            DlibFaceLandmarkDetector_SetImage(nativeObj, colorsHandle.AddrOfPinnedObject(), webCamTexture.width, webCamTexture.height, 4, true);
            colorsHandle.Free();
        }
Ejemplo n.º 4
0
    // Region of interests function
    IEnumerator ROICoroutine()
    {
        while (webcamTexture.width <= 16 && webcamTexture.height <= 16)
        {
            yield return(null);
        }

        while (true)
        {
            // Debug.Log(webcamTexture.width + ", " + nowTexture.width);
            nowTexture.SetPixels32(webcamTexture.GetPixels32());
            pastTexture.SetPixels32(webcamTexture.GetPixels32());
            nowTexture.Apply();
            yield return(new WaitForEndOfFrame());

            nowTexture.SetPixels32(webcamTexture.GetPixels32());
            nowTexture.Apply();
            pastTexture.Apply();

            RenderTexture.active = resultReTexture;
            resultTexture        = new Texture2D(resultReTexture.width, resultReTexture.height);
            resultTexture.ReadPixels(new Rect(0, 0, resultTexture.width, resultTexture.height), 0, 0);
            resultColor = resultTexture.GetPixels();
        }
    }
    public void PlantButtonOnClick()
    {
        try
        {
            IBarcodeReader barcodeReader = new BarcodeReader();
            // decode the current frame
            var result = barcodeReader.Decode(backCamera.GetPixels32(), backCamera.width, backCamera.height);

            if (result != null)
            {
                Debug.Log("DECODED TEXT FROM QR: " + result.Text);
            }

            QrResult = result.Text;
            JsonEntity jsonEntity = new JsonEntity();
            jsonEntity.JsonFlag   = "FactoryQR";
            jsonEntity.JsonObject = QrResult;
            string jsonString = JsonUtility.ToJson(jsonEntity);
            PlantQrCommunication qrCommunication = new PlantQrCommunication();
            qrCommunication.SendDataToServer(jsonString);

            //LoadScene which displays OptimisedRootList of Machines
            SceneManager.LoadScene("YellowStateMachines");
        }
        catch (Exception ex)
        {
            Debug.LogWarning(ex.Message);
            //QrResult = "";
        }
    }
Ejemplo n.º 6
0
    void FrontTaker()
    {
        Debug.Log("Start Front Capture");
        if (rearCamTexture.isPlaying)
        {
            color32 = frontCamTexture.GetPixels32();
            //背面画面の貼り付け
            Texture2D _reartexture = new Texture2D(rearCamTexture.height, rearCamTexture.width);
            BGImage.texture = _reartexture;
            _reartexture.SetPixels32(color32);
            _reartexture.Apply();
            rearCamTexture.Stop();
        }
        FGImage.texture = rearCamTexture;
        Debug.Log("Front Information : " + _devices[0].availableResolutions);
        Debug.Log(" Is Front or Rear? " + _devices[0].isFrontFacing);
        frontCamTexture.Play();
        while (!frontCamTexture.isPlaying)
        {
        }
        color32 = frontCamTexture.GetPixels32();
        //フロント画面の貼り付け
        Texture2D texture = new Texture2D(frontCamTexture.height, frontCamTexture.width);

        FGImage.texture = texture;
        texture.SetPixels32(color32);
        texture.Apply();
    }
Ejemplo n.º 7
0
 private void OnFrame()
 {
     // Check that we are playing
     if (!previewTexture.didUpdateThisFrame || previewTexture.width == 16 || previewTexture.height == 16)
     {
         return;
     }
     // Update preview buffer
     if (previewBuffer == null)
     {
         previewBuffer = previewTexture.GetPixels32();
     }
     else
     {
         previewTexture.GetPixels32(previewBuffer);
     }
     // Invoke events
     if (firstFrame)
     {
         startCallback(previewTexture);
     }
     if (frameCallback != null)
     {
         frameCallback(Stopwatch.GetTimestamp() * 100L);
     }
     firstFrame = false;
 }
Ejemplo n.º 8
0
 public void StartRecording()
 {
     // Start recording
     clock         = new RealtimeClock();
     videoRecorder = new MP4Recorder(webcamTexture.width, webcamTexture.height, 30, 0, 0, OnRecording);
     pixelBuffer   = webcamTexture.GetPixels32();
 }
Ejemplo n.º 9
0
 void Update()
 {
     if (c == null)
     {
         c = camTexture.GetPixels32();
     }
 }
Ejemplo n.º 10
0
 public void Retake()
 {
     Cam.Stop();
     Cam.Play();
     UIManager.Instance.CamObject.GetComponent <Renderer>().material.shader      = Shader.Find("Unlit/Texture");
     UIManager.Instance.CamObject.GetComponent <Renderer>().material.mainTexture = Cam;
     UIManager.Instance.CamObject.GetComponent <Texture2D>().SetPixels32(Cam.GetPixels32());
 }
Ejemplo n.º 11
0
 public void StartRecording()
 {
     // Start recording
     clock       = new RealtimeClock();
     recorder    = new MP4Recorder(webCamTexture.width, webCamTexture.height, 30);
     pixelBuffer = webCamTexture.GetPixels32();
     recording   = true;
 }
Ejemplo n.º 12
0
    public Texture2D GetWebcamTexture2D()
    {
        colors32 = webCamTexture.GetPixels32();
        Texture2D texture = new Texture2D(webCamTexture.width, webCamTexture.height);

        texture.SetPixels32(colors32);
        texture.Apply();
        return(texture);
    }
    void Update()
    {
        if (!runningOnAndroid)
        {
            if (notCalibrated && Input.GetKeyDown(KeyCode.Space))
            {
                bool sucess = Aruco.UFindCharucoBoardCorners(_webCamTexture.GetPixels32(), _webCamTexture.width, _webCamTexture.height, boardParameters, allCharucoIds, allCharucoCorners);

                if (sucess)
                {
                    numOfSuccessfulFrames++;
                }

                if (numOfSuccessfulFrames >= numOfFrames)
                {
                    calibData = Aruco.UCalibrateCameraCharuco(_webCamTexture.width, _webCamTexture.height, boardParameters, allCharucoIds, allCharucoCorners);

                    CameraCalibSerializable calidSaveData;
                    calidSaveData.distortionCoefficients = (double[][])calibData.distCoeffs.GetMangedObject();
                    calidSaveData.cameraMatrix           = (double[][])calibData.cameraMatrix.GetMangedObject();
                    calidSaveData.reProjectionError      = calibData.reProjectionError;
                    Utilities.SaveCameraCalibrationParams(calidSaveData);

                    notCalibrated = false;
                }
            }
        }
        else
        {
            if (notCalibrated)
            {
                if (Input.touchCount > 0)
                {
                    bool sucess = Aruco.UFindCharucoBoardCorners(_webCamTexture.GetPixels32(), _webCamTexture.width, _webCamTexture.height, boardParameters, allCharucoIds, allCharucoCorners);

                    if (sucess)
                    {
                        numOfSuccessfulFrames++;
                    }

                    if (numOfSuccessfulFrames >= numOfFrames)
                    {
                        calibData = Aruco.UCalibrateCameraCharuco(_webCamTexture.width, _webCamTexture.height, boardParameters, allCharucoIds, allCharucoCorners);

                        CameraCalibSerializable calidSaveData;
                        calidSaveData.distortionCoefficients = (double[][])calibData.distCoeffs.GetMangedObject();
                        calidSaveData.cameraMatrix           = (double[][])calibData.cameraMatrix.GetMangedObject();
                        calidSaveData.reProjectionError      = calibData.reProjectionError;
                        Utilities.SaveCameraCalibrationParams(calidSaveData);

                        notCalibrated = false;
                    }
                }
            }
        }
    }
Ejemplo n.º 14
0
        /*
         * private void Awake()
         * {
         *  cameraImage = findCamera.background;
         *
         * }
         *
         * private void Update()
         * {
         *  if (cameraImage != null)
         *  {
         *      cameraImage = findCamera.background;
         *  }
         *  //findCamera.background.texture;
         *
         *
         * }*/
        /*
         * public void StartRecording () {
         *  // Start recording
         *  recorder = new GIFRecorder(cameraImage.texture.width, cameraImage.texture.height, frameDuration);
         *  cameraInput = new CameraInput(recorder, Camera.main);
         *  // Get a real GIF look by skipping frames
         *  //cameraInput.frameSkip = 4;
         * }
         */



        //--

        /*
         * public async void StopRecording () {
         *  // Stop the recording
         *  cameraInput.Dispose();
         *  var path = await recorder.FinishWriting();
         *  // Log path
         *  Debug.Log($"Saved animated GIF image to: {path}");
         *  Application.OpenURL($"file://{path}");
         * }
         */


        //-mp4


        public void StartRecording()
        {
            //recorderTexture = findCamera.GetCurrentCam();

            // Start recording
            clock       = new RealtimeClock();
            mp4Recorder = new MP4Recorder(recorderTexture.width, recorderTexture.height, 30);
            pixelBuffer = recorderTexture.GetPixels32();
            recording   = true;
        }
    private void recording()
    {
        playSound();
        starttime = DateTime.Now.Hour * 60 * 60 * 1000 + DateTime.Now.Minute * 60 * 1000 + DateTime.Now.Second * 1000 +
                    DateTime.Now.Millisecond;

        texture.SetPixels32(myWebcamTexture.GetPixels32());
        texture.Apply();
        textures[counter].SetPixels32(myWebcamTexture.GetPixels32());
        textures[counter].Apply();
    }
Ejemplo n.º 16
0
    public void camset1()
    {
        img1 = webcamTexture.GetPixels32();
        Texture2D texture = new Texture2D(webcamTexture.width, webcamTexture.height);

        texture.SetPixels32(img1);
        texture.Apply();
        set1.texture = texture;
        this.GetComponent <RawImage>().texture = texture;

        var bytes = texture.EncodeToPNG();

        File.WriteAllBytes(Application.persistentDataPath + "/menu/menuimage1.png", bytes);
    }
Ejemplo n.º 17
0
 /// <summary>
 /// Gets the pixels32 of the camera
 /// </summary>
 /// <returns>The pixels32.</returns>
 public Color32[] GetPixels32()
 {
             #if UNITY_ANDROID && !UNITY_EDITOR
     if (isUseEasyWebCam)
     {
         return(EasyWebCam.WebCamPreview.GetPixels32());
     }
     else
     {
         return(webcamera.GetPixels32());
     }
             #else
     return(webcamera.GetPixels32());
             #endif
 }
Ejemplo n.º 18
0
    void Update()
    {
        // if space is down do AR tranformation
        if (notCalibrated && Input.GetKeyDown(KeyCode.Space))
        {
            // laod camera calibrations setting
            CameraCalibSerializable calidSaveData = Utilities.LoadCameraCalibrationParams();
            // initialise marhsallers
            MatDoubleMarshaller distCoeffs   = new MatDoubleMarshaller(calidSaveData.distortionCoefficients);
            MatDoubleMarshaller cameraMatrix = new MatDoubleMarshaller(calidSaveData.cameraMatrix);
            double reProjectionError         = calidSaveData.reProjectionError;
            // get calibration data
            calibData     = new UCameraCalibrationData(distCoeffs, cameraMatrix, reProjectionError);
            notCalibrated = false;
        }
        else if (!notCalibrated)
        {
            // estimate charuco board pose
            (
                UDetectMarkersData markerData,
                UBoardMarkerPoseEstimationDataEuler poseEstimationData
            ) = Aruco.UEstimateCharucoBoardPose(
                _webCamTexture.GetPixels32(),
                _webCamTexture.width,
                _webCamTexture.height,
                boardParameters,
                calibData.cameraMatrix.NativeDataPointer,
                calibData.distCoeffs.NativeDataPointer
                );

            TransformGameObjects(poseEstimationData, markerData);
        }
    }
Ejemplo n.º 19
0
    /// <summary>
    /// 检索二维码方法
    /// </summary>
    void CheckQRCode()
    {
        //存储摄像头画面信息贴图转换的颜色数组
        Color32[] m_colorData = m_webCameraTexture.GetPixels32();

        //将画面中的二维码信息检索出来
        var tResult = m_barcodeRender.Decode(m_colorData, m_webCameraTexture.width, m_webCameraTexture.height);

        Debug.Log(m_delayTime);
        if (tResult != null)
        {
            Debug.Log("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" + tResult.Text);
            QrScanPanel.SetActive(false);
            ClassPanelControll cpc = Camera.main.GetComponent <ClassPanelControll>();
            cpc.inputClassId.text = tResult.Text;
            cpc.inputClassId.transform.parent.GetComponent <InputField>().text = tResult.Text;
            cpc.addClass();
            CancelInvoke();
        }
        if (tmpTime != m_delayTime)
        {
            tmpTime = m_delayTime;
            CancelInvoke();
            InvokeRepeating("CheckQRCode", 0, m_delayTime);
        }
    }
Ejemplo n.º 20
0
    private void Update()
    {
        frames++;
        if (frames % 40 == 0)
        {
            try
            {
                if (!wCamTexture.isPlaying)
                {
                    wCamTexture.Play();
                }

                var result = barcodeReader.Decode(wCamTexture.GetPixels32(),
                                                  wCamTexture.width, wCamTexture.height);
                if (result != null)
                {
                    Debug.Log("DECODED TEXT FROM QR: " + result.Text);
                    if (addingNew)
                    {
                    }
                    else if (manager.CheckID(result.Text, sDatatype, transform.parent.gameObject, nextUI, ErrorUI))
                    {
                        wCamTexture.Stop();
                        if (sDatatype == CheckIdType.package)
                        {
                            tourlist.UpdateList();
                            groundNavigation.updateGroundNavigation();
                        }
                    }
                }
            }
            catch (Exception ex) { Debug.LogWarning(ex.Message); }
            delay = 0.0f;
        }
    }
Ejemplo n.º 21
0
    void Update()
    {
        if (webcamTexture.isPlaying == false)
        {
            webcamTexture.Play();
        }

        color32 = webcamTexture.GetPixels32();


        var texture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGB24, true);

        GetComponent <Renderer>().material.mainTexture = texture;


        texture.SetPixels32(color32);
        texture.Apply();
        var bytes = texture.EncodeToPNG();

        //Destroy(texture);
        File.WriteAllBytes(Application.dataPath + "/Resources/output_images/camera" + i + ".png", bytes);
        lp = i;
        i++;
        if (i == 150)
        {
            i = 0;
        }
    }
Ejemplo n.º 22
0
    public static Color32[] WebCamGetColor32Rotate(WebCamTexture source, bool rotateRight = true)
    {
        Color32[] colorSource = source.GetPixels32();
        Color32[] colorResult = new Color32[colorSource.Length];
        int       count       = 0;
        int       index       = 0;

        for (int i = 0; i < source.width; i++)
        {
            for (int j = 0; j < source.height; j++)
            {
                if (rotateRight == false)
                {
                    index = (source.width * (source.height - j)) - source.width + i;
                }
                else
                {
                    index = (source.width * (j + 1)) - (i + 1);
                }

                colorResult[count] = colorSource[index];
                count++;
            }
        }
        return(colorResult);
    }
Ejemplo n.º 23
0
    private void Update()
    {
        delay += Time.deltaTime;
        if (delay >= 1)
        {
            try
            {
                if (!wCamTexture.isPlaying)
                {
                    wCamTexture.Play();
                }

                IBarcodeReader barcodeReader = new BarcodeReader();
                barcodeReader.Options.PossibleFormats = new System.Collections.Generic.List <BarcodeFormat>();
                barcodeReader.Options.PossibleFormats.Add(BarcodeFormat.QR_CODE);
                barcodeReader.Options.TryHarder = false;

                var result = barcodeReader.Decode(wCamTexture.GetPixels32(),
                                                  wCamTexture.width, wCamTexture.height);
                if (result != null)
                {
                    Debug.Log("DECODED TEXT FROM QR: " + result.Text);
                    if (addingNew)
                    {
                    }
                    else if (manager.CheckID(result.Text, sDatatype, transform.parent.gameObject, nextUI, ErrorUI))
                    {
                        wCamTexture.Stop();
                    }
                }
            }
            catch (Exception ex) { Debug.LogWarning(ex.Message); }
            delay = 0.0f;
        }
    }
Ejemplo n.º 24
0
        private void DrawCamera()
        {
            cameraRenderer.material.mainTexture = camTexture;

            try
            {
                IBarcodeReader barcodeReader = new BarcodeReader();
                var            barcodeResult = barcodeReader.Decode(camTexture.GetPixels32(), camTexture.width, camTexture.height);

                if (barcodeResult != null)
                {
                    QRGenerator.OnResetBarcode?.Invoke();

                    if (!isLoadingBalance)
                    {
                        Server.ProcessTransaction(barcodeResult.Text, stationTeller.GetStationName(), stationMatrix, SetDisplayMessages);
                    }
                    else
                    {
                        Server.LoadBalance(barcodeResult.Text, stationTeller.GetLoadAmount(), SetDisplayMessages);
                    }
                }
            }
            catch (ZXing.FormatException e)
            {
                Debug.LogWarning(e.Message);
            }
        }
Ejemplo n.º 25
0
    void Update()
    {
        if (wtex != null && wtex.isPlaying)
        {
            if (wtex.didUpdateThisFrame)
            {
                if (webtexdata.Length != wtex.width * wtex.height)
                {
                    if (img_handle.IsAllocated)
                    {
                        img_handle.Free();
                    }
                    webtexdata = new Color32[wtex.width * wtex.height];
                    img_handle = GCHandle.Alloc(webtexdata, GCHandleType.Pinned);
                    p_img_ptr  = img_handle.AddrOfPinnedObject();
                }
                wtex.GetPixels32(webtexdata);
                //int[] argb = new int[wtex.width * wtex.height];       //模拟NV21模式
                //for (int i = 0; i < webtexdata.Length; i++)
                //{
                //    argb[i] = 0;
                //    argb[i] |= (webtexdata[i].a << 24);
                //    argb[i] |= (webtexdata[i].r << 16);
                //    argb[i] |= (webtexdata[i].g << 8);
                //    argb[i] |= (webtexdata[i].b);

                //}
                //encodeYUV420SP(img_nv21, argb, wtex.width, wtex.height);
                //UpdateData(p_img_nv21_ptr, 0, wtex.width, wtex.height);
                //UpdateData(p_img_nv21_ptr, (int)wtex.GetNativeTexturePtr(), wtex.width, wtex.height);
                UpdateData(p_img_ptr, 0, wtex.width, wtex.height);
                //UpdateData(IntPtr.Zero, (int)wtex.GetNativeTexturePtr(), wtex.width, wtex.height);
            }
        }
    }
Ejemplo n.º 26
0
    IEnumerator GetQRCode()
    {
        IBarcodeReader barCodeReader = new BarcodeReader();

        webcamTexture.Play();
        var snap = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.ARGB32, false);

        while (string.IsNullOrEmpty(QrCode))
        {
            try
            {
                snap.SetPixels32(webcamTexture.GetPixels32());
                var Result = barCodeReader.Decode(snap.GetRawTextureData(), webcamTexture.width, webcamTexture.height, RGBLuminanceSource.BitmapFormat.ARGB32);
                if (Result != null)
                {
                    QrCode = Result.Text;
                    if (!string.IsNullOrEmpty(QrCode))
                    {
                        Debug.Log("DECODED TEXT FROM QR: " + QrCode);
                        break;
                    }
                }
            }
            catch (Exception ex) { Debug.LogWarning(ex.Message); }
            yield return(null);
        }
        webcamTexture.Stop();
    }
Ejemplo n.º 27
0
    public void TakePicture()
    {
        // Set background.
        Color32[] imagePixels = webcamTexture.GetPixels32();
        Texture2D texture     = new Texture2D(webcamTexture.width, webcamTexture.height);

        texture.SetPixels32(imagePixels);
        texture.Apply();
        Sprite sprite = Sprite.Create(texture, new Rect(0.0f, 0.0f, texture.width, texture.height), new Vector2(0.5f, 0.5f), 100.0f);

        backgroundSprite.sprite = sprite;

        Color32[] colors       = eManager.ApplyOperator(texture);
        Color32[] colorsBefore = texture.GetPixels32();

        for (int i = 0; i < colors.Length; i++)
        {
            if (colors[i].r > Threshold)
            {
                colorsBefore[i].r = 255;
                colorsBefore[i].g = 0;
                colorsBefore[i].b = 0;
            }
        }
        texture.SetPixels32(colorsBefore);
        texture.Apply();

        // Extracting edges.
        eManager.ExtractEdges(imagePixels, webcamTexture.width, webcamTexture.height);
    }
Ejemplo n.º 28
0
    void Update()
    {
        if (c == null)
        {
            c = camTexture.GetPixels32();

            image.texture = camTexture;
            image.material.mainTexture = camTexture;
            print(camTexture.videoRotationAngle);
        }

        // encode the last found

        /*
         * var textForEncoding = LastResult;
         * if (shouldEncodeNow &&
         *  textForEncoding != null)
         * {
         *  var color32 = Encode(textForEncoding, encoded.width, encoded.height);
         *  encoded.SetPixels32(color32);
         *  encoded.Apply();
         *  shouldEncodeNow = false;
         * }
         */
    }
Ejemplo n.º 29
0
 private void TakePic()
 {
     tex = new Texture2D(camWidth, camHeight, TextureFormat.RGB24, false);
     tex.SetPixels32(cam.GetPixels32());
     tex.Apply();
     camImg = tex.GetRawTextureData();
 }
Ejemplo n.º 30
0
    public bool GetData()
    {
        if (m_webcamTexture)
        {
            if (m_webcamTexture.didUpdateThisFrame)
            {
                if (m_webcamTexture.width < 100)
                {
                    return(false);
                }
                else if (!m_initialized)
                {
                    InitializeDataStructures();
                }

                m_webcamTexture.GetPixels32(m_data);
                //l_texture.SetPixels32(m_data);
                //l_texture.Apply();
                return(true);
            }
            else
            {
                return(false);
            }
        }
        return(false);
    }