private void FillFields()
    {
        foreach (WebCamDevice device in WebCamTexture.devices)
        {
            devicesDropdown.options.Add(new TMP_Dropdown.OptionData(device.name));
        }

        WebCamDevice currentDevice = WebCamTexture.devices[0];

        ClearAndFillResolutions(currentDevice);

        foreach (string name in QualitySettings.names)
        {
            qualitySettingsDropdown.options.Add(new TMP_Dropdown.OptionData(name));
        }

        targetFrameRateInput.text = (ParamatersManager.Instance.TargetFrameRate != null ? ParamatersManager.Instance.TargetFrameRate.ToString() : string.Empty);
        requestedFpsInput.text    = (ParamatersManager.Instance.RequestedFPS != null ? ParamatersManager.Instance.RequestedFPS.ToString() : string.Empty);
        vSyncCountDropdown.value  = ParamatersManager.Instance.VSyncCount;
        vSyncCountDropdown.onValueChanged.Invoke(vSyncCountDropdown.value);
        qualitySettingsDropdown.value = ParamatersManager.Instance.QualitySettingsLevel;
        qualitySettingsDropdown.RefreshShownValue();
        delayFrameMinInput.text        = ParamatersManager.Instance.DelayFrameMin.ToString();
        decodeIntervalInput.text       = ParamatersManager.Instance.DecodeInterval.ToString();
        parserTryHarderToggle.isOn     = ParamatersManager.Instance.ParserTryHarder;
        webcamFilterModeDropdown.value = ParamatersManager.Instance.WebcamFilterMode;
    }
示例#2
0
        IEnumerator StartVideo()
        {
            if (WebCamTexture.devices.Length == 0)
            {
                Debug.LogFormat("WebCam device not found");
                yield break;
            }

            yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

            if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
            {
                Debug.LogFormat("authorization for using the device is denied");
                yield break;
            }

            WebCamDevice userCameraDevice = WebCamTexture.devices[deviceIndex];

            m_webCamTexture = new WebCamTexture(userCameraDevice.name, streamingSize.x, streamingSize.y, framerate);
            OnUpdateWebCamTexture?.Invoke(m_webCamTexture);
            m_webCamTexture.Play();
            yield return(new WaitUntil(() => m_webCamTexture.didUpdateThisFrame));

            m_startVideoCorutine = null;
        }
    // Use this for initialization
    private IEnumerator Start()
    {
        // 接続されているカメラを探す
        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("カメラが見つかりません");
            yield break;
        }

        // カメラの使用許可
        yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("カメラの使用が許可されていません。");
            yield break;
        }

        // 最初に取得されたデバイスを使ってテクスチャを作る。
        // TODO: フレームレートの指定
        WebCamDevice userCameraDevice = WebCamTexture.devices[0];

        m_webCamTexture = new WebCamTexture(userCameraDevice.name, m_width, m_height);

        m_displayUI.texture = m_webCamTexture;

        // 撮影開始
        m_webCamTexture.Play();
    }
示例#4
0
        public async Task SwitchCamera()
        {
            if (activeCamera == null)
            {
                return;
            }

            var _isRunning = activeCamera.isPlaying;

            Dispose();

            var devices = WebCamTexture.devices;

            cameraIndex  = ++cameraIndex % devices.Length;
            cameraDevice = devices[cameraIndex];

            bool front     = cameraDevice.isFrontFacing;
            int  framerate = requestedFramerate;

#if UNITY_ANDROID && !UNITY_EDITOR
            // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, Pixel 2)
            // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
            framerate = front ? 15 : framerate;
#endif
            activeCamera = new WebCamTexture(cameraDevice.name, requestedWidth, requestedHeight, framerate);
            await StartRunning(startCallback, frameCallback);

            if (!_isRunning)
            {
                StopRunning();
            }
        }
示例#5
0
        IEnumerator Start()
        {
            if (WebCamTexture.devices.Length == 0)
            {
                Debug.LogFormat("WebCam device not found");
                yield break;
            }

            yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

            if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
            {
                Debug.LogFormat("authorization for using the device is denied");
                yield break;
            }

            WebCamDevice userCameraDevice = WebCamTexture.devices[deviceIndex];

            m_webCamTexture = new WebCamTexture(userCameraDevice.name, streamingSize.x, streamingSize.y);
            m_webCamTexture.Play();
            yield return(new WaitUntil(() => m_webCamTexture.didUpdateThisFrame));

            m_track = new VideoStreamTrack(gameObject.name, m_webCamTexture);
            RenderStreaming.Instance?.AddVideoStreamTrack(m_track);
        }
示例#6
0
    void Start()
    {
        WebCamDevice[] devices = WebCamTexture.devices;
        WebCamDevice   cam     = devices[0];

        camTex = new WebCamTexture(cam.name, 200, 200);
        camTex.Play();
        //GetWebCamImage();
        img = new Texture2D(camTex.width, camTex.height);
        //initialize textures
        rImg = new Texture2D(img.width, img.height);
        bImg = new Texture2D(img.width, img.height);
        gImg = new Texture2D(img.width, img.height);
        //kImg = new Texture2D(img.width,img.height);
        //initialize gradient arrays
        rL = new float[img.width, img.height];
        gL = new float[img.width, img.height];
        bL = new float[img.width, img.height];
        //kL = new float[img.width,img.height];
        //ORL = new float[img.width,img.height];
        //ANDL = new float[img.width,img.height];
        SUML = new float[img.width, img.height];
        //AVGL = new float[img.width,img.height];
        //initialize final textures
        //roImg = new Texture2D(img.width,img.height);
        //goImg = new Texture2D(img.width,img.height);
        //boImg = new Texture2D(img.width,img.height);
        //koImg = new Texture2D(img.width,img.height);
        //ORimg = new Texture2D(img.width,img.height);
        //ANDimg = new Texture2D(img.width,img.height);
        SUMimg = new Texture2D(img.width, img.height);
        //AVGimg = new Texture2D(img.width,img.height);
    }
示例#7
0
    void Start()
    {
        if (WebCamTexture.devices.Length == 0)
        {
            Debug.Log("No devices cameras found");
            return;
        }
        try
        {
            cameraDevice = WebCamTexture.devices[1];
        }
        catch
        {
            cameraDevice = WebCamTexture.devices[0];
        }

        webCamTexture            = new WebCamTexture(cameraDevice.name);
        webCamTexture.filterMode = FilterMode.Trilinear;
        //backCameraTexture = new WebCamTexture(backCameraDevice.name);

        GetComponent <RawImage>().texture = webCamTexture;

        webCamTexture.Play();
        //GetComponent<RawImage> ().SetNativeSize ();

        annotationText.text = PlayerPrefs.GetString("RecordAnnotation", "");
    }
示例#8
0
    void Start()
    {
        // Check for device cameras
        if (WebCamTexture.devices.Length == 0)
        {
            Debug.Log("No devices cameras found");
            return;
        }

        requestedWidth  = Screen.width;
        requestedHeight = Screen.height;

        // Get the device's cameras and create WebCamTextures with them
        frontCameraDevice  = WebCamTexture.devices.Last();
        backCameraDevice   = WebCamTexture.devices.First();
        frontCameraTexture = new WebCamTexture(frontCameraDevice.name);
        backCameraTexture  = new WebCamTexture(backCameraDevice.name);

        frontCameraTexture.filterMode = FilterMode.Trilinear;
        backCameraTexture.filterMode  = FilterMode.Trilinear;

        // Set the camera to use by default
        frontCameraTexture.Stop();
        SetActiveCamera(backCameraTexture);
    }
示例#9
0
 void StartCamera()
 {
     WebCamDevice[] devices = WebCamTexture.devices;
     for (var i = 0; i < devices.Length; i++)
     {
         Debug.Log(devices[i].name);
         Debug.Log(devices[i].isFrontFacing);
     }
     for (int i = 0; i < devices.Length; i++)
     {
         if (devices[i].isFrontFacing)
         {
             usedCamera = devices[i];
             break;
         }
         else
         {
             usedCamera = devices[0];
         }
     }
     webcamTexture = new WebCamTexture(usedCamera.name, 320, 240, 60);
     frame.texture = webcamTexture;
     webcamTexture.Play();
     Color[] camPixels = webcamTexture.GetPixels(0, 0, 320, 240);
     for (int i = 0; i < camPixels.Length; i++)
     {
         Network.Instantiate(pixelTracker, transform.position, Quaternion.identity, 0);
     }
     camOn = true;
 }
示例#10
0
    void Start()
    {
        WebCamDevice[] devices = WebCamTexture.devices;

        if (devices.Length == 0)
        {
            Debug.LogError("no camera detected");
            return;
        }

        for (int i = 0; i < devices.Length; i++)
        {
            WebCamDevice device = devices[i];

            String resolutionStr = "";
            Array.ForEach(device.availableResolutions, r => resolutionStr += " " + r.width + "x" + r.height);

            dropdown.options.Add(new Dropdown.OptionData()
            {
                text = device.name + " f:" + (device.isFrontFacing ? 1 : 0) + resolutionStr + " k:" + device.kind + " dcm:" + device.depthCameraName
            });
        }

        ChangeCamera(4);

        Button btn = takePictureBtn.GetComponent <Button>();

        btn.onClick.AddListener(TakePicture);

        dropdown.onValueChanged.AddListener(ChangeCamera);

        Debug.Log("Initied");
        //adb logcat -s Unity PackageManager dalvikvm DEBUG //adb logcat -v time -s Unity
    }
示例#11
0
    private IEnumerator Start()
    {
        SetScreenSize();
        LocationManager.Start();

        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("カメラのデバイスが無い.");
            yield break;
        }

        yield return Application.RequestUserAuthorization(UserAuthorization.WebCam);
        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("カメラを使うことが許可されていない.");
            yield break;
        }

        // とりあえず最初に取得されたデバイスを使ってテクスチャを作る.
        WebCamDevice userCameraDevice = WebCamTexture.devices[0];
        m_webCamTexture = new WebCamTexture(userCameraDevice.name, m_width, m_height);

        m_displayUI.texture = m_webCamTexture;

        // 撮影開始
        m_webCamTexture.Play();
    }
示例#12
0
    /// TODO: use Coroutine and call InitScreen here
    public void ResetScreen(WebCamDevice?device)
    {
        if (isPlaying)
        {
            webCamTexture.Stop();
            webCamTexture = null;
        }

        if (device is WebCamDevice deviceValue)
        {
            webCamDevice = deviceValue;
        }
        else
        {
            return;
        }

        /// TODO: call Application.RequestUserAuthorization
        webCamTexture = new WebCamTexture(webCamDevice.name, Width, Height, FPS);
        WebCamTextureFramePool.Instance.SetDimension(Width, Height);

        try {
            webCamTexture.Play();
            Debug.Log($"WebCamTexture Graphics Format: {webCamTexture.graphicsFormat}");
        } catch (Exception e) {
            Debug.LogWarning(e.ToString());
            return;
        }
    }
示例#13
0
    // Use this for initialization
    void Awake()
    {
        webCamTex = new WebCamTexture();
        WebCamDevice[] devices        = WebCamTexture.devices;
        WebCamDevice   externalDevice = devices[0];

        for (int i = 0; i < devices.Length; i++)
        {
            if (devices[i].name.Contains("Logitech"))
            {
                externalDevice = devices[i];
                break;
            }
        }
        webCamTex.deviceName = externalDevice.name;
        webCamTex.Play();
        renderMat = new Material(Shader.Find("Hidden/DrawRed"));
        renderMat.SetFloat("_SaturationThreshold", saturationThreshold);
        renderMat.SetColor("_RenderColor", renderLightColor);

        renderMat.SetFloat("_Increment", 1.0f / (float)Screen.width * (float)outlineSize);
        renderMat.SetColor("_OutlineColor", outlineColor);

        startRenderLightColor = renderLightColor;

        initialized = true;
    }
示例#14
0
        void Start()
        {
            _screen = GameObject.Find("Screen");
            if (_screen == null)
            {
                return;
            }

            _screenMaterial = GetComponent <Renderer>().material;

            if (WebCamTexture.devices.Length > 0)
            {
                if (Application.isEditor)
                {
                    _device = WebCamTexture.devices[deviceIndex];
                }
                else
                {
                    _device = WebCamTexture.devices[0];
                }

                _texture = new WebCamTexture(_device.name);
                _screenMaterial.mainTexture = _texture;
                _texture.Play();
            }
            else
            {
                Debug.LogError("No camera device found");
            }
        }
示例#15
0
        public void SetWebcamIndex(int index)
        {
            if (m_WebcamTexture != null)
            {
                m_WebcamTexture.Stop();
            }

            if (index == -1)
            {
                m_WebcamIndex = WebCamTexture.devices.Length - 1;
            }
            else if (index == WebCamTexture.devices.Length)
            {
                m_WebcamIndex = 0;
            }
            else
            {
                m_WebcamIndex = index;
            }

            m_Webcam        = WebCamTexture.devices[m_WebcamIndex];
            m_WebcamTexture = new WebCamTexture(m_Webcam.name, 10, 10);
            m_WebcamTexture.Play();

            m_WebcamRaw = new byte[m_WebcamTexture.width * m_WebcamTexture.height * 3];
            //GameManager.Instance.StoreWebcamSetting(Width, Height);

            Debug.Log("Set webcam device to " + m_Webcam.name);
        }
示例#16
0
    public void startStop()
    {
        //tex = new WebCamTexture();

        /*
         * tex.Play();
         * if (tex.isPlaying)
         * {
         *  display.texture = tex;
         *  WebCamDevice device = WebCamTexture.devices[currentIndex];
         *  tex = new WebCamTexture(device.name);
         *  display = GetComponent<RawImage>();
         *
         * }
         */
        ButtonClicked();

        if (tex != null)
        {
            display.texture = null;
            tex.Stop();
            tex = null;
        }
        else //if(camshouldstillbeactive==true)
        {
            WebCamDevice device = WebCamTexture.devices[currentIndex];
            tex             = new WebCamTexture(device.name);
            display.texture = tex;
            tex.Play();
        }
    }
示例#17
0
 private void StartCamera(WebCamDevice device)
 {
     StopCamera();
     isFrontFacing = device.isFrontFacing;
     webCamTexture = new WebCamTexture(device.name, requestSize.x, requestSize.y, requestFps);
     webCamTexture.Play();
 }
        private void CreateTextureAndPassToPlugin()
        {
            //Create a texture
            Texture2D tex = new Texture2D(1280, 720, TextureFormat.BGRA32, false);

            // Set point filtering just so we can see the pixels clearly
            tex.filterMode = FilterMode.Point;
            // Call Apply() so it's actually uploaded to the GPU
            tex.Apply();

            // Set texture onto our material
            GameObject.Find("Quad").GetComponent <Renderer>().material.mainTexture = tex;

            //Send Webcam Index associated to the Elgato Device
            WebCamDevice[] devices = WebCamTexture.devices;
            for (int i = 0; i < devices.Length; ++i)
            {
                WebCamDevice device = devices [i];
                print(device.name + " " + i);

                if (device.name.Contains(webcamName))
                {
                    webcamID = i;
                }
            }

            // Pass texture pointer to the plugin
            SetTextureFromUnity(tex.GetNativeTexturePtr(), tex.width, tex.height, webcamID);
        }
示例#19
0
    private IEnumerator Start()
    {
        btnNext    = GameObject.Find("btnAddItemNext").GetComponent <Button>();
        btnShutter = GameObject.Find("btnShutter").GetComponent <Button>();
        btnClose   = GameObject.Find("btnAddItemClose").GetComponent <Button>();

        // ボタンクリックイベントの追加
        btnNext.onClick.AddListener(onClickAddItemNext);
        btnShutter.onClick.AddListener(onClickShutter);
        btnClose.onClick.AddListener(onClickCloseAddItemCamera);

        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("カメラのデバイスが無い");
            yield break;
        }

        yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("カメラを使うことが許可されていない");
            yield break;
        }

        // とりあえず最初に取得されたデバイスを使ってテクスチャを作る
        WebCamDevice userCameraDevice = WebCamTexture.devices[0];

        m_webCamTexture = new WebCamTexture(userCameraDevice.name, m_width, m_height);

        m_displayUI.texture = m_webCamTexture;

        // 撮影開始
        m_webCamTexture.Play();
    }
示例#20
0
        public static WebCamTexture PrepareWebCamTexture(ref WebCamDevice device, bool frontFace, int width, int height)
        {
            WebCamTexture texture = null;

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == frontFace)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name
                              + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    device  = WebCamTexture.devices [cameraIndex];
                    texture = new WebCamTexture(device.name, width, height);
                    break;
                }
            }
            // fallback
            if (texture == null)
            {
                device  = WebCamTexture.devices[0];
                texture = new WebCamTexture(device.name, width, height);
            }
            Debug.Log("width " + texture.width + " height " + texture.height + " fps " + texture.requestedFPS);
            return(texture);
        }
示例#21
0
        void Start()
        {
            WebCamDevice userCameraDevice = WebCamTexture.devices[0];

            webCamTexture = new WebCamTexture(userCameraDevice.name, width, height);
            webCamTexture.Play();
        }
示例#22
0
        /// <summary>
        ///		Initializes the network connection.
        /// </summary>
        public void Initialize(int portA = 11002, int portB = 11003)
        {
            // Makes sure the resolutionscale is not out of bounds.
            resolutionScale = Mathf.Clamp01(resolutionScale);

            // Initializes the Networking.
            udpMaster = new UDPMaster();
            udpMaster.Initialize("1.1.1.1", portA, portB);
            udpMaster.AddListener(this);
                        #if UNITY_EDITOR
            udpMaster.LogReceivedMessages = false;
                        #endif

            // Initializes voice recording.
            Microphone.Initialize();
            Microphone.AddListener(this);

            // Initializes audio output for peers' microphone input.
            audioClipOut     = AudioClip.Create("OtherMicrophoneAudio", Microphone.SampleLength, 1, Microphone.RecordingFrequency, false);
            AudioSource.clip = audioClipOut;

            // HACK: This will most definitely break at some point.
            WebCamDevice frontCam = WebCamTexture.devices.Where((WebCamDevice d) => d.isFrontFacing).ToArray()[0];
            OwnFootage      = new WebCamTexture(frontCam.name);
            OwnFootage.name = "Webcamfootage_Self";
            OwnFootage.Play();
        }
示例#23
0
        void Init()
        {
            HidePreview();
            if (Application.platform != RuntimePlatform.Android)
            {
                GetIDECamera();
            }

            foreach (var device in WebCamTexture.devices)
            {
                if (Application.platform == RuntimePlatform.Android)
                {
                    if (!device.isFrontFacing)
                    {
                        _camDevice = device;
                    }
                }
                else
                {
                    _camDevice = device;
                }
            }
            _camTexture           = new WebCamTexture(_camDevice.name, 1280, 720, 30);
            _renderCanvas.texture = _camTexture;
        }
    private IEnumerator Start()
    {
        // ボタンは最初は非表示
        changeBtnVisible(false);

        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("カメラがありません。");
            yield break;
        }

        yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("カメラ利用が許可されていません。");
            yield break;
        }

        WebCamDevice userCameraDevice = WebCamTexture.devices[0];

        webcamTexture = new WebCamTexture(userCameraDevice.name, width, height);
        //webcamTexture = new WebCamTexture(userCameraDevice.name);

        cameraPanel.texture = webcamTexture;
        webcamTexture.Play();

        Debug.Log(webcamTexture.width + " " + webcamTexture.height + " " + webcamTexture.requestedFPS);

        // バーコードリーダーのセット
        this.reader = new WebcamCodeReader();
    }
示例#25
0
    void Start()
    {
        devices = WebCamTexture.devices;

        foreach (WebCamDevice cam in devices)
        {
            Debug.Log("camFront:" + cam.isFrontFacing);
            if (!cam.isFrontFacing)
            {
                rearCamera = cam;
                break;
            }
        }


        webcamTexture = new WebCamTexture();

        /*
         * Renderer renderer = GetComponent<Renderer>();
         * renderer.material.mainTexture = webcamTexture;
         */
        if (devices.Length > 0)
        {
            webcamTexture.deviceName = devices[0].name;
            webcamTexture.Play();
        }

        //data = new Color32[1280 * 720];
    }
    IEnumerator CaptureVideoStart()
    {
        if (!useWebCamToggle.isOn)
        {
            videoStreamTrack    = cam.CaptureStreamTrack(1280, 720, 1000000);
            sourceImage.texture = cam.targetTexture;
            yield break;
        }

        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("WebCam device not found");
            yield break;
        }

        yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("authorization for using the device is denied");
            yield break;
        }

        WebCamDevice userCameraDevice = WebCamTexture.devices[webCamLListDropdown.value];

        webCamTexture = new WebCamTexture(userCameraDevice.name, 1280, 720, 30);
        webCamTexture.Play();
        yield return(new WaitUntil(() => webCamTexture.didUpdateThisFrame));

        videoStreamTrack    = new VideoStreamTrack("video", webCamTexture);
        sourceImage.texture = webCamTexture;
    }
示例#27
0
        void Start()
        {
            // Cache the reference to the game world's
            // camera.
            _camera = GetComponent <Camera>();

            houghCircles = new Mat();
            houghLines   = new Mat();

            gyro             = Input.gyro;
            gravityMagnitude = Physics.gravity.magnitude *
                               gravityScale;

#if UNITY_EDITOR
            useFrontFacingCamera = true;
#endif

            // Try to find a (physical) camera that faces
            // the required direction.
            WebCamDevice[] devices    = WebCamTexture.devices;
            int            numDevices = devices.Length;
            for (int i = 0; i < numDevices; i++)
            {
                WebCamDevice device = devices[i];
                if (device.isFrontFacing ==
                    useFrontFacingCamera)
                {
                    string name = device.name;
                    Debug.Log("Selecting camera with " +
                              "index " + i + " and name " +
                              name);
                    webCamTexture = new WebCamTexture(
                        name, preferredCaptureWidth,
                        preferredCaptureHeight,
                        preferredFPS);
                    break;
                }
            }

            if (webCamTexture == null)
            {
                // No camera faces the required direction.
                // Give up.
                Debug.LogError("No suitable camera found");
                Destroy(this);
                return;
            }

            // Ask the camera to start capturing.
            webCamTexture.Play();

            if (gyro != null)
            {
                gyro.enabled = true;
            }

            // Wait for the camera to start capturing.
            // Then, initialize everything else.
            StartCoroutine(Init());
        }
    private void SaveParameters()
    {
        WebCamDevice device = WebCamTexture.devices[devicesDropdown.value];

        ParamatersManager.Instance.DeviceName = device.name;
        int pickedResolution = resolutionsDropdown.value;

        if (pickedResolution >= 1)
        {
            ParamatersManager.Instance.Resolution = device.availableResolutions[pickedResolution - 1];
        }

        if (!string.IsNullOrEmpty(targetFrameRateInput.text) && !string.IsNullOrWhiteSpace(targetFrameRateInput.text))
        {
            ParamatersManager.Instance.TargetFrameRate = int.Parse(targetFrameRateInput.text);
        }

        if (!string.IsNullOrEmpty(requestedFpsInput.text) && !string.IsNullOrWhiteSpace(requestedFpsInput.text))
        {
            ParamatersManager.Instance.RequestedFPS = float.Parse(requestedFpsInput.text);
        }

        ParamatersManager.Instance.VSyncCount           = vSyncCountDropdown.value;
        ParamatersManager.Instance.QualitySettingsLevel = qualitySettingsDropdown.value;
        ParamatersManager.Instance.DelayFrameMin        = int.Parse(delayFrameMinInput.text);
        ParamatersManager.Instance.DecodeInterval       = float.Parse(decodeIntervalInput.text, CultureInfo.InvariantCulture);
        ParamatersManager.Instance.ParserTryHarder      = parserTryHarderToggle.isOn;
        ParamatersManager.Instance.WebcamFilterMode     = webcamFilterModeDropdown.value;
        ParamatersManager.Instance.ApplyParamaters      = applySettingsToggle.isOn;

        SceneManager.LoadScene("ScannerScene");
    }
示例#29
0
        void ReloadCamTexture()
        {
            playing = false;

            _camTexture = null;

            if (_deviceIndex >= 0 && _deviceIndex < WebCamTexture.devices.Length)
            {
                WebCamDevice device = WebCamTexture.devices.ElementAt(_deviceIndex);
                _camTexture = new WebCamTexture(device.name);
                _camTexture.requestedWidth  = 1280;
                _camTexture.requestedHeight = 720;
                _camTexture.requestedFPS    = _targetFrameRate;

                if (_material)
                {
                    _material.mainTexture = _camTexture;
                }

                if (_rawImage)
                {
                    _rawImage.texture = _camTexture;
                }

                _frameNeedsFixing = true;
            }
        }
示例#30
0
        private IEnumerator Start()
        {
            if (WebCamTexture.devices.Length == 0)
            {
                Debug.LogFormat("カメラがねえ!!");
                yield break;
            }

            yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

            if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
            {
                Debug.LogFormat("カメラだめだそうです");
                yield break;
            }

            // とりあえず最初に取得されたデバイスを使ってテクスチャを作りますよ。
            WebCamDevice userCameraDevice = WebCamTexture.devices[0];

            m_webCamTexture = new WebCamTexture(userCameraDevice.name, m_width, m_height);

            m_displayUI.texture = m_webCamTexture;

            // さあ、撮影開始だ!
            m_webCamTexture.Play();
        }
            // Use this for initialization
            void Start()
            {
                // get camera device
                WebCamDevice? device = GetBackCameraDevice();
                if (device == null)
                {
                // TODO:confirm to return
                return;
                }
                selected_device_ = (WebCamDevice)device;

                // create custom texture
                camera_texture_ = CreateCameraTexture(selected_device_.name, kCameraFPS);

                PlayCamera(camera_texture_);
            }