Example #1
0
        public Task <bool> Initialize()
        {
            _cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            FrameHeight       = _cameraResolution.height;
            FrameWidth        = _cameraResolution.width;
            CameraInitializedEventArgs a = new CameraInitializedEventArgs(FrameWidth, FrameHeight, _format);

            CameraInitialized?.Invoke(this, a);
            return(Task.FromResult(true));
        }
Example #2
0
        public async Task <bool> Initialize()
        {
#if ENABLE_WINMD_SUPPORT
            bool initialized = await InitializeMediaCaptureAsyncTask();

            CameraInitializedEventArgs args = new CameraInitializedEventArgs(FrameWidth, FrameHeight, _format);
            CameraInitialized?.Invoke(this, args);
            return(initialized);
#else
            return(await Task.FromResult(false));
#endif
        }
Example #3
0
        private void OnCameraInitialized(object sender, CameraInitializedEventArgs args)
        {
            if (!displayVideo)
            {
                return;
            }
            int         frameWidth  = args.FrameWidth;
            int         frameHeight = args.FrameHeight;
            ColorFormat format      = args.Format;

            switch (format)
            {
            case ColorFormat.RGB:
                _videoDisplayMaterial = new Material(rgbShader);
                _texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
                break;

            case ColorFormat.Grayscale:
                _videoDisplayMaterial = new Material(luminanceShader);
                _texture = new Texture2D(frameWidth, frameHeight, TextureFormat.Alpha8, false);
                break;

            default:
                throw new InvalidOperationException($"Color format {format} not supported by Video Display Manager");
            }

            if (_meshRenderer == null)
            {
                Debug.Log("Could not initialize texture as mesh renderer of video display is not initialized");
                return;
            }

            _meshRenderer.material            = _videoDisplayMaterial;
            _videoDisplayMaterial.mainTexture = _texture;

            if (videoDisplay != null)
            {
                videoDisplay.transform.localScale = new Vector3(frameWidth * -scale, frameHeight * -scale, 1); // -1 flips texture
            }

            _textureInitialized = true;
        }
        public async Task <bool> Initialize()
        {
            WebCamDevice[] devices = WebCamTexture.devices;
            _logger.Log($"Found {devices.Length} devices.");
            if (devices.Length == 0)
            {
                return(false);
            }

            WebCamDevice device = devices[0];

            foreach (WebCamDevice webcamDevice in devices)
            {
                //if (webcamDevice.name == "OBS Virtual Camera")
                //if (webcamDevice.name == "OBS-Camera")
                if (webcamDevice.name == "Integrated Camera")
                {
                    device = webcamDevice;
                }
            }

            // default
            _targetVideoWidth  = 1280;
            _targetVideoHeight = 720;

            _cameraTexture = new WebCamTexture(device.name, _targetVideoWidth, _targetVideoHeight);
            if (_cameraTexture == null)
            {
                _logger.LogError("Could not create camera texture.");
            }
            ;
            _logger.Log($"Selected {device.name}");
            //Debug.LogFormat("Available resolutions: {0}", string.Join(", ", devices[0].availableResolutions));

            FrameWidth  = Convert.ToInt32(_targetVideoWidth);
            FrameHeight = Convert.ToInt32(_targetVideoHeight);

            CameraInitializedEventArgs args = new CameraInitializedEventArgs(FrameWidth, FrameHeight, ColorFormat.Unknown);

            CameraInitialized?.Invoke(this, args);
            return(await Task.FromResult(true));
        }