public void StartVideoCapture() { Resolution cameraResolution = NRVideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Debug.Log(cameraResolution); int cameraFramerate = NRVideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First(); Debug.Log(cameraFramerate); if (m_VideoCapture != null) { Debug.Log("Created VideoCapture Instance!"); CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.frameRate = cameraFramerate; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; cameraParameters.blendMode = BlendMode.Blend; m_VideoCapture.StartVideoModeAsync(cameraParameters, NRVideoCapture.AudioState.ApplicationAndMicAudio, OnStartedVideoCaptureMode); Previewer.SetData(m_VideoCapture.PreviewTexture, true); } }
private IEnumerator CreateVideoCapture() { NRVideoCapture.CreateAsync(false, captureObject => { if (captureObject == null) { Debug.LogError("Can't get a NRVideoCapture object."); return; } var resolution = NRVideoCapture.SupportedResolutions .OrderByDescending(r => r.width * r.height) .First(); var frameRate = NRVideoCapture.GetSupportedFrameRatesForResolution(resolution) .OrderByDescending(fps => fps) .First(); var cameraParameters = new CameraParameters { hologramOpacity = 0.0f, frameRate = frameRate, cameraResolutionWidth = resolution.width, cameraResolutionHeight = resolution.height, pixelFormat = CapturePixelFormat.BGRA32, blendMode = BlendMode.Blend }; _videoCapture = captureObject; _videoCapture.StartVideoModeAsync( cameraParameters, NRVideoCapture.AudioState.ApplicationAndMicAudio, result => SetState(State.Idle)); }); yield break; }