/// <summary> /// Closes the current {@link CameraDevice}. /// </summary> public void CloseCamera() { try { _cameraOpenCloseLock.Acquire(); lock (_cameraStateLock) { // Reset state and clean up resources used by the camera. // Note: After calling this, the ImageReaders will be closed after any background // tasks saving Images from these readers have been completed. //mPendingUserCaptures = 0; _state = STATE_CLOSED; if (null != _captureSession) { _captureSession.Close(); _captureSession = null; } if (null != _cameraDevice) { _cameraDevice.Close(); _cameraDevice = null; } if (null != _yuvImageReader) { _yuvImageReader.Close(); _yuvImageReader = null; } } } catch (Java.Lang.InterruptedException e) { throw new Java.Lang.RuntimeException("Interrupted while trying to lock camera closing.", e); } finally { _cameraOpenCloseLock.Release(); } }
/// <summary> /// Sets up state related to camera that is needed before opening a {@link CameraDevice}. /// </summary> /// <returns><c>true</c>, if up camera outputs was set, <c>false</c> otherwise.</returns> bool SetUpCameraOutputs(int?preferredPreviewSize = null) { //var activity = this; CameraManager manager = (CameraManager)Android.App.Application.Context.GetSystemService(Context.CameraService); if (manager == null) { Log.Error(TAG, "This device doesn't support Camera2 API."); return(false); } try { // Find a CameraDevice that supports YUV captures, and configure state. foreach (string cameraId in manager.GetCameraIdList()) { CameraCharacteristics characteristics = manager.GetCameraCharacteristics(cameraId); /* * // We only use a camera that supports YUV in this sample. * if (!Contains(characteristics.Get( * CameraCharacteristics.RequestAvailableCapabilities).ToArray<int>(), * (int)RequestAvailableCapabilities.YuvReprocessing)) * { * continue; * }*/ StreamConfigurationMap map = (StreamConfigurationMap)characteristics.Get( CameraCharacteristics.ScalerStreamConfigurationMap); // For still image captures, we use the largest available size. Android.Util.Size[] yuvs = map.GetOutputSizes((int)ImageFormatType.Yuv420888); Android.Util.Size yuvSize; if (preferredPreviewSize == null) { //Choose the smallest size for performance. yuvSize = yuvs.OrderByDescending(element => element.Width * element.Height).Last(); } else { yuvSize = yuvs.OrderByDescending(element => Math.Abs((element.Width * element.Height) - (int)preferredPreviewSize)).Last(); } lock (_cameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (_yuvImageReader == null || _yuvImageReader.GetAndRetain() == null) { _yuvImageReader = new RefCountedAutoCloseable <ImageReader>( ImageReader.NewInstance(yuvSize.Width, yuvSize.Height, ImageFormatType.Yuv420888, /*maxImages*/ 5)); } _yuvImageReader.Get().SetOnImageAvailableListener( _onYuvImageAvailableListener, _backgroundHandler); _characteristics = characteristics; _cameraId = cameraId; } return(true); } } catch (CameraAccessException e) { e.PrintStackTrace(); } // If we found no suitable cameras for capturing YUV, warn the user. Log.Error(TAG, "This device doesn't support capturing YUV photos"); return(false); }