private bool initScanner() { device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { this.Debug("AVCaptureDevice is null"); return(false); } input = AVCaptureDeviceInput.FromDevice(device); if (input == null) { this.Debug("AVCaptureDeviceInput is null"); return(false); } output = new AVCaptureMetadataOutput(); output.SetDelegate(this, DispatchQueue.MainQueue); session = new AVCaptureSession(); session.AddInput(input); session.AddOutput(output); output.MetadataObjectTypes = configuration.Barcodes.ConvertToIOS(); captureVideoPreviewLayer = AVCaptureVideoPreviewLayer.FromSession(session); captureVideoPreviewLayer.Frame = CGRect.Empty; captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; captureVideoPreviewLayer.Connection.VideoOrientation = getDeviceOrientation(); return(true); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
public void SetupLiveCameraStream() { CaptureSession = new AVCaptureSession(); var viewLayer = this.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = this.Frame }; this.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); CaptureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; CaptureSession.AddOutput(stillImageOutput); CaptureSession.StartRunning(); }
void SwitchCameraType() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } if ((Element.CameraType == CameraType.Front && devicePosition == AVCaptureDevicePosition.Front) || (Element.CameraType == CameraType.Rear && devicePosition == AVCaptureDevicePosition.Back)) { return; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
private void HandleRotateCamera() { captureSession.BeginConfiguration(); var currentCameraInput = captureSession.Inputs[0]; captureSession.RemoveInput(currentCameraInput); AVCaptureDevice camera; AVCaptureDeviceInput input = (AVCaptureDeviceInput)currentCameraInput; if (input.Device.Position == AVCaptureDevicePosition.Back) { camera = CameraWithPosition(AVCaptureDevicePosition.Front); } else { camera = CameraWithPosition(AVCaptureDevicePosition.Back); } var videoInput = new AVCaptureDeviceInput(camera, out NSError err); if (err == null) { captureSession.AddInput(videoInput); } captureSession.CommitConfiguration(); AddFlipAnimation(); }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); return(false); } //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration() NSError error = null; captureDevice.LockForConfiguration(out error); if (error != null) { Console.WriteLine(error); captureDevice.UnlockForConfiguration(); return(false); } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); } captureDevice.UnlockForConfiguration(); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); return(false); } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary, }; // configure the output queue = new CoreFoundation.DispatchQueue("myQueue"); outputRecorder = new OutputRecorder(); output.SetSampleBufferDelegate(outputRecorder, queue); session.AddOutput(output); session.StartRunning(); return(true); }
void ShowCameraPreview() { var captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.View.Frame }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); captureButton.Hidden = false; cancelButton.Hidden = false; }
private bool InitScanner(BarcodeScanner.BarcodeFormat barcodeType) { device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (device == null) { return(false); } input = AVCaptureDeviceInput.FromDevice(device); if (input.Device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { input.Device.LockForConfiguration(out NSError err); input.Device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; input.Device.UnlockForConfiguration(); } if (input == null) { return(false); } output = new AVCaptureMetadataOutput(); output.SetDelegate(this, DispatchQueue.MainQueue); session = new AVCaptureSession(); session.AddInput(input); session.AddOutput(output); output.MetadataObjectTypes = GetBarcodeFormat(barcodeType); captureVideoPreviewLayer = AVCaptureVideoPreviewLayer.FromSession(session); captureVideoPreviewLayer.Frame = CGRect.Empty; captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; captureVideoPreviewLayer.Connection.VideoOrientation = GetDeviceOrientation(); return(true); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.Frame = Bounds; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (CameraOption == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } ConfigureCameraForDevice(device); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); CaptureSession.AddInput(captureDeviceInput); Layer.AddSublayer(previewLayer); output = new AVCaptureStillImageOutput { OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG) }; CaptureSession.AddOutput(output); }
public async void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); await SkinSelfie.Pages.CameraPage.ShowTip(); }
private void InitSession() { try { //init capture session _AVSession = new AVCaptureSession(); //check permissions var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (authorizationStatus != AVAuthorizationStatus.Authorized) { return; } //check capture camera var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var camera = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back); if (camera == null) { return; } //add input to capture session _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _); if (_AVSession.CanAddInput(_AVDeviceImput)) { _AVSession.AddInput(_AVDeviceImput); } else { return; } //add output to camera session _MetadataObjectsQueue = new DispatchQueue("metadata objects queue"); _AVMetadataOutput = new AVCaptureMetadataOutput(); if (_AVSession.CanAddOutput(_AVMetadataOutput)) { _AVSession.AddOutput(_AVMetadataOutput); } else { return; } _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue); //init the video preview layer and add it to the current view _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession); _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; _AVVideoPeviewLayer.Frame = Bounds; this.Layer.AddSublayer(_AVVideoPeviewLayer); //start capture session StartSession(true); } catch (Exception ex) { Console.WriteLine("IOS_SCAN | init error", ex); } }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); device = captureDevice; maxExposure = device.ActiveFormat.MaxISO; minExposure = device.ActiveFormat.MinISO; maxDuration = device.ActiveFormat.MaxExposureDuration.Seconds; minDuration = device.ActiveFormat.MinExposureDuration.Seconds; ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); }
public void ToggleFrontBackCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; isSelfie = false; } else { devicePosition = AVCaptureDevicePosition.Front; isSelfie = true; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
#pragma warning restore CS4014 private void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = CameraFeedView.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = CameraFeedView.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; CameraFeedView.Layer.AddSublayer(videoPreviewLayer); AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); if (isMovie) { // Add audio var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out NSError audioErr); if (audioErr != null) { Console.WriteLine("Couldn't create audio device input: " + audioErr.LocalizedDescription); } if (captureSession.CanAddInput(audioDeviceInput)) { captureSession.AddInput(audioDeviceInput); } else { Console.WriteLine("Couldn't add audio input to session"); } movieOutput = new AVCaptureMovieFileOutput(); captureSession.AddOutput(movieOutput); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; var connection = movieOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null && connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } captureSession.CommitConfiguration(); } else { stillImageOutput = new AVCapturePhotoOutput(); stillImageOutput.IsHighResolutionCaptureEnabled = true; stillImageOutput.IsLivePhotoCaptureEnabled = false; captureSession.AddOutput(stillImageOutput); captureSession.CommitConfiguration(); } ShutterButton.Hidden = false; captureSession.StartRunning(); }
private void InitializeSession() { if (this.session != null) { throw new InvalidOperationException("A session is currently active."); } this.session = new AVCaptureSession(); this.session.BeginConfiguration(); this.session.SessionPreset = AVCaptureSession.PresetPhoto; NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice(this.device, out error); if (deviceInput == null) { throw new NSErrorException(error); } this.session.AddInput(deviceInput); this.InitializeOutput(); this.session.AddOutput(this.output); session.CommitConfiguration(); }
/// <summary> /// Initializes this instance. /// </summary> private void Initialize() { var captureSession = new AVCaptureSession(); _previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, Frame = Bounds }; var device = AVCaptureDevice.DefaultDeviceWithMediaType( AVMediaType.Video); if (device == null) { System.Diagnostics.Debug.WriteLine("No device detected."); return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); captureSession.AddInput(input); Layer.AddSublayer(_previewLayer); captureSession.StartRunning(); }
void UpdateCameraOption() { var devices = AVCaptureDeviceDiscoverySession.Create( new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera, AVCaptureDeviceType.BuiltInDualCamera }, AVMediaType.Video, AVCaptureDevicePosition.Unspecified ); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = devices.Devices.FirstOrDefault(d => d.Position == cameraPosition); if (device != null) { var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. captureSession.RemoveInput(videoDeviceInput); if (captureSession.CanAddInput(lVideoDeviceInput)) { captureSession.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; } else { captureSession.AddInput(videoDeviceInput); } captureSession.CommitConfiguration(); } }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; Console.WriteLine(viewLayer.Frame.Width); var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); Console.WriteLine(liveCameraStream.Layer.Frame.Width); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); ViewWillLayoutSubviews(); }
public void RecordVideoToPath(UIViewController ViewController, string VideoPath) { // setup capture device AVCaptureDevice videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error); // create and assign a capture session AVCaptureSession captureSession = new AVCaptureSession(); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; captureSession.AddInput(videoInput); // Create capture device output AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput(); captureSession.AddOutput(videoOutput); videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA; videoOutput.MinFrameDuration = new CMTime(1, 30); videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero); // create a delegate class for handling capture captureVideoDelegate = new CaptureVideoDelegate(ViewController); // Start capture session captureSession.StartRunning(); }
private void SetupCamera() { try { if (_captureSession == null) { _captureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetPhoto }; } SetPreviewSizing(); SetPreviewOrientation(); if (_photoOutput == null) { _device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); TurnOffFlashAndSetContinuousAutoMode(_device); _photoOutput = new AVCapturePhotoOutput { IsHighResolutionCaptureEnabled = true }; _captureSession.AddOutput(_photoOutput); _captureSession.AddInput(AVCaptureDeviceInput.FromDevice(_device)); } } catch (Exception e) { _cameraModule.ErrorMessage = e.ToString(); } }
/// <summary> /// Set the device's focus settings /// </summary> /// <returns><see cref="T:ChilliSource.Mobile.Core.OperationResult"/> instance indicating the outcome of the operation</returns> /// <param name="videoDeviceInput">Video device input.</param> /// <param name="focusMode">Focus mode.</param> /// <param name="exposureMode">Exposure mode.</param> /// <param name="pointOfInterest">Point of interest</param> /// <param name="monitorSubjectAreaChange">If set to <c>true</c> monitor subject area change.</param> public static OperationResult UpdateFocus(this AVCaptureDeviceInput videoDeviceInput, AVCaptureFocusMode focusMode, AVCaptureExposureMode exposureMode, CGPoint pointOfInterest, bool monitorSubjectAreaChange) { if (videoDeviceInput == null) { return(OperationResult.AsFailure("device input is null")); } AVCaptureDevice device = videoDeviceInput.Device; NSError error; if (device.LockForConfiguration(out error)) { if (device.FocusPointOfInterestSupported && device.IsFocusModeSupported(focusMode)) { device.FocusPointOfInterest = pointOfInterest; device.FocusMode = focusMode; } if (device.ExposurePointOfInterestSupported && device.IsExposureModeSupported(exposureMode)) { device.ExposurePointOfInterest = pointOfInterest; device.ExposureMode = exposureMode; } device.SubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange; device.UnlockForConfiguration(); return(OperationResult.AsSuccess()); } else { return(OperationResult.AsFailure(string.Format("Could not lock device for configuration: {0}", error))); } }
private void TryStart() { if (contentLayer != null) { session = new AVCaptureSession(); var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(camera); session.AddInput(input); // create a VideoDataOutput and add it to the sesion var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) { queue = new DispatchQueue("s4mQueue"); outputRecorder = new OutputRecorder(); output.SetSampleBufferDelegate(outputRecorder, queue); session.AddOutput(output); } this.contentLayer.Session = session; session.StartRunning(); } }
void SetupLiveCameraStream() { _captureSession = new AVCaptureSession(); var viewLayer = _liveCameraStream.Layer; _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession) { Frame = _liveCameraStream.Bounds }; _liveCameraStream.AddObserver("bounds", NSKeyValueObservingOptions.New, ObservedBoundsChange); _liveCameraStream.Layer.AddSublayer(_videoPreviewLayer); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); _stillImageOutput = new AVCaptureStillImageOutput { OutputSettings = new NSDictionary() }; _captureSession.AddOutput(_stillImageOutput); _captureSession.AddInput(_captureDeviceInput); _captureSession.StartRunning(); }
public static CaptureSession Create(SelfView parent) { // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); if (captureDevice == null) { return(null); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { return(null); } var output = new AVCaptureMetadataOutput(); var cs = new CaptureSession(parent, input, output); // This must be set after the output is added to the sesssion output.MetadataObjectTypes = AVMetadataObjectType.Face; return(cs); }
public void Start() { captureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; try { var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); var output = new AVCaptureMetadataOutput(); var queue = new DispatchQueue("qrQueue"); captureSession.AddInput(input); captureSession.AddOutput(output); output.SetDelegate(this, queue); output.MetadataObjectTypes = AVMetadataObjectType.QRCode; Layer.AddSublayer(previewLayer); captureSession.StartRunning(); } catch (Exception e) { Console.WriteLine(e); } }
void updateCameraSelection() { session.BeginConfiguration(); AVCaptureInput[] oldInputs = session.Inputs; foreach (var oldInput in oldInputs) { session.RemoveInput(oldInput); } AVCaptureDeviceInput input = pickCamera(); if (input == null) { foreach (var oldInput in oldInputs) { session.AddInput(oldInput); } } else { session.AddInput(input); device = input.Device; NSError error; if (!device.LockForConfiguration(out error)) { Console.WriteLine("Could not lock for device: " + error.LocalizedDescription); } updateAVFoundationFaceDetection(); } session.CommitConfiguration(); }
void SetupCaptureDevice() { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); if (captureDevice == null) { Console.WriteLine("Error: no video devices available"); return; } videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (videoDeviceInput == null) { Console.WriteLine("Error: could not create AVCaptureDeviceInput"); return; } if (captureSession.CanAddInput(videoDeviceInput)) { captureSession.AddInput(videoDeviceInput); } previewLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect; previewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; }
protected override void Dispose(bool disposing) { if (captureDeviceInput != null && captureSession != null) { captureSession.RemoveInput(captureDeviceInput); } if (captureDeviceInput != null) { captureDeviceInput.Dispose(); captureDeviceInput = null; } if (captureSession != null) { captureSession.StopRunning(); captureSession.Dispose(); captureSession = null; } if (stillImageOutput != null) { stillImageOutput.Dispose(); stillImageOutput = null; } base.Dispose(disposing); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); outputSession = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); CaptureSession.AddOutput(outputSession); }
public void ToggleFrontBackCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; toggleCameraButton.SetBackgroundImage(frontCameraIcon, UIControlState.Normal); } else { devicePosition = AVCaptureDevicePosition.Front; toggleCameraButton.SetBackgroundImage(rearCameraIcon, UIControlState.Normal); } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); //.PresetPhoto for camera image feed captureSession.SessionPreset = AVCaptureSession.PresetPhoto; var viewLayer = liveCameraStream.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.View.Frame, }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary { [AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG) }; stillImageOutput = new AVCaptureStillImageOutput { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); }
AVCaptureDeviceInput pickCamera() { AVCaptureDevicePosition desiredPosition = AVCaptureDevicePosition.Back; bool hadError = false; foreach (var device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == desiredPosition) { NSError error = null; AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error); if (error != null) { hadError = true; displayErrorOnMainQueue(error, "Could not initialize for AVMediaTypeVideo"); } else if (session.CanAddInput(input)) { return(input); } } } if (!hadError) { displayErrorOnMainQueue(null, "No camera found for requested orientation"); } return(null); }
void CreateInput () { NSError error; input = AVCaptureDeviceInput.FromDevice (device, out error); if (input == null) { throw new Exception ("Could not capture from " + device + " Error: " + error); } session.AddInput (input); }
partial void SwitchCameraButtonTapped (UIButton sender) { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation (devicePosition); ConfigureCameraForDevice (device); captureSession.BeginConfiguration (); captureSession.RemoveInput (captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice (device); captureSession.AddInput (captureDeviceInput); captureSession.CommitConfiguration (); }
void Initialize () { CaptureSession = new AVCaptureSession (); previewLayer = new AVCaptureVideoPreviewLayer (CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault (d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput (device, out error); CaptureSession.AddInput (input); Layer.AddSublayer (previewLayer); CaptureSession.StartRunning (); IsPreviewing = true; }
public override void ViewDidLoad() { base.ViewDidLoad (); weAreRecording = false; lblError.Hidden = true; btnStartRecording.SetTitle("Start Recording", UIControlState.Normal); //Set up session session = new AVCaptureSession (); //Set up inputs and add them to the session //this will only work if using a physical device! Console.WriteLine ("getting device inputs"); try{ //add video capture device device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice (device); session.AddInput (input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex){ //show the label error. This will always show when running in simulator instead of physical device. lblError.Hidden = false; return; } //Set up preview layer (shows what the input device sees) Console.WriteLine ("setting up preview layer"); previewlayer = new AVCaptureVideoPreviewLayer (session); previewlayer.Frame = this.View.Bounds; //this code makes UI controls sit on top of the preview layer! Allows you to just place the controls in interface builder UIView cameraView = new UIView (); cameraView = new UIView (); cameraView.Layer.AddSublayer (previewlayer); this.View.AddSubview (cameraView); this.View.SendSubviewToBack (cameraView); Console.WriteLine ("Configuring output"); output = new AVCaptureMovieFileOutput (); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput (output)) { session.AddOutput (output); } session.SessionPreset = AVCaptureSession.PresetMedium; Console.WriteLine ("About to start running session"); session.StartRunning (); //toggle recording button was pushed. btnStartRecording.TouchUpInside += startStopPushed; //Console.ReadLine (); }
void ConfigureSession () { if (setupResult != AVCamSetupResult.Success) return; session.BeginConfiguration (); // We do not create an AVCaptureMovieFileOutput when setting up the session because the // AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. AVCaptureDevice defaultVideoDevice = AVCaptureDevice.GetDefaultDevice (AVCaptureDeviceType.BuiltInDuoCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice (AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice (AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); NSError error; var input = AVCaptureDeviceInput.FromDevice (defaultVideoDevice, out error); if (error != null) { Console.WriteLine ($"Could not create video device input: {error.LocalizedDescription}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration (); return; } if (session.CanAddInput (input)) { session.AddInput (input); videoDeviceInput = input; DispatchQueue.MainQueue.DispatchAsync (() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; AVCaptureVideoOrientation videoOrientation; if (statusBarOrientation != UIInterfaceOrientation.Unknown && TryConvertToVideoOrientation(statusBarOrientation, out videoOrientation)) initialVideoOrientation = videoOrientation; PreviewView.VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine ("Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration (); return; } // Add audio input. var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error); if (error != null) Console.WriteLine ($"Could not create audio device input: {error.LocalizedDescription}"); if (session.CanAddInput (audioDeviceInput)) session.AddInput (audioDeviceInput); else Console.WriteLine ("Could not add audio device input to the session"); // Add photo output. if (session.CanAddOutput (photoOutput)) { session.AddOutput (photoOutput); photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? LivePhotoMode.On : LivePhotoMode.Off; } else { Console.WriteLine ("Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration (); return; } session.CommitConfiguration (); }
void ChangeCamera (UIButton cameraButton) { cameraButton.Enabled = false; RecordButton.Enabled = false; PhotoButton.Enabled = false; LivePhotoModeButton.Enabled = false; CaptureModeControl.Enabled = false; sessionQueue.DispatchAsync (() => { AVCaptureDevice currentVideoDevice = videoDeviceInput.Device; AVCaptureDevicePosition currentPosition = currentVideoDevice.Position; AVCaptureDevicePosition preferredPosition = 0; AVCaptureDeviceType preferredDeviceType = 0; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; preferredDeviceType = AVCaptureDeviceType.BuiltInDuoCamera; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; preferredDeviceType = AVCaptureDeviceType.BuiltInWideAngleCamera; break; } var devices = videoDeviceDiscoverySession.Devices; AVCaptureDevice newVideoDevice = null; // First, look for a device with both the preferred position and device type. Otherwise, look for a device with only the preferred position. newVideoDevice = devices.FirstOrDefault (d => d.Position == preferredPosition && d.DeviceType == preferredDeviceType) ?? devices.FirstOrDefault (d => d.Position == preferredPosition); if (newVideoDevice != null) { NSError error; var input = AVCaptureDeviceInput.FromDevice (newVideoDevice, out error); if (error == null) { session.BeginConfiguration (); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. session.RemoveInput (videoDeviceInput); if (session.CanAddInput (input)) { subjectSubscriber?.Dispose (); subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver (AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, input.Device); session.AddInput (input); videoDeviceInput = input; } else { session.AddInput (videoDeviceInput); } var connection = MovieFileOutput?.ConnectionFromMediaType (AVMediaType.Video); if (connection != null) { if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } // Set Live Photo capture enabled if it is supported.When changing cameras, the // IsLivePhotoCaptureEnabled property of the AVCapturePhotoOutput gets set to false when // a video device is disconnected from the session.After the new video device is // added to the session, re - enable Live Photo capture on the AVCapturePhotoOutput if it is supported. photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; session.CommitConfiguration (); } } DispatchQueue.MainQueue.DispatchAsync (() => { CameraButton.Enabled = true; RecordButton.Enabled = MovieFileOutput != null; PhotoButton.Enabled = true; LivePhotoModeButton.Enabled = true; CaptureModeControl.Enabled = true; }); }); }
void setupCaptureSession () { if (CaptureSession != null) return; CaptureSession = new AVCaptureSession (); NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground (); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput (videoDevice, out error); if (CaptureSession.CanAddInput (videoInput)) CaptureSession.AddInput (videoInput); metadataOutput = new AVCaptureMetadataOutput (); var metadataQueue = new DispatchQueue ("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput (metadataOutput)) CaptureSession.AddOutput (metadataOutput); }
public void ToggleFrontBackCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
private bool addAudioInput( out string errorMessage ) { errorMessage = ""; audioCaptureDevice = MediaDevices.Microphone; audioInput = AVCaptureDeviceInput.FromDevice(audioCaptureDevice); if (audioInput == null) { errorMessage = "No audio capture device"; return false; } session.AddInput (audioInput); return true; }
private void initializeRecorder() { //Initialize sessions session = new AVCaptureSession (); //get inputs and add them to the session. try{ //add front facing camera foreach(AVCaptureDevice capdevice in AVCaptureDevice.Devices) { if(capdevice.HasMediaType(AVMediaType.Video)) { if(capdevice.Position == AVCaptureDevicePosition.Front) { device = capdevice; } } } //device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice(device); session.AddInput(input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex) { return; } }
private void SetupCamera() { CaptureSession = null; CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; currentDevice = null; inputDevice1 = null; inputDevice2 = null; foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == AVCaptureDevicePosition.Front) { inputDevice1 = device; } else if (device.Position == AVCaptureDevicePosition.Back) { inputDevice2 = device; } } NSError error; if (inputDevice1.HasFlash) { inputDevice1.LockForConfiguration(out error); inputDevice1.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } if (inputDevice2.HasFlash) { inputDevice2.LockForConfiguration(out error); inputDevice2.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error); rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error); currentDevice = inputDevice2; if (CaptureSession.CanAddInput(rearCamera)) { CaptureSession.AddInput(rearCamera); } AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = View.Frame; View.Layer.InsertSublayer(previewLayer, 0); StillImageOutput = new AVCaptureStillImageOutput(); StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG); CaptureSession.AddOutput(StillImageOutput); CaptureSession.StartRunning(); }
void ChangeCamera () { MetadataObjectTypesButton.Enabled = false; SessionPresetsButton.Enabled = false; CameraButton.Enabled = false; ZoomSlider.Enabled = false; // Remove the metadata overlay layers, if any. RemoveMetadataObjectOverlayLayers (); DispatchQueue.MainQueue.DispatchAsync (() => { var currentVideoDevice = videoDeviceInput.Device; var currentPosition = currentVideoDevice.Position; var preferredPosition = AVCaptureDevicePosition.Unspecified; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; break; } var videoDevice = DeviceWithMediaType (AVMediaType.Video, preferredPosition); if (videoDevice != null) { NSError err; var vDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out err); if (err != null) { Console.WriteLine ($"Error occured while creating video device input: {err}"); return; } session.BeginConfiguration (); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. session.RemoveInput (videoDeviceInput); // When changing devices, a session preset that may be supported // on one device may not be supported by another. To allow the // user to successfully switch devices, we must save the previous // session preset, set the default session preset (High), and // attempt to restore it after the new video device has been // added. For example, the 4K session preset is only supported // by the back device on the iPhone 6s and iPhone 6s Plus. As a // result, the session will not let us add a video device that // does not support the current session preset. var previousSessionPreset = session.SessionPreset; session.SessionPreset = AVCaptureSession.PresetHigh; if (session.CanAddInput (vDeviceInput)) { session.AddInput (vDeviceInput); videoDeviceInput = vDeviceInput; } else { session.AddInput (videoDeviceInput); } // Restore the previous session preset if we can. if (session.CanSetSessionPreset (previousSessionPreset)) session.SessionPreset = previousSessionPreset; session.CommitConfiguration (); } MetadataObjectTypesButton.Enabled = true; SessionPresetsButton.Enabled = true; CameraButton.Enabled = true; ZoomSlider.Enabled = true; ZoomSlider.MaxValue = (float)NMath.Min (videoDeviceInput.Device.ActiveFormat.VideoMaxZoomFactor, 8); ZoomSlider.Value = (float)videoDeviceInput.Device.VideoZoomFactor; }); }
void SetupAVCapture (NSString sessionPreset) { if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){ Console.WriteLine ("Could not create the CoreVideo TextureCache"); return; } session = new AVCaptureSession (); session.BeginConfiguration (); // Preset size session.SessionPreset = sessionPreset; // Input device var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (videoDevice == null){ Console.WriteLine ("No video device"); return; } NSError err; var input = new AVCaptureDeviceInput (videoDevice, out err); if (err != null){ Console.WriteLine ("Error creating video capture device"); return; } session.AddInput (input); // Create the output device var dataOutput = new AVCaptureVideoDataOutput () { AlwaysDiscardsLateVideoFrames = true, // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange) }; dataOutputDelegate = new DataOutputDelegate (this); // // This dispatches the video frames into the main thread, because the OpenGL // code is accessing the data synchronously. // dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue); session.AddOutput (dataOutput); session.CommitConfiguration (); session.StartRunning (); }
private bool addCameraInput( out string errorMessage ) { errorMessage = ""; videoCaptureDevice = this.cameraType == CameraType.FrontFacing ? MediaDevices.FrontCamera : MediaDevices.BackCamera; videoInput = AVCaptureDeviceInput.FromDevice(videoCaptureDevice); if (videoInput == null) { errorMessage = "No video capture device"; return false; } session.AddInput (videoInput); return true; }
void ConfigureSession () { if (setupResult != SessionSetupResult.Success) return; session.BeginConfiguration (); var videoDevice = DeviceWithMediaType (AVMediaType.Video, AVCaptureDevicePosition.Back); NSError err; var vDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out err); if (err != null) { Console.WriteLine ($"Could not create video device input: ${err}"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration (); return; } if (session.CanAddInput (vDeviceInput)) { session.AddInput (vDeviceInput); videoDeviceInput = vDeviceInput; } else { Console.WriteLine ("Could not add video device input to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration (); return; } // Add metadata output. if (session.CanAddOutput (metadataOutput)) { session.AddOutput (metadataOutput); // Set this view controller as the delegate for metadata objects. metadataOutput.SetDelegate (this, metadataObjectsQueue); metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; // Use all metadata object types by default. metadataOutput.RectOfInterest = CGRect.Empty; } else { Console.WriteLine ("Could not add metadata output to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration (); return; } session.CommitConfiguration (); }