void SetupCaptureDevice() { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); if (captureDevice == null) { Console.WriteLine("Error: no video devices available"); return; } videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (videoDeviceInput == null) { Console.WriteLine("Error: could not create AVCaptureDeviceInput"); return; } if (captureSession.CanAddInput(videoDeviceInput)) { captureSession.AddInput(videoDeviceInput); } previewLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect; previewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; }
void ConfigureSession() { if (setupResult != SessionSetupResult.Success) { return; } session.BeginConfiguration(); var videoDevice = DeviceWithMediaType(AVMediaType.Video, AVCaptureDevicePosition.Back); NSError err; var vDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out err); if (err != null) { Console.WriteLine($"Could not create video device input: ${err}"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(vDeviceInput)) { session.AddInput(vDeviceInput); videoDeviceInput = vDeviceInput; } else { Console.WriteLine("Could not add video device input to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } // Add metadata output. if (session.CanAddOutput(metadataOutput)) { session.AddOutput(metadataOutput); // Set this view controller as the delegate for metadata objects. metadataOutput.SetDelegate(this, metadataObjectsQueue); metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; // Use all metadata object types by default. metadataOutput.RectOfInterest = CGRect.Empty; } else { Console.WriteLine("Could not add metadata output to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } session.CommitConfiguration(); }
private void InitSession() { try { //init capture session _AVSession = new AVCaptureSession(); //check permissions var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (authorizationStatus != AVAuthorizationStatus.Authorized) { return; } //check capture camera var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var camera = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back); if (camera == null) { return; } //add input to capture session _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _); if (_AVSession.CanAddInput(_AVDeviceImput)) { _AVSession.AddInput(_AVDeviceImput); } else { return; } //add output to camera session _MetadataObjectsQueue = new DispatchQueue("metadata objects queue"); _AVMetadataOutput = new AVCaptureMetadataOutput(); if (_AVSession.CanAddOutput(_AVMetadataOutput)) { _AVSession.AddOutput(_AVMetadataOutput); } else { return; } _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue); //init the video preview layer and add it to the current view _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession); _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; _AVVideoPeviewLayer.Frame = Bounds; this.Layer.AddSublayer(_AVVideoPeviewLayer); //start capture session StartSession(true); } catch (Exception ex) { Console.WriteLine("IOS_SCAN | init error", ex); } }
#pragma warning restore CS4014 private void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = CameraFeedView.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = CameraFeedView.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; CameraFeedView.Layer.AddSublayer(videoPreviewLayer); AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); if (isMovie) { // Add audio var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out NSError audioErr); if (audioErr != null) { Console.WriteLine("Couldn't create audio device input: " + audioErr.LocalizedDescription); } if (captureSession.CanAddInput(audioDeviceInput)) { captureSession.AddInput(audioDeviceInput); } else { Console.WriteLine("Couldn't add audio input to session"); } movieOutput = new AVCaptureMovieFileOutput(); captureSession.AddOutput(movieOutput); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; var connection = movieOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null && connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } captureSession.CommitConfiguration(); } else { stillImageOutput = new AVCapturePhotoOutput(); stillImageOutput.IsHighResolutionCaptureEnabled = true; stillImageOutput.IsLivePhotoCaptureEnabled = false; captureSession.AddOutput(stillImageOutput); captureSession.CommitConfiguration(); } ShutterButton.Hidden = false; captureSession.StartRunning(); }
/// <summary> /// Start camera preview /// </summary> public override void StartCamera() { if (Session == null) { Session = new AVCaptureSession(); Device = Configuration.ShowBackCameraFirst ? AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back) : AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); if (Device == null) { NoCameraAvailable(); Console.WriteLine("Could not find capture device, does your device have a camera?"); return; } try { NSError error; VideoInput = new AVCaptureDeviceInput(Device, out error); Session.AddInput(VideoInput); _videoOutput = new AVCaptureMovieFileOutput { MinFreeDiskSpaceLimit = 1024 * 1024 }; if (Session.CanAddOutput(_videoOutput)) { Session.AddOutput(_videoOutput); } if (Configuration.RecordAudio) { var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); _audioInput = new AVCaptureDeviceInput(audioDevice, out error); if (Session.CanAddInput(_audioInput)) { Session.AddInput(_audioInput); } } if (Configuration.DetectFaces) { SetupFaceDetection(); } SetupVideoPreviewLayer(); Session.StartRunning(); } catch { /* ignore */ } FlashConfiguration(true); } base.StartCamera(); }
private void InitialiseCaptureSession() { try { _captureSession.SessionPreset = AVCaptureSession.Preset1920x1080; var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video) as AVCaptureDevice; NSError error; var input = new AVCaptureDeviceInput(captureDevice, out error); if (error?.Code != 0) { Console.WriteLine($"Error {error.ToString()}"); } if (_captureSession.CanAddInput(input)) { _captureSession.AddInput(input); } var videoOutput = new AVCaptureVideoDataOutput(); videoOutput.SetSampleBufferDelegateQueue(this, new DispatchQueue("sample buffer delegate")); if (_captureSession.CanAddOutput(videoOutput)) { _captureSession.AddOutput(videoOutput); } _captureSession.StartRunning(); } catch (Exception ex) { int i = 0; i++; } }
public void UpdateCameraOption(CameraOptions option) { var devices = AVCaptureDeviceDiscoverySession.Create( new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera, AVCaptureDeviceType.BuiltInDualCamera }, AVMediaType.Video, AVCaptureDevicePosition.Unspecified ); var cameraPosition = (option == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = devices.Devices.FirstOrDefault(d => d.Position == cameraPosition); if (device != null) { var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. captureSession.RemoveInput(videoDeviceInput); if (captureSession.CanAddInput(lVideoDeviceInput)) { captureSession.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; } else { captureSession.AddInput(videoDeviceInput); } captureSession.CommitConfiguration(); } }
public void MetadataObjectTypesTest() { TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false); if (Runtime.Arch != Arch.DEVICE) { Assert.Ignore("This test only runs on device (requires camera access)"); } TestRuntime.RequestCameraPermission(AVMediaType.Video, true); using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) { NSError error; using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) { if (captureSession.CanAddInput(videoInput)) { captureSession.AddInput(videoInput); } using (var metadataOutput = new AVCaptureMetadataOutput()) { if (captureSession.CanAddOutput(metadataOutput)) { captureSession.AddOutput(metadataOutput); } AVMetadataObjectType all = AVMetadataObjectType.None; foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType))) { switch (val) { case AVMetadataObjectType.CatBody: case AVMetadataObjectType.DogBody: case AVMetadataObjectType.HumanBody: case AVMetadataObjectType.SalientObject: // fail *and crash* on iOS 8 (at least on 32bits devices) if (!TestRuntime.CheckXcodeVersion(11, 0)) { continue; } // xcode 12 beta 1 on device if ((Runtime.Arch == Arch.DEVICE) && TestRuntime.CheckXcodeVersion(12, 0)) { continue; } break; } metadataOutput.MetadataObjectTypes = val; all |= val; Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString()); } metadataOutput.MetadataObjectTypes = all; Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString()); } } } } }
public void MetadataObjectTypesTest() { if (!TestRuntime.CheckSystemAndSDKVersion(8, 0)) { Assert.Ignore("Test only works correctly in iOS 8+"); } if (Runtime.Arch != Arch.DEVICE) { Assert.Ignore("This test only runs on device (requires camera access)"); } var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); switch (auth) { case AVAuthorizationStatus.Restricted: case AVAuthorizationStatus.Denied: Assert.Fail("This test requires access to the camera, but the app has been denied access."); break; } using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) { NSError error; using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) { if (captureSession.CanAddInput(videoInput)) { captureSession.AddInput(videoInput); } using (var metadataOutput = new AVCaptureMetadataOutput()) { if (captureSession.CanAddOutput(metadataOutput)) { captureSession.AddOutput(metadataOutput); } AVMetadataObjectType all = AVMetadataObjectType.None; foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType))) { metadataOutput.MetadataObjectTypes = val; all |= val; Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString()); } metadataOutput.MetadataObjectTypes = all; Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString()); } } } } }
private void SetupLiveCameraStream() { _captureSession = new AVCaptureSession(); AVCaptureDevice captureDevice; captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (captureDevice == null) { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); } ConfigureCameraForDevice(captureDevice); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (!_captureSession.CanAddInput(_captureDeviceInput)) { return; } _capturePhotoOutput = new AVCapturePhotoOutput(); _capturePhotoOutput.IsHighResolutionCaptureEnabled = true; _capturePhotoOutput.IsLivePhotoCaptureEnabled = false; if (!_captureSession.CanAddOutput(_capturePhotoOutput)) { return; } _captureSession.BeginConfiguration(); _captureSession.SessionPreset = AVCaptureSession.PresetPhoto; _captureSession.AddInput(_captureDeviceInput); _captureSession.AddOutput(_capturePhotoOutput); _captureSession.CommitConfiguration(); _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession) { Frame = liveCameraStream.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; liveCameraStream.Layer.AddSublayer(_videoPreviewLayer); _captureSession.StartRunning(); }
void PrepareSession() { session = new AVCaptureSession(); var captureDevice = frontCamera; if (session == null || captureDevice == null) { return; } try { var deviceInput = new AVCaptureDeviceInput(captureDevice, out var deviceInputError); if (deviceInputError != null) { throw new NSErrorException(deviceInputError); } session.BeginConfiguration(); if (session.CanAddInput(deviceInput)) { session.AddInput(deviceInput); } var output = new AVCaptureVideoDataOutput { UncompressedVideoSetting = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange }, AlwaysDiscardsLateVideoFrames = true }; if (session.CanAddOutput(output)) { session.AddOutput(output); } session.CommitConfiguration(); var queue = new DispatchQueue("output.queue"); output.SetSampleBufferDelegateQueue(this, queue); Console.WriteLine($"PrepareSession: Done setting up delegate"); } catch (Exception ex) { Console.WriteLine($"PrepareSession Error: {ex.Message}"); } }
public void ConfigureSession(AVCaptureSession session) { Console.WriteLine("capture session: configuring - adding audio input"); // Add audio input, if fails no need to fail whole configuration var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice); if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine("capture session: could not add audio device input to the session"); } }
public void MetadataObjectTypesTest() { if (!TestRuntime.CheckSystemAndSDKVersion(8, 0)) { Assert.Ignore("Test only works correctly in iOS 8+"); } if (Runtime.Arch != Arch.DEVICE) { Assert.Ignore("This test only runs on device (requires camera access)"); } TestRuntime.RequestCameraPermission(AVMediaType.Video, true); using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) { NSError error; using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) { if (captureSession.CanAddInput(videoInput)) { captureSession.AddInput(videoInput); } using (var metadataOutput = new AVCaptureMetadataOutput()) { if (captureSession.CanAddOutput(metadataOutput)) { captureSession.AddOutput(metadataOutput); } AVMetadataObjectType all = AVMetadataObjectType.None; foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType))) { metadataOutput.MetadataObjectTypes = val; all |= val; Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString()); } metadataOutput.MetadataObjectTypes = all; Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString()); } } } } }
void SettupCaptureSession() { _captureSession = new AVCaptureSession(); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, cameraPosition); if (captureDevice != null) { var input = new AVCaptureDeviceInput(captureDevice, out var error); if (error == null) { if (_captureSession.CanAddInput(input)) { _captureSession.AddInput(input); } } if (_captureSession.CanAddOutput(_photoOutput)) { _captureSession.AddOutput(_photoOutput); } _cameraLayer = new AVCaptureVideoPreviewLayer(_captureSession); _cameraLayer.Frame = this.Bounds; _cameraLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; this.Layer.AddSublayer(_cameraLayer); //Turn on flash if (captureDevice.HasTorch) { captureDevice.LockForConfiguration(out var err); if (err == null) { if (captureDevice.TorchMode == AVCaptureTorchMode.Off) { captureDevice.TorchMode = AVCaptureTorchMode.On; captureDevice.FlashMode = AVCaptureFlashMode.On; } captureDevice.SetTorchModeLevel(1.0f, out var _); captureDevice.UnlockForConfiguration(); } } _captureSession.StartRunning(); } }
public void InitAndStartCamera() { session = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error); if (videoInput == null || !session.CanAddInput(videoInput)) return; session.AddInput(videoInput); previewLayer = new AVCaptureVideoPreviewLayer(session) { Frame = rootView.Bounds }; previewLayer.Connection.VideoOrientation = configDicByRotationChanged[UIApplication.SharedApplication.StatusBarOrientation]; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; cameraView.Layer.AddSublayer(previewLayer); session.StartRunning(); }
void setupCaptureSession() { if (CaptureSession != null) { return; } CaptureSession = new AVCaptureSession(); NSNotificationCenter.DefaultCenter.AddObserver(null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver(UIApplication.WillEnterForegroundNotification.ToString(), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground(); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput(videoDevice, out error); if (CaptureSession.CanAddInput(videoInput)) { CaptureSession.AddInput(videoInput); } metadataOutput = new AVCaptureMetadataOutput(); var metadataQueue = new DispatchQueue("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate(metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput(metadataOutput)) { CaptureSession.AddOutput(metadataOutput); } }
private bool TryToAddInput(AVCaptureSession session, AVCaptureDeviceInput videoDeviceInput) { if (videoDeviceInput == null) { return(false); } if (_videoDeviceInput != null) { session.RemoveInput(_videoDeviceInput); } if (!session.CanAddInput(videoDeviceInput)) { return(false); } session.AddInput(videoDeviceInput); return(true); }
public Task <OperationResult> Setup(bool enableAudioRecording, bool enableStillImageCapture = false, UIInterfaceOrientation orientation = UIInterfaceOrientation.Portrait, int numberOfCameras = 1) { TaskCompletionSource <OperationResult> tcs = new TaskCompletionSource <OperationResult>(); var warnings = new List <string>(); NumberOfCameras = numberOfCameras; _enableAudioRecording = enableAudioRecording; _enableStillImageCapture = enableStillImageCapture; _session = new AVCaptureSession(); _backgroundRecordingID = -1; NSError error; var result = AVCaptureDeviceFactory.CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back); if (!result.IsSuccessful) { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure("No video devices found, probably running in the simulator")); return(tcs.Task); } _videoDeviceInput = AVCaptureDeviceInput.FromDevice(result.Result, out error); if (_videoDeviceInput == null) { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure(@"Could not create video device input: {error}")); return(tcs.Task); } _session.BeginConfiguration(); if (_session.CanAddInput(_videoDeviceInput)) { _session.AddInput(_videoDeviceInput); var initialVideoOrientation = (AVCaptureVideoOrientation)(long)orientation; PreviewLayer.Session = _session; PreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; PreviewLayer.Connection.VideoOrientation = initialVideoOrientation; } else { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure("Could not add video device input to the session")); return(tcs.Task); } if (_enableAudioRecording) { AVCaptureDevice audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { warnings.Add(@"Could not create audio device input: {error}"); } else { if (_session.CanAddInput(audioDeviceInput)) { _session.AddInput(audioDeviceInput); } else { warnings.Add("Could not add audio device input to the session"); } } } _movieFileOutput = new AVCaptureMovieFileOutput(); if (_session.CanAddOutput(_movieFileOutput)) { _session.AddOutput(_movieFileOutput); AVCaptureConnection connection = _movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } else { warnings.Add("Could not add movie file output to the session"); _setupResult = CameraSetupResult.SessionConfigurationFailed; } if (_enableStillImageCapture) { _stillImageOutput = new AVCaptureStillImageOutput(); if (_session.CanAddOutput(_stillImageOutput)) { _stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; _session.AddOutput(_stillImageOutput); } else { warnings.Add("Could not add still image output to the session"); _setupResult = CameraSetupResult.SessionConfigurationFailed; } } _session.CommitConfiguration(); _setupResult = CameraSetupResult.Success; tcs.SetResult(OperationResult.AsSuccess(string.Empty, warnings)); AddObservers(); return(tcs.Task); }
void setupCaptureSession () { if (CaptureSession != null) return; CaptureSession = new AVCaptureSession (); NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground (); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput (videoDevice, out error); if (CaptureSession.CanAddInput (videoInput)) CaptureSession.AddInput (videoInput); metadataOutput = new AVCaptureMetadataOutput (); var metadataQueue = new DispatchQueue ("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput (metadataOutput)) CaptureSession.AddOutput (metadataOutput); }
public void SetupSession() { videoPreviewLayer.Session = captureSession; videoPreviewLayer.Frame = liveCameraStream.Bounds; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = GetBackCamera(); ConfigureCameraForDevice(captureDevice); NSError err; videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice, out err); videoDataOutput = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true }; DispatchQueue queue = new DispatchQueue("dbrcameraQueue"); if (captureSession.CanAddInput(videoDeviceInput)) { captureSession.AddInput(videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync(() => { var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { AVCaptureVideoOrientation videoOrintation; if (Enum.TryParse(statusBarOrientation.ToString(), out videoOrintation)) { initialVideoOrientation = videoOrintation; } } videoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else if (err != null) { Console.WriteLine($"Could not create video device input: {err}"); //this.setupResult = SessionSetupResult.ConfigurationFailed; this.captureSession.CommitConfiguration(); return; } else { Console.WriteLine("Could not add video device input to the session"); //this.setupResult = SessionSetupResult.ConfigurationFailed; this.captureSession.CommitConfiguration(); return; } if (captureSession.CanAddOutput(videoDataOutput)) { captureSession.AddOutput(videoDataOutput); captureOutput.reader = reader; captureOutput.update = ResetResults; videoDataOutput.SetSampleBufferDelegateQueue(captureOutput, queue); videoDataOutput.WeakVideoSettings = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromInt32((int)CVPixelFormatType.CV32BGRA)); } else { Console.WriteLine("Could not add metadata output to the session"); //this.setupResult = SessionSetupResult.ConfigurationFailed; captureSession.CommitConfiguration(); return; } captureSession.CommitConfiguration(); }
public Task <OperationResult> ChanageCamera() { TaskCompletionSource <OperationResult> tcs = new TaskCompletionSource <OperationResult>(); AVCaptureDevice currentVideoDevice = _videoDeviceInput.Device; AVCaptureDevicePosition preferredPosition = AVCaptureDevicePosition.Unspecified; AVCaptureDevicePosition currentPosition = currentVideoDevice.Position; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: { preferredPosition = AVCaptureDevicePosition.Back; break; } case AVCaptureDevicePosition.Back: { preferredPosition = AVCaptureDevicePosition.Front; break; } } var result = AVCaptureDeviceFactory.CreateDevice(AVMediaType.Video, preferredPosition); if (result.IsSuccessful) { AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(result.Result); if (_videoDeviceInput == null) { tcs.SetResult(OperationResult.AsFailure(@"Could not create video device input: {error}")); return(tcs.Task); } _session.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. _session.RemoveInput(_videoDeviceInput); if (_session.CanAddInput(videoDeviceInput)) { if (_subjectSubscriber != null) { _subjectSubscriber.Dispose(); } result.Result.SetFlashMode(AVCaptureFlashMode.Auto); _subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureDevice.SubjectAreaDidChangeNotification, OnSubjectAreaChangedHandler, result.Result); _session.AddInput(videoDeviceInput); _videoDeviceInput = videoDeviceInput; } else { _session.AddInput(_videoDeviceInput); } AVCaptureConnection connection = _movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } _session.CommitConfiguration(); tcs.SetResult(OperationResult.AsSuccess()); } else { tcs.SetResult(OperationResult.AsFailure("Failed to create video device: " + result.Message)); } return(tcs.Task); }
void setupAVCapture() { NSError error = null; AVCaptureSession session = new AVCaptureSession(); if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone) { session.SessionPreset = AVCaptureSession.Preset640x480; } else { session.SessionPreset = AVCaptureSession.PresetPhoto; } // Select a video device, make an input AVCaptureDevice device = null; AVCaptureDevicePosition desiredPosition = AVCaptureDevicePosition.Front; // find the front facing camera foreach (AVCaptureDevice d in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (d.Position == desiredPosition) { device = d; this.isUsingFrontFacingCamera = true; break; } } // fall back to the default camera. if (device == null) { this.isUsingFrontFacingCamera = false; device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); } // get the input device AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice(device, out error); if (error == null) { // add the input to the session if (session.CanAddInput(deviceInput)) { session.AddInput(deviceInput); } // Make a video data output this.videoDataOutput = new AVCaptureVideoDataOutput(); // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA' NSDictionary rgbOutputSettings = new NSDictionary( CVPixelBuffer.PixelFormatTypeKey, CVPixelFormatType.CV32BGRA ); this.videoDataOutput.WeakVideoSettings = rgbOutputSettings; this.videoDataOutput.AlwaysDiscardsLateVideoFrames = true; // discard if the data output queue is blocked // create a serial dispatch queue used for the sample buffer delegate // a serial dispatch queue must be used to guarantee that video frames will be delivered in order // see the header doc for setSampleBufferDelegate:queue: for more information this.videoDataOutputQueue = new DispatchQueue("VideoDataOutputQueue"); this.videoDataOutput.SetSampleBufferDelegate(new CustomAVCaptureVideoDataOutputSampleBufferDelegate(this), this.videoDataOutputQueue); if (session.CanAddOutput(this.videoDataOutput)) { session.AddOutput(this.videoDataOutput); } // get the output for doing face detection. this.videoDataOutput.ConnectionFromMediaType(AVMediaType.Video).Enabled = true; this.previewLayer = new AVCaptureVideoPreviewLayer(session); this.previewLayer.BackgroundColor = UIColor.Black.CGColor; this.previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect; CALayer rootLayer = this.previewView.Layer; rootLayer.MasksToBounds = true; this.previewLayer.Frame = rootLayer.Bounds; rootLayer.AddSublayer(this.previewLayer); session.StartRunning(); } session = null; if (error != null) { UIAlertView alertView = new UIAlertView( "Failed with error " + (int)error.Code, error.LocalizedDescription, null, "Dismiss", null); alertView.Show(); this.teardownAVCapture(); } }
void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } session.BeginConfiguration(); // We do not create an AVCaptureMovieFileOutput when setting up the session because the // AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var defaultVideoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDuoCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); NSError error; var input = AVCaptureDeviceInput.FromDevice(defaultVideoDevice, out error); if (error != null) { Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(input)) { session.AddInput(input); videoDeviceInput = input; DispatchQueue.MainQueue.DispatchAsync(() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; AVCaptureVideoOrientation videoOrientation; if (statusBarOrientation != UIInterfaceOrientation.Unknown && TryConvertToVideoOrientation(statusBarOrientation, out videoOrientation)) { initialVideoOrientation = videoOrientation; } PreviewView.VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine("Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. //var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (error != null) { Console.WriteLine($"Could not create audio device input: {error.LocalizedDescription}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine("Could not add audio device input to the session"); } // Add photo output. if (session.CanAddOutput(photoOutput)) { session.AddOutput(photoOutput); photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? LivePhotoMode.On : LivePhotoMode.Off; } else { Console.WriteLine("Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } session.CommitConfiguration(); }
protected virtual void SetupAVCapture() { AVCaptureDeviceInput deviceInput; // Select a video device, make an input var videoDevice = AVCaptureDeviceDiscoverySession.Create( new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera }, AVMediaType.Video, AVCaptureDevicePosition.Back ).Devices.FirstOrDefault(); deviceInput = new AVCaptureDeviceInput(videoDevice, out NSError error); if (error != null) { Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}"); return; } session.BeginConfiguration(); session.SessionPreset = AVCaptureSession.Preset640x480; // Model image size is smaller // Add a video input if (!session.CanAddInput(deviceInput)) { Console.WriteLine("Could not add video device input to the session"); session.CommitConfiguration(); return; } session.AddInput(deviceInput); if (session.CanAddOutput(videoDataOutput)) { session.AddOutput(videoDataOutput); // Add a video data ouptut videoDataOutput.AlwaysDiscardsLateVideoFrames = true; videoDataOutput.WeakVideoSettings = new NSDictionary(CVPixelBuffer.PixelFormatTypeKey, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange); videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue); } else { Console.WriteLine("Could not add video data output to the session"); session.CommitConfiguration(); return; } var captureConnection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video); // Always process the frames captureConnection.Enabled = true; videoDevice.LockForConfiguration(out NSError error2); if (error2 == null) { var formatDescription = videoDevice.ActiveFormat.FormatDescription as CMVideoFormatDescription; CMVideoDimensions dimensions = formatDescription.Dimensions; bufferSize.Width = dimensions.Width; bufferSize.Height = dimensions.Height; videoDevice.UnlockForConfiguration(); } else { Console.WriteLine($"{error2.LocalizedDescription}"); } session.CommitConfiguration(); previewLayer = AVCaptureVideoPreviewLayer.FromSession(session); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; rootLayer = previewView.Layer; previewLayer.Frame = rootLayer.Bounds; rootLayer.AddSublayer(previewLayer); }
public bool SetupCapture() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume captureSession = new AVCaptureSession() { //SessionPreset = AVCaptureSession.PresetPhoto SessionPreset = AVCaptureSession.Preset1280x720 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (captureDeviceInput == null) { Console.WriteLine("No video input device"); return(false); } if (captureSession.CanAddInput(captureDeviceInput)) { captureSession.AddInput(captureDeviceInput); } else { Console.WriteLine("Could not add input capture device to AVACaptureSession"); return(false); } // create a VideoDataOutput and add it to the sesion AVCaptureVideoDataOutput output = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = false, // true, WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV24RGB }.Dictionary //, // If you want to cap the frame rate at a given speed, in this sample: 30 frames per second //MinFrameDuration = new CMTime(1, 30) }; CoreFoundation.DispatchQueue videoCaptureQueue = new CoreFoundation.DispatchQueue("Video Capture Queue"); output.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(output)) { captureSession.AddOutput(output); } else { return(false); } // add preview layer to this view controller's NSView AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(captureSession); previewLayer.Frame = this.View.Bounds; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; if (this.View.Layer == null) { this.View.WantsLayer = true; this.View.Layer = previewLayer; } else { this.View.WantsLayer = true; this.View.Layer.AddSublayer(previewLayer); } captureSession.StartRunning(); return(true); }
private void BeginSession() { try { NSError error = null; var deviceInput = new AVCaptureDeviceInput(captureDevice, out error); if (error == null && captureSession.CanAddInput(deviceInput)) { captureSession.AddInput(deviceInput); } previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspect }; //this.HomeView.BackgroundColor = UIColor.Black; previewLayer.Frame = this.HomeView.Layer.Bounds; this.HomeView.Layer.AddSublayer(previewLayer); captureDevice.LockForConfiguration(out error); if (error != null) { Console.WriteLine(error); captureDevice.UnlockForConfiguration(); return; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); } captureDevice.UnlockForConfiguration(); captureSession.StartRunning(); // create a VideoDataOutput and add it to the sesion videoOut = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } captureSession.CommitConfiguration(); setupAVFoundationFaceDetection(); //var OutputSampleDelegate = new VideoCapture( //(s) => //{ // GreetingsLabel.Text = s; // PopulateList(s); //}, new Action<CIImage, CGRect>(DrawFaces)); //videoOut.SetSampleBufferDelegateQueue(OutputSampleDelegate, sessionQueue); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
// Call this on the session queue. void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } NSError error = null; session.BeginConfiguration(); /* * We do not create an AVCaptureMovieFileOutput when setting up the session because the * AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. */ session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (videoDevice == null) { // If the back dual camera is not available, default to the back wide angle camera. videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. if (videoDevice == null) { videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); } } var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (lVideoDeviceInput == null) { Console.WriteLine($"Could not create video device input: {error}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(lVideoDeviceInput)) { session.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; DispatchQueue.MainQueue.DispatchAsync(() => { /* * Why are we dispatching this to the main queue? * Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView * can only be manipulated on the main thread. * Note: As an exception to the above rule, it is not necessary to serialize video orientation changes * on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. * * Use the status bar orientation as the initial video orientation. Subsequent orientation changes are * handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:]. */ var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; } VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine(@"Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine($"Could not create audio device input: {error}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine(@"Could not add audio device input to the session"); } // Add photo output. var lPhotoOutput = new AVCapturePhotoOutput(); if (session.CanAddOutput(lPhotoOutput)) { session.AddOutput(lPhotoOutput); photoOutput = lPhotoOutput; photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported()); livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? AVCamLivePhotoMode.On : AVCamLivePhotoMode.Off; //depthDataDeliveryMode = photoOutput.IsDepthDataDeliverySupported() ? AVCamDepthDataDeliveryMode.On : AVCamDepthDataDeliveryMode.Off; inProgressPhotoCaptureDelegates = new Dictionary <long, AVCamPhotoCaptureDelegate>(); inProgressLivePhotoCapturesCount = 0; } else { Console.WriteLine(@"Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } backgroundRecordingId = UIApplication.BackgroundTaskInvalid; session.CommitConfiguration(); }
public async override void ViewDidLoad () { base.ViewDidLoad (); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession (); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue ("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend (); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video); if (!granted) SetupResult = AVCamSetupResult.CameraNotAuthorized; SessionQueue.Resume (); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync (() => { if (SetupResult != AVCamSetupResult.Success) return; backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error); if (videoDeviceInput == null) Console.WriteLine ("Could not create video device input: {0}", error); Session.BeginConfiguration (); if (Session.CanAddInput (videoDeviceInput)) { Session.AddInput (VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync (() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine ("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error); if (audioDeviceInput == null) Console.WriteLine ("Could not create audio device input: {0}", error); if (Session.CanAddInput (audioDeviceInput)) Session.AddInput (audioDeviceInput); else Console.WriteLine ("Could not add audio device input to the session"); var movieFileOutput = new AVCaptureMovieFileOutput (); if (Session.CanAddOutput (movieFileOutput)) { Session.AddOutput (MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video); if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } else { Console.WriteLine ("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput (); if (Session.CanAddOutput (stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput (StillImageOutput = stillImageOutput); } else { Console.WriteLine ("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration (); }); }
public async override void ViewDidLoad() { base.ViewDidLoad(); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession(); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend(); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video); if (!granted) { SetupResult = AVCamSetupResult.CameraNotAuthorized; } SessionQueue.Resume(); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync(() => { if (SetupResult != AVCamSetupResult.Success) { return; } backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (videoDeviceInput == null) { Console.WriteLine("Could not create video device input: {0}", error); } Session.BeginConfiguration(); if (Session.CanAddInput(videoDeviceInput)) { Session.AddInput(VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync(() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; } var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine("Could not create audio device input: {0}", error); } if (Session.CanAddInput(audioDeviceInput)) { Session.AddInput(audioDeviceInput); } else { Console.WriteLine("Could not add audio device input to the session"); } var movieFileOutput = new AVCaptureMovieFileOutput(); if (Session.CanAddOutput(movieFileOutput)) { Session.AddOutput(MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } else { Console.WriteLine("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput(); if (Session.CanAddOutput(stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput(StillImageOutput = stillImageOutput); } else { Console.WriteLine("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration(); }); }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
private void SetupCamera() { CaptureSession = null; CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; currentDevice = null; inputDevice1 = null; inputDevice2 = null; foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == AVCaptureDevicePosition.Front) { inputDevice1 = device; } else if (device.Position == AVCaptureDevicePosition.Back) { inputDevice2 = device; } } NSError error; if (inputDevice1.HasFlash) { inputDevice1.LockForConfiguration(out error); inputDevice1.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } if (inputDevice2.HasFlash) { inputDevice2.LockForConfiguration(out error); inputDevice2.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error); rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error); currentDevice = inputDevice2; if (CaptureSession.CanAddInput(rearCamera)) { CaptureSession.AddInput(rearCamera); } AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = View.Frame; View.Layer.InsertSublayer(previewLayer, 0); StillImageOutput = new AVCaptureStillImageOutput(); StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG); CaptureSession.AddOutput(StillImageOutput); CaptureSession.StartRunning(); }
bool SetupCaptureSession() { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); if (audioDevice == null) { return(false); // e.g. simulator } var audioIn = new AVCaptureDeviceInput(audioDevice, out error); if (captureSession.CanAddInput(audioIn)) { captureSession.AddInput(audioIn); } var audioOut = new AVCaptureAudioDataOutput(); var audioCaptureQueue = new DispatchQueue("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue); if (captureSession.CanAddOutput(audioOut)) { captureSession.AddOutput(audioOut); } audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput(videoDevice, out error); if (captureSession.CanAddInput(videoIn)) { captureSession.AddInput(videoIn); } // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType(AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration(); return(true); }
public void SetupCamera() { AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, AVCaptureDevicePosition.Back); if (captureDevice == null) { Console.WriteLine("Could not create capture device"); return; } CaptureDevice = captureDevice; if (captureDevice.SupportsAVCaptureSessionPreset(AVCaptureSession.Preset3840x2160)) { captureSession.SessionPreset = AVCaptureSession.Preset3840x2160; BufferAspectRatio = 3840.0 / 2160.0; } else { captureSession.SessionPreset = AVCaptureSession.Preset1920x1080; BufferAspectRatio = 1920.0 / 1080.0; } AVCaptureDeviceInput deviceInput = new AVCaptureDeviceInput(captureDevice, out NSError deviceInputErr); if (deviceInputErr != null) { Console.WriteLine("Could not create device input"); deviceInputErr.Dispose(); return; } if (captureSession.CanAddInput(deviceInput)) { captureSession.AddInput(deviceInput); } VideoDataOutput.AlwaysDiscardsLateVideoFrames = true; VideoDataOutput.SetSampleBufferDelegateQueue(this, VideoDataOutputQueue); //VideoDataOutput.WeakVideoSettings = new NSDictionary<NSString, NSString> (); //VideoDataOutput.WeakVideoSettings.TryAdd<NSString, NSString> (CVPixelBuffer.PixelFormatTypeKey, OSType); if (captureSession.CanAddOutput(VideoDataOutput)) { captureSession.AddOutput(VideoDataOutput); VideoDataOutput.ConnectionFromMediaType(AVMediaType.Video).PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Off; } else { Console.WriteLine("Could not add VDO output"); } _ = captureDevice.LockForConfiguration(out NSError lockConf); if (lockConf != null) { Console.WriteLine("Could not set zoom level due to error: " + lockConf); lockConf.Dispose(); return; } captureDevice.VideoZoomFactor = 2; captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; captureDevice.UnlockForConfiguration(); captureSession.StartRunning(); }