public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); //var viewLayer = liveCameraStream.Layer; var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); // HACK: Dunno why this is returning null???? if (captureDevice is null) { return; } ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); //var dictionary = new NSMutableDictionary //{ // [AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG) //}; stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); }
private bool InitScanner(BarcodeScanner.BarcodeFormat barcodeType) { device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (device == null) { return(false); } input = AVCaptureDeviceInput.FromDevice(device); if (input.Device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { input.Device.LockForConfiguration(out NSError err); input.Device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; input.Device.UnlockForConfiguration(); } if (input == null) { return(false); } output = new AVCaptureMetadataOutput(); output.SetDelegate(this, DispatchQueue.MainQueue); session = new AVCaptureSession(); session.AddInput(input); session.AddOutput(output); output.MetadataObjectTypes = GetBarcodeFormat(barcodeType); captureVideoPreviewLayer = AVCaptureVideoPreviewLayer.FromSession(session); captureVideoPreviewLayer.Frame = CGRect.Empty; captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; captureVideoPreviewLayer.Connection.VideoOrientation = GetDeviceOrientation(); return(true); }
public static IMediaStream Create(MediaStreamConstraints constraints) { var mediaStreamTracks = new List <IMediaStreamTrack>(); bool isAudio = (constraints.Audio.Value.HasValue && constraints.Audio.Value == true) || constraints.Audio.Object != null; bool isVideo = (constraints.Video.Value.HasValue && constraints.Video.Value == true) || constraints.Video.Object != null; if (isAudio) { var defaultAudioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Audio); mediaStreamTracks.Add(MediaStreamTrack.Create(MediaStreamTrackKind.Audio, defaultAudioDevice.UniqueID)); } if (isVideo) { var devices = Webrtc.RTCCameraVideoCapturer.CaptureDevices; //// TODO: CURENTLY HARD CODED TO BACK. SELECT THE CAMERA BASED ON constraints var defaultVideoDevice = devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front /*.Back*/); //var defaultVideoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); mediaStreamTracks.Add(MediaStreamTrack.Create(MediaStreamTrackKind.Video, defaultVideoDevice.UniqueID)); } var nativeMediaStream = WebRTCme.WebRtc.NativePeerConnectionFactory.MediaStreamWithStreamId($"{WebRTCme.WebRtc.Id}"); var self = new MediaStream(nativeMediaStream); foreach (var track in mediaStreamTracks) { self.AddTrack(track); } return(self); }
void Compare(NSString constant, AVMediaTypes value) { Assert.That(AVCaptureDevice.GetDefaultDevice(constant), Is.EqualTo(AVCaptureDevice.GetDefaultDevice(value)), value.ToString()); #if !XAMCORE_4_0 Assert.That(AVCaptureDevice.GetDefaultDevice(constant), Is.EqualTo(AVCaptureDevice.DefaultDeviceWithMediaType((string)constant)), value.ToString() + ".compat"); #endif }
public void SetupLiveCameraStream(object cameraView) { _cameraView = (UIView)cameraView; captureSession = new AVCaptureSession(); var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = _cameraView.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; _cameraView.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); }
#pragma warning restore CS4014 private void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = CameraFeedView.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = CameraFeedView.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; CameraFeedView.Layer.AddSublayer(videoPreviewLayer); AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); if (isMovie) { // Add audio var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out NSError audioErr); if (audioErr != null) { Console.WriteLine("Couldn't create audio device input: " + audioErr.LocalizedDescription); } if (captureSession.CanAddInput(audioDeviceInput)) { captureSession.AddInput(audioDeviceInput); } else { Console.WriteLine("Couldn't add audio input to session"); } movieOutput = new AVCaptureMovieFileOutput(); captureSession.AddOutput(movieOutput); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; var connection = movieOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null && connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } captureSession.CommitConfiguration(); } else { stillImageOutput = new AVCapturePhotoOutput(); stillImageOutput.IsHighResolutionCaptureEnabled = true; stillImageOutput.IsLivePhotoCaptureEnabled = false; captureSession.AddOutput(stillImageOutput); captureSession.CommitConfiguration(); } ShutterButton.Hidden = false; captureSession.StartRunning(); }
private void SetupCamera() { try { if (_captureSession == null) { _captureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetPhoto }; } SetPreviewSizing(); SetPreviewOrientation(); if (_photoOutput == null) { _device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); TurnOffFlashAndSetContinuousAutoMode(_device); _photoOutput = new AVCapturePhotoOutput { IsHighResolutionCaptureEnabled = true }; _captureSession.AddOutput(_photoOutput); _captureSession.AddInput(AVCaptureDeviceInput.FromDevice(_device)); } } catch (Exception e) { _cameraModule.ErrorMessage = e.ToString(); } }
private void InitDevice() { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); AVCaptureDeviceInput input; try { input = new AVCaptureDeviceInput(captureDevice, out NSError err); if (err == null) { captureSession = new AVCaptureSession(); captureSession.AddInput(input); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, Frame = previewView.Layer.Bounds }; previewView.Layer.AddSublayer(previewLayer); captureOutput = new AVCapturePhotoOutput { IsHighResolutionCaptureEnabled = true }; captureSession.AddOutput(captureOutput); captureSession.StartRunning(); } } catch (Exception ex) { allowAndBack(); } }
private void InitialiseCaptureSession() { try { _captureSession.SessionPreset = AVCaptureSession.Preset1920x1080; var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video) as AVCaptureDevice; NSError error; var input = new AVCaptureDeviceInput(captureDevice, out error); if (error?.Code != 0) { Console.WriteLine($"Error {error.ToString()}"); } if (_captureSession.CanAddInput(input)) { _captureSession.AddInput(input); } var videoOutput = new AVCaptureVideoDataOutput(); videoOutput.SetSampleBufferDelegateQueue(this, new DispatchQueue("sample buffer delegate")); if (_captureSession.CanAddOutput(videoOutput)) { _captureSession.AddOutput(videoOutput); } _captureSession.StartRunning(); } catch (Exception ex) { int i = 0; i++; } }
void SetupCaptureDevice() { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); if (captureDevice == null) { Console.WriteLine("Error: no video devices available"); return; } videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (videoDeviceInput == null) { Console.WriteLine("Error: could not create AVCaptureDeviceInput"); return; } if (captureSession.CanAddInput(videoDeviceInput)) { captureSession.AddInput(videoDeviceInput); } previewLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect; previewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; }
public void RefcountTest() { TestRuntime.AssertSystemVersion(ApplePlatform.iOS, 7, 0, throwIfOtherPlatform: false); // Bug #27205 var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaTypes.Video.GetConstant()); switch (auth) { case AVAuthorizationStatus.Restricted: case AVAuthorizationStatus.Denied: case AVAuthorizationStatus.NotDetermined: Assert.Inconclusive("This test requires video recording permissions."); return; } using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video.GetConstant())) { if (videoDevice == null) { Assert.Inconclusive("Failed to create a video device for testing"); } foreach (var format in videoDevice.Formats) { for (int i = 0; i < 10; i++) { using (var f = format.FormatDescription) { } } } } } }
private void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); captureSession.SessionPreset = AVCaptureSession.PresetMedium; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = new CGRect(0f, 0f, View.Bounds.Width, View.Bounds.Height), Orientation = GetCameraForOrientation() }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); aVCaptureMovieFileOutput = new AVCaptureMovieFileOutput(); var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice); captureSession.AddOutput(aVCaptureMovieFileOutput); captureSession.AddInput(captureDeviceInput); captureSession.AddInput(audioDeviceInput); aVCaptureMovieFileOutput.ConnectionFromMediaType(AVMediaType.Video).VideoOrientation = GetCameraForOrientation(); captureSession.StartRunning(); }
public void Start() { captureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; try { var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); var output = new AVCaptureMetadataOutput(); var queue = new DispatchQueue("qrQueue"); captureSession.AddInput(input); captureSession.AddOutput(output); output.SetDelegate(this, queue); output.MetadataObjectTypes = AVMetadataObjectType.QRCode; Layer.AddSublayer(previewLayer); captureSession.StartRunning(); } catch (Exception e) { Console.WriteLine(e); } }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); device = captureDevice; maxExposure = device.ActiveFormat.MaxISO; minExposure = device.ActiveFormat.MinISO; maxDuration = device.ActiveFormat.MaxExposureDuration.Seconds; minDuration = device.ActiveFormat.MinExposureDuration.Seconds; ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.View.Frame }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); }
private AVCaptureDevice CameraWithPosition(AVCaptureDevicePosition position) { var defaultCamera = AVCaptureDevice.GetDefaultDevice( AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, position); return(defaultCamera); }
//async Task RegisterFaces() //{ // try // { // var persongroupId = Guid.NewGuid().ToString(); // await FaceServiceHelper.CreatePersonGroupAsync(persongroupId, // "Xamarin", // AppDelegate.WorkspaceKey); // await FaceServiceHelper.CreatePersonAsync(persongroupId, "Albert Einstein"); // var personsInGroup = await FaceServiceHelper.GetPersonsAsync(persongroupId); // await FaceServiceHelper.AddPersonFaceAsync(persongroupId, personsInGroup[0].PersonId, // "https://upload.wikimedia.org/wikipedia/commons/d/d3/Albert_Einstein_Head.jpg", null, null); // await FaceServiceHelper.TrainPersonGroupAsync(persongroupId); // isFaceRegistered = true; // } // catch (FaceAPIException ex) // { // Console.WriteLine(ex.Message); // isFaceRegistered = false; // } //} private void PrepareCamera() { captureSession.SessionPreset = AVCaptureSession.PresetMedium; captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front) ?? AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); BeginSession(); }
public static bool IsTorchOn() { var videoDevices = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (videoDevices.HasTorch) { return(videoDevices.TorchMode == AVCaptureTorchMode.On); } return(false); }
private void SetupAVCapture(NSString sessionPreset) { if ((this.VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(this.context)) == null) { Console.WriteLine("Could not create the CoreVideo TextureCache"); return; } this.session = new AVCaptureSession(); this.session.BeginConfiguration(); // Preset size this.session.SessionPreset = sessionPreset; // Input device var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (videoDevice == null) { Console.WriteLine("No video device"); return; } var input = new AVCaptureDeviceInput(videoDevice, out NSError error); if (error != null) { Console.WriteLine("Error creating video capture device"); return; } this.session.AddInput(input); // Create the output device using (var dataOutput = new AVCaptureVideoDataOutput()) { dataOutput.AlwaysDiscardsLateVideoFrames = true; // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding //VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange) this.dataOutputDelegate = new DataOutputDelegate(this); // // This dispatches the video frames into the main thread, because the OpenGL // code is accessing the data synchronously. // dataOutput.SetSampleBufferDelegateQueue(this.dataOutputDelegate, DispatchQueue.MainQueue); this.session.AddOutput(dataOutput); } this.session.CommitConfiguration(); this.session.StartRunning(); }
public ViewController(IntPtr handle) : base(handle) { var config = new FrameCapturerConfig() { LayerGravity = AVLayerVideoGravity.ResizeAspect, Device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video), PixelFormat = CVPixelFormatType.CV422YpCbCr8, FrameQualityPreset = AVCaptureSession.PresetHigh, }; capturer = FrameCapturer.WithConfiguration(config); }
public void MetadataObjectTypesTest() { TestRuntime.AssertSystemVersion(ApplePlatform.iOS, 8, 0, throwIfOtherPlatform: false); TestRuntime.AssertDevice("This test only runs on device (requires camera access)"); TestRuntime.RequestCameraPermission(AVMediaTypes.Video.GetConstant(), true); using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video.GetConstant())) { NSError error; using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) { if (captureSession.CanAddInput(videoInput)) { captureSession.AddInput(videoInput); } using (var metadataOutput = new AVCaptureMetadataOutput()) { if (captureSession.CanAddOutput(metadataOutput)) { captureSession.AddOutput(metadataOutput); } AVMetadataObjectType all = AVMetadataObjectType.None; foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType))) { switch (val) { case AVMetadataObjectType.CatBody: case AVMetadataObjectType.DogBody: case AVMetadataObjectType.HumanBody: case AVMetadataObjectType.SalientObject: // fail *and crash* on iOS 8 (at least on 32bits devices) if (!TestRuntime.CheckXcodeVersion(11, 0)) { continue; } // xcode 12 beta 1 on device if (TestRuntime.IsDevice && TestRuntime.CheckXcodeVersion(12, 0)) { continue; } break; } metadataOutput.MetadataObjectTypes = val; all |= val; Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString()); } metadataOutput.MetadataObjectTypes = all; Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString()); } } } } }
public override void ViewDidLoad() { base.ViewDidLoad(); OCR = new TesseractApi(); CaptureSession = new AVCaptureSession(); ImageOutput = new AVCapturePhotoOutput(); var cameraDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); var cameraInput = AVCaptureDeviceInput.FromDevice(cameraDevice); CaptureSession.AddInput(cameraInput); CaptureSession.AddOutput(ImageOutput); SetupUI(); CaptureSession.StartRunning(); Camera = new CameraHandler(); Camera.FinishedProcessing += async delegate { PictureView.Image = new UIImage(Camera.Picture, 1f, UIImageOrientation.Right); Capture = PictureView.Capture(); await InitReader(); }; OCRButton.TouchUpInside += async delegate { HandleButtonClick(); }; AlphaNumericSwitch.ValueChanged += async delegate { await SetOcrTextLabel(); }; // Selection slider Setup SelectionBarSlider.TouchUpInside += async delegate { await InitReader(); }; SelectionBarSlider.TouchUpOutside += async delegate { await InitReader(); }; SelectionBarSlider.ValueChanged += delegate { var tempFrame = SelectionBarView.Frame; tempFrame.Y = (SelectionBarSlider.Value * 92) + 22; SelectionBarView.Frame = tempFrame; }; }
/// <summary> /// Retrieves the camera device. /// </summary> /// <returns><c>true</c>, if camera device was retrieved, <c>false</c> otherwise.</returns> public bool RetrieveCameraDevice() { _device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (_device == null) { Console.WriteLine("\n" + "RetrieveCameraDevice() No device detected \n "); return(false); } return(true); }
public override void ViewDidLoad() { base.ViewDidLoad(); var device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); if (device is null) { this.ShowAlert("无法访问相机", null); return; } var input = AVCaptureDeviceInput.FromDevice(device); if (input is null) { this.ShowAlert("无法访问相机", null); return; } session.AddInput(input); try { var output = new AVCaptureMetadataOutput(); output.SetDelegate(this, DispatchQueue.MainQueue); session.AddOutput(output); output.MetadataObjectTypes = AVMetadataObjectType.QRCode; } catch { return; } preview = AVCaptureVideoPreviewLayer.FromSession(session); if (preview is null) { this.ShowAlert("无法显示扫描预览", null); return; } preview.VideoGravity = AVLayerVideoGravity.Resize; preview.Frame = View.Layer.Bounds; View.Layer.AddSublayer(preview); session.StartRunning(); codeFrame = new UIView(); codeFrame.Layer.BorderColor = UIColor.Green.CGColor; codeFrame.Layer.BorderWidth = 2; View.AddSubview(codeFrame); View.BringSubviewToFront(codeFrame); }
protected void SetupCaptureSession() { if (session == null) { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (captureDevice == null) { //RenderImageMessage("Capture device not found."); //_label.Text = "Capture device not found."; SetMessage("Capture device not found."); return; } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { //RenderImageMessage("No input device"); //_label.Text = "No input device"; SetMessage("No input from the capture Device."); return; } session.AddInput(input); // create a VideoDataOutput and add it to the sesion AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed(); settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA; var output = new AVCaptureVideoDataOutput() { UncompressedVideoSetting = settingUncomp, // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second //MinFrameDuration = new CMTime (1, 15) }; // configure the output queue = new DispatchQueue("myQueue"); output.SetSampleBufferDelegateQueue(outputRecorder, queue); session.AddOutput(output); } session.StartRunning(); }
/// <summary> /// Creates a camera stream and adds it to the view /// </summary> private async Task SetupLiveCameraStream() { captureSession = new AVCaptureSession(); // SETUP THE PREVIEW OF THE CAPTURE SESSION videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.Frame }; this.Layer.AddSublayer(videoPreviewLayer); // SETUP THE INPUT DEVICE FOR THE SESSION var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); // VIDEO OUTPUT BUFFERING METHOD var output = new AVCaptureVideoDataOutput(); var settings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV32BGRA }; output.WeakVideoSettings = settings.Dictionary; Queue = new DispatchQueue("ManCamQueue"); Recorder = new OutputRecorder(this, VideoFrameInterval); Recorder.OnFrameRecieved += HandleVideFrameImage; output.SetSampleBufferDelegate(Recorder, Queue); captureSession.AddOutput(output); // UI PREPERATION AddTargetOverlay(); textOutputLabel = new UILabel(new CGRect(targetOverlayView.Frame.Width + 10, 10, 100, 100)) { TextColor = UIColor.White, Font = UIFont.BoldSystemFontOfSize(22) }; this.AddSubview(textOutputLabel); captureSession.StartRunning(); }
private void SetupLiveCameraStream() { _captureSession = new AVCaptureSession(); AVCaptureDevice captureDevice; captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (captureDevice == null) { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); } ConfigureCameraForDevice(captureDevice); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (!_captureSession.CanAddInput(_captureDeviceInput)) { return; } _capturePhotoOutput = new AVCapturePhotoOutput(); _capturePhotoOutput.IsHighResolutionCaptureEnabled = true; _capturePhotoOutput.IsLivePhotoCaptureEnabled = false; if (!_captureSession.CanAddOutput(_capturePhotoOutput)) { return; } _captureSession.BeginConfiguration(); _captureSession.SessionPreset = AVCaptureSession.PresetPhoto; _captureSession.AddInput(_captureDeviceInput); _captureSession.AddOutput(_capturePhotoOutput); _captureSession.CommitConfiguration(); _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession) { Frame = liveCameraStream.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; liveCameraStream.Layer.AddSublayer(_videoPreviewLayer); _captureSession.StartRunning(); }
public void ConfigureSession(AVCaptureSession session) { Console.WriteLine("capture session: configuring - adding audio input"); // Add audio input, if fails no need to fail whole configuration var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice); if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine("capture session: could not add audio device input to the session"); } }
}//*/ // //Camera can't use in simulator //* public void ConfigurationCamera() { acsSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(acsSession) { Frame = new RectangleF(30, 40, 300, 350) }; View.Layer.AddSublayer(previewLayer); acdDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); deviceInput = AVCaptureDeviceInput.FromDevice(acdDevice); acsSession.AddInput(deviceInput); imageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; acsSession.AddOutput(imageOutput); acsSession.StartRunning(); }//*/
void SettupCaptureSession() { _captureSession = new AVCaptureSession(); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, cameraPosition); if (captureDevice != null) { var input = new AVCaptureDeviceInput(captureDevice, out var error); if (error == null) { if (_captureSession.CanAddInput(input)) { _captureSession.AddInput(input); } } if (_captureSession.CanAddOutput(_photoOutput)) { _captureSession.AddOutput(_photoOutput); } _cameraLayer = new AVCaptureVideoPreviewLayer(_captureSession); _cameraLayer.Frame = this.Bounds; _cameraLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; this.Layer.AddSublayer(_cameraLayer); //Turn on flash if (captureDevice.HasTorch) { captureDevice.LockForConfiguration(out var err); if (err == null) { if (captureDevice.TorchMode == AVCaptureTorchMode.Off) { captureDevice.TorchMode = AVCaptureTorchMode.On; captureDevice.FlashMode = AVCaptureFlashMode.On; } captureDevice.SetTorchModeLevel(1.0f, out var _); captureDevice.UnlockForConfiguration(); } } _captureSession.StartRunning(); } }