void ConfigureSession() { if (setupResult != SessionSetupResult.Success) { return; } session.BeginConfiguration(); var videoDevice = DeviceWithMediaType(AVMediaType.Video, AVCaptureDevicePosition.Back); NSError err; var vDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out err); if (err != null) { Console.WriteLine($"Could not create video device input: ${err}"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(vDeviceInput)) { session.AddInput(vDeviceInput); videoDeviceInput = vDeviceInput; } else { Console.WriteLine("Could not add video device input to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } // Add metadata output. if (session.CanAddOutput(metadataOutput)) { session.AddOutput(metadataOutput); // Set this view controller as the delegate for metadata objects. metadataOutput.SetDelegate(this, metadataObjectsQueue); metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; // Use all metadata object types by default. metadataOutput.RectOfInterest = CGRect.Empty; } else { Console.WriteLine("Could not add metadata output to the session"); setupResult = SessionSetupResult.ConfigurationFailed; session.CommitConfiguration(); return; } session.CommitConfiguration(); }
public void ToggleFrontBackCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; toggleCameraButton.SetBackgroundImage(frontCameraIcon, UIControlState.Normal); } else { devicePosition = AVCaptureDevicePosition.Front; toggleCameraButton.SetBackgroundImage(rearCameraIcon, UIControlState.Normal); } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
void SwitchCameraType() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } if ((Element.CameraType == CameraType.Front && devicePosition == AVCaptureDevicePosition.Front) || (Element.CameraType == CameraType.Rear && devicePosition == AVCaptureDevicePosition.Back)) { return; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
public void ToggleFrontBackCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; isSelfie = false; } else { devicePosition = AVCaptureDevicePosition.Front; isSelfie = true; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
private void HandleRotateCamera() { captureSession.BeginConfiguration(); var currentCameraInput = captureSession.Inputs[0]; captureSession.RemoveInput(currentCameraInput); AVCaptureDevice camera; AVCaptureDeviceInput input = (AVCaptureDeviceInput)currentCameraInput; if (input.Device.Position == AVCaptureDevicePosition.Back) { camera = CameraWithPosition(AVCaptureDevicePosition.Front); } else { camera = CameraWithPosition(AVCaptureDevicePosition.Back); } var videoInput = new AVCaptureDeviceInput(camera, out NSError err); if (err == null) { captureSession.AddInput(videoInput); } captureSession.CommitConfiguration(); AddFlipAnimation(); }
void UpdateCameraOption() { var devices = AVCaptureDeviceDiscoverySession.Create( new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera, AVCaptureDeviceType.BuiltInDualCamera }, AVMediaType.Video, AVCaptureDevicePosition.Unspecified ); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = devices.Devices.FirstOrDefault(d => d.Position == cameraPosition); if (device != null) { var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. captureSession.RemoveInput(videoDeviceInput); if (captureSession.CanAddInput(lVideoDeviceInput)) { captureSession.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; } else { captureSession.AddInput(videoDeviceInput); } captureSession.CommitConfiguration(); } }
void SetupAVCapture(NSString sessionPreset) { if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context)) == null) { Console.WriteLine("Could not create the CoreVideo TextureCache"); return; } session = new AVCaptureSession(); session.BeginConfiguration(); // Preset size session.SessionPreset = sessionPreset; // Input device var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (videoDevice == null) { Console.WriteLine("No video device"); return; } NSError err; var input = new AVCaptureDeviceInput(videoDevice, out err); if (err != null) { Console.WriteLine("Error creating video capture device"); return; } session.AddInput(input); // Create the output device var dataOutput = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true, // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding VideoSettings = new AVVideoSettings(CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange) }; dataOutputDelegate = new DataOutputDelegate(this); // // This dispatches the video frames into the main thread, because the OpenGL // code is accessing the data synchronously. // dataOutput.SetSampleBufferDelegateAndQueue(dataOutputDelegate, DispatchQueue.MainQueue); session.AddOutput(dataOutput); session.CommitConfiguration(); session.StartRunning(); }
public void SetFrontCam() { var devicePosition = AVCaptureDevicePosition.Back; var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
void Initialize() { Configuration.IsScanning = true; SizeChange(); CaptureSession = new AVCaptureSession(); CaptureSession.BeginConfiguration(); this.Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width), VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; setPreviewOrientation(); var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = AVCaptureDevicePosition.Back; //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset640x480; Layer.AddSublayer(previewLayer); CaptureSession.CommitConfiguration(); CaptureSession.StartRunning(); VideoDataOutput = new AVCaptureVideoDataOutput(); VideoDataOutput.AlwaysDiscardsLateVideoFrames = true; VideoDataOutput.WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary; captureVideoDelegate = new CaptureVideoDelegate(); captureVideoDelegate.OnDetected += (list) => { this.OnDetected?.Invoke(list); CaptureSession.StopRunning(); }; VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue); CaptureSession.AddOutput(VideoDataOutput); }
private AVCaptureStillImageOutput _getStillImageOutput() { var shouldReinitializeStillImageOutput = stillImageOutput == null; if (!shouldReinitializeStillImageOutput) { var connection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.Active; } } if (shouldReinitializeStillImageOutput) { stillImageOutput = new AVCaptureStillImageOutput(); captureSession.BeginConfiguration(); captureSession.AddOutput(stillImageOutput); captureSession.CommitConfiguration(); } return(stillImageOutput); }
private void SetupLiveCameraStream() { _captureSession = new AVCaptureSession(); AVCaptureDevice captureDevice; captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (captureDevice == null) { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); } ConfigureCameraForDevice(captureDevice); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (!_captureSession.CanAddInput(_captureDeviceInput)) { return; } _capturePhotoOutput = new AVCapturePhotoOutput(); _capturePhotoOutput.IsHighResolutionCaptureEnabled = true; _capturePhotoOutput.IsLivePhotoCaptureEnabled = false; if (!_captureSession.CanAddOutput(_capturePhotoOutput)) { return; } _captureSession.BeginConfiguration(); _captureSession.SessionPreset = AVCaptureSession.PresetPhoto; _captureSession.AddInput(_captureDeviceInput); _captureSession.AddOutput(_capturePhotoOutput); _captureSession.CommitConfiguration(); _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession) { Frame = liveCameraStream.Frame, VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; liveCameraStream.Layer.AddSublayer(_videoPreviewLayer); _captureSession.StartRunning(); }
private void ToggleFrontBackCamera(object sender, EventArgs e) { var devicePosition = _captureDeviceInput.Device.Position; devicePosition = devicePosition == AVCaptureDevicePosition.Front ? AVCaptureDevicePosition.Back : AVCaptureDevicePosition.Front; var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); _captureSession.BeginConfiguration(); _captureSession.RemoveInput(_captureDeviceInput); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); _captureSession.AddInput(_captureDeviceInput); _captureSession.CommitConfiguration(); }
public void Initialize() { captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); SetupCaptureDevice(); SetupPhotoCapture(); SetupVideoCapture(); SetCaptureType(); SetFrameRate(); captureSession.CommitConfiguration(); Layer.AddSublayer(previewLayer); isInitialized = true; }
public Task SwapCameraAsync() { if (session != null) { var currentCameraInput = session.Inputs[0]; AVCaptureDevice newCamera; if (currentCameraInput.GetPosition() == AVCaptureDevicePosition.Back) { newCamera = AVCaptureDevice .DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); } else { newCamera = AVCaptureDevice .DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back); } if (newCamera != null) { session.BeginConfiguration(); session.RemoveInput(currentCameraInput); NSError error = null; var newInput = new AVCaptureDeviceInput(newCamera, out error); if (error == null) { session.AddInput(newInput); CameraPanel = currentCameraInput.GetPosition() == AVCaptureDevicePosition.Back ? CameraPanel.Front : CameraPanel.Back; } else { //rollback session.RemoveInput(currentCameraInput); } session.CommitConfiguration(); } } return(Task.FromResult <object>(null)); }
void Initialize() { // Create the capture session captureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; captureSession.BeginConfiguration(); SetupVideoInput(); SetupPhotoCapture(); captureSession.CommitConfiguration(); Layer.AddSublayer(previewLayer); }
void PrepareSession() { session = new AVCaptureSession(); var captureDevice = frontCamera; if (session == null || captureDevice == null) { return; } try { var deviceInput = new AVCaptureDeviceInput(captureDevice, out var deviceInputError); if (deviceInputError != null) { throw new NSErrorException(deviceInputError); } session.BeginConfiguration(); if (session.CanAddInput(deviceInput)) { session.AddInput(deviceInput); } var output = new AVCaptureVideoDataOutput { UncompressedVideoSetting = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange }, AlwaysDiscardsLateVideoFrames = true }; if (session.CanAddOutput(output)) { session.AddOutput(output); } session.CommitConfiguration(); var queue = new DispatchQueue("output.queue"); output.SetSampleBufferDelegateQueue(this, queue); Console.WriteLine($"PrepareSession: Done setting up delegate"); } catch (Exception ex) { Console.WriteLine($"PrepareSession Error: {ex.Message}"); } }
void SetSelectedCamera(CameraSelection selectedCamera) { if (selectedCamera.ToAVCaptureDevicePosition() == _captureDeviceInput.Device.Position) { return; } var device = GetCamera(selectedCamera.ToAVCaptureDevicePosition()); ConfigureCameraForDevice(device); _captureSession.BeginConfiguration(); _captureSession.RemoveInput(_captureDeviceInput); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); _captureSession.AddInput(_captureDeviceInput); _captureSession.CommitConfiguration(); }
public ProblemPage() { InitializeComponent(); this.emotionServiceClient = new EmotionServiceClient("MYAPIHERE"); AuthorizeCameraUse(); SetupLiveCameraStream(); var device = GetCameraForOrientation(AVCaptureDevicePosition.Front); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
private void StartCameraWithCompletionHandler(Action <bool, NSError> completion) { captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); captureDevice = CameraDeviceForPosition(AVCaptureDevicePosition.Back); if (captureDevice == null) { string message = "Error message back camera - not found"; string title = "Error title back camera - not found"; ShowErrorMessage(message, title); return; } NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error); if (deviceInput == null) { Console.WriteLine("This error should be handled appropriately in your app -- obtain device input: {0}", error); string message = "Error message back camera - can't open."; string title = "Error title for back camera - can't open."; ShowErrorMessage(message, title); return; } captureSession.AddInput(deviceInput); stillImageOutput = new AVCaptureStillImageOutput(); //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output stillImageOutput.OutputSettings = NSDictionary.FromObjectAndKey(AVVideo.CodecJPEG, AVVideo.CodecKey); captureSession.AddOutput(stillImageOutput); cameraPreviewView.ConfigureCaptureSession(captureSession, stillImageOutput); captureSession.SessionPreset = AVCaptureSession.PresetPhoto; captureDeviceFormat = captureDevice.ActiveFormat; captureSession.CommitConfiguration(); captureSession.StartRunning(); maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount; PrepareBracketsWithCompletionHandler(completion); }
public void captureOutput(AVCaptureFileOutput captureOutput, NSUrl fileURL) { if (captureSession != null) { captureSession.BeginConfiguration(); } if (_flashMode != CameraFlashMode.Off) { _updateTorch(_flashMode); } if (captureSession != null) { captureSession.CommitConfiguration(); } }
partial void SwitchCameraButton_TouchUpInside(UIButton sender) { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
/* * private void PhotoPicked(NSIndexPath indexPath) * { * var collectionCell = (PhotoCollectionViewCell)photoCollection.CellForItem(indexPath); * * if (collectionCell.Asset != null) * { * using (var m = new PHImageManager()) * { * var options = new PHImageRequestOptions(); * * options.DeliveryMode = PHImageRequestOptionsDeliveryMode.FastFormat; * options.Synchronous = false; * options.NetworkAccessAllowed = true; * * m.RequestImageData(collectionCell.Asset, options, (data, dataUti, orientation, info) => * { * if (data != null) * { * var photo = UIImage.LoadFromData(data); * GoToDescription(photo); * } * }); * } * } * }*/ private void SwitchCameraButtonTapped(object sender, EventArgs e) { var devicePosition = _captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); _captureSession.BeginConfiguration(); _captureSession.RemoveInput(_captureDeviceInput); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); _captureSession.AddInput(_captureDeviceInput); _captureSession.CommitConfiguration(); }
void BttSwitch_TouchUpInside(object sender, EventArgs e) { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
public void BeginSession() { try { captureSession.BeginConfiguration(); var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; videoOutput.WeakVideoSettings = settings.Dictionary; videoOutput.AlwaysDiscardsLateVideoFrames = true; videoOutput.SetSampleBufferDelegateQueue(this, queue); captureSession.SessionPreset = AVCaptureSession.Preset1920x1080; captureSession.AddOutput(videoOutput); NSError err; var input = new AVCaptureDeviceInput(captureDevice, out err); if (err != null) { Console.WriteLine("AVCapture error: " + err); } captureSession.AddInput(input); videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video); captureSession.CommitConfiguration(); captureSession.StartRunning(); Console.WriteLine("started AV capture session"); } catch { Console.WriteLine("error connecting to the capture device"); } }
public async override void ViewDidLoad () { base.ViewDidLoad (); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession (); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue ("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend (); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video); if (!granted) SetupResult = AVCamSetupResult.CameraNotAuthorized; SessionQueue.Resume (); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync (() => { if (SetupResult != AVCamSetupResult.Success) return; backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error); if (videoDeviceInput == null) Console.WriteLine ("Could not create video device input: {0}", error); Session.BeginConfiguration (); if (Session.CanAddInput (videoDeviceInput)) { Session.AddInput (VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync (() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine ("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error); if (audioDeviceInput == null) Console.WriteLine ("Could not create audio device input: {0}", error); if (Session.CanAddInput (audioDeviceInput)) Session.AddInput (audioDeviceInput); else Console.WriteLine ("Could not add audio device input to the session"); var movieFileOutput = new AVCaptureMovieFileOutput (); if (Session.CanAddOutput (movieFileOutput)) { Session.AddOutput (MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video); if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } else { Console.WriteLine ("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput (); if (Session.CanAddOutput (stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput (StillImageOutput = stillImageOutput); } else { Console.WriteLine ("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration (); }); }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
void SetupAVCapture (NSString sessionPreset) { if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){ Console.WriteLine ("Could not create the CoreVideo TextureCache"); return; } session = new AVCaptureSession (); session.BeginConfiguration (); // Preset size session.SessionPreset = sessionPreset; // Input device var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (videoDevice == null){ Console.WriteLine ("No video device"); return; } NSError err; var input = new AVCaptureDeviceInput (videoDevice, out err); if (err != null){ Console.WriteLine ("Error creating video capture device"); return; } session.AddInput (input); // Create the output device var dataOutput = new AVCaptureVideoDataOutput () { AlwaysDiscardsLateVideoFrames = true, // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange) }; dataOutputDelegate = new DataOutputDelegate (this); // // This dispatches the video frames into the main thread, because the OpenGL // code is accessing the data synchronously. // dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue); session.AddOutput (dataOutput); session.CommitConfiguration (); session.StartRunning (); }
protected void Initialize() { // configure the capture session for medium resolution, change this if your code // can cope with more data or volume CaptureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } // SET to slow motion NSError error; var input = new AVCaptureDeviceInput(device, out error); movieFileOutput = new AVCaptureMovieFileOutput { //set max record time to 10 minutes MaxRecordedDuration = CMTime.FromSeconds(600, 1) }; photoFileOutput = new AVCapturePhotoOutput(); photoFileOutput.IsHighResolutionCaptureEnabled = true; if (CaptureSession.CanAddOutput(movieFileOutput)) { CaptureSession.BeginConfiguration(); CaptureSession.AddOutput(movieFileOutput); CaptureSession.AddOutput(photoFileOutput); var ranges = device.ActiveFormat.VideoSupportedFrameRateRanges; if (device.LockForConfiguration(out error)) { device.ActiveVideoMinFrameDuration = new CMTime(1, (int)ranges.First().MinFrameRate); device.ActiveVideoMaxFrameDuration = new CMTime(1, (int)ranges.First().MaxFrameRate); } var connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } CaptureSession.CommitConfiguration(); } CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); // set frame rate if Slow-mo is requested if (speedOptions == SpeedOptions.SlowMo) { foreach (var vFormat in device.Formats) { var _ranges = vFormat.VideoSupportedFrameRateRanges as AVFrameRateRange[]; var frameRates = _ranges[0]; if (frameRates.MaxFrameRate >= 240.0) { device.LockForConfiguration(out NSError _error); if (_error is null) { device.ActiveFormat = vFormat as AVCaptureDeviceFormat; device.ActiveVideoMinFrameDuration = frameRates.MinFrameDuration; device.ActiveVideoMaxFrameDuration = frameRates.MaxFrameDuration; device.UnlockForConfiguration(); break; } } } } IsPreviewing = true; }
void Initialize(bool defaultTorchOn, bool vibrationOnDetected, bool startScanningOnCreate) { Configuration.IsScanning = startScanningOnCreate; CaptureSession = new AVCaptureSession(); CaptureSession.BeginConfiguration(); this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = this.Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = AVCaptureDevicePosition.Back; //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset1280x720; Layer.AddSublayer(previewLayer); CaptureSession.CommitConfiguration(); VideoDataOutput = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA } .Dictionary }; captureVideoDelegate = new CaptureVideoDelegate(vibrationOnDetected); captureVideoDelegate.OnDetected += (list) => { InvokeOnMainThread(() => { //CaptureSession.StopRunning(); this.OnDetected?.Invoke(list); }); }; VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue); CaptureSession.AddOutput(VideoDataOutput); InvokeOnMainThread(() => { CaptureSession.StartRunning(); //Torch on by default if (defaultTorchOn && !GoogleVisionBarCodeScanner.Methods.IsTorchOn()) { GoogleVisionBarCodeScanner.Methods.ToggleFlashlight(); } }); }
private void StartCameraWithCompletionHandler(Action<bool, NSError> completion) { captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back); if (captureDevice == null) { string message = "Error message back camera - not found"; string title = "Error"; ShowErrorMessage (message, title); return; } NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error); if (deviceInput == null) { Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error); string message = "Error message back camera - can't open."; string title = "Error"; ShowErrorMessage (message, title); return; } captureSession.AddInput (deviceInput); stillImageOutput = new AVCaptureStillImageOutput (); //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG); captureSession.AddOutput (stillImageOutput); cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput); captureSession.SessionPreset = AVCaptureSession.PresetPhoto; captureDeviceFormat = captureDevice.ActiveFormat; captureSession.CommitConfiguration (); captureSession.StartRunning (); maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount; PrepareBracketsWithCompletionHandler (completion); }
private void SetupEventHandlers() { cancelPhotoButton.TouchUpInside += (s, e) => { (Element as CustomVideoCamera).Cancel(); }; videoButton.TouchUpInside += (s, e) => { var element = (Element as CustomVideoCamera); //AssetsLibrary.ALAssetsLibrary li = new // var documents = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); // var library = System.IO.Path.Combine(documents, "..", "Library"); var urlpath = System.IO.Path.Combine(Path.GetTempPath(), "sweetMovieFilm.mov"); if (!weAreRecording) { recordTimeTimer = NSTimer.CreateRepeatingScheduledTimer(TimeSpan.FromSeconds(0.5), delegate { recordTimeLabel.Text = TimeSpan.FromSeconds(output.RecordedDuration.Seconds).ToString(@"mm\:ss"); //Write Action Here }); NSUrl url = new NSUrl(urlpath, false); NSFileManager manager = new NSFileManager(); NSError error = new NSError(); if (manager.FileExists(urlpath)) { Console.WriteLine("Deleting File"); manager.Remove(urlpath, out error); Console.WriteLine("Deleted File"); } //var dataOutput = new AVCaptureVideoDataOutput() //{ // AlwaysDiscardsLateVideoFrames = true, // WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary //}; AVCaptureConnection connection = null; if (output.Connections != null) { foreach (AVCaptureConnection connectionItem in output.Connections) { foreach (AVCaptureInputPort port in connectionItem.InputPorts) { if (port.MediaType == AVMediaType.Video) { connection = connectionItem; break; } } } } if (connection != null && connection.SupportsVideoOrientation) { connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; } //(AVCaptureConnection)output.Connections [0]; if (connection != null) { CustomAvCaptureFileOutPutRecordingDelegate avDel = new CustomAvCaptureFileOutPutRecordingDelegate(); avDel.Element = element; avDel.activityIndicator = activitySpinner; //output.StartRecordingToOutputFile(url, avDel); output.StartRecordingToOutputFile(url, avDel); } Console.WriteLine(urlpath); weAreRecording = true; videoButton.SetImage(UIImage.FromFile(element.StopVideoImage), UIControlState.Normal); } //we were already recording. Stop recording else { activitySpinner.StartAnimating(); output.StopRecording(); videoButton.SetImage(UIImage.FromFile(element.StartVideoImage), UIControlState.Normal); recordTimeLabel.Text = ""; Console.WriteLine("stopped recording"); weAreRecording = false; recordTimeTimer.Invalidate(); } }; flashButton.TouchUpInside += (s, e) => { var element = (Element as CustomVideoCamera); var device = captureDeviceInput.Device; var error = new NSError(); if (device.HasFlash) { if (device.FlashMode == AVCaptureFlashMode.On) { device.LockForConfiguration(out error); device.FlashMode = AVCaptureFlashMode.Off; device.UnlockForConfiguration(); flashButton.SetBackgroundImage(UIImage.FromBundle(element.FlashLightOnImage), UIControlState.Normal); } else { device.LockForConfiguration(out error); device.FlashMode = AVCaptureFlashMode.On; device.UnlockForConfiguration(); flashButton.SetBackgroundImage(UIImage.FromBundle(element.FlashLightOffImage), UIControlState.Normal); } } flashOn = !flashOn; }; photoGallaryButton.TouchUpInside += (s, e) => { var imagePicker = new UIImagePickerController { SourceType = UIImagePickerControllerSourceType.PhotoLibrary, MediaTypes = new string[] { "public.movie" } }; imagePicker.AllowsEditing = false; //imagePicker.ShowsCameraControls = false; // imagePicker.ShowsCameraControls = false; //Make sure we have the root view controller which will launch the photo gallery var window = UIApplication.SharedApplication.KeyWindow; var vc = window.RootViewController; while (vc.PresentedViewController != null) { vc = vc.PresentedViewController; } //Show the image gallery vc.PresentViewController(imagePicker, true, null); //call back for when a picture is selected and finished editing imagePicker.FinishedPickingMedia += (sender, e2) => { if (e2.Info[UIImagePickerController.MediaType].ToString() == "public.movie") { NSUrl mediaURL = e2.Info[UIImagePickerController.MediaURL] as NSUrl; if (mediaURL != null) { Console.WriteLine(mediaURL.ToString()); NSData data = NSData.FromUrl(mediaURL); byte[] dataBytes = new byte[data.Length]; System.Runtime.InteropServices.Marshal.Copy(data.Bytes, dataBytes, 0, Convert.ToInt32(data.Length)); (Element as CustomVideoCamera).SetPhotoResult(mediaURL.ToString(), dataBytes, 0, 0); } } //UIImage originalImage = e2.Info[UIImagePickerController.OriginalImage] as UIImage; //if (originalImage != null) //{ // //Got the image now, convert it to byte array to send back up to the forms project // var pngImage = originalImage.AsPNG(); // // UIImage imageInfo = new UIImage(pngImage); // byte[] myByteArray = new byte[pngImage.Length]; // System.Runtime.InteropServices.Marshal.Copy(pngImage.Bytes, myByteArray, 0, Convert.ToInt32(pngImage.Length)); // (Element as CustomVideoCamera).SetPhotoResult(originalImage.pmyByteArray, // (int)originalImage.Size.Width, // (int)originalImage.Size.Height); // //System.Runtime.InteropServices.Marshal.Copy(pngImage.Bytes, myByteArray, 0, Convert.ToInt32(pngImage.Length)); // //MessagingCenter.Send<byte[]>(myByteArray, "ImageSelected"); //} //Close the image gallery on the UI thread Device.BeginInvokeOnMainThread(() => { vc.DismissViewController(true, null); }); }; //Cancel button callback from the image gallery imagePicker.Canceled += (sender, e1) => { vc.DismissViewController(true, null); //(Element as CustomCamera).Cancel(); }; //(Element as CustomCamera).Cancel(); }; rotateButton.TouchUpInside += (s, e) => { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }; }
// Call this on the session queue. void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } NSError error = null; session.BeginConfiguration(); /* * We do not create an AVCaptureMovieFileOutput when setting up the session because the * AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. */ session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (videoDevice == null) { // If the back dual camera is not available, default to the back wide angle camera. videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. if (videoDevice == null) { videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); } } var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (lVideoDeviceInput == null) { Console.WriteLine($"Could not create video device input: {error}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(lVideoDeviceInput)) { session.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; DispatchQueue.MainQueue.DispatchAsync(() => { /* * Why are we dispatching this to the main queue? * Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView * can only be manipulated on the main thread. * Note: As an exception to the above rule, it is not necessary to serialize video orientation changes * on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. * * Use the status bar orientation as the initial video orientation. Subsequent orientation changes are * handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:]. */ var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; } VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine(@"Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine($"Could not create audio device input: {error}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine(@"Could not add audio device input to the session"); } // Add photo output. var lPhotoOutput = new AVCapturePhotoOutput(); if (session.CanAddOutput(lPhotoOutput)) { session.AddOutput(lPhotoOutput); photoOutput = lPhotoOutput; photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported()); livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? AVCamLivePhotoMode.On : AVCamLivePhotoMode.Off; //depthDataDeliveryMode = photoOutput.IsDepthDataDeliverySupported() ? AVCamDepthDataDeliveryMode.On : AVCamDepthDataDeliveryMode.Off; inProgressPhotoCaptureDelegates = new Dictionary <long, AVCamPhotoCaptureDelegate>(); inProgressLivePhotoCapturesCount = 0; } else { Console.WriteLine(@"Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } backgroundRecordingId = UIApplication.BackgroundTaskInvalid; session.CommitConfiguration(); }
bool SetupCaptureSession() { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); if (audioDevice == null) { return(false); // e.g. simulator } var audioIn = new AVCaptureDeviceInput(audioDevice, out error); if (captureSession.CanAddInput(audioIn)) { captureSession.AddInput(audioIn); } var audioOut = new AVCaptureAudioDataOutput(); var audioCaptureQueue = new DispatchQueue("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue); if (captureSession.CanAddOutput(audioOut)) { captureSession.AddOutput(audioOut); } audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput(videoDevice, out error); if (captureSession.CanAddInput(videoIn)) { captureSession.AddInput(videoIn); } // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType(AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration(); return(true); }
void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } session.BeginConfiguration(); // We do not create an AVCaptureMovieFileOutput when setting up the session because the // AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var defaultVideoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDuoCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); NSError error; var input = AVCaptureDeviceInput.FromDevice(defaultVideoDevice, out error); if (error != null) { Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(input)) { session.AddInput(input); videoDeviceInput = input; DispatchQueue.MainQueue.DispatchAsync(() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; AVCaptureVideoOrientation videoOrientation; if (statusBarOrientation != UIInterfaceOrientation.Unknown && TryConvertToVideoOrientation(statusBarOrientation, out videoOrientation)) { initialVideoOrientation = videoOrientation; } PreviewView.VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine("Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. //var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (error != null) { Console.WriteLine($"Could not create audio device input: {error.LocalizedDescription}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine("Could not add audio device input to the session"); } // Add photo output. if (session.CanAddOutput(photoOutput)) { session.AddOutput(photoOutput); photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? LivePhotoMode.On : LivePhotoMode.Off; } else { Console.WriteLine("Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } session.CommitConfiguration(); }