/******************************* MAIN FUNCTIONS *******************************/ public override void ViewDidLoad () { base.ViewDidLoad (); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); CaptureSession = new AVCaptureSession(); CaptureSession.AddInput (input as AVCaptureInput); var captureMetadataOutput = new AVCaptureMetadataOutput(); metadataDelegate = new MetadataObjectsDelegate(); metadataDelegate.outer = this; captureMetadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue); CaptureSession.AddOutput(captureMetadataOutput); captureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode; VideoPreviewLayer = new AVCaptureVideoPreviewLayer (CaptureSession); VideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; VideoPreviewLayer.Frame = View.Layer.Bounds; View.Layer.AddSublayer (VideoPreviewLayer); View.BringSubviewToFront (messageLabel); QRCodeFrameView = new UIView (); QRCodeFrameView.Layer.BorderColor = UIColor.Green.CGColor; QRCodeFrameView.Layer.BorderWidth = 2; View.AddSubview (QRCodeFrameView); View.BringSubviewToFront (QRCodeFrameView); CaptureSession.StartRunning(); cancelButton.Clicked += (sender, e) => { this.DismissViewController (true, null); }; }
public void RecordVideoToPath(UIViewController ViewController, string VideoPath) { // setup capture device AVCaptureDevice videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error); // create and assign a capture session AVCaptureSession captureSession = new AVCaptureSession(); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; captureSession.AddInput(videoInput); // Create capture device output AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput(); captureSession.AddOutput(videoOutput); videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA; videoOutput.MinFrameDuration = new CMTime(1, 30); videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero); // create a delegate class for handling capture captureVideoDelegate = new CaptureVideoDelegate(ViewController); // Start capture session captureSession.StartRunning(); }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; Console.WriteLine(viewLayer.Frame.Width); var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); Console.WriteLine(liveCameraStream.Layer.Frame.Width); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); ViewWillLayoutSubviews(); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ // No input device return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // configure the output queue = new DispatchQueue ("myQueue"); qrScanner = new QrScanner (this); output.SetSampleBufferDelegateAndQueue (qrScanner, queue); session.AddOutput (output); previewLayer = new AVCaptureVideoPreviewLayer (session); previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill"; session.StartRunning (); return true; }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input device"); return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second MinFrameDuration = new CMTime (1, 15) }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (); output.SetSampleBufferDelegateAndQueue (outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (captureDevice == null) { Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255)); CvInvoke.PutText ( img, "Capture device not found.", new Point (10, 200), FontFace.HersheyComplex, 1, new MCvScalar (), 2); ImageView.Image = img.ToUIImage(); return false; } var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input device"); return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed(); settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA; var output = new AVCaptureVideoDataOutput () { UncompressedVideoSetting = settingUncomp, // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second //MinFrameDuration = new CMTime (1, 15) }; // configure the output queue = new DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (ImageView); output.SetSampleBufferDelegateQueue(outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (captureDevice == null) { Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device"); return false; } //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration() NSError error = null; captureDevice.LockForConfiguration (out error); if (error != null) { Console.WriteLine (error); captureDevice.UnlockForConfiguration (); return false; } if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15); captureDevice.UnlockForConfiguration (); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null) { Console.WriteLine ("No input - this won't work on the simulator, try a physical device"); return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) { queue = new DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (); output.SetSampleBufferDelegate (outputRecorder, queue); session.AddOutput (output); } session.StartRunning (); return true; }
public void InitAndStartCamera() { session = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error); if (videoInput == null || !session.CanAddInput(videoInput)) return; session.AddInput(videoInput); previewLayer = new AVCaptureVideoPreviewLayer(session) { Frame = rootView.Bounds }; previewLayer.Connection.VideoOrientation = configDicByRotationChanged[UIApplication.SharedApplication.StatusBarOrientation]; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; cameraView.Layer.AddSublayer(previewLayer); session.StartRunning(); }
public override void WindowDidLoad() { base.WindowDidLoad(); session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); } session.AddInput(input); var captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(session); var contentView = Window.ContentView; contentView.WantsLayer = true; captureVideoPreviewLayer.Frame = contentView.Bounds; contentView.Layer.AddSublayer(captureVideoPreviewLayer); session.StartRunning(); }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { // No input device return(false); } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA) }; // configure the output queue = new DispatchQueue("myQueue"); qrScanner = new QrScanner(this); output.SetSampleBufferDelegateAndQueue(qrScanner, queue); session.AddOutput(output); previewLayer = new AVCaptureVideoPreviewLayer(session); previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill"; session.StartRunning(); return(true); }
public async Task SetupLiveCameraStream() { await Task.Delay(700); captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.View.Frame }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); videoPreviewLayer.Orientation = AVCaptureVideoOrientation.LandscapeRight; /*if(!portrait) videoPreviewLayer.Orientation = AVCaptureVideoOrientation.LandscapeRight; * else videoPreviewLayer.Orientation = AVCaptureVideoOrientation.Portrait; * portrait = !portrait;*/ //videoPreviewLayer.Orientation = AVCaptureVideoOrientation.LandscapeRight; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); }
/// <summary> /// Starts a session with the camera, and creates the classes /// needed to view a video preview, and capture a still image. /// </summary> public void SetupLiveCameraStream() { captureSession = new AVCaptureSession() { SessionPreset = new NSString(AVCaptureSession.PresetHigh) }; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = View.Frame, Orientation = GetCaptureOrientation(UIApplication.SharedApplication.StatusBarOrientation) }; View.Layer.AddSublayer(videoPreviewLayer); AVCaptureDevice captureDevice = GetCameraForOrientation(AVCaptureDevicePosition.Back) ?? GetCameraForOrientation(AVCaptureDevicePosition.Front) ?? GetCameraForOrientation(AVCaptureDevicePosition.Unspecified); if (captureDevice == null) { (Element as LabelReader).CameraError(LabelReaderConstants.NoCameraMessage); return; } captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(captureDeviceInput); videoDataOutput = new AVCaptureVideoDataOutput(); videoDataOutput.SetSampleBufferDelegateQueue(this, new CoreFoundation.DispatchQueue("frameQueue")); captureSession.AddOutput(videoDataOutput); captureSession.StartRunning(); // set last processed time to now so the handler for video frames will wait an appropriate length of time // before processing images. lastImageProcessedTime = DateTime.Now; }
void SetupLiveStream() { CaptureSession = new AVCaptureSession(); // PhotoOutput = new AVCapturePhotoOutput(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; // previewLayer.BorderWidth = 2f; // previewLayer.BorderColor = UIColor.Red.CGColor; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } captureDeviceInput = new AVCaptureDeviceInput(device, out NSError error); CaptureSession.AddInput(captureDeviceInput); Layer.AddSublayer(previewLayer); captureStillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; CaptureSession.AddOutput(captureStillImageOutput); CaptureSession.StartRunning(); IsPreviewing = true; }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input device"); return(false); } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second MinFrameDuration = new CMTime(1, 15) }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("myQueue"); outputRecorder = new OutputRecorder(); output.SetSampleBufferDelegateAndQueue(outputRecorder, queue); session.AddOutput(output); session.StartRunning(); return(true); }
void BttSwitch_TouchUpInside(object sender, EventArgs e) { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
void Initialize () { CaptureSession = new AVCaptureSession (); previewLayer = new AVCaptureVideoPreviewLayer (CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault (d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput (device, out error); CaptureSession.AddInput (input); Layer.AddSublayer (previewLayer); CaptureSession.StartRunning (); IsPreviewing = true; }
public void SwitchCamera() { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); CaptureSession.BeginConfiguration(); CaptureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); CaptureSession.AddInput(captureDeviceInput); CaptureSession.CommitConfiguration(); }
partial void SwitchCameraButton_TouchUpInside(UIButton sender) { var devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); }
void ToggleFrontBackCamera(object sender, EventArgs e) { var devicePosition = _captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); _captureSession.BeginConfiguration(); _captureSession.RemoveInput(_captureDeviceInput); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); _captureSession.AddInput(_captureDeviceInput); _captureSession.CommitConfiguration(); }
private void SwitchCameraButtonTapped(object sender, EventArgs e) { var devicePosition = _captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } var device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); _captureSession.BeginConfiguration(); _captureSession.RemoveInput(_captureDeviceInput); _captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); _captureSession.AddInput(_captureDeviceInput); _captureSession.CommitConfiguration(); CheckDeviceOrientation(null); }
void Initialize() { CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); CaptureOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; CaptureSession.AddOutput(CaptureOutput); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
/// <summary> /// Initializes the camera. /// </summary> /// <returns>The camera.</returns> public void InitializeCamera() { try { NSError error; NSError err; _device.LockForConfiguration(out err); _device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; _device.UnlockForConfiguration(); _input = new AVCaptureDeviceInput(_device, out error); _captureSession.AddInput(_input); _output = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); _captureSession.AddOutput(_output); InvokeOnMainThread(delegate { // capture connection used for rotating camera _captureConnection = _previewLayer.Connection; SetStartOrientation(); // set orientation before loading camera _captureSession.StartRunning(); }); } catch (Exception error) { Console.WriteLine(error.Message); } Available?.Invoke(this, _cameraAvailable); }
public override void ViewDidLoad () { base.ViewDidLoad (); session = new AVCaptureSession (); var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(camera); session.AddInput(input); output = new AVCaptureMetadataOutput(); var metadataDelegate = new MetadataOutputDelegate(); output.SetDelegate(metadataDelegate, DispatchQueue.MainQueue); session.AddOutput(output); output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeEAN13Code }; var previewLayer = new AVCaptureVideoPreviewLayer(session); //var view = new ContentView(UIColor.LightGray, previewLayer, metadataDelegate); previewLayer.MasksToBounds = true; previewLayer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill; previewLayer.Frame = UIScreen.MainScreen.Bounds; this.View.Layer.AddSublayer(previewLayer); metadataDelegate.MetadataFound += (s, e) => { session.StopRunning(); new UIAlertView("Scanned!",e.StringValue, null ,"OK",null).Show(); }; session.StartRunning(); }
public void BeginSession() { try { captureSession.BeginConfiguration(); var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; videoOutput.WeakVideoSettings = settings.Dictionary; videoOutput.AlwaysDiscardsLateVideoFrames = true; videoOutput.SetSampleBufferDelegateQueue(this, queue); captureSession.SessionPreset = AVCaptureSession.Preset1920x1080; captureSession.AddOutput(videoOutput); NSError err; var input = new AVCaptureDeviceInput(captureDevice, out err); if (err != null) { Console.WriteLine("AVCapture error: " + err); } captureSession.AddInput(input); videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video); captureSession.CommitConfiguration(); captureSession.StartRunning(); Console.WriteLine("started AV capture session"); } catch { Console.WriteLine("error connecting to the capture device"); } }
public void Start() { captureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; try { var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(input); Layer.AddSublayer(previewLayer); captureSession.StartRunning(); } catch (Exception e) { Console.WriteLine(e); } }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
void setupCaptureSession() { //Create a device for capturing Barcodes var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); //Configure the dvice for something fancy autofocus stuffs ConfigureCameraForDevice(captureDevice); //Create an input from that device - meaning to instaniate the device to make an input node... err something like that var captureInput = AVCaptureDeviceInput.FromDevice(captureDevice); //Add the input to the session session.AddInput(captureInput); //Create a preview layer for the view var previewLayer = AVCaptureVideoPreviewLayer.FromSession(session); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = vie_Preview_cam.Frame; //Add the preview layer to the View for the camera uiview vie_Preview_cam.Layer.AddSublayer(previewLayer); //Assign who's going to handle the metadataoutput var metadataoutput = new AVCaptureMetadataOutput(); //Set delegate metadataoutput.SetDelegate(this, CoreFoundation.DispatchQueue.MainQueue); //Add the metadataoutput to session session.AddOutput(metadataoutput); //Assign which type of Codes will be read, metadataoutput.MetadataObjectTypes = AVMetadataObjectType.QRCode; //Start the Session session.StartRunning(); }
public void SetupSession() { videoPreviewLayer.Session = captureSession; videoPreviewLayer.Frame = liveCameraStream.Bounds; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); var captureDevice = GetBackCamera(); ConfigureCameraForDevice(captureDevice); NSError err; videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice, out err); videoDataOutput = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true }; DispatchQueue queue = new DispatchQueue("dbrcameraQueue"); if (captureSession.CanAddInput(videoDeviceInput)) { captureSession.AddInput(videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync(() => { var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { AVCaptureVideoOrientation videoOrintation; if (Enum.TryParse(statusBarOrientation.ToString(), out videoOrintation)) { initialVideoOrientation = videoOrintation; } } videoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else if (err != null) { Console.WriteLine($"Could not create video device input: {err}"); //this.setupResult = SessionSetupResult.ConfigurationFailed; this.captureSession.CommitConfiguration(); return; } else { Console.WriteLine("Could not add video device input to the session"); //this.setupResult = SessionSetupResult.ConfigurationFailed; this.captureSession.CommitConfiguration(); return; } if (captureSession.CanAddOutput(videoDataOutput)) { captureSession.AddOutput(videoDataOutput); captureOutput.reader = reader; captureOutput.update = ResetResults; videoDataOutput.SetSampleBufferDelegateQueue(captureOutput, queue); videoDataOutput.WeakVideoSettings = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromInt32((int)CVPixelFormatType.CV32BGRA)); } else { Console.WriteLine("Could not add metadata output to the session"); //this.setupResult = SessionSetupResult.ConfigurationFailed; captureSession.CommitConfiguration(); return; } captureSession.CommitConfiguration(); }
public override void ViewDidLoad() { base.ViewDidLoad (); weAreRecording = false; lblError.Hidden = true; btnStartRecording.SetTitle("Start Recording", UIControlState.Normal); //Set up session session = new AVCaptureSession (); //Set up inputs and add them to the session //this will only work if using a physical device! Console.WriteLine ("getting device inputs"); try{ //add video capture device device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice (device); session.AddInput (input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex){ //show the label error. This will always show when running in simulator instead of physical device. lblError.Hidden = false; return; } //Set up preview layer (shows what the input device sees) Console.WriteLine ("setting up preview layer"); previewlayer = new AVCaptureVideoPreviewLayer (session); previewlayer.Frame = this.View.Bounds; //this code makes UI controls sit on top of the preview layer! Allows you to just place the controls in interface builder UIView cameraView = new UIView (); cameraView = new UIView (); cameraView.Layer.AddSublayer (previewlayer); this.View.AddSubview (cameraView); this.View.SendSubviewToBack (cameraView); Console.WriteLine ("Configuring output"); output = new AVCaptureMovieFileOutput (); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput (output)) { session.AddOutput (output); } session.SessionPreset = AVCaptureSession.PresetMedium; Console.WriteLine ("About to start running session"); session.StartRunning (); //toggle recording button was pushed. btnStartRecording.TouchUpInside += startStopPushed; //Console.ReadLine (); }
private void _setupOutputMode(CameraOutputMode newCameraOutputMode, CameraOutputMode?oldCameraOutputMode) { captureSession.BeginConfiguration(); if (oldCameraOutputMode != null) { // remove current setting switch (oldCameraOutputMode) { case CameraOutputMode.StillImage: if (stillImageOutput != null) { captureSession.RemoveOutput(stillImageOutput); } break; case CameraOutputMode.VideoOnly: case CameraOutputMode.VideoWithMic: if (movieOutput != null) { captureSession.RemoveOutput(movieOutput); } if (oldCameraOutputMode == CameraOutputMode.VideoWithMic) { _removeMicInput(); } break; default: break; } } // configure new devices switch (newCameraOutputMode) { case CameraOutputMode.StillImage: if (stillImageOutput == null) { _setupOutputs(); } if (stillImageOutput != null) { captureSession.AddOutput(stillImageOutput); } break; case CameraOutputMode.VideoOnly: case CameraOutputMode.VideoWithMic: captureSession.AddOutput(_getMovieOutput()); if (newCameraOutputMode == CameraOutputMode.VideoWithMic) { var validMic = _deviceInputFromDevice(mic); if (validMic != null) { captureSession.AddInput(validMic); } } break; default: break; } captureSession.CommitConfiguration(); _updateCameraQualityMode(CameraOutputQuality); _orientationChanged(); }
private void SetupCamera() { CaptureSession = null; CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; currentDevice = null; inputDevice1 = null; inputDevice2 = null; foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == AVCaptureDevicePosition.Front) { inputDevice1 = device; } else if (device.Position == AVCaptureDevicePosition.Back) { inputDevice2 = device; } } NSError error; if (inputDevice1.HasFlash) { inputDevice1.LockForConfiguration(out error); inputDevice1.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } if (inputDevice2.HasFlash) { inputDevice2.LockForConfiguration(out error); inputDevice2.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error); rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error); currentDevice = inputDevice2; if (CaptureSession.CanAddInput(rearCamera)) { CaptureSession.AddInput(rearCamera); } AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = View.Frame; View.Layer.InsertSublayer(previewLayer, 0); StillImageOutput = new AVCaptureStillImageOutput(); StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG); CaptureSession.AddOutput(StillImageOutput); CaptureSession.StartRunning(); }
public override void ViewDidLoad() { base.ViewDidLoad(); this.View.BackgroundColor = UIColor.White; NSError error; // Setup detector options. var options = new CIDetectorOptions { Accuracy = FaceDetectorAccuracy.High, // Can give a hint here about the rects to detect. 1.4 would be for A4 sheets of paper for instance. AspectRatio = 1.41f, }; // Create a rectangle detector. Note that you can also create QR detector or a face detector. // Most of this code will also work with other detectors (like streaming to a preview layer and grabbing images). this.detector = CIDetector.CreateRectangleDetector(context: null, detectorOptions: options); // Create the session. The AVCaptureSession is the managing instance of the whole video handling. var captureSession = new AVCaptureSession() { // Defines what quality we want to use for the images we grab. Photo gives highest resolutions. SessionPreset = AVCaptureSession.PresetPhoto }; // Find a suitable AVCaptureDevice for video input. var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { // This will not work on the iOS Simulator - there is no camera. :-) throw new InvalidProgramException("Failed to get AVCaptureDevice for video input!"); } // Create a device input with the device and add it to the session. var videoInput = AVCaptureDeviceInput.FromDevice(device, out error); if (videoInput == null) { throw new InvalidProgramException("Failed to get AVCaptureDeviceInput from AVCaptureDevice!"); } // Let session read from the input, this is our source. captureSession.AddInput(videoInput); // Create output for the video stream. This is the destination. var videoOutput = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true }; // Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting // properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated, // so I use the WeakVideoSettings instead which takes an NSDictionary as input. this.videoSettingsDict = new NSMutableDictionary(); this.videoSettingsDict.Add(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA)); videoOutput.WeakVideoSettings = this.videoSettingsDict; // Create a delegate to report back to us when an image has been captured. // We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate // which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project. this.sampleBufferDelegate = new VideoFrameSamplerDelegate(); // Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue. // This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues). this.sessionQueue = new DispatchQueue("AVSessionQueue"); // Assign the queue and the delegate to the output. Now all output will go through the delegate. videoOutput.SetSampleBufferDelegate(this.sampleBufferDelegate, this.sessionQueue); // Add output to session. captureSession.AddOutput(videoOutput); // We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer. // A CALayer can be added to a UIView. We add that layer to the controller's main view. var layer = this.View.Layer; this.videoLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); this.videoLayer.Frame = layer.Bounds; layer.AddSublayer(this.videoLayer); // All setup! Start capturing! captureSession.StartRunning(); // This is just for information and allows you to get valid values for the detection framerate. Console.WriteLine("Available capture framerates:"); var rateRanges = device.ActiveFormat.VideoSupportedFrameRateRanges; foreach (var r in rateRanges) { Console.WriteLine(r.MinFrameRate + "; " + r.MaxFrameRate + "; " + r.MinFrameDuration + "; " + r.MaxFrameDuration); } // Configure framerate. Kind of weird way of doing it but the only one that works. device.LockForConfiguration(out error); // CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case. device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS); device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS); device.UnlockForConfiguration(); // Put a small image view at the top left that shows the live image with the detected rectangle(s). this.imageViewOverlay = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewOverlay.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewOverlay.Layer.BorderWidth = 3f; this.Add(this.imageViewOverlay); // Put another image view top right that shows the image with perspective correction. this.imageViewPerspective = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewPerspective.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewPerspective.Layer.BorderWidth = 3f; this.Add(this.imageViewPerspective); // Add some lables for information. this.mainWindowLbl = new UILabel { Text = "Live stream from camera. Point camera to a rectangular object.", TextAlignment = UITextAlignment.Center }; this.Add(this.mainWindowLbl); this.detectionWindowLbl = new UILabel { Text = "Detected rectangle overlay", TextAlignment = UITextAlignment.Center }; this.Add(this.detectionWindowLbl); this.perspectiveWindowLbl = new UILabel { Text = "Perspective corrected", TextAlignment = UITextAlignment.Center }; this.Add(this.perspectiveWindowLbl); }
bool SetupCaptureSession() { session = new AVCaptureSession(); AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { Console.WriteLine("No video camera (in simulator?)"); return false; // simulator? } NSError error = null; AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error); if (input == null) Console.WriteLine("Error: " + error); else session.AddInput(input); AVCaptureMetadataOutput output = new AVCaptureMetadataOutput(); var dg = new CaptureDelegate(this); output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue); session.AddOutput(output); // This could be any list of supported barcode types output.MetadataObjectTypes = new NSString[] {AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode}; // OR you could just accept "all" with the following line; // output.MetadataObjectTypes = output.AvailableMetadataObjectTypes; // empty // DEBUG: use this if you're curious about the available types // foreach (var t in output.AvailableMetadataObjectTypes) // Console.WriteLine(t); AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session); //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height); previewLayer.Frame = new RectangleF(0, 0, 320, 290); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString(); View.Layer.AddSublayer (previewLayer); session.StartRunning(); Console.WriteLine("StartRunning"); return true; }
public override void FinishedLaunching(UIApplication application) { // Create a new capture session Session = new AVCaptureSession (); Session.SessionPreset = AVCaptureSession.PresetMedium; // Create a device input CaptureDevice = GetFrontCamera(); if (CaptureDevice == null) { // Video capture not supported, abort Console.WriteLine ("Video recording not supported on this device"); CameraAvailable = false; return; } // Prepare device for configuration CaptureDevice.LockForConfiguration (out Error); if (Error != null) { // There has been an issue, abort Console.WriteLine ("Error: {0}", Error.LocalizedDescription); CaptureDevice.UnlockForConfiguration (); return; } // Configure stream for 15 frames per second (fps) CaptureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15); // Unlock configuration CaptureDevice.UnlockForConfiguration (); // Get input from capture device Input = AVCaptureDeviceInput.FromDevice (CaptureDevice); if (Input == null) { // Error, report and abort Console.WriteLine ("Unable to gain input from capture device."); CameraAvailable = false; return; } // Attach input to session Session.AddInput (Input); // Create a new output var output = new AVCaptureVideoDataOutput (); var settings = new AVVideoSettingsUncompressed (); settings.PixelFormatType = CVPixelFormatType.CV32BGRA; output.WeakVideoSettings = settings.Dictionary; // Configure and attach to the output to the session Queue = new DispatchQueue ("ManCamQueue"); Recorder = new OutputRecorder (); output.SetSampleBufferDelegate (Recorder, Queue); Session.AddOutput (output); // Configure and attach a still image output for bracketed capture StillImageOutput = new AVCaptureStillImageOutput (); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int) AVVideoCodec.JPEG); Session.AddOutput (StillImageOutput); // Let tabs know that a camera is available CameraAvailable = true; }
bool SetupCaptureSession() { session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; AVCaptureDevice[] capDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); AVCaptureDeviceInput input = null; if (capDevices.Length != 0) input = AVCaptureDeviceInput.FromDevice (capDevices[0]); if (input == null){ new UIAlertView("Error", "Camera not available", null, "OK", null).Show(); Console.WriteLine ("Camera not available"); return false; } session.AddInput (input); var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), MinFrameDuration = new CMTime (1, 30) //second parameter is frames per second }; queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (); output.SetSampleBufferDelegateAndQueue (outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
void EnableCameraView() { if (captureSession != null) { return; } captureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.Preset1280x720 }; captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (captureDevice == null) { ReleaseCaptureSession(); return; } captureDevice.LockForConfiguration(out NSError error); var format = captureDevice.ActiveFormat; double epsilon = 0.00000001; var desiredFrameRate = 30; captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); foreach (var range in format.VideoSupportedFrameRateRanges) { if (range.MinFrameRate <= (desiredFrameRate + epsilon) && range.MaxFrameRate >= (desiredFrameRate - epsilon)) { var duration = new CMTime(1, desiredFrameRate, 0) { TimeFlags = CMTime.Flags.Valid }; var minDuration = new CMTime(1, (int)range.MinFrameRate, 0) { TimeFlags = CMTime.Flags.Valid }; captureDevice.ActiveVideoMaxFrameDuration = duration; captureDevice.ActiveVideoMinFrameDuration = duration; break; } } captureDevice.UnlockForConfiguration(); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { ReleaseCaptureSession(); ReleaseCaptureDevice(); return; } captureSession.AddInput(input); captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { BackgroundColor = UIColor.Black.CGColor, VideoGravity = AVLayerVideoGravity.ResizeAspectFill, Frame = UIScreen.MainScreen.Bounds }; captureVideoPreviewLayer.Connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation(); View.Layer.InsertSublayer(captureVideoPreviewLayer, 0); var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) { queue = new DispatchQueue("cameraoutputqueue"); outputRecorder = new CustomOutputRecorder(); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegateQueue(outputRecorder, queue); captureSession.AddOutput(output); var connection = output.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation(); } } captureSession.StartRunning(); }
public override void FinishedLaunching(UIApplication application) { //Create a new capture session Session = new AVCaptureSession(); Session.SessionPreset = AVCaptureSession.PresetMedium; //create a device input CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (CaptureDevice == null) { //Video capture not supported, abort camera operation if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Pad) { errorCamera("No Camera detected", "Seems your " + UIDevice.CurrentDevice.UserInterfaceIdiom + " has no camera. You must have a camera installed to use this feature"); CameraAvailable = false; return; } else if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone) { errorCamera("No Camera detected", "Seems your " + UIDevice.CurrentDevice.UserInterfaceIdiom + " has no camera. You must have a camera installed to use this feature"); CameraAvailable = false; return; } } else { CaptureDevice.LockForConfiguration(out Error); if (Error != null) { Console.WriteLine("Error detected in camera configuration: {0} ", Error.LocalizedDescription); CaptureDevice.UnlockForConfiguration(); return; } else { //configure a stream for 40 frames per second fps CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 40); //unlock configuration CaptureDevice.UnlockForConfiguration(); //get input from capture device Input = AVCaptureDeviceInput.FromDevice(CaptureDevice); if (Input == null) { switch (UIDevice.CurrentDevice.UserInterfaceIdiom) { case UIUserInterfaceIdiom.Pad: errorCamera("No Input", "No input detected from the camera on your: " + UIUserInterfaceIdiom.Pad); CameraAvailable = false; return; break; case UIUserInterfaceIdiom.Phone: errorCamera("No Input", "No input detected from the camera on your: " + UIUserInterfaceIdiom.Phone); CameraAvailable = false; return; break; } } else { //attach input to session Session.AddInput(Input); //create a new output var output = new AVCaptureVideoDataOutput(); var settings = new AVVideoSettingsUncompressed(); settings.PixelFormatType = CVPixelFormatType.CV32BGRA; output.WeakVideoSettings = settings.Dictionary; //configure and attach to the output to the session Queue = new DispatchQueue("ManCamQueue"); Recorder = new OutputRecorder(); output.SetSampleBufferDelegate(Recorder, Queue); Session.AddOutput(output); CameraAvailable = true; } } } }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; AVCaptureDevice captureDevice = null; var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video); foreach (AVCaptureDevice Device in videoDevices) { if (Device.Position == AVCaptureDevicePosition.Front) { captureDevice = Device; break; } } // create a device input and attach it to the session if(captureDevice==null){ captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); } if (captureDevice == null){ return false; } //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration() NSError error = null; captureDevice.LockForConfiguration(out error); if(error != null) { captureDevice.UnlockForConfiguration(); return false; } if(UIDevice.CurrentDevice.CheckSystemVersion(7,0)) captureDevice.ActiveVideoMinFrameDuration = new CMTime (1,15); captureDevice.UnlockForConfiguration(); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (_state); output.SetSampleBufferDelegate (outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = this.Frame; previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(this.Frame); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput() { //videoSettings VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return; } try { var started = DateTime.Now; var rs = barcodeReader.Decode(img); var total = DateTime.Now - started; Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms"); if (rs != null) { resultCallback(rs); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); Console.WriteLine("SetupCamera Finished"); session.AddOutput(output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (captureDevice == null){ Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device"); return false; } var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input - this won't work on the simulator, try a physical device"); return false; } else session.AddInput (input); previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = this.Frame; previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(this.Frame); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview (overlayView); this.BringSubviewToFront (overlayView); //overlayView.LayoutSubviews (); } session.StartRunning (); Console.WriteLine ("RUNNING!!!"); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput () { //videoSettings VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (options.AutoRotate.HasValue && options.AutoRotate.Value) return src; switch (UIDevice.CurrentDevice.Orientation) { case UIDeviceOrientation.Portrait: return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise(); case UIDeviceOrientation.PortraitUpsideDown: return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise(); case UIDeviceOrientation.LandscapeLeft: return src; case UIDeviceOrientation.LandscapeRight: return src; } return src; }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (this.options.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + this.options.TryHarder.Value); barcodeReader.Options.TryHarder = this.options.TryHarder.Value; } if (this.options.PureBarcode.HasValue) barcodeReader.Options.PureBarcode = this.options.PureBarcode.Value; if (this.options.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + this.options.AutoRotate.Value); barcodeReader.AutoRotate = this.options.AutoRotate.Value; } if (!string.IsNullOrEmpty (this.options.CharacterSet)) barcodeReader.Options.CharacterSet = this.options.CharacterSet; if (this.options.TryInverted.HasValue) barcodeReader.TryInverted = this.options.TryInverted.Value; if (this.options.PossibleFormats != null && this.options.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List<BarcodeFormat>(); foreach (var pf in this.options.PossibleFormats) barcodeReader.Options.PossibleFormats.Add(pf); } outputRecorder = new OutputRecorder (this.options, img => { try { var started = DateTime.Now; var rs = barcodeReader.Decode(img); var total = DateTime.Now - started; Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms"); if (rs != null) resultCallback(rs); } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate (outputRecorder, queue); Console.WriteLine("SetupCamera Finished"); session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; if (captureDevice.FocusPointOfInterestSupported) captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); captureDevice.UnlockForConfiguration(); } else Console.WriteLine("Failed to Lock for Config: " + err.Description); } return true; }
public async override void ViewDidLoad () { base.ViewDidLoad (); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession (); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue ("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend (); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video); if (!granted) SetupResult = AVCamSetupResult.CameraNotAuthorized; SessionQueue.Resume (); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync (() => { if (SetupResult != AVCamSetupResult.Success) return; backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error); if (videoDeviceInput == null) Console.WriteLine ("Could not create video device input: {0}", error); Session.BeginConfiguration (); if (Session.CanAddInput (videoDeviceInput)) { Session.AddInput (VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync (() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine ("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error); if (audioDeviceInput == null) Console.WriteLine ("Could not create audio device input: {0}", error); if (Session.CanAddInput (audioDeviceInput)) Session.AddInput (audioDeviceInput); else Console.WriteLine ("Could not add audio device input to the session"); var movieFileOutput = new AVCaptureMovieFileOutput (); if (Session.CanAddOutput (movieFileOutput)) { Session.AddOutput (MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video); if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } else { Console.WriteLine ("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput (); if (Session.CanAddOutput (stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput (StillImageOutput = stillImageOutput); } else { Console.WriteLine ("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration (); }); }
private void StartCameraWithCompletionHandler(Action<bool, NSError> completion) { captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back); if (captureDevice == null) { string message = "Error message back camera - not found"; string title = "Error"; ShowErrorMessage (message, title); return; } NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error); if (deviceInput == null) { Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error); string message = "Error message back camera - can't open."; string title = "Error"; ShowErrorMessage (message, title); return; } captureSession.AddInput (deviceInput); stillImageOutput = new AVCaptureStillImageOutput (); //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG); captureSession.AddOutput (stillImageOutput); cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput); captureSession.SessionPreset = AVCaptureSession.PresetPhoto; captureDeviceFormat = captureDevice.ActiveFormat; captureSession.CommitConfiguration (); captureSession.StartRunning (); maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount; PrepareBracketsWithCompletionHandler (completion); }
bool SetupCaptureSession() { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); if (audioDevice == null) { return(false); // e.g. simulator } var audioIn = new AVCaptureDeviceInput(audioDevice, out error); if (captureSession.CanAddInput(audioIn)) { captureSession.AddInput(audioIn); } var audioOut = new AVCaptureAudioDataOutput(); var audioCaptureQueue = new DispatchQueue("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue); if (captureSession.CanAddOutput(audioOut)) { captureSession.AddOutput(audioOut); } audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput(videoDevice, out error); if (captureSession.CanAddInput(videoIn)) { captureSession.AddInput(videoIn); } // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType(AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration(); return(true); }
// Call this on the session queue. void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } NSError error = null; session.BeginConfiguration(); /* * We do not create an AVCaptureMovieFileOutput when setting up the session because the * AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. */ session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (videoDevice == null) { // If the back dual camera is not available, default to the back wide angle camera. videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. if (videoDevice == null) { videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); } } var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (lVideoDeviceInput == null) { Console.WriteLine($"Could not create video device input: {error}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(lVideoDeviceInput)) { session.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; DispatchQueue.MainQueue.DispatchAsync(() => { /* * Why are we dispatching this to the main queue? * Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView * can only be manipulated on the main thread. * Note: As an exception to the above rule, it is not necessary to serialize video orientation changes * on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. * * Use the status bar orientation as the initial video orientation. Subsequent orientation changes are * handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:]. */ var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; } VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine(@"Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine($"Could not create audio device input: {error}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine(@"Could not add audio device input to the session"); } // Add photo output. var lPhotoOutput = new AVCapturePhotoOutput(); if (session.CanAddOutput(lPhotoOutput)) { session.AddOutput(lPhotoOutput); photoOutput = lPhotoOutput; photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported()); livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? AVCamLivePhotoMode.On : AVCamLivePhotoMode.Off; //depthDataDeliveryMode = photoOutput.IsDepthDataDeliverySupported() ? AVCamDepthDataDeliveryMode.On : AVCamDepthDataDeliveryMode.Off; inProgressPhotoCaptureDelegates = new Dictionary <long, AVCamPhotoCaptureDelegate>(); inProgressLivePhotoCapturesCount = 0; } else { Console.WriteLine(@"Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } backgroundRecordingId = UIApplication.BackgroundTaskInvalid; session.CommitConfiguration(); }
private void initializeRecorder() { //Initialize sessions session = new AVCaptureSession (); //get inputs and add them to the session. try{ //add front facing camera foreach(AVCaptureDevice capdevice in AVCaptureDevice.Devices) { if(capdevice.HasMediaType(AVMediaType.Video)) { if(capdevice.Position == AVCaptureDevicePosition.Front) { device = capdevice; } } } //device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice(device); session.AddInput(input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex) { return; } }
protected void Initialize() { // configure the capture session for medium resolution, change this if your code // can cope with more data or volume CaptureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } // SET to slow motion NSError error; var input = new AVCaptureDeviceInput(device, out error); movieFileOutput = new AVCaptureMovieFileOutput { //set max record time to 10 minutes MaxRecordedDuration = CMTime.FromSeconds(600, 1) }; photoFileOutput = new AVCapturePhotoOutput(); photoFileOutput.IsHighResolutionCaptureEnabled = true; if (CaptureSession.CanAddOutput(movieFileOutput)) { CaptureSession.BeginConfiguration(); CaptureSession.AddOutput(movieFileOutput); CaptureSession.AddOutput(photoFileOutput); var ranges = device.ActiveFormat.VideoSupportedFrameRateRanges; if (device.LockForConfiguration(out error)) { device.ActiveVideoMinFrameDuration = new CMTime(1, (int)ranges.First().MinFrameRate); device.ActiveVideoMaxFrameDuration = new CMTime(1, (int)ranges.First().MaxFrameRate); } var connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } CaptureSession.CommitConfiguration(); } CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); // set frame rate if Slow-mo is requested if (speedOptions == SpeedOptions.SlowMo) { foreach (var vFormat in device.Formats) { var _ranges = vFormat.VideoSupportedFrameRateRanges as AVFrameRateRange[]; var frameRates = _ranges[0]; if (frameRates.MaxFrameRate >= 240.0) { device.LockForConfiguration(out NSError _error); if (_error is null) { device.ActiveFormat = vFormat as AVCaptureDeviceFormat; device.ActiveVideoMinFrameDuration = frameRates.MinFrameDuration; device.ActiveVideoMaxFrameDuration = frameRates.MaxFrameDuration; device.UnlockForConfiguration(); break; } } } } IsPreviewing = true; }
void SetupAVCapture (NSString sessionPreset) { if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){ Console.WriteLine ("Could not create the CoreVideo TextureCache"); return; } session = new AVCaptureSession (); session.BeginConfiguration (); // Preset size session.SessionPreset = sessionPreset; // Input device var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (videoDevice == null){ Console.WriteLine ("No video device"); return; } NSError err; var input = new AVCaptureDeviceInput (videoDevice, out err); if (err != null){ Console.WriteLine ("Error creating video capture device"); return; } session.AddInput (input); // Create the output device var dataOutput = new AVCaptureVideoDataOutput () { AlwaysDiscardsLateVideoFrames = true, // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange) }; dataOutputDelegate = new DataOutputDelegate (this); // // This dispatches the video frames into the main thread, because the OpenGL // code is accessing the data synchronously. // dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue); session.AddOutput (dataOutput); session.CommitConfiguration (); session.StartRunning (); }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
void Initialize(bool defaultTorchOn, bool vibrationOnDetected, bool startScanningOnCreate) { Configuration.IsScanning = startScanningOnCreate; CaptureSession = new AVCaptureSession(); CaptureSession.BeginConfiguration(); this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = this.Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = AVCaptureDevicePosition.Back; //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset1280x720; Layer.AddSublayer(previewLayer); CaptureSession.CommitConfiguration(); VideoDataOutput = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA } .Dictionary }; captureVideoDelegate = new CaptureVideoDelegate(vibrationOnDetected); captureVideoDelegate.OnDetected += (list) => { InvokeOnMainThread(() => { //CaptureSession.StopRunning(); this.OnDetected?.Invoke(list); }); }; VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue); CaptureSession.AddOutput(VideoDataOutput); InvokeOnMainThread(() => { CaptureSession.StartRunning(); //Torch on by default if (defaultTorchOn && !GoogleVisionBarCodeScanner.Methods.IsTorchOn()) { GoogleVisionBarCodeScanner.Methods.ToggleFlashlight(); } }); }
private void BeginSession() { try { NSError error = null; var deviceInput = new AVCaptureDeviceInput(captureDevice, out error); if (error == null && captureSession.CanAddInput(deviceInput)) { captureSession.AddInput(deviceInput); } previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspect }; //this.HomeView.BackgroundColor = UIColor.Black; previewLayer.Frame = this.HomeView.Layer.Bounds; this.HomeView.Layer.AddSublayer(previewLayer); captureDevice.LockForConfiguration(out error); if (error != null) { Console.WriteLine(error); captureDevice.UnlockForConfiguration(); return; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); } captureDevice.UnlockForConfiguration(); captureSession.StartRunning(); // create a VideoDataOutput and add it to the sesion videoOut = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } captureSession.CommitConfiguration(); setupAVFoundationFaceDetection(); //var OutputSampleDelegate = new VideoCapture( //(s) => //{ // GreetingsLabel.Text = s; // PopulateList(s); //}, new Action<CIImage, CGRect>(DrawFaces)); //videoOut.SetSampleBufferDelegateQueue(OutputSampleDelegate, sessionQueue); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
public override void ViewDidLoad () { base.ViewDidLoad (); this.View.BackgroundColor = UIColor.White; NSError error; // Setup detector options. var options = new CIDetectorOptions { Accuracy = FaceDetectorAccuracy.High, // Can give a hint here about the rects to detect. 1.4 would be for A4 sheets of paper for instance. AspectRatio = 1.41f, }; // Create a rectangle detector. Note that you can also create QR detector or a face detector. // Most of this code will also work with other detectors (like streaming to a preview layer and grabbing images). this.detector = CIDetector.CreateRectangleDetector (context: null, detectorOptions: options); // Create the session. The AVCaptureSession is the managing instance of the whole video handling. var captureSession = new AVCaptureSession () { // Defines what quality we want to use for the images we grab. Photo gives highest resolutions. SessionPreset = AVCaptureSession.PresetPhoto }; // Find a suitable AVCaptureDevice for video input. var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { // This will not work on the iOS Simulator - there is no camera. :-) throw new InvalidProgramException ("Failed to get AVCaptureDevice for video input!"); } // Create a device input with the device and add it to the session. var videoInput = AVCaptureDeviceInput.FromDevice (device, out error); if (videoInput == null) { throw new InvalidProgramException ("Failed to get AVCaptureDeviceInput from AVCaptureDevice!"); } // Let session read from the input, this is our source. captureSession.AddInput (videoInput); // Create output for the video stream. This is the destination. var videoOutput = new AVCaptureVideoDataOutput () { AlwaysDiscardsLateVideoFrames = true }; // Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting // properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated, // so I use the WeakVideoSettings instead which takes an NSDictionary as input. this.videoSettingsDict = new NSMutableDictionary (); this.videoSettingsDict.Add (CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA)); videoOutput.WeakVideoSettings = this.videoSettingsDict; // Create a delegate to report back to us when an image has been captured. // We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate // which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project. this.sampleBufferDelegate = new VideoFrameSamplerDelegate (); // Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue. // This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues). this.sessionQueue = new DispatchQueue ("AVSessionQueue"); // Assign the queue and the delegate to the output. Now all output will go through the delegate. videoOutput.SetSampleBufferDelegate(this.sampleBufferDelegate, this.sessionQueue); // Add output to session. captureSession.AddOutput(videoOutput); // We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer. // A CALayer can be added to a UIView. We add that layer to the controller's main view. var layer = this.View.Layer; this.videoLayer = AVCaptureVideoPreviewLayer.FromSession (captureSession); this.videoLayer.Frame = layer.Bounds; layer.AddSublayer (this.videoLayer); // All setup! Start capturing! captureSession.StartRunning (); // This is just for information and allows you to get valid values for the detection framerate. Console.WriteLine ("Available capture framerates:"); var rateRanges = device.ActiveFormat.VideoSupportedFrameRateRanges; foreach (var r in rateRanges) { Console.WriteLine (r.MinFrameRate + "; " + r.MaxFrameRate + "; " + r.MinFrameDuration + "; " + r.MaxFrameDuration); } // Configure framerate. Kind of weird way of doing it but the only one that works. device.LockForConfiguration (out error); // CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case. device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS); device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS); device.UnlockForConfiguration (); // Put a small image view at the top left that shows the live image with the detected rectangle(s). this.imageViewOverlay = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewOverlay.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewOverlay.Layer.BorderWidth = 3f; this.Add (this.imageViewOverlay); // Put another image view top right that shows the image with perspective correction. this.imageViewPerspective = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewPerspective.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewPerspective.Layer.BorderWidth = 3f; this.Add (this.imageViewPerspective); // Add some lables for information. this.mainWindowLbl = new UILabel { Text = "Live stream from camera. Point camera to a rectangular object.", TextAlignment = UITextAlignment.Center }; this.Add (this.mainWindowLbl); this.detectionWindowLbl = new UILabel { Text = "Detected rectangle overlay", TextAlignment = UITextAlignment.Center }; this.Add (this.detectionWindowLbl); this.perspectiveWindowLbl = new UILabel { Text = "Perspective corrected", TextAlignment = UITextAlignment.Center }; this.Add (this.perspectiveWindowLbl); }
bool SetupCaptureSession() { var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput(); captureDelegate = new CaptureDelegate(metaDataObjects => { if (!analyzing) { return; } //Console.WriteLine("Found MetaData Objects"); var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) || working) //|| CancelTokenSource.IsCancellationRequested) { return; } working = true; wasScanned = false; lastAnalysis = DateTime.UtcNow; var mdo = metaDataObjects.FirstOrDefault(); if (mdo == null) { return; } var readableObj = mdo as AVMetadataMachineReadableCodeObject; if (readableObj == null) { return; } wasScanned = true; var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); working = false; }); metadataOutput.SetDelegate(captureDelegate, DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { #if __UNIFIED__ var formats = AVMetadataObjectType.None; foreach (var f in ScanningOptions.PossibleFormats) { formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f); } formats &= ~AVMetadataObjectType.None; metadataOutput.MetadataObjectTypes = formats; #else var formats = new List <string> (); foreach (var f in ScanningOptions.PossibleFormats) { formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f)); } metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray(); #endif } else { metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { var perf1 = PerformanceCounter.Start(); NSError lockForConfigErr = null; captureDevice.LockForConfiguration(out lockForConfigErr); if (lockForConfigErr == null) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10); captureDevice.UnlockForConfiguration(); } PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); } else { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } } #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); //output.AlwaysDiscardsLateVideoFrames = true; Console.WriteLine("SetupCamera Finished"); //session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
//Protected protected AVCaptureSession MaybeInitializeSession() { //Create the capture session var session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; //Setup the video capture var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Failure.Alert("No captureDevice - this won't work on the simulator, try a physical device"); return null; } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Failure.Alert("No input - this won't work on the simulator, try a physical device"); return null; } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output var queue = new MonoTouch.CoreFoundation.DispatchQueue("myQueue"); output.SetSampleBufferDelegate(this, queue); session.AddOutput(output); return session; }
bool SetupCaptureSession() { var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = DateTime.UtcNow; previewLayer = new AVCaptureVideoPreviewLayer(session); var totalAVPreviewLayerAlloc = DateTime.UtcNow - startedAVPreviewLayerAlloc; Console.WriteLine("PERF: Alloc AVCaptureVideoPreviewLayer took {0} ms.", totalAVPreviewLayerAlloc.TotalMilliseconds); // //Framerate set here (15 fps) // if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) // { // var perf1 = PerformanceCounter.Start (); // NSError lockForConfigErr = null; // captureDevice.LockForConfiguration (out lockForConfigErr); // if (lockForConfigErr == null) // { // captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 10); // captureDevice.UnlockForConfiguration (); // } // PerformanceCounter.Stop (perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); // } // else // previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReaderiOS(null, (img) => { var src = new RGBLuminanceSourceiOS(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return(false); } try { //var sw = new System.Diagnostics.Stopwatch(); //sw.Start(); var rs = barcodeReader.Decode(img); //sw.Stop(); //Console.WriteLine("Decode Time: {0} ms", sw.ElapsedMilliseconds); if (rs != null) { resultCallback(rs); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
void setupCaptureSession () { if (CaptureSession != null) return; CaptureSession = new AVCaptureSession (); NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground (); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput (videoDevice, out error); if (CaptureSession.CanAddInput (videoInput)) CaptureSession.AddInput (videoInput); metadataOutput = new AVCaptureMetadataOutput (); var metadataQueue = new DispatchQueue ("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput (metadataOutput)) CaptureSession.AddOutput (metadataOutput); }