Ejemplo n.º 1
0
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var viewLayer = liveCameraStream.Layer;

            Console.WriteLine(viewLayer.Frame.Width);

            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            Console.WriteLine(liveCameraStream.Layer.Frame.Width);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            var dictionary = new NSMutableDictionary();
            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();

            ViewWillLayoutSubviews();
        }
		public void ConfigureCaptureSession (AVCaptureSession captureSession, AVCaptureStillImageOutput captureOutput)
		{
			if (previewLayer != null) {
				previewLayer.RemoveFromSuperLayer ();
				previewLayer = null;
			}

			previewLayer = new AVCaptureVideoPreviewLayer (captureSession) {
				VideoGravity = AVPlayerLayer.GravityResizeAspect,
				Frame = Bounds
			};

			Layer.AddSublayer (previewLayer);

			CaptureOutput = captureOutput;

			CaptureOutput.AddObserver (this, capturingStillImageKeypath, NSKeyValueObservingOptions.New, IntPtr.Zero);
		}
Ejemplo n.º 3
0
        void SetupLiveStream()
        {
            CaptureSession = new AVCaptureSession();
            // PhotoOutput = new AVCapturePhotoOutput();
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            // previewLayer.BorderWidth = 2f;
            // previewLayer.BorderColor = UIColor.Red.CGColor;

            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device         = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);



            if (device == null)
            {
                return;
            }

            captureDeviceInput = new AVCaptureDeviceInput(device, out NSError error);

            CaptureSession.AddInput(captureDeviceInput);
            Layer.AddSublayer(previewLayer);

            captureStillImageOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            CaptureSession.AddOutput(captureStillImageOutput);
            CaptureSession.StartRunning();
            IsPreviewing = true;
        }
Ejemplo n.º 4
0
        // method for capture the photo
        public async void CapturePhoto()
        {
            var current = CrossConnectivity.Current.IsConnected;

            // check for the internet connection to use the ResDiary API
            if (!current)
            {
                var okAlertController = UIAlertController.Create("Connection Error", "Please connect to the internet", UIAlertControllerStyle.Alert);

                okAlertController.AddAction(UIAlertAction.Create("OK", UIAlertActionStyle.Default, null));

                PresentViewController(okAlertController, true, null);
            }
            else
            {
                DialogService.ShowLoading("Scanning Logo");

                var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
                var sampleBuffer    = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);

                var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);

                // crop photo, first change it to UIImage, then crop it
                UIImage img = new UIImage(jpegImageAsNsData);
                img = CropImage(img, (int)View.Bounds.GetMidX() + 40, (int)View.Bounds.GetMidY() + 225, 600, 600); // values in rectangle are the starting point and then width and height

                byte[] CroppedImage;

                // change UIImage to byte array
                using (NSData imageData = img.AsPNG())
                {
                    CroppedImage = new Byte[imageData.Length];
                    System.Runtime.InteropServices.Marshal.Copy(imageData.Bytes, CroppedImage, 0, Convert.ToInt32(imageData.Length));
                }

                SendPhoto(CroppedImage);
            }
        }
Ejemplo n.º 5
0
        async void BttCapture_TouchUpInside(object sender, EventArgs e)
        {
            var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
            var sampleBuffer    = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);

            var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);

            var image           = UIKit.UIImage.LoadFromData(jpegImageAsNsData).ResizeImage(syncPhotoOptions);
            var jpegAsByteArray = image.AsJPEG(syncPhotoOptions.Quality).ToArray();

            var result = new PhotoSetNative();

            result.galleryImageXF.Checked       = true;
            result.galleryImageXF.ImageRawData  = jpegAsByteArray;
            result.galleryImageXF.AsyncStatus   = Models.ImageAsyncStatus.InLocal;
            result.galleryImageXF.ImageSourceXF = ImageSource.FromStream(() => new System.IO.MemoryStream(jpegAsByteArray));

            MessagingCenter.Send <XFCameraController, List <PhotoSetNative> >(this, Utils.SubscribeImageFromCamera, new List <PhotoSetNative>()
            {
                result
            });
            DismissModalViewController(true);
        }
        void HandleAVCaptureCompletionHandlercompletionHandler(CMSampleBuffer imageDataSampleBuffer, NSError error)
        {
            try
            {
                using (var pool = new NSAutoreleasePool()) {
                    imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer);
                    //imageDataSampleBuffer.Dispose();
//					parentView.DismissModalViewControllerAnimated(true);
//					parentView.HandlePickedImage(imageData);
//					session.StopRunning();
                    var iamge = UIImage.LoadFromData(imageData);
                    InvokeOnMainThread(delegate {
                        var imageView   = new UIImageView(new RectangleF(0, 0, this.View.Frame.Size.Width, this.View.Frame.Size.Height));
                        imageView.Image = iamge;
                        this.View.AddSubview(imageView);
                    });
                }
            }
            catch (Exception exc)
            {
                Console.WriteLine(exc);
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Captures the image with metadata.
        /// </summary>
        /// <returns>The image with metadata.</returns>
        /// <param name="captureStillImageOutput">Capture still image output.</param>
        /// <param name="connection">Connection.</param>
        private async Task CaptureImageWithMetadata(AVCaptureStillImageOutput captureStillImageOutput, AVCaptureConnection connection)
        {
            var sampleBuffer = await captureStillImageOutput.CaptureStillImageTaskAsync(connection);

            var imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            var image     = UIImage.LoadFromData(imageData);

            RotateImage(ref image);

            try
            {
                byte[] imgData = image.AsJPEG().ToArray();

                if (Photo != null)
                {
                    Photo(this, imgData);
                }
            }
            catch (Exception error)
            {
                Console.WriteLine(error.Message);
            }
        }
Ejemplo n.º 8
0
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var viewLayer = liveCameraStream.Layer;

            Console.WriteLine(viewLayer.Frame.Width);

            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            Console.WriteLine(liveCameraStream.Layer.Frame.Width);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();

            ViewWillLayoutSubviews();
        }
Ejemplo n.º 9
0
        private void OnImageCaptured(
            CMSampleBuffer buffer,
            NSError error,
            Action <byte[]> imageCaptured,
            Action <string> captureFailed)
        {
            if (error != null)
            {
                captureFailed(error.LocalizedDescription);
            }
            else
            {
                NSData data = AVCaptureStillImageOutput.JpegStillToNSData(buffer);

                byte[] image = new byte[data.Length];
                Marshal.Copy(
                    data.Bytes,
                    image,
                    0,
                    Convert.ToInt32(data.Length));

                imageCaptured(image);
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Initializes the camera.
        /// </summary>
        /// <returns>The camera.</returns>
        public void InitializeCamera()
        {
            try
            {
                NSError error;
                NSError err;

                _device.LockForConfiguration(out err);
                _device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                _device.UnlockForConfiguration();

                _input = new AVCaptureDeviceInput(_device, out error);
                _captureSession.AddInput(_input);

                _output = new AVCaptureStillImageOutput();

                var dict = new NSMutableDictionary();
                dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
                _captureSession.AddOutput(_output);

                InvokeOnMainThread(delegate
                {
                    // capture connection used for rotating camera
                    _captureConnection = _previewLayer.Connection;
                    SetStartOrientation();
                    // set orientation before loading camera
                    _captureSession.StartRunning();
                });
            }
            catch (Exception error)
            {
                Console.WriteLine(error.Message);
            }

            Available?.Invoke(this, _cameraAvailable);
        }
Ejemplo n.º 11
0
 void HandleBalloonPopped(object sender, bool e)
 {
     if (this.cameraInitialized)
     {
         this.gameplayView.Layer.AddSublayer(this.previewLayer);
         Task.Delay(pictureDelayMs).ContinueWith((w) => {
             output.CaptureStillImageTaskAsync(output.Connections [0]).ContinueWith(r => {
                 if (!r.IsFaulted)
                 {
                     var imageData = AVCaptureStillImageOutput.JpegStillToNSData(r.Result);
                     InvokeOnMainThread(() => {
                         UIImage img           = new UIImage(imageData);
                         var orientation       = img.Orientation;
                         UIImage flipped       = UIImage.FromImage(img.CGImage, 1, UIImageOrientation.LeftMirrored);
                         UIImageView imageView = new UIImageView(flipped);
                         imageView.ContentMode = UIViewContentMode.ScaleAspectFill;
                         imageView.Frame       = this.View.Frame;
                         this.View.InsertSubview(imageView, 1);
                     });
                 }
             });
         });
     }
 }
Ejemplo n.º 12
0
        private AVCaptureStillImageOutput _getStillImageOutput()
        {
            var shouldReinitializeStillImageOutput = stillImageOutput == null;

            if (!shouldReinitializeStillImageOutput)
            {
                var connection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
                if (connection != null)
                {
                    shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.Active;
                }
            }

            if (shouldReinitializeStillImageOutput)
            {
                stillImageOutput = new AVCaptureStillImageOutput();

                captureSession.BeginConfiguration();
                captureSession.AddOutput(stillImageOutput);
                captureSession.CommitConfiguration();
            }

            return(stillImageOutput);
        }
Ejemplo n.º 13
0
        void InitializeCameraLayer()
        {
            this.captureSession = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium                 // TODO investigate that
            };
            var captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).Where(dev => dev.Position == AVCaptureDevicePosition.Front).FirstOrDefault();

            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                return;
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                return;
            }
            this.captureSession.AddInput(input);

            // set up the output
            output = new AVCaptureStillImageOutput();
            var dict = new NSMutableDictionary();

            dict [AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            captureSession.AddOutput(output);

            this.previewLayer = AVCaptureVideoPreviewLayer.FromSession(this.captureSession);
            this.previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            this.previewLayer.Frame             = this.View.Frame;
            this.captureSession.StartRunning();

            this.cameraInitialized = true;
        }
Ejemplo n.º 14
0
        void Initialize()
        {
            CaptureSession = new AVCaptureSession();
            CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto;
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device         = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            CaptureOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };
            CaptureSession.AddOutput(CaptureOutput);

            CaptureSession.AddInput(input);
            Layer.AddSublayer(previewLayer);
            CaptureSession.StartRunning();
            IsPreviewing = true;
        }
Ejemplo n.º 15
0
        private AVCaptureStillImageOutput m_stillImageOutput = null;            //这是session其中的一个输出,该输出被jpeg压缩过

        async void SetupCaptureSession()
        {
            var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video);

            if (granted)
            {
                //AVCaptureDevice captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                AVCaptureDevice   videoDevice = null;
                AVCaptureDevice[] devices     = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                foreach (var item in devices)
                {
                    if (item.Position == AVCaptureDevicePosition.Back)
                    {
                        videoDevice = item;
                        break;
                    }
                }
                if (videoDevice == null)
                {
                    new UIAlertView("提示", "获取摄像头失败!", null, "确定").Show();
                    return;
                }

                AVCaptureDeviceInput videoInput = AVCaptureDeviceInput.FromDevice(videoDevice); //视频源即为摄像头
                if (videoInput == null)
                {
                    new UIAlertView("提示", "获取摄像头视频源失败!", null, "确定").Show();
                    return;
                }

                AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput();
                videoOutput.WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary;
                videoOutput.MinFrameDuration = new CMTime(1, 15);                                                 // 15fps
                videoOutput.SetSampleBufferDelegateQueue(new CameraVideoTransform(this.imageViewVideo), m_queue); //输出到imageViewVideo,并且每一帧由CameraVideoTransform类做一个变换

                AVCaptureStillImageOutput stillImageOutput = new AVCaptureStillImageOutput();
                stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed()
                {
                    Codec = AVVideoCodec.JPEG
                };

                m_session.BeginConfiguration();
                m_session.SessionPreset = AVCaptureSession.PresetMedium;
                m_session.AddInput(videoInput);
                //配置了两个输出
                //videoOutput,委托给CameraVideoTransform类进行.
                //stillImageOutput, 没有委托给它输出,但是它设置了输出的格式是被jpeg压缩过的.
                m_session.AddOutput(videoOutput);
                m_session.AddOutput(stillImageOutput);
                m_session.CommitConfiguration();

                m_queue.DispatchAsync(delegate()
                {
                    m_session.StartRunning();       //开整
                });

                m_videoDevice      = videoDevice;
                m_stillImageOutput = stillImageOutput;
            }
            else
            {
                new UIAlertView("提示", "没有访问摄像头的权限!", null, "确定").Show();
                //this.NavigationController.PopViewController(true);
                return;
            }
        }
Ejemplo n.º 16
0
        public void SetupLiveCameraStream()
        {
            Debug.WriteLine("SetupLiveCameraStream start");
            captureSession = new AVCaptureSession();
            if (liveCameraStream == null)
            {
                Debug.WriteLine("SetupLiveCameraStream liveCameraStream was null");
                liveCameraStream = new UIView();
            }
            var    viewLayer = liveCameraStream.Layer;
            nfloat w         = this.View.Frame.Width;
            nfloat h         = this.View.Frame.Height;

            Debug.WriteLine(" pre w:" + w + ", h:" + h);
            if (w < h)
            {
                h = w;
            }
            else if (h < w)
            {
                w = h;
            }
            Debug.WriteLine("post w:" + w + ", h:" + h);
            CoreGraphics.CGRect myRect = new CoreGraphics.CGRect(0f, 100f, w, h);
            //CoreGraphics.CGRect myRect = new CGRect(new CGSize(w, w));

            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                //Frame = this.View.Frame
                // This does correctly reduce the longer side.
                // However, it then reduces the shorter side to maintain aspect ratio. oof.
                Frame = myRect,
                //VideoGravity = AVLayerVideoGravity.Resize,  // default is ResizeAspect which results in a new rectangle
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,  // default is ResizeAspect
            };
            //videoPreviewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);
            //UITapGestureRecognizer tapRecognizer = new UITapGestureRecognizer(PreviewAreaTappedToChangeFocus);
            //liveCameraStream.AddGestureRecognizer(tapRecognizer);

            //var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (captureDeviceInput == null)
            {
                //NSError err;
                //captureDeviceInput = new AVCaptureDeviceInput(captureDevice, out err);
                ExitWithoutPhoto(this, new EventArgs());
                return;
            }
            captureSession.AddInput(captureDeviceInput);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary(), HighResolutionStillImageOutputEnabled = true,
            };

            captureSession.AddOutput(stillImageOutput);
            Debug.WriteLine("SetupLiveCameraStream pre running");
            captureSession.StartRunning();
            Debug.WriteLine("SetupLiveCameraStream end");
        }
Ejemplo n.º 17
0
        public override async void ViewDidLoad()
        {
            base.ViewDidLoad();
            await autorizacionCamara();

            ConfiguracionCamara();
            btnCapturar.TouchUpInside += async delegate
            {
                var salidadevideo = salidaImagen.ConnectionFromMediaType(AVMediaType.Video);
                var bufferdevideo = await salidaImagen.CaptureStillImageTaskAsync(salidadevideo);

                var datosImagen = AVCaptureStillImageOutput.JpegStillToNSData(bufferdevideo);

                arregloJpg = datosImagen.ToArray();
                string rutacarpeta = Environment.GetFolderPath
                                         (Environment.SpecialFolder.Personal);
                string resultado = "Foto";
                archivoLocal = resultado + ".jpg";
                ruta         = Path.Combine(rutacarpeta, archivoLocal);
                File.WriteAllBytes(ruta, arregloJpg);
                Imagen.Image = UIImage.FromFile(ruta);
            };
            btnRespaldar.TouchUpInside += async delegate
            {
                try
                {
                    CloudStorageAccount cuentaAlmacenamiento = CloudStorageAccount.Parse
                                                                   ("DefaultEndpointsProtocol=https;AccountName=almacenamientoxamarin;AccountKey=hX6T/p8IcOAF8RomLimw0fnLfkUC5CbnLOEn+6X5xLo3BxvOrmsUel0U2B4UtSK8cONvkBWUAFNJT+OR5tc3EA==");
                    CloudBlobClient    clienteBlob = cuentaAlmacenamiento.CreateCloudBlobClient();
                    CloudBlobContainer contenedor  = clienteBlob.GetContainerReference("imagenes");
                    CloudBlockBlob     recursoblob = contenedor.GetBlockBlobReference(archivoLocal);
                    await recursoblob.UploadFromFileAsync(ruta);

                    MessageBox("Guardado en", "Azure Storage - Blob");

                    CloudTableClient tableClient = cuentaAlmacenamiento.CreateCloudTableClient();

                    CloudTable table = tableClient.GetTableReference("Ubicaciones");

                    await table.CreateIfNotExistsAsync();

                    UbicacionEntity ubica = new UbicacionEntity(archivoLocal, Pais);
                    ubica.Latitud   = latitud;
                    ubica.Localidad = Ciudad;
                    ubica.Longitud  = longitud;

                    TableOperation insertar = TableOperation.Insert(ubica);
                    await table.ExecuteAsync(insertar);

                    MessageBox("Guardado en Azure", "Table NoSQL");
                }
                catch (StorageException ex)
                {
                    MessageBox("Error: ", ex.Message);
                }
            };

            #region "Mapas"

            locationManager = new CLLocationManager();
            locationManager.RequestWhenInUseAuthorization();
            Mapa.ShowsUserLocation = true;
            var locator  = CrossGeolocator.Current;
            var position = await
                           locator.GetPositionAsync(timeoutMilliseconds : 10000);

            Mapa.MapType = MKMapType.Hybrid;
            CLLocationCoordinate2D Centrar = new CLLocationCoordinate2D
                                                 (position.Latitude,
                                                 position.Longitude);
            MKCoordinateSpan   Altura = new MKCoordinateSpan(.002, .002);
            MKCoordinateRegion Region = new MKCoordinateRegion
                                            (Centrar, Altura);
            Mapa.SetRegion(Region, true);

            CLLocation Ubicacion = new CLLocation(position.Latitude, position.Longitude);

            CLGeocoder clg   = new CLGeocoder();
            var        Datos = await clg.ReverseGeocodeLocationAsync(Ubicacion);

            Pais     = Datos[0].Country;
            Ciudad   = Datos[0].Locality;
            latitud  = position.Latitude;
            longitud = position.Longitude;

            #endregion
        }
Ejemplo n.º 18
0
        void SnapStillImage(CameraViewController sender)
        {
            SessionQueue.DispatchAsync(async() => {
                AVCaptureConnection connection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
                var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;

                // Update the orientation on the still image output video connection before capturing.
                connection.VideoOrientation = previewLayer.Connection.VideoOrientation;

                // Flash set to Auto for Still Capture.
                SetFlashModeForDevice(AVCaptureFlashMode.Auto, VideoDeviceInput.Device);

                // Capture a still image.
                try {
                    var imageDataSampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(connection);

                    // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously.
                    NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer);

                    PHPhotoLibrary.RequestAuthorization(status => {
                        if (status == PHAuthorizationStatus.Authorized)
                        {
                            // To preserve the metadata, we create an asset from the JPEG NSData representation.
                            // Note that creating an asset from a UIImage discards the metadata.

                            // In iOS 9, we can use AddResource method on PHAssetCreationRequest class.
                            // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:].

                            if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0))
                            {
                                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                                    var request = PHAssetCreationRequest.CreationRequestForAsset();
                                    request.AddResource(PHAssetResourceType.Photo, imageData, null);
                                }, (success, err) => {
                                    if (!success)
                                    {
                                        Console.WriteLine("Error occurred while saving image to photo library: {0}", err);
                                    }
                                });
                            }
                            else
                            {
                                var temporaryFileUrl = new NSUrl(GetTmpFilePath("jpg"), false);
                                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                                    NSError error = null;
                                    if (imageData.Save(temporaryFileUrl, NSDataWritingOptions.Atomic, out error))
                                    {
                                        PHAssetChangeRequest.FromImage(temporaryFileUrl);
                                    }
                                    else
                                    {
                                        Console.WriteLine("Error occured while writing image data to a temporary file: {0}", error);
                                    }
                                }, (success, error) => {
                                    if (!success)
                                    {
                                        Console.WriteLine("Error occurred while saving image to photo library: {0}", error);
                                    }

                                    // Delete the temporary file.
                                    NSError deleteError;
                                    NSFileManager.DefaultManager.Remove(temporaryFileUrl, out deleteError);
                                });
                            }
                        }
                    });
                } catch (NSErrorException ex) {
                    Console.WriteLine("Could not capture still image: {0}", ex.Error);
                }
            });
        }
Ejemplo n.º 19
0
        public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions)
        {
            // create a new window instance based on the screen size
            Window = new UIWindow(UIScreen.MainScreen.Bounds);

            Microsoft.WindowsAzure.MobileServices.CurrentPlatform.Init();

            // If you have defined a root view controller, set it here:
            initialViewController     = Storyboard.InstantiateInitialViewController() as UIViewController;
            Window.RootViewController = initialViewController;
            UITabBar.Appearance.SelectedImageTintColor = UIColor.FromRGB(14, 125, 202);
            UITabBar.Appearance.BackgroundColor        = UIColor.White;
            // make the window visible
            Window.MakeKeyAndVisible();

            // Create a new capture session
            Session = new AVCaptureSession();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (CaptureDevice == null)
            {
                //throw new Exception("Video recording not supported on this device");
            }
            else
            {
                // Prepare device for configuration
                if (!CaptureDevice.LockForConfiguration(out Error))
                {
                    // There has been an issue, abort
                    Console.WriteLine("Error: {0}", Error.LocalizedDescription);
                    CaptureDevice.UnlockForConfiguration();
                    return(false);
                }

                // Configure stream for 15 frames per second (fps)
                CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);

                // Unlock configuration
                CaptureDevice.UnlockForConfiguration();

                // Get input from capture device
                Input = AVCaptureDeviceInput.FromDevice(CaptureDevice);
                if (Input == null)
                {
                    // Error, report and abort
                    Console.WriteLine("Unable to gain input from capture device.");
                    CameraAvailable = false;
                    return(false);
                }

                // Attach input to session
                Session.AddInput(Input);

                // Create a new output
                var output   = new AVCaptureVideoDataOutput();
                var settings = new AVVideoSettingsUncompressed();
                settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
                output.WeakVideoSettings = settings.Dictionary;

                // Configure and attach to the output to the session
                Queue    = new DispatchQueue("ManCamQueue");
                Recorder = new OutputRecorder();
                output.SetSampleBufferDelegate(Recorder, Queue);
                Session.AddOutput(output);

                // Configure and attach a still image output for bracketed capture
                StillImageOutput = new AVCaptureStillImageOutput();
                var dict = new NSMutableDictionary();
                dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
                Session.AddOutput(StillImageOutput);

                // Let tabs know that a camera is available
                CameraAvailable = true;
            }
            return(true);
        }
Ejemplo n.º 20
0
        public override void FinishedLaunching(UIApplication application)
        {
            // Create a new capture session
            Session = new AVCaptureSession();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (CaptureDevice == null)
            {
                // Video capture not supported, abort
                Console.WriteLine("Video recording not supported on this device");
                CameraAvailable = false;
                return;
            }

            // Prepare device for configuration
            CaptureDevice.LockForConfiguration(out Error);
            if (Error != null)
            {
                // There has been an issue, abort
                Console.WriteLine("Error: {0}", Error.LocalizedDescription);
                CaptureDevice.UnlockForConfiguration();
                return;
            }

            // Configure stream for 15 frames per second (fps)
            CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);

            // Unlock configuration
            CaptureDevice.UnlockForConfiguration();

            // Get input from capture device
            Input = AVCaptureDeviceInput.FromDevice(CaptureDevice);
            if (Input == null)
            {
                // Error, report and abort
                Console.WriteLine("Unable to gain input from capture device.");
                CameraAvailable = false;
                return;
            }

            // Attach input to session
            Session.AddInput(Input);

            // Create a new output
            var output   = new AVCaptureVideoDataOutput();
            var settings = new AVVideoSettingsUncompressed();

            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            output.WeakVideoSettings = settings.Dictionary;

            // Configure and attach to the output to the session
            Queue    = new DispatchQueue("ManCamQueue");
            Recorder = new OutputRecorder();
            output.SetSampleBufferDelegate(Recorder, Queue);
            Session.AddOutput(output);

            // Configure and attach a still image output for bracketed capture
            StillImageOutput = new AVCaptureStillImageOutput();
            var dict = new NSMutableDictionary();

            dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            Session.AddOutput(StillImageOutput);

            // Let tabs know that a camera is available
            CameraAvailable = true;
        }
Ejemplo n.º 21
0
        private void SetupCamera(bool restart = false)
        {
            try
            {
                if (_captureSession == null || restart)
                {
                    _captureSession = new AVCaptureSession
                    {
                        SessionPreset = AVCaptureSession.PresetPhoto
                    };
                    if (!_cameraModule.AvailableCameras.Any())
                    {
                        var deviceTypes = new List <AVCaptureDeviceType>
                        {
                            AVCaptureDeviceType.BuiltInWideAngleCamera,
                            AVCaptureDeviceType.BuiltInTelephotoCamera
                        };
                        if (UIDevice.CurrentDevice.CheckSystemVersion(13, 0))
                        {
                            deviceTypes.Add(AVCaptureDeviceType.BuiltInUltraWideCamera);
                        }
                        var session = AVCaptureDeviceDiscoverySession.Create(
                            deviceTypes.ToArray(), AVMediaType.Video, AVCaptureDevicePosition.Unspecified);
                        _devices = session.Devices;
                        foreach (var avCaptureDevice in _devices)
                        {
                            _cameraModule.AvailableCameras.Add(new AvailableCamera
                            {
                                DisplayName = avCaptureDevice.LocalizedName,
                                CameraId    = avCaptureDevice.UniqueID,
                                IsFront     = avCaptureDevice.Position == AVCaptureDevicePosition.Front
                            });
                        }
                    }
                }

                SetPreviewOrientation();

                if (_device == null)
                {
                    _device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);
                    _cameraModule.ChosenCamera = _cameraModule.AvailableCameras.First(c => c.CameraId == _device.UniqueID);
                }
                else
                {
                    _device = AVCaptureDevice.DeviceWithUniqueID(_cameraModule.ChosenCamera.CameraId);
                }

                SetPreviewSizing(_device, restart);

                TurnOffFlashAndSetContinuousAutoMode(_device);

                _is10OrHigher = UIDevice.CurrentDevice.CheckSystemVersion(10, 0);
                var isRestart = false;
                if (_is10OrHigher && (_photoOutput == null || restart))
                {
                    _photoOutput = new AVCapturePhotoOutput
                    {
                        IsHighResolutionCaptureEnabled = true
                    };

                    _captureSession.AddOutput(_photoOutput);
                    isRestart = true;
                }
                else if (!_is10OrHigher && (_stillImageOutput == null || restart))
                {
                    _stillImageOutput = new AVCaptureStillImageOutput
                    {
                        OutputSettings = new NSDictionary(),
                        HighResolutionStillImageOutputEnabled = true
                    };

                    _captureSession.AddOutput(_stillImageOutput);
                    isRestart = true;
                }

                if (isRestart)
                {
                    var settings = new AVVideoSettingsUncompressed
                    {
                        PixelFormatType = CVPixelFormatType.CV32BGRA
                    };
                    _previewFrameOutput = new AVCaptureVideoDataOutput
                    {
                        AlwaysDiscardsLateVideoFrames = true,
                        MinFrameDuration         = new CMTime(1, 30),
                        UncompressedVideoSetting = settings
                    };
                    //if (UIDevice.CurrentDevice.CheckSystemVersion(13, 0)) //TODO: what is this?
                    //{
                    //    _previewFrameOutput.DeliversPreviewSizedOutputBuffers = true;
                    //    _previewFrameOutput.AutomaticallyConfiguresOutputBufferDimensions = false;
                    //}
                    _previewFrameDelegate = new PreviewFrameDelegate(_cameraModule);
                    var queue = new DispatchQueue("PreviewFrameQueue");
                    _previewFrameOutput.WeakVideoSettings = settings.Dictionary;
                    _previewFrameOutput.SetSampleBufferDelegate(_previewFrameDelegate, queue);

                    _captureSession.AddOutput(_previewFrameOutput);
                    _captureSession.AddInput(AVCaptureDeviceInput.FromDevice(_device));
                }

                _device.AddObserver(this, "adjustingFocus", NSKeyValueObservingOptions.OldNew, IntPtr.Zero);
            }
            catch (Exception e)
            {
                _cameraModule.ErrorMessage = e.ToString();
            }
        }
Ejemplo n.º 22
0
		private void SetupCamera()
		{
			CaptureSession = null;
			CaptureSession = new AVCaptureSession();
			CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto;

			currentDevice = null;
			inputDevice1 = null;
			inputDevice2 = null;

			foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video))
			{
				if (device.Position == AVCaptureDevicePosition.Front)
				{
					inputDevice1 = device;
				}
				else if (device.Position == AVCaptureDevicePosition.Back)
				{
					inputDevice2 = device;
				}
			}

			NSError error;
			if (inputDevice1.HasFlash)
			{
				inputDevice1.LockForConfiguration(out error);
				inputDevice1.FlashMode = AVCaptureFlashMode.Off;
				FlashButton.TitleLabel.Text = "Flash Off";
			}

			if (inputDevice2.HasFlash)
			{
				inputDevice2.LockForConfiguration(out error);
				inputDevice2.FlashMode = AVCaptureFlashMode.Off;
				FlashButton.TitleLabel.Text = "Flash Off";
			}


			frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error);
			rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error);
			currentDevice = inputDevice2;

			if (CaptureSession.CanAddInput(rearCamera))
			{
				CaptureSession.AddInput(rearCamera);
			}

			AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession);
			previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			previewLayer.Frame = View.Frame;
			View.Layer.InsertSublayer(previewLayer, 0);

			StillImageOutput = new AVCaptureStillImageOutput();
			StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG);

			CaptureSession.AddOutput(StillImageOutput);

			CaptureSession.StartRunning();
		}
Ejemplo n.º 23
0
        public async Task <NSData> CapturePhoto()
        {
            var videoConnection = CaptureOutput.ConnectionFromMediaType(AVMediaType.Video);
            var sampleBuffer    = await CaptureOutput.CaptureStillImageTaskAsync(videoConnection);

            var jpegData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            var image    = UIImage.LoadFromData(jpegData);

            CGImage             imgRed                 = image.CGImage;
            UIDevice            device                 = UIDevice.CurrentDevice;
            UIDeviceOrientation orientation            = device.Orientation;
            AVCaptureConnection previewLayerConnection = this.previewLayer.Connection;
            var photo = new UIImage(jpegData);

            if (previewLayerConnection.SupportsVideoOrientation)

            {
                if (orientation == UIDeviceOrientation.Portrait)
                {
                    UpdatePreviewLayer(previewLayerConnection,
                                       AVCaptureVideoOrientation.Portrait);
                    NSData ret = photo.AsJPEG();
                    return(ret);;
                }

                switch (orientation)
                {
                case UIDeviceOrientation.PortraitUpsideDown:
                    photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Up);
                    //  UpdatePreviewLayer(previewLayerConnection,
                    //    AVCaptureVideoOrientation.Portrait);
                    break;

                case UIDeviceOrientation.LandscapeLeft:
                    photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Up);
                    //  UpdatePreviewLayer(previewLayerConnection,
                    //      AVCaptureVideoOrientation.Portrait);
                    break;

                case UIDeviceOrientation.LandscapeRight:
                    photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Down);
                    // UpdatePreviewLayer(previewLayerConnection,
                    //  AVCaptureVideoOrientation.Portrait);
                    break;

                case UIDeviceOrientation.Portrait:
                    //  photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Up);
                    // UpdatePreviewLayer(previewLayerConnection,
                    //     AVCaptureVideoOrientation.Portrait);
                    break;

                default:
                    //UpdatePreviewLayer(previewLayerConnection,
                    //  AVCaptureVideoOrientation.Portrait);
                    //photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Up);
                    break;
                }
            }
            LayoutSubviews();
            NSData retrunObj = photo.AsJPEG();

            return(retrunObj);
            // var photo = new UIImage(jpegData);
            // var rotatedPhoto = RotateImage(photo, 180f);


            //   var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            //   return jpegImageAsNsData;

            /* CALayer layer = new CALayer
             * {
             *   //ContentsGravity = "kCAGravityResizeAspect",
             *   //ContentsRect = rect,
             *   //GeometryFlipped = true,
             *   ContentsScale = 1.0f,
             *   Frame = Bounds,
             *   Contents = rotatedPhoto.CGImage //Contents = photo.CGImage,
             * };
             * layer.Frame = previewLayer.Frame;
             *
             * var image= ImageFromLayer(layer);
             * NSData imageData = image.AsJPEG();
             * return imageData;
             * /*var t=CIIma.LoadFromData()
             * image.LoadData(NSData.FromArray(sampleBuffer));
             * var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(image.AsJPEG());
             * return jpegImageAsNsData;
             * ImageFromLayer(layer).AsJPEG().AsStream());
             * // MainPage.UpdateImage(ImageFromLayer(layer).AsJPEG().AsStream());*/
        }
Ejemplo n.º 24
0
        async private void TakePictureAndShrinkIt()
        {
            /*Uncomment this block of code to revert back*/
            //var mediaOptions = new Plugin.Media.Abstractions.StoreCameraMediaOptions
            //{
            //    DefaultCamera = Plugin.Media.Abstractions.CameraDevice.Front,
            //};
            //var photo = await Plugin.Media.CrossMedia.Current.TakePhotoAsync(mediaOptions);



            //test manual


            //var device = GetCameraForOrientation(AVCaptureDevicePosition.Front);
            //ConfigureCameraForDevice(device);

            //captureSession.BeginConfiguration();
            //captureSession.RemoveInput(captureDeviceInput);
            //captureDeviceInput = AVCaptureDeviceInput.FromDevice(device);
            //captureSession.AddInput(captureDeviceInput);
            //captureSession.CommitConfiguration();


            var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
            var sampleBuffer    = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);

            var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            var uiImage           = UIImage.LoadFromData(jpegImageAsNsData);
            var resizedImage      = ScaledImage(uiImage, 500, 500);

            //var newMediaFile = new MediaFile(resizedImage.)
            //var jpegAsByteArray = jpegImageAsNsData.ToArray();
            // jpegAsByteArray
            //var photo = resizedImage.AsJPEG((System.nfloat)1.0).AsStream();

            // var photo = new MemoryStream(jpegAsByteArray);

            //end test manual



            ClearEmotionResults("{Analyzing...}");
            //if (photo != null)
            //PhotoImage.Source = ImageSource.FromStream(() => { return photo.GetStream(); });
            PhotoImage.Source = ImageSource.FromStream(() => { return(resizedImage.AsJPEG(1.0f).AsStream()); });
            //  await analyseImage(photo.GetStream());
            var emotions = await DetectEmotionsManuallyAsync(jpegImageAsNsData);

            if (emotions != null)
            {
                ConfidenceResultsReturned(this, new EmotionEventArgs(
                                              emotions.Anger, emotions.Contempt, emotions.Disgust, emotions.Fear,
                                              emotions.Happiness, emotions.Neutral, emotions.Sadness,
                                              emotions.Surprise)); //  photo.Dispose();
                this.AngerLabel.Text     = $"Anger: {emotions.Anger.ToString()}";
                this.ContemptLabel.Text  = $"Contempt: {emotions.Contempt.ToString()}";
                this.DisgustLabel.Text   = $"Disgust: {emotions.Disgust.ToString()}";
                this.FearLabel.Text      = $"Fear: {emotions.Fear.ToString()}";
                this.HappinessLabel.Text = $"Happiness: {emotions.Happiness.ToString()}";
                this.NeutralLabel.Text   = $"Neutral: {emotions.Neutral.ToString()}";
                this.SadnessLabel.Text   = $"Sadness: {emotions.Sadness.ToString()}";
                this.SurpriseLabel.Text  = $"Surprise: {emotions.Surprise.ToString()}";



                KeyValuePair <string, float> e = new KeyValuePair <string, float>("zero", 0);
                using (IEnumerator <KeyValuePair <string, float> > enumer = emotions.ToRankedList().GetEnumerator())
                {
                    if (enumer.MoveNext())
                    {
                        e = enumer.Current;
                    }
                }

                string highestEmotion   = e.Key;
                var    highlightedColor = Color.Red;
                switch (highestEmotion)
                {
                case "Anger":
                    this.AngerLabel.TextColor = highlightedColor;
                    break;

                case "Contempt":
                    this.ContemptLabel.TextColor = highlightedColor;
                    break;

                case "Disgust":
                    this.DisgustLabel.TextColor = highlightedColor;
                    break;

                case "Fear":
                    this.FearLabel.TextColor = highlightedColor;
                    break;

                case "Happiness":
                    this.HappinessLabel.TextColor = highlightedColor;
                    break;

                case "Neutral":
                    this.NeutralLabel.TextColor = highlightedColor;
                    break;

                case "Sadness":

                    this.SadnessLabel.TextColor = highlightedColor;
                    break;


                case "Surprise":

                    this.SurpriseLabel.TextColor = highlightedColor;
                    break;

                default: break;
                }
            }
            else
            {
                DisplayAlert("Picture Analysis Failure", "Photo was not taken at a good angle" +
                             " and therefore could not be parsed. Continue solving problems normally.", "OK");
            }
        }
		public async override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			// Disable UI. The UI is enabled if and only if the session starts running.
			CameraButton.Enabled = false;
			RecordButton.Enabled = false;
			StillButton.Enabled = false;

			// Create the AVCaptureSession.
			Session = new AVCaptureSession ();

			// Setup the preview view.
			PreviewView.Session = Session;

			// Communicate with the session and other session objects on this queue.
			SessionQueue = new DispatchQueue ("session queue");
			SetupResult = AVCamSetupResult.Success;

			// Check video authorization status. Video access is required and audio access is optional.
			// If audio access is denied, audio is not recorded during movie recording.
			switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) {
				// The user has previously granted access to the camera.
				case AVAuthorizationStatus.Authorized:
					break;

				// The user has not yet been presented with the option to grant video access.
				// We suspend the session queue to delay session setup until the access request has completed to avoid
				// asking the user for audio access if video access is denied.
				// Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
				case AVAuthorizationStatus.NotDetermined:
					SessionQueue.Suspend ();
					var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video);
					if (!granted)
						SetupResult = AVCamSetupResult.CameraNotAuthorized;
					SessionQueue.Resume ();
					break;

				// The user has previously denied access.
				default:
					SetupResult = AVCamSetupResult.CameraNotAuthorized;
					break;
			}

			// Setup the capture session.
			// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
			// Why not do all of this on the main queue?
			// Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
			// so that the main queue isn't blocked, which keeps the UI responsive.
			SessionQueue.DispatchAsync (() => {
				if (SetupResult != AVCamSetupResult.Success)
					return;

				backgroundRecordingID = -1;
				NSError error;
				AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back);
				AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error);
				if (videoDeviceInput == null)
					Console.WriteLine ("Could not create video device input: {0}", error);

				Session.BeginConfiguration ();
				if (Session.CanAddInput (videoDeviceInput)) {
					Session.AddInput (VideoDeviceInput = videoDeviceInput);
					DispatchQueue.MainQueue.DispatchAsync (() => {
						// Why are we dispatching this to the main queue?
						// Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
						// can only be manipulated on the main thread.
						// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
						// on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
						// Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
						// ViewWillTransitionToSize method.
						UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation;
						AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
						if (statusBarOrientation != UIInterfaceOrientation.Unknown)
							initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation;

						var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;
						previewLayer.Connection.VideoOrientation = initialVideoOrientation;
					});
				} else {
					Console.WriteLine ("Could not add video device input to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
				AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error);
				if (audioDeviceInput == null)
					Console.WriteLine ("Could not create audio device input: {0}", error);

				if (Session.CanAddInput (audioDeviceInput))
					Session.AddInput (audioDeviceInput);
				else
					Console.WriteLine ("Could not add audio device input to the session");

				var movieFileOutput = new AVCaptureMovieFileOutput ();
				if (Session.CanAddOutput (movieFileOutput)) {
					Session.AddOutput (MovieFileOutput = movieFileOutput);
					AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video);
					if (connection.SupportsVideoStabilization)
						connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
				} else {
					Console.WriteLine ("Could not add movie file output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				var stillImageOutput = new AVCaptureStillImageOutput ();
				if (Session.CanAddOutput (stillImageOutput)) {
					stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed {
						Codec = AVVideoCodec.JPEG
					};
					Session.AddOutput (StillImageOutput = stillImageOutput);
				} else {
					Console.WriteLine ("Could not add still image output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				Session.CommitConfiguration ();
			});
		}
Ejemplo n.º 26
0
        public async void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection)
        {
            Console.WriteLine("Got metadata");

            try {
                List <int> unseen = faceViews.Keys.ToList();
                List <int> seen   = new List <int>();


                CATransaction.Flush();
                CATransaction.Begin();
                CATransaction.SetValueForKey(NSObject.FromObject(true), (NSString)(CATransaction.DisableActions.ToString()));



                foreach (var face in faces)
                {
                    // HACK: int faceId = (face as AVMetadataFaceObject).FaceID;
                    int faceId = (int)(face as AVMetadataFaceObject).FaceID;
                    unseen.Remove(faceId);
                    seen.Add(faceId);

                    FaceView view;
                    if (faceViews.ContainsKey(faceId))
                    {
                        view = faceViews[faceId];
                    }
                    else
                    {
                        view = new FaceView();
                        view.Layer.CornerRadius = 10;
                        view.Layer.BorderWidth  = 3;
                        view.Layer.BorderColor  = UIColor.Green.CGColor;
                        previewView.AddSubview(view);
                        faceViews.Add(faceId, view);
                        view.Id       = faceId;
                        view.Callback = TouchCallBack;
                        if (lockedFaceID != null)
                        {
                            view.Alpha = 0;
                        }
                    }

                    AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject(face);
                    view.Frame = adjusted.Bounds;
                }

                foreach (int faceId in unseen)
                {
                    FaceView view = faceViews[faceId];
                    view.RemoveFromSuperview();
                    faceViews.Remove(faceId);
                    if (faceId == lockedFaceID)
                    {
                        clearLockedFace();
                    }
                }

                if (lockedFaceID != null)
                {
                    FaceView view = faceViews[lockedFaceID.GetValueOrDefault()];
                    // HACK: Cast resulting nfloat to float
                    // float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor;
                    float size      = (float)(Math.Max(view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor);
                    float zoomDelta = lockedFaceSize / size;
                    float lockTime  = (float)(CATransition.CurrentMediaTime() - this.lockTime);
                    float zoomRate  = (float)(Math.Log(zoomDelta) / lockTime);
                    if (Math.Abs(zoomDelta) > 0.1)
                    {
                        device.RampToVideoZoom(zoomRate > 0 ? MaxZoom : 1, zoomRate);
                    }
                }
            }
            catch {
                Console.WriteLine("error weird");
            }
            finally {
                CATransaction.Commit();
            }

            lock (lockerobj) {
                if (processingFaceDetection)
                {
                    return;
                }
                processingFaceDetection = true;
            }

            //CATransaction.Begin();
            //CATransaction.SetValueForKey(NSObject.FromObject(true), (NSString)(CATransaction.DisableActions.ToString()));



            AVCaptureConnection avcaptureconnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);

            //AVCaptureAutoExposureBracketedStillImageSettings bracketedstillimagesettings = AVCaptureAutoExposureBracketedStillImageSettings.Create(exposureTargetBias: AVCaptureDevice.ExposureTargetBiasCurrent);

            //var settings = new AVCaptureBracketedStillImageSettings[] { bracketedstillimagesettings };


            //stillImageOutput.PrepareToCaptureStillImageBracket(avcaptureconnection,settings, (status,error)=> {
            //    if (error == null) {

            //        stillImageOutput.CaptureStillImageAsynchronously(avcaptureconnection,
            //                                    (CMSampleBuffer imageDataSampleBuffer, NSError nserror) => {
            //                                        if (nserror == null) {
            //                                            using (var sampleBuffer = imageDataSampleBuffer) {

            //                                                if (sampleBuffer != null) {
            //                                                    using (NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer)) {
            //                                                        if (imageData != null) {
            //                                                            uIImage = UIImage.LoadFromData(imageData);
            //                                                            /// operater your image
            //                                                            //Console.WriteLine(image);

            //                                                            SetImage(uIImage);



            //                                                        }
            //                                                    }


            //                                                }
            //                                                else {
            //                                                    Console.WriteLine("something was wrong");
            //                                                }
            //                                            }
            //                                        }


            //                                    });
            //    }
            //});

            //CATransaction.Commit();

            //DispatchQueue.MainQueue.DispatchAsync(() => {
            //    CaptureImageWithMetadata(stillImageOutput, avcaptureconnection);
            //});


            //stillImageOutput.CaptureStillImageAsynchronously(avcaptureconnection,
            //(CMSampleBuffer imageDataSampleBuffer, NSError nserror) => {



            //    if (nserror == null) {

            //        //DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
            //        //{
            //        DispatchQueue.MainQueue.DispatchAsync(() => {
            //            UIAlertView alert = new UIAlertView();
            //            alert.Show();
            //        });
            //        //});
            //        //DispatchQueue.MainQueue.DispatchAsync(delegate
            //        //{
            //        CIImage image = null;
            //        using (var sampleBuffer = imageDataSampleBuffer) {
            //            NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            //            image = CIImage.FromData(imageData);


            //        }



            //         uIImage = image.MakeUIImageFromCIImage();
            //        ivPictureTaken.BackgroundColor = UIColor.Blue;
            //        //ivPictureTaken.Image = uIImage;
            //        //Thread.Sleep(2000);
            //        //processingFaceDetection = false;


            //        //});

            //    }
            //    else {

            //        Console.WriteLine("Something went wrong");
            //    }

            //});
            ivPictureTaken.BackgroundColor = (ivPictureTaken.BackgroundColor == UIColor.Blue)? UIColor.Black : UIColor.Blue;

            await Task.Delay(1000);

            CMSampleBuffer sampleBuffer = await stillImageOutput.CaptureStillImageTaskAsync(avcaptureconnection);

            foreach (var face in faces)
            {
                int faceId = (int)(face as AVMetadataFaceObject).FaceID;
                if (faceViews != null && faceViews.ContainsKey(faceId))
                {
                    var view = faceViews[faceId];
                    view.Frame = CGRect.Empty;
                    view.RemoveFromSuperview();
                }
            }
            teardownAVFoundationFaceDetection();


            CIImage ciImage            = null;
            UIImage uIImage            = null;
            UIImage transformeduIImage = null;

            using (sampleBuffer ) {
                NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
                arregloJPG = imageData.ToArray();
                ciImage    = CIImage.FromData(imageData);
                uIImage    = new UIImage(imageData);

                CGAffineTransform cGAffineTransform = new CGAffineTransform();

                switch (deviceOrientation)
                {
                case UIDeviceOrientation.Portrait:

                    cGAffineTransform = CGAffineTransform.MakeRotation(DegreesToRadians(0));

                    break;

                case UIDeviceOrientation.PortraitUpsideDown:
                    cGAffineTransform = (CGAffineTransform.MakeRotation(DegreesToRadians(180)));
                    break;

                case UIDeviceOrientation.LandscapeLeft:
                    cGAffineTransform = CGAffineTransform.MakeRotation(DegreesToRadians(90));
                    break;

                case UIDeviceOrientation.LandscapeRight:
                    cGAffineTransform = CGAffineTransform.MakeRotation(DegreesToRadians(-90));
                    //cGAffineTransform.Translate(uIImage.CGImage.Width,0);

                    break;

                case UIDeviceOrientation.FaceUp:
                case UIDeviceOrientation.FaceDown:
                default:
                    break;     // leave the layer in its last known orientation
                }

                var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;

                // Create a CGImage on the RGB colorspace from the configured parameter above
                using (var cs = CGColorSpace.CreateDeviceRGB()) {
                    using (CGBitmapContext context = new CGBitmapContext(null, (int)uIImage.CGImage.Width, (int)uIImage.CGImage.Height, uIImage.CGImage.BitsPerComponent, uIImage.CGImage.BytesPerRow, cs, (CGImageAlphaInfo)flags)) {
                        context.ConcatCTM(cGAffineTransform);
                        var cgRect = new CGRect(0, 0, uIImage.CGImage.Width, uIImage.CGImage.Height);
                        context.DrawImage(cgRect, uIImage.CGImage);
                        //ciImage = context.ToImage();

                        using (CGImage cgImage2 = context.ToImage()) {
                            //pixelBuffer.Unlock(CVPixelBufferLock.None);
                            transformeduIImage = UIImage.FromImage(cgImage2);
                            //return UIImage.FromImage(cgImage);
                        }
                    }
                }
            }

            sampleBuffer.Dispose();

            //UIImage uIImage = image2.MakeUIImageFromCIImage();
            NSData nsdata = uIImage.ResizeImageWithAspectRatio(640, 480).AsPNG();

            ivPictureTaken.Image = UIImage.LoadFromData(nsdata);//uIImage;
            //byte[] bytes = nsdata.ToArray();
            //WriteToFile(nsdata);
            //string encoded = Base64.EncodeToString(localdata, Base64Flags.Default);
            //byte[] b = System.IO.File.ReadAllBytes(FileName);


            //string rutaCarpeta = Environment.GetFolderPath(Environment.SpecialFolder.Personal);
            //string resultado = "FotoLAB12";
            //archivoLocal = resultado + ".jpg";
            //ruta = Path.Combine(rutaCarpeta, archivoLocal);
            //File.WriteAllBytes(ruta, arregloJPG);
            //ivPictureTaken.Image= UIImage.FromFile(ruta);
            //DispatchQueue.MainQueue.DispatchAsync(() => {
            //ivPictureTaken.Image = null;
            //InvokeOnMainThread(() => {
            //    ivPictureTaken.BackgroundColor = UIColor.Black;
            //    ivPictureTaken = new UIImageView(uIImage);
            //});

            //ivPictureTaken.SetNeedsDisplay();

            //CATransaction.Commit();

            //});
            //DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
            //{
            ProcessingImage(nsdata);
            //});
            //DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => {
            //    //NSNotificationCenter.DefaultCenter.PostNotificationName("OnFaceDetected", uIImage);
            //});
            //session.StopRunning();
            //await Task.Delay(3000);
            //processingFaceDetection = false;
            var a = -1;
        }
        private void StartCameraWithCompletionHandler(Action<bool, NSError> completion)
        {
            captureSession = new AVCaptureSession ();
            captureSession.BeginConfiguration ();
            captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back);

            if (captureDevice == null) {
                string message = "Error message back camera - not found";
                string title = "Error";
                ShowErrorMessage (message, title);
                return;
            }

            NSError error;
            AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error);
            if (deviceInput == null) {
                Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error);

                string message = "Error message back camera - can't open.";
                string title = "Error";
                ShowErrorMessage (message, title);
                return;
            }

            captureSession.AddInput (deviceInput);
            stillImageOutput = new AVCaptureStillImageOutput ();

            //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output
            stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG);
            captureSession.AddOutput (stillImageOutput);
            cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput);
            captureSession.SessionPreset = AVCaptureSession.PresetPhoto;

            captureDeviceFormat = captureDevice.ActiveFormat;
            captureSession.CommitConfiguration ();
            captureSession.StartRunning ();
            maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount;
            PrepareBracketsWithCompletionHandler (completion);
        }
Ejemplo n.º 28
0
        private void OnShutter(object sender, EventArgs e)
        {
            if (_imageOutput == null)
            {
                return;
            }
            var orientation = GetOrientation();

            Debug.WriteLine($"Capturing image with device orientation: {orientation.Item1} and image orientation: {orientation.Item2}");

            DispatchQueue.DefaultGlobalQueue.DispatchAsync(() =>
            {
                var videoConnection =
                    _imageOutput.ConnectionFromMediaType(AVMediaType.Video);
                if (videoConnection.SupportsVideoOrientation)
                {
                    videoConnection.VideoOrientation = orientation.Item1;
                }

                _imageOutput.CaptureStillImageAsynchronously(videoConnection,
                                                             (buffer, error) =>
                {
                    Session?.StopRunning();

                    var data = AVCaptureStillImageOutput.JpegStillToNSData(buffer);

                    var image       = new UIImage(data);
                    var imageWidth  = image.Size.Width;
                    var imageHeight = image.Size.Height;

                    var previewWidth = PreviewContainer.Frame.Width;

                    var centerCoordinate = imageHeight * 0.5;

                    var imageRef = image.CGImage.WithImageInRect(
                        new CGRect(centerCoordinate - imageWidth * 0.5, 0, imageWidth,
                                   imageWidth));

                    DispatchQueue.MainQueue.DispatchAsync(() =>
                    {
                        if (Configuration.CropImage)
                        {
                            var resizedImage =
                                new UIImage(imageRef, previewWidth / imageWidth,
                                            orientation.Item2);
                            SaveToPhotosAlbum(resizedImage);
                            _onImage?.Invoke(resizedImage);
                        }
                        else
                        {
                            SaveToPhotosAlbum(image);
                            _onImage?.Invoke(image);
                        }

                        Session?.StopRunning();
                        Session      = null;
                        Device       = null;
                        _imageOutput = null;
                    });
                });
            });
        }
Ejemplo n.º 29
0
        public override void FinishedLaunching(UIApplication application)
        {
            // Create a new capture session
            Session = new AVCaptureSession ();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = GetFrontCamera();
            if (CaptureDevice == null) {
                // Video capture not supported, abort
                Console.WriteLine ("Video recording not supported on this device");
                CameraAvailable = false;
                return;
            }

            // Prepare device for configuration
            CaptureDevice.LockForConfiguration (out Error);
            if (Error != null) {
                // There has been an issue, abort
                Console.WriteLine ("Error: {0}", Error.LocalizedDescription);
                CaptureDevice.UnlockForConfiguration ();
                return;
            }

            // Configure stream for 15 frames per second (fps)
            CaptureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);

            // Unlock configuration
            CaptureDevice.UnlockForConfiguration ();

            // Get input from capture device
            Input = AVCaptureDeviceInput.FromDevice (CaptureDevice);
            if (Input == null) {
                // Error, report and abort
                Console.WriteLine ("Unable to gain input from capture device.");
                CameraAvailable = false;
                return;
            }

            // Attach input to session
            Session.AddInput (Input);

            // Create a new output
            var output = new AVCaptureVideoDataOutput ();
            var settings = new AVVideoSettingsUncompressed ();
            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            output.WeakVideoSettings = settings.Dictionary;

            // Configure and attach to the output to the session
            Queue = new DispatchQueue ("ManCamQueue");
            Recorder = new OutputRecorder ();
            output.SetSampleBufferDelegate (Recorder, Queue);
            Session.AddOutput (output);

            // Configure and attach a still image output for bracketed capture
            StillImageOutput = new AVCaptureStillImageOutput ();
            var dict = new NSMutableDictionary();
            dict[AVVideo.CodecKey] = new NSNumber((int) AVVideoCodec.JPEG);
            Session.AddOutput (StillImageOutput);

            // Let tabs know that a camera is available
            CameraAvailable = true;
        }
Ejemplo n.º 30
0
        /// <summary>
        /// Views the did load.
        /// </summary>
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Hide no camera label
            NoCamera.Hidden = ThisApp.CameraAvailable;

            // Attach to camera view
            ThisApp.Recorder.DisplayView = CameraView;

            // Setup scrolling area
            ScrollView.ContentSize = new CGSize(CameraView.Frame.Width * 4f, CameraView.Frame.Height);

            // Add output views
            Output.Add(BuildOutputView(1));
            Output.Add(BuildOutputView(2));
            Output.Add(BuildOutputView(3));

            // Create preset settings
            var Settings = new AVCaptureBracketedStillImageSettings[] {
                AVCaptureAutoExposureBracketedStillImageSettings.Create(-2f),
                AVCaptureAutoExposureBracketedStillImageSettings.Create(0f),
                AVCaptureAutoExposureBracketedStillImageSettings.Create(2f)
            };

            OutputIndex = Settings.Length;

            // Wireup capture button
            CaptureButton.TouchUpInside += (sender, e) => {
                // Reset output index
                if (OutputIndex < Settings.Length)
                {
                    return;
                }

                OutputIndex = 0;

                // Tell the camera that we are getting ready to do a bracketed capture
                ThisApp.StillImageOutput.PrepareToCaptureStillImageBracket(ThisApp.StillImageOutput.Connections [0], Settings, (ready, err) => {
                    // Was there an error, if so report it
                    if (err != null)
                    {
                        Console.WriteLine("Error: {0}", err.LocalizedDescription);
                    }
                });

                // Ask the camera to snap a bracketed capture
                ThisApp.StillImageOutput.CaptureStillImageBracket(ThisApp.StillImageOutput.Connections [0], Settings, (sampleBuffer, settings, err) => {
                    // Convert raw image stream into a Core Image Image
                    var imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
                    var image     = CIImage.FromData(imageData);

                    // Display the resulting image
                    Output [OutputIndex++].Image = UIImage.FromImage(image);

                    // IMPORTANT: You must release the buffer because AVFoundation has a fixed number
                    // of buffers and will stop delivering frames if it runs out.
                    sampleBuffer.Dispose();
                });
            };
        }
Ejemplo n.º 31
0
        private async void SnapStillImage()
        {
            //
            if ((m_videoDevice != null) && (m_stillImageOutput != null))
            {
                if (m_videoDevice.HasFlash && m_videoDevice.IsFlashModeSupported(AVCaptureFlashMode.Auto))
                {
                    NSError error;
                    if (m_videoDevice.LockForConfiguration(out error))
                    {
                        m_videoDevice.FlashMode = AVCaptureFlashMode.Auto;
                        m_videoDevice.UnlockForConfiguration();
                    }
                }

                AVCaptureConnection connection = m_stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
                var imageDataSampleBuffer      = await m_stillImageOutput.CaptureStillImageTaskAsync(connection);   //获得当前帧的压缩图像

                var imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer);                 //得到当前帧压缩图像的图像数据...

                //RequestAuthorization(handler), handler是用户与权限对话框交互后,执行的动作。
                PHPhotoLibrary.RequestAuthorization(status => {
                    if (status == PHAuthorizationStatus.Authorized)
                    {   // 若用户授权了
                        // To preserve the metadata, we create an asset from the JPEG NSData representation.
                        // Note that creating an asset from a UIImage discards the metadata.

                        // In iOS 9, we can use AddResource method on PHAssetCreationRequest class.
                        // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:].

                        if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0))
                        {
                            //PHPhotoLibrary.SharedPhotoLibrary 返回的是一个(共享)图片库对象
                            //PerformChanges (changeHandler, completionHandler) changeHandler 以及 completionHandler 是一个lambda
                            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                                var request = PHAssetCreationRequest.CreationRequestForAsset();
                                request.AddResource(PHAssetResourceType.Photo, imageData, null);        //保存当前照片
                            }, (success, err) => {
                                if (!success)
                                {
                                    Console.WriteLine("Error occurred while saving image to photo library: {0}", err);
                                }
                            });
                        }
                        else
                        {   //用户没有授权
                            string outputFileName  = NSProcessInfo.ProcessInfo.GloballyUniqueString;
                            string tmpDir          = Path.GetTempPath();
                            string outputFilePath  = Path.Combine(tmpDir, outputFileName);
                            string outputFilePath2 = Path.ChangeExtension(outputFilePath, "jpg");
                            NSUrl temporaryFileUrl = new NSUrl(outputFilePath2, false);

                            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                                NSError error = null;
                                if (imageData.Save(temporaryFileUrl, NSDataWritingOptions.Atomic, out error))
                                {
                                    PHAssetChangeRequest.FromImage(temporaryFileUrl);
                                }
                                else
                                {
                                    Console.WriteLine("Error occured while writing image data to a temporary file: {0}", error);
                                }
                            }, (success, error) => {
                                if (!success)
                                {
                                    Console.WriteLine("Error occurred while saving image to photo library: {0}", error);
                                }

                                // Delete the temporary file.
                                NSError deleteError;
                                NSFileManager.DefaultManager.Remove(temporaryFileUrl, out deleteError);
                            });
                        }
                    }
                });
            }
        }
Ejemplo n.º 32
0
        public async override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Disable UI. The UI is enabled if and only if the session starts running.
            CameraButton.Enabled = false;
            RecordButton.Enabled = false;
            StillButton.Enabled  = false;

            // Create the AVCaptureSession.
            Session = new AVCaptureSession();

            // Setup the preview view.
            PreviewView.Session = Session;

            // Communicate with the session and other session objects on this queue.
            SessionQueue = new DispatchQueue("session queue");
            SetupResult  = AVCamSetupResult.Success;

            // Check video authorization status. Video access is required and audio access is optional.
            // If audio access is denied, audio is not recorded during movie recording.
            switch (AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video))
            {
            // The user has previously granted access to the camera.
            case AVAuthorizationStatus.Authorized:
                break;

            // The user has not yet been presented with the option to grant video access.
            // We suspend the session queue to delay session setup until the access request has completed to avoid
            // asking the user for audio access if video access is denied.
            // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
            case AVAuthorizationStatus.NotDetermined:
                SessionQueue.Suspend();
                var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video);

                if (!granted)
                {
                    SetupResult = AVCamSetupResult.CameraNotAuthorized;
                }
                SessionQueue.Resume();
                break;

            // The user has previously denied access.
            default:
                SetupResult = AVCamSetupResult.CameraNotAuthorized;
                break;
            }

            // Setup the capture session.
            // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
            // Why not do all of this on the main queue?
            // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
            // so that the main queue isn't blocked, which keeps the UI responsive.
            SessionQueue.DispatchAsync(() => {
                if (SetupResult != AVCamSetupResult.Success)
                {
                    return;
                }

                backgroundRecordingID = -1;
                NSError error;
                AVCaptureDevice videoDevice           = CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back);
                AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error);
                if (videoDeviceInput == null)
                {
                    Console.WriteLine("Could not create video device input: {0}", error);
                }

                Session.BeginConfiguration();
                if (Session.CanAddInput(videoDeviceInput))
                {
                    Session.AddInput(VideoDeviceInput = videoDeviceInput);
                    DispatchQueue.MainQueue.DispatchAsync(() => {
                        // Why are we dispatching this to the main queue?
                        // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
                        // can only be manipulated on the main thread.
                        // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                        // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                        // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
                        // ViewWillTransitionToSize method.
                        UIInterfaceOrientation statusBarOrientation       = UIApplication.SharedApplication.StatusBarOrientation;
                        AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                        if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                        {
                            initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation;
                        }

                        var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;
                        previewLayer.Connection.VideoOrientation = initialVideoOrientation;
                    });
                }
                else
                {
                    Console.WriteLine("Could not add video device input to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                AVCaptureDevice audioDevice           = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);
                if (audioDeviceInput == null)
                {
                    Console.WriteLine("Could not create audio device input: {0}", error);
                }

                if (Session.CanAddInput(audioDeviceInput))
                {
                    Session.AddInput(audioDeviceInput);
                }
                else
                {
                    Console.WriteLine("Could not add audio device input to the session");
                }

                var movieFileOutput = new AVCaptureMovieFileOutput();
                if (Session.CanAddOutput(movieFileOutput))
                {
                    Session.AddOutput(MovieFileOutput = movieFileOutput);
                    AVCaptureConnection connection    = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video);
                    if (connection.SupportsVideoStabilization)
                    {
                        connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
                    }
                }
                else
                {
                    Console.WriteLine("Could not add movie file output to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                var stillImageOutput = new AVCaptureStillImageOutput();
                if (Session.CanAddOutput(stillImageOutput))
                {
                    stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed {
                        Codec = AVVideoCodec.JPEG
                    };
                    Session.AddOutput(StillImageOutput = stillImageOutput);
                }
                else
                {
                    Console.WriteLine("Could not add still image output to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                Session.CommitConfiguration();
            });
        }
        public async Task <NSData> CapturePhoto()
        {
            var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
            var sampleBuffer    = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);

            var jpegData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
            var image    = UIImage.LoadFromData(jpegData);

            CGImage             imgRed                 = image.CGImage;
            UIDevice            device                 = UIDevice.CurrentDevice;
            UIDeviceOrientation orientation            = device.Orientation;
            AVCaptureConnection previewLayerConnection = this.previewLayer.Connection;
            var photo = new UIImage(jpegData);

            if (previewLayerConnection.SupportsVideoOrientation)

            {
                switch (orientation)
                {
                case UIDeviceOrientation.PortraitUpsideDown:
                    photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Up);
                    // UpdatePreviewLayer(previewLayerConnection,
                    // AVCaptureVideoOrientation.Portrait);
                    break;

                case UIDeviceOrientation.LandscapeLeft:
                    photo = new UIImage(image.CGImage, photo.CurrentScale, UIImageOrientation.Up);
                    // UpdatePreviewLayer(previewLayerConnection,
                    // AVCaptureVideoOrientation.Portrait);
                    break;

                case UIDeviceOrientation.LandscapeRight:
                    //    photo = new UIImage(image.CGImage, 1.0f, UIImageOrientation.Down);

                    var temp12 = new UIImage(image.CGImage, photo.CurrentScale, UIImageOrientation.Down);

                    NSData retrunObj22 = ScaleAndRotateImage(temp12, UIImageOrientation.Down).AsJPEG();
                    return(retrunObj22);

                //UpdatePreviewLayer(previewLayerConnection,
                // AVCaptureVideoOrientation.Portrait);
                //  break;
                case UIDeviceOrientation.Portrait:
                    //UpdatePreviewLayer(previewLayerConnection,
                    //   AVCaptureVideoOrientation.Portrait);
                    // if(image.Orientation==UIImageOrientation.Right)
                    var temp = new UIImage(image.CGImage, 1.0f, image.Orientation);

                    NSData retrunObj1 = ScaleAndRotateImage(temp, image.Orientation).AsJPEG();
                    return(retrunObj1);

                //  break;
                default:

                    //   UpdatePreviewLayer(previewLayerConnection,
                    //   AVCaptureVideoOrientation.Portrait);
                    var temp1 = new UIImage(image.CGImage, 1.0f, image.Orientation);

                    NSData retrunObj2 = ScaleAndRotateImage(temp1, image.Orientation).AsJPEG();
                    return(retrunObj2);
                }
            }
            LayoutSubviews();
            NSData retrunObj = photo.AsJPEG();

            return(retrunObj);
        }
Ejemplo n.º 34
0
        public Task <OperationResult> Setup(bool enableAudioRecording, bool enableStillImageCapture = false, UIInterfaceOrientation orientation = UIInterfaceOrientation.Portrait, int numberOfCameras = 1)
        {
            TaskCompletionSource <OperationResult> tcs = new TaskCompletionSource <OperationResult>();
            var warnings = new List <string>();

            NumberOfCameras = numberOfCameras;

            _enableAudioRecording    = enableAudioRecording;
            _enableStillImageCapture = enableStillImageCapture;
            _session = new AVCaptureSession();

            _backgroundRecordingID = -1;
            NSError error;
            var     result = AVCaptureDeviceFactory.CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back);

            if (!result.IsSuccessful)
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure("No video devices found, probably running in the simulator"));
                return(tcs.Task);
            }

            _videoDeviceInput = AVCaptureDeviceInput.FromDevice(result.Result, out error);

            if (_videoDeviceInput == null)
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure(@"Could not create video device input: {error}"));
                return(tcs.Task);
            }

            _session.BeginConfiguration();
            if (_session.CanAddInput(_videoDeviceInput))
            {
                _session.AddInput(_videoDeviceInput);

                var initialVideoOrientation = (AVCaptureVideoOrientation)(long)orientation;
                PreviewLayer.Session      = _session;
                PreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                PreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
            }
            else
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure("Could not add video device input to the session"));
                return(tcs.Task);
            }

            if (_enableAudioRecording)
            {
                AVCaptureDevice      audioDevice      = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio);
                AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);
                if (audioDeviceInput == null)
                {
                    warnings.Add(@"Could not create audio device input: {error}");
                }
                else
                {
                    if (_session.CanAddInput(audioDeviceInput))
                    {
                        _session.AddInput(audioDeviceInput);
                    }
                    else
                    {
                        warnings.Add("Could not add audio device input to the session");
                    }
                }
            }

            _movieFileOutput = new AVCaptureMovieFileOutput();
            if (_session.CanAddOutput(_movieFileOutput))
            {
                _session.AddOutput(_movieFileOutput);
                AVCaptureConnection connection = _movieFileOutput.ConnectionFromMediaType(AVMediaType.Video);
                if (connection.SupportsVideoStabilization)
                {
                    connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
                }
            }
            else
            {
                warnings.Add("Could not add movie file output to the session");
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
            }

            if (_enableStillImageCapture)
            {
                _stillImageOutput = new AVCaptureStillImageOutput();
                if (_session.CanAddOutput(_stillImageOutput))
                {
                    _stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed
                    {
                        Codec = AVVideoCodec.JPEG
                    };
                    _session.AddOutput(_stillImageOutput);
                }
                else
                {
                    warnings.Add("Could not add still image output to the session");
                    _setupResult = CameraSetupResult.SessionConfigurationFailed;
                }
            }

            _session.CommitConfiguration();

            _setupResult = CameraSetupResult.Success;
            tcs.SetResult(OperationResult.AsSuccess(string.Empty, warnings));

            AddObservers();

            return(tcs.Task);
        }