Beispiel #1
0
        public bool RestartReading()
        {
            var result = false;

            this.assetReader = AVAssetReader.FromAsset(this.videoAsset, out NSError error);
            if (error == null)
            {
                var settings = new AVVideoSettingsUncompressed {
                    PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange
                };
                this.videoAssetReaderOutput = new AVAssetReaderTrackOutput(this.videoTrack, settings);
                if (this.videoAssetReaderOutput != null)
                {
                    this.videoAssetReaderOutput.AlwaysCopiesSampleData = true;

                    if (this.assetReader.CanAddOutput(this.videoAssetReaderOutput))
                    {
                        this.assetReader.AddOutput(this.videoAssetReaderOutput);
                        result = this.assetReader.StartReading();
                    }
                }
            }
            else
            {
                Console.WriteLine($"Failed to create AVAssetReader object: {error}");
            }

            return(result);
        }
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Image <Bgr, Byte> img = new Image <Bgr, byte> (512, 512, new Bgr(255, 255, 255));
                CvInvoke.PutText(
                    img,
                    "Capture device not found.",
                    new Point(10, 200),
                    FontFace.HersheyComplex,
                    1,
                    new MCvScalar(),
                    2);
                ImageView.Image = img.ToUIImage();
                return(false);
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
Beispiel #3
0
        public void ConfigureDeviceAndStart()
        {
            var device = GetDevice();

            if (device == null)
            {
                return;
            }

            try
            {
                if (device.LockForConfiguration(out var error))
                {
                    if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                    {
                        device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    }

                    device.UnlockForConfiguration();
                }

                // Configure Input
                var input = AVCaptureDeviceInput.FromDevice(device, out var error2);
                _captureSession.AddInput(input);

                // Configure Output
                var settings = new AVVideoSettingsUncompressed()
                {
                    PixelFormatType = CoreVideo.CVPixelFormatType.CV32BGRA
                };

                var videoOutput = new AVCaptureVideoDataOutput
                {
                    WeakVideoSettings             = settings.Dictionary,
                    AlwaysDiscardsLateVideoFrames = true
                };

                var videoCaptureQueue = new DispatchQueue("Video Queue");
                videoOutput.SetSampleBufferDelegateQueue(new OutputRecorder(View, _shapeLayer), videoCaptureQueue);

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                // Start session
                _captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.Write(e);
            }
        }
Beispiel #4
0
        private void SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };



            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                RenderImageMessage("Capture device not found.");

                return;
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                RenderImageMessage("No input device");

                return;
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
        }
Beispiel #5
0
        void SetupVideoCapture()
        {
            var settings = new AVVideoSettingsUncompressed();

            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;

            videoCaptureDelegate = new XCameraVideoOutputDelegate(FrameCapturedHandler);
            queue = new DispatchQueue("XCamera.CameraQueue");

            videoOutput = new AVCaptureVideoDataOutput();
            videoOutput.UncompressedVideoSetting      = settings;
            videoOutput.AlwaysDiscardsLateVideoFrames = true;
            videoOutput.SetSampleBufferDelegateQueue(videoCaptureDelegate, queue);
        }
        /// <summary>
        /// Creates a camera stream and adds it to the view
        /// </summary>
        private async Task SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();


            // SETUP THE PREVIEW OF THE CAPTURE SESSION
            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = this.Frame
            };

            this.Layer.AddSublayer(videoPreviewLayer);


            // SETUP THE INPUT DEVICE FOR THE SESSION
            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);


            // VIDEO OUTPUT BUFFERING METHOD
            var output   = new AVCaptureVideoDataOutput();
            var settings = new AVVideoSettingsUncompressed
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            output.WeakVideoSettings = settings.Dictionary;

            Queue    = new DispatchQueue("ManCamQueue");
            Recorder = new OutputRecorder(this, VideoFrameInterval);
            Recorder.OnFrameRecieved += HandleVideFrameImage;
            output.SetSampleBufferDelegate(Recorder, Queue);
            captureSession.AddOutput(output);

            // UI PREPERATION
            AddTargetOverlay();
            textOutputLabel = new UILabel(new CGRect(targetOverlayView.Frame.Width + 10, 10, 100, 100))
            {
                TextColor = UIColor.White,
                Font      = UIFont.BoldSystemFontOfSize(22)
            };

            this.AddSubview(textOutputLabel);

            captureSession.StartRunning();
        }
Beispiel #7
0
        void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack)
        {
            if (videoTrack == null)
            {
                return;
            }

            // Decompress to ARGB with the asset reader
            var decompSettings = new AVVideoSettingsUncompressed {
                PixelFormatType       = CVPixelFormatType.CV32BGRA,
                AllocateWithIOSurface = null
            };
            AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);

            assetReader.AddOutput(output);

            // Get the format description of the track, to fill in attributes of the video stream that we don't want to change
            var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault();
            // Grab track dimensions from format description
            CGSize trackDimensions = formatDescription != null
                                ? formatDescription.GetPresentationDimensions(false, false)
                                : videoTrack.NaturalSize;

            // Grab clean aperture, pixel aspect ratio from format description
            AVVideoCodecSettings compressionSettings = null;

            if (formatDescription != null)
            {
                var cleanApertureDescr    = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey);
                var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey);
                compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr);
            }

            // Compress to H.264 with the asset writer
            var videoSettings = new AVVideoSettingsCompressed {
                Codec         = AVVideoCodec.H264,
                Width         = (int)trackDimensions.Width,
                Height        = (int)trackDimensions.Height,
                CodecSettings = compressionSettings
            };
            AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings);

            input.Transform = videoTrack.PreferredTransform;
            assetWriter.AddInput(input);

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            videoSampleBufferChannel = new VideoChannel(output, input, transformer);
        }
        void ConfigureAVCaptureSession()
        {
            session = new AVCaptureSession();
            session.SessionPreset = sessionPreset;

            var device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            try
            {
                var input = new AVCaptureDeviceInput(device, out var error);

                if (error != null)
                {
                    throw new Exception(error.LocalizedDescription);
                }

                session.AddInput(input);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            var videoDataOutput      = new AVCaptureVideoDataOutput();
            var videoDataOutputQueue = new DispatchQueue("videodataqueue", false);

            videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);


            var settings = new AVVideoSettingsUncompressed()
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoDataOutput.WeakVideoSettings = settings.Dictionary;
            session.AddOutput(videoDataOutput);

            var connection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            connection.Enabled = true;
        }
Beispiel #9
0
        private CameraPreview DispatchOpenWithPreviewAsync(Size previewRequestSize)
        {
            _session.BeginConfiguration();
            var videoOutput = new AVCaptureVideoDataOutput();
            var settings    = new AVVideoSettingsUncompressed {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoOutput.UncompressedVideoSetting      = settings;
            videoOutput.WeakVideoSettings             = settings.Dictionary;
            videoOutput.AlwaysDiscardsLateVideoFrames = true;

            var preview = new CameraPreview(previewRequestSize, new System.Drawing.Size(720, 1280));

            videoOutput.SetSampleBufferDelegateQueue(preview, new DispatchQueue("sample buffer"));

            _session.AddOutput(videoOutput);

            var videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);

            videoConnection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            videoConnection.VideoMirrored    = true;

            _photoOutput = new AVCapturePhotoOutput
            {
                IsHighResolutionCaptureEnabled = true
            };
            _photoOutput.SetPreparedPhotoSettingsAsync(new[] { CreatePhotoSettings() });

            _session.SessionPreset = AVCaptureSession.Preset1280x720;
            _session.AddOutput(_photoOutput);
            _session.CommitConfiguration();
            _session.StartRunning();
            _isRunning = true;
            return(preview);
        }
bool SetupCaptureSession ()
      {
         // configure the capture session for low resolution, change this if your code
         // can cope with more data or volume
         session = new AVCaptureSession () {
            SessionPreset = AVCaptureSession.PresetMedium
         };

         // create a device input and attach it to the session
         var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
         if (captureDevice == null) {
            Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255));
            CvInvoke.PutText (
               img, 
               "Capture device not found.", 
               new Point (10, 200), 
               FontFace.HersheyComplex, 
               1, 
               new MCvScalar (), 
               2);
            ImageView.Image = img.ToUIImage();
            return false;
         }
         var input = AVCaptureDeviceInput.FromDevice (captureDevice);
         if (input == null){
            Console.WriteLine ("No input device");
            return false;
         }
         session.AddInput (input);

         // create a VideoDataOutput and add it to the sesion
         AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();
         settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
         var output = new AVCaptureVideoDataOutput () {
            UncompressedVideoSetting = settingUncomp,

            // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
            //MinFrameDuration = new CMTime (1, 15)
         };
        

         // configure the output
         queue = new DispatchQueue ("myQueue");
         outputRecorder = new OutputRecorder (ImageView);
         output.SetSampleBufferDelegateQueue(outputRecorder, queue);
         session.AddOutput (output);

         session.StartRunning ();
         return true;
      }
Beispiel #11
0
        public override void FinishedLaunching(UIApplication application)
        {
            //Create a new capture session
            Session = new AVCaptureSession();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            //create a device input
            CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (CaptureDevice == null)
            {
                //Video capture not supported, abort camera operation
                if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Pad)
                {
                    errorCamera("No Camera detected", "Seems your " + UIDevice.CurrentDevice.UserInterfaceIdiom + " has no camera. You must have a camera installed to use this feature");
                    CameraAvailable = false;
                    return;
                }

                else if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone)
                {
                    errorCamera("No Camera detected", "Seems your " + UIDevice.CurrentDevice.UserInterfaceIdiom + " has no camera. You must have a camera installed to use this feature");
                    CameraAvailable = false;
                    return;
                }
            }
            else
            {
                CaptureDevice.LockForConfiguration(out Error);
                if (Error != null)
                {
                    Console.WriteLine("Error detected in camera configuration: {0} ", Error.LocalizedDescription);
                    CaptureDevice.UnlockForConfiguration();
                    return;
                }
                else
                {
                    //configure a stream for 40 frames per second fps
                    CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 40);

                    //unlock configuration
                    CaptureDevice.UnlockForConfiguration();

                    //get input from capture device
                    Input = AVCaptureDeviceInput.FromDevice(CaptureDevice);

                    if (Input == null)
                    {
                        switch (UIDevice.CurrentDevice.UserInterfaceIdiom)
                        {
                        case UIUserInterfaceIdiom.Pad:
                            errorCamera("No Input", "No input detected from the camera on your: " + UIUserInterfaceIdiom.Pad);
                            CameraAvailable = false;
                            return;

                            break;

                        case UIUserInterfaceIdiom.Phone:
                            errorCamera("No Input", "No input detected from the camera on your: " + UIUserInterfaceIdiom.Phone);
                            CameraAvailable = false;
                            return;

                            break;
                        }
                    }

                    else
                    {
                        //attach input to session
                        Session.AddInput(Input);

                        //create a new output
                        var output   = new AVCaptureVideoDataOutput();
                        var settings = new AVVideoSettingsUncompressed();
                        settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
                        output.WeakVideoSettings = settings.Dictionary;

                        //configure and attach to the output to the session
                        Queue    = new DispatchQueue("ManCamQueue");
                        Recorder = new OutputRecorder();
                        output.SetSampleBufferDelegate(Recorder, Queue);
                        Session.AddOutput(output);

                        CameraAvailable = true;
                    }
                }
            }
        }
Beispiel #12
0
        public override void FinishedLaunching(UIApplication application)
        {
            // Create a new capture session
            Session = new AVCaptureSession();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (CaptureDevice == null)
            {
                // Video capture not supported, abort
                Console.WriteLine("Video recording not supported on this device");
                CameraAvailable = false;
                return;
            }

            // Prepare device for configuration
            CaptureDevice.LockForConfiguration(out Error);
            if (Error != null)
            {
                // There has been an issue, abort
                Console.WriteLine("Error: {0}", Error.LocalizedDescription);
                CaptureDevice.UnlockForConfiguration();
                return;
            }

            // Configure stream for 15 frames per second (fps)
            CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);

            // Unlock configuration
            CaptureDevice.UnlockForConfiguration();

            // Get input from capture device
            Input = AVCaptureDeviceInput.FromDevice(CaptureDevice);
            if (Input == null)
            {
                // Error, report and abort
                Console.WriteLine("Unable to gain input from capture device.");
                CameraAvailable = false;
                return;
            }

            // Attach input to session
            Session.AddInput(Input);

            // Create a new output
            var output   = new AVCaptureVideoDataOutput();
            var settings = new AVVideoSettingsUncompressed();

            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            output.WeakVideoSettings = settings.Dictionary;

            // Configure and attach to the output to the session
            Queue    = new DispatchQueue("ManCamQueue");
            Recorder = new OutputRecorder();
            output.SetSampleBufferDelegate(Recorder, Queue);
            Session.AddOutput(output);

            // Configure and attach a still image output for bracketed capture
            StillImageOutput = new AVCaptureStillImageOutput();
            var dict = new NSMutableDictionary();

            dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            Session.AddOutput(StillImageOutput);

            // Let tabs know that a camera is available
            CameraAvailable = true;
        }
        private void Initialize()
        {
            //Pinchジェスチャ登録
            SetPinchGesture();

            //デバイス設定
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;

            MainDevice = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            NSError device_error;

            MainDevice.LockForConfiguration(out device_error);
            if (device_error != null)
            {
                Console.WriteLine($"Error: {device_error.LocalizedDescription}");
                MainDevice.UnlockForConfiguration();
                return;
            }
            //フレームレート設定
            MainDevice.ActiveVideoMinFrameDuration = new CMTime(1, 24);
            MainDevice.UnlockForConfiguration();

            if (MainDevice == null)
            {
                return;
            }

            //max zoom
            MaxZoom = (float)Math.Min(MainDevice.ActiveFormat.VideoMaxZoomFactor, 6);

            //入力設定
            NSError error;

            Input = new AVCaptureDeviceInput(MainDevice, out error);
            CaptureSession.AddInput(Input);

            //出力設定
            Output = new AVCaptureVideoDataOutput();

            //フレーム処理用
            Queue = new DispatchQueue("myQueue");
            Output.AlwaysDiscardsLateVideoFrames = true;
            Recorder = new OutputRecorder()
            {
                Camera = Camera
            };
            Output.SetSampleBufferDelegate(Recorder, Queue);
            var vSettings = new AVVideoSettingsUncompressed();

            vSettings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            Output.WeakVideoSettings  = vSettings.Dictionary;

            CaptureSession.AddOutput(Output);

            if (IsPreviewing)
            {
                CaptureSession.StartRunning();
            }
        }
Beispiel #14
0
        public override void FinishedLaunching(UIApplication application)
        {
            // Create a new capture session
            Session = new AVCaptureSession ();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = GetFrontCamera();
            if (CaptureDevice == null) {
                // Video capture not supported, abort
                Console.WriteLine ("Video recording not supported on this device");
                CameraAvailable = false;
                return;
            }

            // Prepare device for configuration
            CaptureDevice.LockForConfiguration (out Error);
            if (Error != null) {
                // There has been an issue, abort
                Console.WriteLine ("Error: {0}", Error.LocalizedDescription);
                CaptureDevice.UnlockForConfiguration ();
                return;
            }

            // Configure stream for 15 frames per second (fps)
            CaptureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);

            // Unlock configuration
            CaptureDevice.UnlockForConfiguration ();

            // Get input from capture device
            Input = AVCaptureDeviceInput.FromDevice (CaptureDevice);
            if (Input == null) {
                // Error, report and abort
                Console.WriteLine ("Unable to gain input from capture device.");
                CameraAvailable = false;
                return;
            }

            // Attach input to session
            Session.AddInput (Input);

            // Create a new output
            var output = new AVCaptureVideoDataOutput ();
            var settings = new AVVideoSettingsUncompressed ();
            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            output.WeakVideoSettings = settings.Dictionary;

            // Configure and attach to the output to the session
            Queue = new DispatchQueue ("ManCamQueue");
            Recorder = new OutputRecorder ();
            output.SetSampleBufferDelegate (Recorder, Queue);
            Session.AddOutput (output);

            // Configure and attach a still image output for bracketed capture
            StillImageOutput = new AVCaptureStillImageOutput ();
            var dict = new NSMutableDictionary();
            dict[AVVideo.CodecKey] = new NSNumber((int) AVVideoCodec.JPEG);
            Session.AddOutput (StillImageOutput);

            // Let tabs know that a camera is available
            CameraAvailable = true;
        }
Beispiel #15
0
        private void SetupCamera(bool restart = false)
        {
            try
            {
                if (_captureSession == null || restart)
                {
                    _captureSession = new AVCaptureSession
                    {
                        SessionPreset = AVCaptureSession.PresetPhoto
                    };
                    if (!_cameraModule.AvailableCameras.Any())
                    {
                        var deviceTypes = new List <AVCaptureDeviceType>
                        {
                            AVCaptureDeviceType.BuiltInWideAngleCamera,
                            AVCaptureDeviceType.BuiltInTelephotoCamera
                        };
                        if (UIDevice.CurrentDevice.CheckSystemVersion(13, 0))
                        {
                            deviceTypes.Add(AVCaptureDeviceType.BuiltInUltraWideCamera);
                        }
                        var session = AVCaptureDeviceDiscoverySession.Create(
                            deviceTypes.ToArray(), AVMediaType.Video, AVCaptureDevicePosition.Unspecified);
                        _devices = session.Devices;
                        foreach (var avCaptureDevice in _devices)
                        {
                            _cameraModule.AvailableCameras.Add(new AvailableCamera
                            {
                                DisplayName = avCaptureDevice.LocalizedName,
                                CameraId    = avCaptureDevice.UniqueID,
                                IsFront     = avCaptureDevice.Position == AVCaptureDevicePosition.Front
                            });
                        }
                    }
                }

                SetPreviewOrientation();

                if (_device == null)
                {
                    _device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);
                    _cameraModule.ChosenCamera = _cameraModule.AvailableCameras.First(c => c.CameraId == _device.UniqueID);
                }
                else
                {
                    _device = AVCaptureDevice.DeviceWithUniqueID(_cameraModule.ChosenCamera.CameraId);
                }

                SetPreviewSizing(_device, restart);

                TurnOffFlashAndSetContinuousAutoMode(_device);

                _is10OrHigher = UIDevice.CurrentDevice.CheckSystemVersion(10, 0);
                var isRestart = false;
                if (_is10OrHigher && (_photoOutput == null || restart))
                {
                    _photoOutput = new AVCapturePhotoOutput
                    {
                        IsHighResolutionCaptureEnabled = true
                    };

                    _captureSession.AddOutput(_photoOutput);
                    isRestart = true;
                }
                else if (!_is10OrHigher && (_stillImageOutput == null || restart))
                {
                    _stillImageOutput = new AVCaptureStillImageOutput
                    {
                        OutputSettings = new NSDictionary(),
                        HighResolutionStillImageOutputEnabled = true
                    };

                    _captureSession.AddOutput(_stillImageOutput);
                    isRestart = true;
                }

                if (isRestart)
                {
                    var settings = new AVVideoSettingsUncompressed
                    {
                        PixelFormatType = CVPixelFormatType.CV32BGRA
                    };
                    _previewFrameOutput = new AVCaptureVideoDataOutput
                    {
                        AlwaysDiscardsLateVideoFrames = true,
                        MinFrameDuration         = new CMTime(1, 30),
                        UncompressedVideoSetting = settings
                    };
                    //if (UIDevice.CurrentDevice.CheckSystemVersion(13, 0)) //TODO: what is this?
                    //{
                    //    _previewFrameOutput.DeliversPreviewSizedOutputBuffers = true;
                    //    _previewFrameOutput.AutomaticallyConfiguresOutputBufferDimensions = false;
                    //}
                    _previewFrameDelegate = new PreviewFrameDelegate(_cameraModule);
                    var queue = new DispatchQueue("PreviewFrameQueue");
                    _previewFrameOutput.WeakVideoSettings = settings.Dictionary;
                    _previewFrameOutput.SetSampleBufferDelegate(_previewFrameDelegate, queue);

                    _captureSession.AddOutput(_previewFrameOutput);
                    _captureSession.AddInput(AVCaptureDeviceInput.FromDevice(_device));
                }

                _device.AddObserver(this, "adjustingFocus", NSKeyValueObservingOptions.OldNew, IntPtr.Zero);
            }
            catch (Exception e)
            {
                _cameraModule.ErrorMessage = e.ToString();
            }
        }
Beispiel #16
0
        public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions)
        {
            // create a new window instance based on the screen size
            Window = new UIWindow(UIScreen.MainScreen.Bounds);

            Microsoft.WindowsAzure.MobileServices.CurrentPlatform.Init();

            // If you have defined a root view controller, set it here:
            initialViewController     = Storyboard.InstantiateInitialViewController() as UIViewController;
            Window.RootViewController = initialViewController;
            UITabBar.Appearance.SelectedImageTintColor = UIColor.FromRGB(14, 125, 202);
            UITabBar.Appearance.BackgroundColor        = UIColor.White;
            // make the window visible
            Window.MakeKeyAndVisible();

            // Create a new capture session
            Session = new AVCaptureSession();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (CaptureDevice == null)
            {
                //throw new Exception("Video recording not supported on this device");
            }
            else
            {
                // Prepare device for configuration
                if (!CaptureDevice.LockForConfiguration(out Error))
                {
                    // There has been an issue, abort
                    Console.WriteLine("Error: {0}", Error.LocalizedDescription);
                    CaptureDevice.UnlockForConfiguration();
                    return(false);
                }

                // Configure stream for 15 frames per second (fps)
                CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);

                // Unlock configuration
                CaptureDevice.UnlockForConfiguration();

                // Get input from capture device
                Input = AVCaptureDeviceInput.FromDevice(CaptureDevice);
                if (Input == null)
                {
                    // Error, report and abort
                    Console.WriteLine("Unable to gain input from capture device.");
                    CameraAvailable = false;
                    return(false);
                }

                // Attach input to session
                Session.AddInput(Input);

                // Create a new output
                var output   = new AVCaptureVideoDataOutput();
                var settings = new AVVideoSettingsUncompressed();
                settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
                output.WeakVideoSettings = settings.Dictionary;

                // Configure and attach to the output to the session
                Queue    = new DispatchQueue("ManCamQueue");
                Recorder = new OutputRecorder();
                output.SetSampleBufferDelegate(Recorder, Queue);
                Session.AddOutput(output);

                // Configure and attach a still image output for bracketed capture
                StillImageOutput = new AVCaptureStillImageOutput();
                var dict = new NSMutableDictionary();
                dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
                Session.AddOutput(StillImageOutput);

                // Let tabs know that a camera is available
                CameraAvailable = true;
            }
            return(true);
        }
		void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack)
		{
			if (videoTrack == null)
				return;

			// Decompress to ARGB with the asset reader
			var decompSettings = new AVVideoSettingsUncompressed {
				PixelFormatType = CVPixelFormatType.CV32BGRA,
				AllocateWithIOSurface = null
			};
			AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings);
			assetReader.AddOutput (output);

			// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
			var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault ();
			// Grab track dimensions from format description
			CGSize trackDimensions = formatDescription != null
				? formatDescription.GetPresentationDimensions (false, false)
				: videoTrack.NaturalSize;

			// Grab clean aperture, pixel aspect ratio from format description
			AVVideoCodecSettings compressionSettings = null;
			if (formatDescription != null) {
				var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
				var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
				compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
			}

			// Compress to H.264 with the asset writer
			var videoSettings = new AVVideoSettingsCompressed {
				Codec = AVVideoCodec.H264,
				Width = (int)trackDimensions.Width,
				Height = (int)trackDimensions.Height,
				CodecSettings = compressionSettings
			};
			AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings);
			input.Transform = videoTrack.PreferredTransform;
			assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			videoSampleBufferChannel = new VideoChannel (output, input, transformer);
		}