Exemplo n.º 1
0
        static bool TryConvertToVideoOrientation(UIInterfaceOrientation orientation, out AVCaptureVideoOrientation result)
        {
            switch (orientation)
            {
            case UIInterfaceOrientation.Portrait:
                result = AVCaptureVideoOrientation.Portrait;
                return(true);

            case UIInterfaceOrientation.PortraitUpsideDown:
                result = AVCaptureVideoOrientation.PortraitUpsideDown;
                return(true);

            case UIInterfaceOrientation.LandscapeLeft:
                result = AVCaptureVideoOrientation.LandscapeRight;
                return(true);

            case UIInterfaceOrientation.LandscapeRight:
                result = AVCaptureVideoOrientation.LandscapeLeft;
                return(true);

            default:
                result = 0;
                return(false);
            }
        }
Exemplo n.º 2
0
            AVCaptureVideoOrientation GetVideoOrientationFromDevice()
            {
                var orientation = UIApplication.SharedApplication.StatusBarOrientation;
                AVCaptureVideoOrientation result = AVCaptureVideoOrientation.Portrait;

                switch (orientation)
                {
                case UIInterfaceOrientation.PortraitUpsideDown:
                    result = AVCaptureVideoOrientation.PortraitUpsideDown;
                    break;

                case UIInterfaceOrientation.Portrait:
                    result = AVCaptureVideoOrientation.Portrait;
                    break;

                case UIInterfaceOrientation.LandscapeLeft:
                    result = AVCaptureVideoOrientation.LandscapeLeft;
                    break;

                case UIInterfaceOrientation.LandscapeRight:
                    result = AVCaptureVideoOrientation.LandscapeRight;
                    break;

                default:
                    break;
                }

                return(result);
            }
Exemplo n.º 3
0
 public void UpdateVideoOrientation(AVCaptureVideoOrientation newValue)
 {
     if (PreviewLayer?.Connection != null)
     {
         PreviewLayer.Connection.VideoOrientation = newValue;
     }
 }
Exemplo n.º 4
0
        float AngleOffsetFromPortraitOrientationToOrientation(AVCaptureVideoOrientation orientation)
        {
            float angle = 0.0F;

            switch (orientation)
            {
            case AVCaptureVideoOrientation.Portrait:
                angle = 0.0F;
                break;

            case AVCaptureVideoOrientation.LandscapeRight:
                angle = -Convert.ToSingle(Math.PI / 2.0);
                break;

            case AVCaptureVideoOrientation.LandscapeLeft:
                angle = Convert.ToSingle(Math.PI / 2.0);
                break;

            case AVCaptureVideoOrientation.PortraitUpsideDown:
                angle = Convert.ToSingle(Math.PI);
                break;

            default:
                break;
            }

            return(angle);
        }
Exemplo n.º 5
0
        public void Prepare(AVCaptureVideoOrientation captureVideoOrientation)
        {
            var status = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            if (status == AVAuthorizationStatus.NotDetermined)
            {
                _sessionQueue.Suspend();
                AVCaptureDevice.RequestAccessForMediaType(AVAuthorizationMediaType.Video, granted =>
                {
                    if (granted)
                    {
                        DispatchQueue.MainQueue.DispatchAsync(() =>
                        {
                            _captureSessionDelegate.CaptureGrantedSession(AVAuthorizationStatus.Authorized);
                        });
                    }
                    else
                    {
                        _setupResult = SessionSetupResult.NotAuthorized;
                    }

                    _sessionQueue.Resume();
                });
            }
            else if (status != AVAuthorizationStatus.Authorized)
            {
                _setupResult = SessionSetupResult.NotAuthorized;
            }

            _sessionQueue.DispatchAsync(() =>
            {
                ConfigureSession();
                UpdateVideoOrientation(captureVideoOrientation);
            });
        }
Exemplo n.º 6
0
 private void _executeVideoCompletionWithURL(NSUrl url, AVCaptureVideoOrientation orientation, NSError error)
 {
     if (videoCompletion != null)
     {
         videoCompletion(url, orientation, error);
         videoCompletion = null;
     }
 }
        /// <summary>
        /// Creates a Transform to apply to the OpenGL view to display the Video at the proper orientation
        /// </summary>
        /// <returns>
        /// A transform to correct the orientation
        /// </returns>
        /// <param name='orientation'>
        /// Current Orientation
        /// </param>
        public CGAffineTransform TransformFromCurrentVideoOrientationToOrientation(AVCaptureVideoOrientation orientation)
        {
            // Calculate offsets from an arbitrary reference orientation (portrait)
            float orientationAngleOffset      = AngleOffsetFromPortraitOrientationToOrientation(orientation);
            float videoOrientationAngleOffset = AngleOffsetFromPortraitOrientationToOrientation(videoOrientation);

            // Find the difference in angle between the passed in orientation and the current video orientation
            float angleOffset = orientationAngleOffset - videoOrientationAngleOffset;

            return(CGAffineTransform.MakeRotation(angleOffset));
        }
Exemplo n.º 8
0
 public void StopRecording()
 {
     Debug.WriteLine("StopRecording");
     cameraManager.stopRecordingVideo(async(url, _orientation, error) =>
     {
         orientation = _orientation;
         SaveToAlbum(url.AbsoluteString);
         Debug.WriteLine("// TODO: SaveToAlbum");
         DeleteOriginal(url);
     });
 }
        public AVCaptureVideoOrientation GetCameraForOrientation(UIInterfaceOrientation toInterfaceOrientation)
        {
            AVCaptureVideoOrientation aVCaptureVideoOrientation = AVCaptureVideoOrientation.Portrait;

            if (toInterfaceOrientation == UIInterfaceOrientation.Portrait)
            {
                aVCaptureVideoOrientation = AVCaptureVideoOrientation.Portrait;
            }
            else if (toInterfaceOrientation == UIInterfaceOrientation.LandscapeRight)
            {
                aVCaptureVideoOrientation = AVCaptureVideoOrientation.LandscapeRight;
            }
            return(aVCaptureVideoOrientation);
        }
	    static float AngleOffsetFromPortraitOrientationToOrientation (AVCaptureVideoOrientation orientation)
		{
			switch (orientation) {
			case AVCaptureVideoOrientation.LandscapeRight:
				return (float) (-Math.PI / 2.0);
			case AVCaptureVideoOrientation.LandscapeLeft:
				return (float) (Math.PI / 2.0);
			case AVCaptureVideoOrientation.PortraitUpsideDown:
				return (float) Math.PI;
			//case AVCaptureVideoOrientation.Portrait:
			default:
				return 0.0f;
			}
		}
Exemplo n.º 11
0
        public AVCaptureVideoOrientation GetCameraForOrientation()
        {
            var currentOrientation = UIApplication.SharedApplication.StatusBarOrientation;
            AVCaptureVideoOrientation aVCaptureVideoOrientation = AVCaptureVideoOrientation.Portrait;

            if (currentOrientation == UIInterfaceOrientation.Portrait)
            {
                aVCaptureVideoOrientation = AVCaptureVideoOrientation.Portrait;
            }
            else if (currentOrientation == UIInterfaceOrientation.LandscapeRight)
            {
                aVCaptureVideoOrientation = AVCaptureVideoOrientation.LandscapeRight;
            }
            return(aVCaptureVideoOrientation);
        }
Exemplo n.º 12
0
        partial void TakePhoto(UIButton sender)
        {
            AVCaptureVideoOrientation layerOrientation = videoPreviewLayer.Connection.VideoOrientation;

            if (isMovie)
            {
                ShutterButton.Enabled = false; // disable until recording starts/stops

                if (!movieOutput.Recording)
                {
                    // set up recording
                    if (UIDevice.CurrentDevice.IsMultitaskingSupported)
                    {
                        backgroundRecordingId = UIApplication.SharedApplication.BeginBackgroundTask(null);
                    }

                    AVCaptureConnection connection = movieOutput?.ConnectionFromMediaType(AVMediaType.Video);
                    if (connection != null)
                    {
                        connection.VideoOrientation = layerOrientation;
                    }

                    SetPaths(".mov");

                    movieOutput.StartRecordingToOutputFile(NSUrl.FromFilename(filePath), this);
                }
                else
                {
                    // finish recording
                    movieOutput.StopRecording();
                }
            }
            else
            {
                AVCapturePhotoSettings photoSettings = AVCapturePhotoSettings.Create();

                // The first format in the array is the preferred format
                if (photoSettings.AvailablePreviewPhotoPixelFormatTypes.Length > 0)
                {
                    photoSettings.PreviewPhotoFormat = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, photoSettings.AvailablePreviewPhotoPixelFormatTypes[0]);
                }

                stillImageOutput.CapturePhoto(photoSettings, this);
            }
        }
Exemplo n.º 13
0
        static float AngleOffsetFromPortraitOrientationToOrientation(AVCaptureVideoOrientation orientation)
        {
            switch (orientation)
            {
            case AVCaptureVideoOrientation.LandscapeRight:
                return((float)(-Math.PI / 2.0));

            case AVCaptureVideoOrientation.LandscapeLeft:
                return((float)(Math.PI / 2.0));

            case AVCaptureVideoOrientation.PortraitUpsideDown:
                return((float)Math.PI);

            //case AVCaptureVideoOrientation.Portrait:
            default:
                return(0.0f);
            }
        }
Exemplo n.º 14
0
        private void SetPreviewOrientation()
        {
            AVCaptureVideoOrientation videoOrientation = 0;

            switch (UIDevice.CurrentDevice.Orientation)
            {
            case UIDeviceOrientation.Portrait:
                videoOrientation = AVCaptureVideoOrientation.Portrait;
                break;

            case UIDeviceOrientation.LandscapeRight:
                videoOrientation = AVCaptureVideoOrientation.LandscapeLeft;
                break;

            case UIDeviceOrientation.LandscapeLeft:
                videoOrientation = AVCaptureVideoOrientation.LandscapeRight;
                break;
            }

            if (videoOrientation != 0)
            {
                _avCaptureVideoPreviewLayer.Orientation = videoOrientation;
            }
        }
Exemplo n.º 15
0
        bool SetupCaptureSession()
        {
            //Console.WriteLine ("SetupCaptureSession");
            // Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
            // is used to deliver both audio and video buffers, and our video processing consistently takes
            // too long, the delivery queue can back up, resulting in audio being dropped.
            //
            // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
            // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
            //
            // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.

            // Create Capture session
            captureSession = new AVCaptureSession();
            captureSession.BeginConfiguration();

            // Create audio connection
            NSError error;
            var     audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

            if (audioDevice == null)
            {
                return(false);                // e.g. simulator
            }
            var audioIn = new AVCaptureDeviceInput(audioDevice, out error);

            if (captureSession.CanAddInput(audioIn))
            {
                captureSession.AddInput(audioIn);
            }

            var audioOut          = new AVCaptureAudioDataOutput();
            var audioCaptureQueue = new DispatchQueue("Audio Capture Queue");

            // Add the Delegate to capture each sample that comes through
            audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue);

            if (captureSession.CanAddOutput(audioOut))
            {
                captureSession.AddOutput(audioOut);
            }

            audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio);

            // Create Video Session
            var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back);
            var videoIn     = new AVCaptureDeviceInput(videoDevice, out error);

            if (captureSession.CanAddInput(videoIn))
            {
                captureSession.AddInput(videoIn);
            }

            // RosyWriter prefers to discard late video frames early in the capture pipeline, since its
            // processing can take longer than real-time on some platforms (such as iPhone 3GS).
            // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
            // alwaysDiscardsLateVideoFrames property to NO.
            var videoOut = new AVCaptureVideoDataOutput {
                AlwaysDiscardsLateVideoFrames = true,
                // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes
                // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };

            // Create a DispatchQueue for the Video Processing
            var videoCaptureQueue = new DispatchQueue("Video Capture Queue");

            videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }

            // Set the Video connection from the Video Output object
            videoConnection  = videoOut.ConnectionFromMediaType(AVMediaType.Video);
            videoOrientation = videoConnection.VideoOrientation;

            captureSession.CommitConfiguration();

            return(true);
        }
	    float AngleOffsetFromPortraitOrientationToOrientation (AVCaptureVideoOrientation orientation)
		{
			float angle = 0.0F;
			
			switch (orientation){
			case AVCaptureVideoOrientation.Portrait:
				angle = 0.0F;
				break;
			case AVCaptureVideoOrientation.LandscapeRight:
				angle = -Convert.ToSingle (Math.PI / 2.0);
				break;
			case AVCaptureVideoOrientation.LandscapeLeft:
				angle = Convert.ToSingle (Math.PI / 2.0);
				break;
			case AVCaptureVideoOrientation.PortraitUpsideDown:
				angle = Convert.ToSingle(Math.PI);
				break;
			default:
				break;
			}
			
			return angle;
		}
		static bool TryConvertToVideoOrientation (UIInterfaceOrientation orientation, out AVCaptureVideoOrientation result)
		{
			switch (orientation) {
			case UIInterfaceOrientation.Portrait:
				result = AVCaptureVideoOrientation.Portrait;
				return true;

			case UIInterfaceOrientation.PortraitUpsideDown:
				result = AVCaptureVideoOrientation.PortraitUpsideDown;
				return true;

			case UIInterfaceOrientation.LandscapeLeft:
				result = AVCaptureVideoOrientation.LandscapeRight;
				return true;

			case UIInterfaceOrientation.LandscapeRight:
				result = AVCaptureVideoOrientation.LandscapeLeft;
				return true;

			default:
				result = 0;
				return false;
			}
		}
Exemplo n.º 18
0
        public async override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Disable UI. The UI is enabled if and only if the session starts running.
            CameraButton.Enabled = false;
            RecordButton.Enabled = false;
            StillButton.Enabled  = false;

            // Create the AVCaptureSession.
            Session = new AVCaptureSession();

            // Setup the preview view.
            PreviewView.Session = Session;

            // Communicate with the session and other session objects on this queue.
            SessionQueue = new DispatchQueue("session queue");
            SetupResult  = AVCamSetupResult.Success;

            // Check video authorization status. Video access is required and audio access is optional.
            // If audio access is denied, audio is not recorded during movie recording.
            switch (AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video))
            {
            // The user has previously granted access to the camera.
            case AVAuthorizationStatus.Authorized:
                break;

            // The user has not yet been presented with the option to grant video access.
            // We suspend the session queue to delay session setup until the access request has completed to avoid
            // asking the user for audio access if video access is denied.
            // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
            case AVAuthorizationStatus.NotDetermined:
                SessionQueue.Suspend();
                var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video);

                if (!granted)
                {
                    SetupResult = AVCamSetupResult.CameraNotAuthorized;
                }
                SessionQueue.Resume();
                break;

            // The user has previously denied access.
            default:
                SetupResult = AVCamSetupResult.CameraNotAuthorized;
                break;
            }

            // Setup the capture session.
            // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
            // Why not do all of this on the main queue?
            // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
            // so that the main queue isn't blocked, which keeps the UI responsive.
            SessionQueue.DispatchAsync(() => {
                if (SetupResult != AVCamSetupResult.Success)
                {
                    return;
                }

                backgroundRecordingID = -1;
                NSError error;
                AVCaptureDevice videoDevice           = CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back);
                AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error);
                if (videoDeviceInput == null)
                {
                    Console.WriteLine("Could not create video device input: {0}", error);
                }

                Session.BeginConfiguration();
                if (Session.CanAddInput(videoDeviceInput))
                {
                    Session.AddInput(VideoDeviceInput = videoDeviceInput);
                    DispatchQueue.MainQueue.DispatchAsync(() => {
                        // Why are we dispatching this to the main queue?
                        // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
                        // can only be manipulated on the main thread.
                        // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                        // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                        // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
                        // ViewWillTransitionToSize method.
                        UIInterfaceOrientation statusBarOrientation       = UIApplication.SharedApplication.StatusBarOrientation;
                        AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                        if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                        {
                            initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation;
                        }

                        var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;
                        previewLayer.Connection.VideoOrientation = initialVideoOrientation;
                    });
                }
                else
                {
                    Console.WriteLine("Could not add video device input to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                AVCaptureDevice audioDevice           = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);
                if (audioDeviceInput == null)
                {
                    Console.WriteLine("Could not create audio device input: {0}", error);
                }

                if (Session.CanAddInput(audioDeviceInput))
                {
                    Session.AddInput(audioDeviceInput);
                }
                else
                {
                    Console.WriteLine("Could not add audio device input to the session");
                }

                var movieFileOutput = new AVCaptureMovieFileOutput();
                if (Session.CanAddOutput(movieFileOutput))
                {
                    Session.AddOutput(MovieFileOutput = movieFileOutput);
                    AVCaptureConnection connection    = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video);
                    if (connection.SupportsVideoStabilization)
                    {
                        connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
                    }
                }
                else
                {
                    Console.WriteLine("Could not add movie file output to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                var stillImageOutput = new AVCaptureStillImageOutput();
                if (Session.CanAddOutput(stillImageOutput))
                {
                    stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed {
                        Codec = AVVideoCodec.JPEG
                    };
                    Session.AddOutput(StillImageOutput = stillImageOutput);
                }
                else
                {
                    Console.WriteLine("Could not add still image output to the session");
                    SetupResult = AVCamSetupResult.SessionConfigurationFailed;
                }

                Session.CommitConfiguration();
            });
        }
Exemplo n.º 19
0
 private void updatePreviewLayer(AVCaptureConnection layer, AVCaptureVideoOrientation orientation)
 {
     layer.VideoOrientation = orientation;
     previewLayer.Frame     = Bounds;
 }
		/// <summary>
		/// Creates a Transform to apply to the OpenGL view to display the Video at the proper orientation
		/// </summary>
		/// <returns>
		/// A transform to correct the orientation
		/// </returns>
		/// <param name='orientation'>
		/// Current Orientation
		/// </param>
		public CGAffineTransform TransformFromCurrentVideoOrientationToOrientation (AVCaptureVideoOrientation orientation)
		{
			// Calculate offsets from an arbitrary reference orientation (portrait)
			float orientationAngleOffset = AngleOffsetFromPortraitOrientationToOrientation (orientation);
			float videoOrientationAngleOffset = AngleOffsetFromPortraitOrientationToOrientation (videoOrientation);
				
			// Find the difference in angle between the passed in orientation and the current video orientation
			float angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
			return CGAffineTransform.MakeRotation (angleOffset);
		}
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}