コード例 #1
0
ファイル: Main.cs プロジェクト: rojepp/monotouch-samples
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.PresetMedium
			};
			
			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				Console.WriteLine ("No input device");
				return false;
			}
			session.AddInput (input);
			
			// create a VideoDataOutput and add it to the sesion
			var output = new AVCaptureVideoDataOutput () {
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
				
				// If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
				MinFrameDuration = new CMTime (1, 15)
			};
			
			// configure the output
			queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
			outputRecorder = new OutputRecorder ();
			output.SetSampleBufferDelegateAndQueue (outputRecorder, queue);
			session.AddOutput (output);
			
			session.StartRunning ();
			return true;
		}
コード例 #2
0
ファイル: MainUI.cs プロジェクト: JosephWetzel/joesdoor
		void UserKnocked ()
		{
			_state.didKnock = true;
			if (_state.CountDown > 0 && _state.CountDown < 5 && _state.PublishToTwitter == false) {
				_state.PublishToTwitter = true;
				session.StopRunning ();
				session.Dispose ();
				session = null;
				outputRecorder.Dispose ();
				outputRecorder = null;
				queue.Dispose ();
				queue = null;

				//Stop Both Timers
				AudioLoop.Invalidate ();
				timer.Invalidate ();

				//Stop Recording
				StopRecording ();



				//Start up Resording
				NSTimer.CreateScheduledTimer (TimeSpan.FromSeconds (10), AudioReset);
				NSTimer.CreateScheduledTimer (TimeSpan.FromSeconds (1), PostToTwitter);
				_state.PublishToTwitter = true;
				_state.TopLabel.Text = "";
				_state.BottomLabel.Text = "Posted to Twitter!";
				imgYouRock.Hidden = false;

			}
		}
コード例 #3
0
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.PresetMedium
			};

			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				// No input device
				return false;
			}
			session.AddInput (input);

			// create a VideoDataOutput and add it to the sesion
			var output = new AVCaptureVideoDataOutput () {
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};

			// configure the output
			queue = new DispatchQueue ("myQueue");
			qrScanner = new QrScanner (this);
			output.SetSampleBufferDelegateAndQueue (qrScanner, queue);
			session.AddOutput (output);

			previewLayer = new AVCaptureVideoPreviewLayer (session);
			previewLayer.Orientation = AVCaptureVideoOrientation.Portrait;
			previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill";

			session.StartRunning ();
			return true;
		}
コード例 #4
0
		public CustomVideoCompositor (AVVideoComposition videoComposition) : base()
		{
			renderingQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.renderingqueue");
			renderContextQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.rendercontextqueue");
			previousBuffer = null;
			renderContextDidChange = false;
		}
コード例 #5
0
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();
			
			backgroundQueue = new DispatchQueue ("com.videotimeline.backgroundqueue", false);
			displayLink = CADisplayLink.Create (DisplayLinkCallback);
			displayLink.AddToRunLoop (NSRunLoop.Current, NSRunLoopMode.Default);
			displayLink.Paused = true;
			lastCallbackTime = 0.0;
			bufferSemaphore = new SemaphoreSlim (0);
		}
コード例 #6
0
		public ReadWriteSampleBufferChannel (AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput)
		{
			if (readerOutput == null)
				throw new ArgumentNullException ("readerOutput");
			if (writerInput == null)
				throw new ArgumentNullException ("writerInput");

			this.readerOutput = readerOutput;
			this.writerInput = writerInput;

			serializationQueue = new DispatchQueue ("ReadWriteSampleBufferChannel queue");
		}
コード例 #7
0
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession {
				SessionPreset = AVCaptureSession.PresetMedium
			};

			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (captureDevice == null) {
				Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
				return false;
			}
			//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
			NSError error = null;
			captureDevice.LockForConfiguration (out error);
			if (error != null) {
				Console.WriteLine (error);
				captureDevice.UnlockForConfiguration ();
				return false;
			}

			if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0))
				captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);
			captureDevice.UnlockForConfiguration ();

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null) {
				Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
				return false;
			}

			session.AddInput (input);

			// create a VideoDataOutput and add it to the sesion
			var settings = new CVPixelBufferAttributes {
				PixelFormatType = CVPixelFormatType.CV32BGRA
			};
			using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) {
				queue = new DispatchQueue ("myQueue");
				outputRecorder = new OutputRecorder ();
				output.SetSampleBufferDelegate (outputRecorder, queue);
				session.AddOutput (output);
			}

			session.StartRunning ();
			return true;
		}
コード例 #8
0
		public PostManager (Action reloadHandler)
		{
			this.reloadHandler = reloadHandler;
			PostCells = new List<Post> ();
			downloadingBatchStorage = new List<Post> ();
			updateCellArrayQueue = new DispatchQueue ("UpdateCellQueue");
			fetchRecordQueue = new NSOperationQueue ();
			tagArray = new string[0];

			desiredKeys = new string[] {
				Post.ImageRefKey,
				Post.FontKey,
				Post.TextKey
			};
		}
コード例 #9
0
		public ReadWriteSampleBufferChannel (AVAssetReaderOutput localAssetReaderOutput,
			AVAssetWriterInput localAssetWriterInput,
			bool useAdaptor)
		{
			_assetReaderOutput = localAssetReaderOutput;
			_assetWriterInput = localAssetWriterInput;
			_useAdaptor = useAdaptor;

			if (_useAdaptor) {
				var adaptorAttrs = new CVPixelBufferAttributes {
					PixelFormatType = CVPixelFormatType.CV32BGRA
				};
				_adaptor = AVAssetWriterInputPixelBufferAdaptor.FromInput (localAssetWriterInput, adaptorAttrs.Dictionary);
			}

			_serializationQueue = new DispatchQueue ("ReadWriteSampleBufferChannel queue");
		}
コード例 #10
0
        public void QueueBase_ReturnExtraItemsTest()
        {
            var target = new DispatchQueue<Guid>(null);
            target.ItemsQueryCount = 2;
            target.ReturnToStorageAfterItemsCount = 4;

            target.Append(new List<SignalDispatchBase<Guid>>()
            {
                SignaloBotEntityCreator<Guid>.CreateSignal(),
                SignaloBotEntityCreator<Guid>.CreateSignal(),
                SignaloBotEntityCreator<Guid>.CreateSignal(),
                SignaloBotEntityCreator<Guid>.CreateSignal(),
                SignaloBotEntityCreator<Guid>.CreateSignal(),
                SignaloBotEntityCreator<Guid>.CreateSignal()
            }, true);

            var activeKeys = new List<int>() { SignaloBotTestParameters.ExistingDeliveryType };
            target.ReturnExtraItems(activeKeys);

            int totalItems = target.CountQueueItems();
            Assert.AreEqual(target.ItemsQueryCount, totalItems);
        }
コード例 #11
0
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession () {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            AVCaptureDevice[] capDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            AVCaptureDeviceInput input = null;
            if (capDevices.Length != 0) input = AVCaptureDeviceInput.FromDevice (capDevices[0]);
            if (input == null){
                new UIAlertView("Error", "Camera not available", null, "OK", null).Show();
                Console.WriteLine ("Camera not available");
                return false;
            }
            session.AddInput (input);

            var output = new AVCaptureVideoDataOutput () {
                VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
                MinFrameDuration = new CMTime (1, 30)  //second parameter is frames per second
            };

            queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
            outputRecorder = new OutputRecorder ();
            output.SetSampleBufferDelegateAndQueue (outputRecorder, queue);
            session.AddOutput (output);

            session.StartRunning ();
            return true;
        }
コード例 #12
0
ファイル: MainUI.cs プロジェクト: JosephWetzel/joesdoor
			bool SetupCaptureSession ()
			{
				// configure the capture session for low resolution, change this if your code
				// can cope with more data or volume
				session = new AVCaptureSession () {
					SessionPreset = AVCaptureSession.PresetMedium
				};


				AVCaptureDevice captureDevice = null;
				var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video);
				foreach (AVCaptureDevice Device in videoDevices) {
					if (Device.Position == AVCaptureDevicePosition.Front)
					{
						captureDevice = Device;
						break;
					}
				}



				// create a device input and attach it to the session
				if(captureDevice==null){
					captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
				}
				if (captureDevice == null){
					return false;
				}
				//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
				NSError error = null;
				captureDevice.LockForConfiguration(out error);

				if(error != null)
				{
					captureDevice.UnlockForConfiguration();
					return false;
				}
				if(UIDevice.CurrentDevice.CheckSystemVersion(7,0))
					captureDevice.ActiveVideoMinFrameDuration = new CMTime (1,15);

			captureDevice.UnlockForConfiguration();

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);

				if (input == null){
					return false;
				}

				session.AddInput (input);

				// create a VideoDataOutput and add it to the sesion
				var output = new AVCaptureVideoDataOutput () {
					VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
				};


				// configure the output
				queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
				outputRecorder = new OutputRecorder (_state);
				output.SetSampleBufferDelegate (outputRecorder, queue);
				session.AddOutput (output);

				session.StartRunning ();
				return true;
			}
コード例 #13
0
        public void Create()
        {
            TestRuntime.AssertXcodeVersion(8, 0);

            var called   = false;
            var callback = new Action(() => called = true);
            DispatchBlockFlags flags;

            Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create(null), "ANE 1");
            Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create(null, DispatchBlockFlags.AssignCurrent), "ANE 2");
            Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create((Action)null, DispatchBlockFlags.AssignCurrent, DispatchQualityOfService.Background, 2), "ANE 3");
            Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create((DispatchBlock)null, DispatchBlockFlags.AssignCurrent, DispatchQualityOfService.Background, 2), "ANE 4");
            // Invalid input results in NULL and an exception
            Assert.Throws <Exception> (() => DispatchBlock.Create(callback, (DispatchBlockFlags)12345678), "E 1");
            Assert.Throws <Exception> (() => DispatchBlock.Create(callback, (DispatchBlockFlags)12345678, DispatchQualityOfService.UserInteractive, 0), "E 2");
            Assert.Throws <Exception> (() => DispatchBlock.Create(callback, DispatchBlockFlags.None, (DispatchQualityOfService)12345678, 0), "E 3");
            Assert.Throws <Exception> (() => DispatchBlock.Create(callback, DispatchBlockFlags.None, DispatchQualityOfService.Default, 12345678), "E 4");

            called = false;
            using (var db = DispatchBlock.Create(callback)) {
                using (var queue = new DispatchQueue("Background")) {
                    queue.Activate();
                    queue.DispatchAsync(db);
                    var rv = (int)db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                    Assert.AreEqual(0, rv, "Timed Out A");
                    Assert.IsTrue(called, "Called A");
                }
            }

            called = false;
            flags  = DispatchBlockFlags.None;
            using (var db = DispatchBlock.Create(callback, flags)) {
                using (var queue = new DispatchQueue("Background")) {
                    queue.Activate();
                    queue.DispatchAsync(db);
                    var rv = (int)db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                    Assert.AreEqual(0, rv, "Timed Out " + flags);
                    Assert.IsTrue(called, "Called " + flags);
                }
            }

            called = false;
            flags  = DispatchBlockFlags.AssignCurrent;
            using (var db = DispatchBlock.Create(callback, flags)) {
                using (var queue = new DispatchQueue("Background")) {
                    queue.Activate();
                    queue.DispatchAsync(db);
                    var rv = (int)db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                    Assert.AreEqual(0, rv, "Timed Out " + flags);
                    Assert.IsTrue(called, "Called " + flags);
                }
            }


            called = false;
            flags  = DispatchBlockFlags.Detached;
            using (var db = DispatchBlock.Create(callback, flags)) {
                using (var queue = new DispatchQueue("Background")) {
                    queue.Activate();
                    queue.DispatchAsync(db);
                    var rv = (int)db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                    Assert.AreEqual(0, rv, "Timed Out " + flags);
                    Assert.IsTrue(called, "Called " + flags);
                }
            }

            called = false;
            flags  = DispatchBlockFlags.Detached;
            using (var db = DispatchBlock.Create(callback, flags, DispatchQualityOfService.Background, -8)) {
                using (var queue = new DispatchQueue("Background")) {
                    queue.Activate();
                    queue.DispatchAsync(db);
                    var rv = (int)db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                    Assert.AreEqual(0, rv, "Timed Out Background 8" + flags);
                    Assert.IsTrue(called, "Called Background 8" + flags);
                }
            }

            called = false;
            flags  = DispatchBlockFlags.Detached;
            using (var db = DispatchBlock.Create(callback, flags, DispatchQualityOfService.Background, -8)) {
                using (var db2 = db.Create(DispatchBlockFlags.EnforceQosClass, DispatchQualityOfService.Unspecified, -7)) {
                    using (var queue = new DispatchQueue("Background")) {
                        queue.Activate();
                        queue.DispatchAsync(db2);
                        var rv = (int)db2.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5)));
                        Assert.AreEqual(0, rv, "Timed Out Background DB" + flags);
                        Assert.IsTrue(called, "Called Background DB" + flags);
                    }
                }
            }
        }
コード例 #14
0
		void setupCaptureSession ()
		{
			if (CaptureSession != null)
				return;

			CaptureSession = new AVCaptureSession ();

			NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession);

			applicationWillEnterForegroundNotificationObserver = 
				NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (),
			                                                    UIApplication.SharedApplication,
					NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
				applicationWillEnterForeground ();                                          	
			});

			videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);

			NSError error;
			videoInput = new AVCaptureDeviceInput (videoDevice, out error);
			if (CaptureSession.CanAddInput (videoInput))
				CaptureSession.AddInput (videoInput);

			metadataOutput = new AVCaptureMetadataOutput ();

			var metadataQueue = new DispatchQueue ("com.AVCam.metadata");
			metadataObjectsDelegate = new MetadataObjectsDelegate {
				DidOutputMetadataObjectsAction = DidOutputMetadataObjects
			};
			metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue);

			if (CaptureSession.CanAddOutput (metadataOutput))
				CaptureSession.AddOutput (metadataOutput);
		}
コード例 #15
0
		private bool addImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = "";

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes () { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new CoreFoundation.DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += new EventHandler<CaptureErrorEventArgs>( handleImageCaptureError );
			videoFrameSampler.ImageCaptured += new EventHandler<ImageCaptureEventArgs>( handleImageCaptured );

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try
			{
				AVCaptureConnection connection = (AVCaptureConnection)frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			}
			catch
			{
			}

			return true;
		}
コード例 #16
0
bool SetupCaptureSession ()
      {
         // configure the capture session for low resolution, change this if your code
         // can cope with more data or volume
         session = new AVCaptureSession () {
            SessionPreset = AVCaptureSession.PresetMedium
         };

         // create a device input and attach it to the session
         var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
         if (captureDevice == null) {
            Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255));
            CvInvoke.PutText (
               img, 
               "Capture device not found.", 
               new Point (10, 200), 
               FontFace.HersheyComplex, 
               1, 
               new MCvScalar (), 
               2);
            ImageView.Image = img.ToUIImage();
            return false;
         }
         var input = AVCaptureDeviceInput.FromDevice (captureDevice);
         if (input == null){
            Console.WriteLine ("No input device");
            return false;
         }
         session.AddInput (input);

         // create a VideoDataOutput and add it to the sesion
         AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();
         settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
         var output = new AVCaptureVideoDataOutput () {
            UncompressedVideoSetting = settingUncomp,

            // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
            //MinFrameDuration = new CMTime (1, 15)
         };
        

         // configure the output
         queue = new DispatchQueue ("myQueue");
         outputRecorder = new OutputRecorder (ImageView);
         output.SetSampleBufferDelegateQueue(outputRecorder, queue);
         session.AddOutput (output);

         session.StartRunning ();
         return true;
      }
コード例 #17
0
		public async override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			// Disable UI. The UI is enabled if and only if the session starts running.
			CameraButton.Enabled = false;
			RecordButton.Enabled = false;
			StillButton.Enabled = false;

			// Create the AVCaptureSession.
			Session = new AVCaptureSession ();

			// Setup the preview view.
			PreviewView.Session = Session;

			// Communicate with the session and other session objects on this queue.
			SessionQueue = new DispatchQueue ("session queue");
			SetupResult = AVCamSetupResult.Success;

			// Check video authorization status. Video access is required and audio access is optional.
			// If audio access is denied, audio is not recorded during movie recording.
			switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) {
				// The user has previously granted access to the camera.
				case AVAuthorizationStatus.Authorized:
					break;

				// The user has not yet been presented with the option to grant video access.
				// We suspend the session queue to delay session setup until the access request has completed to avoid
				// asking the user for audio access if video access is denied.
				// Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
				case AVAuthorizationStatus.NotDetermined:
					SessionQueue.Suspend ();
					var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video);
					if (!granted)
						SetupResult = AVCamSetupResult.CameraNotAuthorized;
					SessionQueue.Resume ();
					break;

				// The user has previously denied access.
				default:
					SetupResult = AVCamSetupResult.CameraNotAuthorized;
					break;
			}

			// Setup the capture session.
			// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
			// Why not do all of this on the main queue?
			// Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
			// so that the main queue isn't blocked, which keeps the UI responsive.
			SessionQueue.DispatchAsync (() => {
				if (SetupResult != AVCamSetupResult.Success)
					return;

				backgroundRecordingID = -1;
				NSError error;
				AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back);
				AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error);
				if (videoDeviceInput == null)
					Console.WriteLine ("Could not create video device input: {0}", error);

				Session.BeginConfiguration ();
				if (Session.CanAddInput (videoDeviceInput)) {
					Session.AddInput (VideoDeviceInput = videoDeviceInput);
					DispatchQueue.MainQueue.DispatchAsync (() => {
						// Why are we dispatching this to the main queue?
						// Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
						// can only be manipulated on the main thread.
						// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
						// on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
						// Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
						// ViewWillTransitionToSize method.
						UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation;
						AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
						if (statusBarOrientation != UIInterfaceOrientation.Unknown)
							initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation;

						var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;
						previewLayer.Connection.VideoOrientation = initialVideoOrientation;
					});
				} else {
					Console.WriteLine ("Could not add video device input to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
				AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error);
				if (audioDeviceInput == null)
					Console.WriteLine ("Could not create audio device input: {0}", error);

				if (Session.CanAddInput (audioDeviceInput))
					Session.AddInput (audioDeviceInput);
				else
					Console.WriteLine ("Could not add audio device input to the session");

				var movieFileOutput = new AVCaptureMovieFileOutput ();
				if (Session.CanAddOutput (movieFileOutput)) {
					Session.AddOutput (MovieFileOutput = movieFileOutput);
					AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video);
					if (connection.SupportsVideoStabilization)
						connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
				} else {
					Console.WriteLine ("Could not add movie file output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				var stillImageOutput = new AVCaptureStillImageOutput ();
				if (Session.CanAddOutput (stillImageOutput)) {
					stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed {
						Codec = AVVideoCodec.JPEG
					};
					Session.AddOutput (StillImageOutput = stillImageOutput);
				} else {
					Console.WriteLine ("Could not add still image output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				Session.CommitConfiguration ();
			});
		}
コード例 #18
0
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			this.View.BackgroundColor = UIColor.White;

			NSError error;

			// Setup detector options.
			var options = new CIDetectorOptions {
				Accuracy = FaceDetectorAccuracy.High,
				// Can give a hint here about the rects to detect. 1.4 would be for A4 sheets of paper for instance.
				AspectRatio = 1.41f,
				
			};

			// Create a rectangle detector. Note that you can also create QR detector or a face detector.
			// Most of this code will also work with other detectors (like streaming to a preview layer and grabbing images).
			this.detector = CIDetector.CreateRectangleDetector (context: null, detectorOptions: options);

			// Create the session. The AVCaptureSession is the managing instance of the whole video handling.
			var captureSession = new AVCaptureSession ()
			{ 
				// Defines what quality we want to use for the images we grab. Photo gives highest resolutions.
				SessionPreset = AVCaptureSession.PresetPhoto
			};

			// Find a suitable AVCaptureDevice for video input.
			var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
			if (device == null)
			{
				// This will not work on the iOS Simulator - there is no camera. :-)
				throw new InvalidProgramException ("Failed to get AVCaptureDevice for video input!");
			}

			// Create a device input with the device and add it to the session.
			var videoInput = AVCaptureDeviceInput.FromDevice (device, out error);
			if (videoInput == null)
			{
				throw new InvalidProgramException ("Failed to get AVCaptureDeviceInput from AVCaptureDevice!");
			}

			// Let session read from the input, this is our source.
			captureSession.AddInput (videoInput);

			// Create output for the video stream. This is the destination.
			var videoOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true
			};

			// Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting 
			// properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated,
			// so I use the WeakVideoSettings instead which takes an NSDictionary as input.
			this.videoSettingsDict = new NSMutableDictionary ();
			this.videoSettingsDict.Add (CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA));
			videoOutput.WeakVideoSettings = this.videoSettingsDict;

			// Create a delegate to report back to us when an image has been captured.
			// We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate
			// which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project.
			this.sampleBufferDelegate = new VideoFrameSamplerDelegate ();

			// Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue.
			// This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues).
			this.sessionQueue =  new DispatchQueue ("AVSessionQueue");

			// Assign the queue and the delegate to the output. Now all output will go through the delegate.
			videoOutput.SetSampleBufferDelegate(this.sampleBufferDelegate, this.sessionQueue);

			// Add output to session.
			captureSession.AddOutput(videoOutput);

			// We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer.
			// A CALayer can be added to a UIView. We add that layer to the controller's main view.
			var layer = this.View.Layer;
			this.videoLayer = AVCaptureVideoPreviewLayer.FromSession (captureSession);
			this.videoLayer.Frame = layer.Bounds;
			layer.AddSublayer (this.videoLayer);

			// All setup! Start capturing!
			captureSession.StartRunning ();

			// This is just for information and allows you to get valid values for the detection framerate. 
			Console.WriteLine ("Available capture framerates:");
			var rateRanges = device.ActiveFormat.VideoSupportedFrameRateRanges;
			foreach (var r in rateRanges)
			{
				Console.WriteLine (r.MinFrameRate + "; " + r.MaxFrameRate + "; " + r.MinFrameDuration + "; " + r.MaxFrameDuration);
			}

			// Configure framerate. Kind of weird way of doing it but the only one that works.
			device.LockForConfiguration (out error);
			// CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case.
			device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS);
			device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS);
			device.UnlockForConfiguration ();

			// Put a small image view at the top left that shows the live image with the detected rectangle(s).
			this.imageViewOverlay = new UIImageView
			{ 
				ContentMode = UIViewContentMode.ScaleAspectFit,
				BackgroundColor = UIColor.Gray
			};
			this.imageViewOverlay.Layer.BorderColor = UIColor.Red.CGColor;
			this.imageViewOverlay.Layer.BorderWidth = 3f;
			this.Add (this.imageViewOverlay);

			// Put another image view top right that shows the image with perspective correction.
			this.imageViewPerspective = new UIImageView
			{ 
				ContentMode = UIViewContentMode.ScaleAspectFit,
				BackgroundColor = UIColor.Gray
			};
			this.imageViewPerspective.Layer.BorderColor = UIColor.Red.CGColor;
			this.imageViewPerspective.Layer.BorderWidth = 3f;
			this.Add (this.imageViewPerspective);

			// Add some lables for information.
			this.mainWindowLbl = new UILabel
			{
				Text = "Live stream from camera. Point camera to a rectangular object.",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.mainWindowLbl);

			this.detectionWindowLbl = new UILabel
			{
				Text = "Detected rectangle overlay",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.detectionWindowLbl);

			this.perspectiveWindowLbl = new UILabel
			{
				Text = "Perspective corrected",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.perspectiveWindowLbl);
		}
コード例 #19
0
		void Initialize ()
		{
			DataSource = new CollectionViewDataSource (this);
			imageRecords = new List<Image> ();
			updateArrayQueue = new DispatchQueue ("UpdateCollectionViewQueue");
		}
コード例 #20
0
		bool AddImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = string.Empty;

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += HandleImageCaptureError;
			videoFrameSampler.ImageCaptured += HandleImageCaptured;

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try {
				AVCaptureConnection connection = frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			} catch (Exception ex) {
				Console.WriteLine (ex.Message);
			}

			return true;
		}
コード例 #21
0
        private C4Error SetupC4Replicator()
        {
            Config.Database.CheckOpenLocked();
            C4Error err = new C4Error();

            if (_repl != null)
            {
                Native.c4repl_setOptions(_repl, ((FLSlice)Config.Options.FLEncode()).ToArrayFast());
                return(err);
            }

            _desc = ToString(); // Cache this; it may be called a lot when logging

            // Target:
            var      addr      = new C4Address();
            Database otherDB   = null;
            var      remoteUrl = Config.RemoteUrl;
            string   dbNameStr = remoteUrl?.Segments?.Last().TrimEnd('/');

            using (var dbNameStr_ = new C4String(dbNameStr))
                using (var remoteUrlStr_ = new C4String(remoteUrl?.AbsoluteUri)) {
                    FLSlice   dn = dbNameStr_.AsFLSlice();
                    C4Address localAddr;
                    var       addrFromUrl = NativeRaw.c4address_fromURL(remoteUrlStr_.AsFLSlice(), &localAddr, &dn);
                    addr = localAddr;

                    if (addrFromUrl)
                    {
                        //get cookies from url and add to replicator options
                        var cookiestring = Config.Database.GetCookies(remoteUrl);
                        if (!String.IsNullOrEmpty(cookiestring))
                        {
                            var split = cookiestring.Split(';') ?? Enumerable.Empty <string>();
                            foreach (var entry in split)
                            {
                                var pieces = entry?.Split('=');
                                if (pieces?.Length != 2)
                                {
                                    WriteLog.To.Sync.W(Tag, "Garbage cookie value, ignoring");
                                    continue;
                                }

                                Config.Options.Cookies.Add(new Cookie(pieces[0]?.Trim(), pieces[1]?.Trim()));
                            }
                        }
                    }
                    else
                    {
                        Config.OtherDB?.CheckOpenLocked();
                        otherDB = Config.OtherDB;
                    }

                    var options = Config.Options;

                    Config.Authenticator?.Authenticate(options);

                    options.Build();
                    var push       = Config.ReplicatorType.HasFlag(ReplicatorType.Push);
                    var pull       = Config.ReplicatorType.HasFlag(ReplicatorType.Pull);
                    var continuous = Config.Continuous;

                    var socketFactory = Config.SocketFactory;
                    socketFactory.context = GCHandle.ToIntPtr(GCHandle.Alloc(this)).ToPointer();
                    _nativeParams         = new ReplicatorParameters(options)
                    {
                        Push            = Mkmode(push, continuous),
                        Pull            = Mkmode(pull, continuous),
                        Context         = this,
                        OnDocumentEnded = OnDocEnded,
                        OnStatusChanged = StatusChangedCallback,
                        SocketFactory   = &socketFactory
                    };

                    // Clear the reset flag, it is a one-time thing
                    options.Reset = false;

                    if (Config.PushFilter != null)
                    {
                        _nativeParams.PushFilter = PushFilterCallback;
                    }
                    if (Config.PullFilter != null)
                    {
                        _nativeParams.PullFilter = PullValidateCallback;
                    }

                    DispatchQueue.DispatchSync(() =>
                    {
                        C4Error localErr = new C4Error();
                    #if COUCHBASE_ENTERPRISE
                        if (otherDB != null)
                        {
                            _repl = Native.c4repl_newLocal(Config.Database.c4db, otherDB.c4db, _nativeParams.C4Params,
                                                           &localErr);
                        }
                        else
                    #endif
                        _repl = Native.c4repl_new(Config.Database.c4db, addr, dbNameStr, _nativeParams.C4Params, &localErr);

                        if (_documentEndedUpdate.Counter > 0)
                        {
                            SetProgressLevel(C4ReplicatorProgressLevel.ReplProgressPerDocument);
                        }

                        err = localErr;
                    });
                }

            return(err);
        }
コード例 #22
0
ファイル: AppDelegate.cs プロジェクト: sgccarey/XamCam
        public override void FinishedLaunching(UIApplication application)
        {
            // Create a new capture session
            Session = new AVCaptureSession ();
            Session.SessionPreset = AVCaptureSession.PresetMedium;

            // Create a device input
            CaptureDevice = GetFrontCamera();
            if (CaptureDevice == null) {
                // Video capture not supported, abort
                Console.WriteLine ("Video recording not supported on this device");
                CameraAvailable = false;
                return;
            }

            // Prepare device for configuration
            CaptureDevice.LockForConfiguration (out Error);
            if (Error != null) {
                // There has been an issue, abort
                Console.WriteLine ("Error: {0}", Error.LocalizedDescription);
                CaptureDevice.UnlockForConfiguration ();
                return;
            }

            // Configure stream for 15 frames per second (fps)
            CaptureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);

            // Unlock configuration
            CaptureDevice.UnlockForConfiguration ();

            // Get input from capture device
            Input = AVCaptureDeviceInput.FromDevice (CaptureDevice);
            if (Input == null) {
                // Error, report and abort
                Console.WriteLine ("Unable to gain input from capture device.");
                CameraAvailable = false;
                return;
            }

            // Attach input to session
            Session.AddInput (Input);

            // Create a new output
            var output = new AVCaptureVideoDataOutput ();
            var settings = new AVVideoSettingsUncompressed ();
            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;
            output.WeakVideoSettings = settings.Dictionary;

            // Configure and attach to the output to the session
            Queue = new DispatchQueue ("ManCamQueue");
            Recorder = new OutputRecorder ();
            output.SetSampleBufferDelegate (Recorder, Queue);
            Session.AddOutput (output);

            // Configure and attach a still image output for bracketed capture
            StillImageOutput = new AVCaptureStillImageOutput ();
            var dict = new NSMutableDictionary();
            dict[AVVideo.CodecKey] = new NSNumber((int) AVVideoCodec.JPEG);
            Session.AddOutput (StillImageOutput);

            // Let tabs know that a camera is available
            CameraAvailable = true;
        }
コード例 #23
0
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.Preset640x480
			};
			
			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (captureDevice == null){
				Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
				return false;
			}

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
				return false;
			}
			else
				session.AddInput (input);


			previewLayer = new AVCaptureVideoPreviewLayer(session);

			//Framerate set here (15 fps)
			if (previewLayer.RespondsToSelector(new Selector("connection")))
				previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10);

			previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			previewLayer.Frame = this.Frame;
			previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

			layerView = new UIView(this.Frame);
			layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
			layerView.Layer.AddSublayer(previewLayer);

			this.AddSubview(layerView);

			ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);

			if (overlayView != null)
			{
				this.AddSubview (overlayView);
				this.BringSubviewToFront (overlayView);

				//overlayView.LayoutSubviews ();
			}

			session.StartRunning ();

			Console.WriteLine ("RUNNING!!!");

			// create a VideoDataOutput and add it to the sesion
			output = new AVCaptureVideoDataOutput () {
				//videoSettings
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
			};

			// configure the output
			queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString());

			var barcodeReader = new BarcodeReader(null, (img) => 	
			{
				var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height);

				//Don't try and rotate properly if we're autorotating anyway
				if (options.AutoRotate.HasValue && options.AutoRotate.Value)
					return src;

				switch (UIDevice.CurrentDevice.Orientation)
				{
					case UIDeviceOrientation.Portrait:
						return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise();
					case UIDeviceOrientation.PortraitUpsideDown:
						return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise();
					case UIDeviceOrientation.LandscapeLeft:
						return src;
					case UIDeviceOrientation.LandscapeRight:
						return src;
				}

				return src;

			}, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown));

			if (this.options.TryHarder.HasValue)
			{
				Console.WriteLine("TRY_HARDER: " + this.options.TryHarder.Value);
				barcodeReader.Options.TryHarder = this.options.TryHarder.Value;
			}
			if (this.options.PureBarcode.HasValue)
				barcodeReader.Options.PureBarcode = this.options.PureBarcode.Value;
			if (this.options.AutoRotate.HasValue)
			{
				Console.WriteLine("AUTO_ROTATE: " + this.options.AutoRotate.Value);
				barcodeReader.AutoRotate = this.options.AutoRotate.Value;
			}
			if (!string.IsNullOrEmpty (this.options.CharacterSet))
				barcodeReader.Options.CharacterSet = this.options.CharacterSet;
			if (this.options.TryInverted.HasValue)
				barcodeReader.TryInverted = this.options.TryInverted.Value;

			if (this.options.PossibleFormats != null && this.options.PossibleFormats.Count > 0)
			{
				barcodeReader.Options.PossibleFormats = new List<BarcodeFormat>();
				
				foreach (var pf in this.options.PossibleFormats)
					barcodeReader.Options.PossibleFormats.Add(pf);
			}

			outputRecorder = new OutputRecorder (this.options, img => 
			{
				try
				{
					var started = DateTime.Now;
					var rs = barcodeReader.Decode(img);
					var total = DateTime.Now - started;

					Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms");

					if (rs != null)
						resultCallback(rs);
				}
				catch (Exception ex)
				{
					Console.WriteLine("DECODE FAILED: " + ex);
				}
			});

			output.AlwaysDiscardsLateVideoFrames = true;
			output.SetSampleBufferDelegate (outputRecorder, queue);


			Console.WriteLine("SetupCamera Finished");

			session.AddOutput (output);
			//session.StartRunning ();


			if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus))
			{
				NSError err = null;
				if (captureDevice.LockForConfiguration(out err))
				{
					captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus;

					if (captureDevice.FocusPointOfInterestSupported)
						captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f);

					captureDevice.UnlockForConfiguration();
				}
				else
					Console.WriteLine("Failed to Lock for Config: " + err.Description);
			}

			return true;
		}
コード例 #24
0
        bool SetupCaptureSession()
        {
            var started = DateTime.UtcNow;

            var availableResolutions = new List <CameraResolution> ();

            var consideredResolutions = new Dictionary <NSString, CameraResolution> {
                { AVCaptureSession.Preset352x288, new CameraResolution   {
                      Width = 352, Height = 288
                  } },
                { AVCaptureSession.PresetMedium, new CameraResolution    {
                      Width = 480, Height = 360
                  } },                                                                                                          //480x360
                { AVCaptureSession.Preset640x480, new CameraResolution   {
                      Width = 640, Height = 480
                  } },
                { AVCaptureSession.Preset1280x720, new CameraResolution  {
                      Width = 1280, Height = 720
                  } },
                { AVCaptureSession.Preset1920x1080, new CameraResolution {
                      Width = 1920, Height = 1080
                  } }
            };

            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.Preset640x480
            };

            // create a device input and attach it to the session
//			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
            AVCaptureDevice captureDevice = null;
            var             devices       = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            foreach (var device in devices)
            {
                captureDevice = device;
                if (options.UseFrontCameraIfAvailable.HasValue &&
                    options.UseFrontCameraIfAvailable.Value &&
                    device.Position == AVCaptureDevicePosition.Front)
                {
                    break;                     //Front camera successfully set
                }
                else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value))
                {
                    break;                     //Back camera succesfully set
                }
            }
            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }

            CameraResolution resolution = null;

            // Find resolution
            // Go through the resolutions we can even consider
            foreach (var cr in consideredResolutions)
            {
                // Now check to make sure our selected device supports the resolution
                // so we can add it to the list to pick from
                if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key))
                {
                    availableResolutions.Add(cr.Value);
                }
            }

            resolution = options.GetResolution(availableResolutions);

            // See if the user selected a resolution
            if (resolution != null)
            {
                // Now get the preset string from the resolution chosen
                var preset = (from c in consideredResolutions
                              where c.Value.Width == resolution.Width &&
                              c.Value.Height == resolution.Height
                              select c.Key).FirstOrDefault();

                // If we found a matching preset, let's set it on the session
                if (!string.IsNullOrEmpty(preset))
                {
                    session.SessionPreset = preset;
                }
            }

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }
            else
            {
                session.AddInput(input);
            }


            var startedAVPreviewLayerAlloc = DateTime.UtcNow;

            previewLayer = new AVCaptureVideoPreviewLayer(session);

            var totalAVPreviewLayerAlloc = DateTime.UtcNow - startedAVPreviewLayerAlloc;

            Console.WriteLine("PERF: Alloc AVCaptureVideoPreviewLayer took {0} ms.", totalAVPreviewLayerAlloc.TotalMilliseconds);


            //Framerate set here (15 fps)
            if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
            {
                var perf1 = PerformanceCounter.Start();

                NSError lockForConfigErr = null;

                captureDevice.LockForConfiguration(out lockForConfigErr);
                if (lockForConfigErr == null)
                {
                    captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10);
                    captureDevice.UnlockForConfiguration();
                }

                PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms");
            }
            else
            {
                previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10);
            }


            var perf2 = PerformanceCounter.Start();

                        #if __UNIFIED__
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                        #else
            previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                        #endif
            previewLayer.Frame    = new CGRect(0, 0, this.Frame.Width, this.Frame.Height);
            previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

            layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height));
            layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
            layerView.Layer.AddSublayer(previewLayer);

            this.AddSubview(layerView);

            ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);

            if (overlayView != null)
            {
                this.AddSubview(overlayView);
                this.BringSubviewToFront(overlayView);

                //overlayView.LayoutSubviews ();
            }

            PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms");

            var perf3 = PerformanceCounter.Start();

            session.StartRunning();

            PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms");

            var perf4 = PerformanceCounter.Start();

            var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA),
                                                              CVPixelBuffer.PixelFormatTypeKey);


            // create a VideoDataOutput and add it to the sesion
            output = new AVCaptureVideoDataOutput {
                WeakVideoSettings = videoSettings
            };

            // configure the output
            queue = new DispatchQueue("ZxingScannerView");             // (Guid.NewGuid().ToString());

            var barcodeReader = new BarcodeReaderiOS(null, (img) =>
            {
                var src = new RGBLuminanceSourceiOS(img);                 //, bmp.Width, bmp.Height);

                //Don't try and rotate properly if we're autorotating anyway
                if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value)
                {
                    return(src);
                }

                var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait;
                InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation);

                switch (tmpInterfaceOrientation)
                {
                case UIInterfaceOrientation.Portrait:
                    return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise());

                case UIInterfaceOrientation.PortraitUpsideDown:
                    return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise());

                case UIInterfaceOrientation.LandscapeLeft:
                    return(src);

                case UIInterfaceOrientation.LandscapeRight:
                    return(src);
                }

                return(src);
            }, null, null);             //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown));

            if (ScanningOptions.TryHarder.HasValue)
            {
                Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value);
                barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value;
            }
            if (ScanningOptions.PureBarcode.HasValue)
            {
                barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value;
            }
            if (ScanningOptions.AutoRotate.HasValue)
            {
                Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value);
                barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value;
            }
            if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet))
            {
                barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet;
            }
            if (ScanningOptions.TryInverted.HasValue)
            {
                barcodeReader.TryInverted = ScanningOptions.TryInverted.Value;
            }

            if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0)
            {
                barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>();

                foreach (var pf in ScanningOptions.PossibleFormats)
                {
                    barcodeReader.Options.PossibleFormats.Add(pf);
                }
            }

            outputRecorder = new OutputRecorder(ScanningOptions, img =>
            {
                if (!IsAnalyzing)
                {
                    return(false);
                }

                try
                {
                    //var sw = new System.Diagnostics.Stopwatch();
                    //sw.Start();

                    var rs = barcodeReader.Decode(img);

                    //sw.Stop();

                    //Console.WriteLine("Decode Time: {0} ms", sw.ElapsedMilliseconds);

                    if (rs != null)
                    {
                        resultCallback(rs);
                        return(true);
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine("DECODE FAILED: " + ex);
                }

                return(false);
            });

            output.AlwaysDiscardsLateVideoFrames = true;
            output.SetSampleBufferDelegate(outputRecorder, queue);

            PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished.  Took {0} ms.");

            session.AddOutput(output);
            //session.StartRunning ();


            var perf5 = PerformanceCounter.Start();

            NSError err = null;
            if (captureDevice.LockForConfiguration(out err))
            {
                if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                {
                    captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                }
                else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus))
                {
                    captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus;
                }

                if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure))
                {
                    captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
                }
                else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose))
                {
                    captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose;
                }

                if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance))
                {
                    captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
                }
                else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance))
                {
                    captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance;
                }

                if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported)
                {
                    captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
                }

                if (captureDevice.FocusPointOfInterestSupported)
                {
                    captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f);
                }

                if (captureDevice.ExposurePointOfInterestSupported)
                {
                    captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f);
                }

                captureDevice.UnlockForConfiguration();
            }
            else
            {
                Console.WriteLine("Failed to Lock for Config: " + err.Description);
            }

            PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms.");

            return(true);
        }
コード例 #25
0
		public SessionManager ()
		{
			previousSecondTimestamps = new List<double> ();
			sessionQueue = new DispatchQueue ("com.apple.sample.sessionmanager.capture");
			pipelineRunningTask = 0;
		}
コード例 #26
0
		public void SetupAndStartCaptureSession ()
		{
			//Console.WriteLine ("SetupAndStartCapture Session");
			
			// Create a shallow queue for buffers going to the display for preview.
			previewBufferQueue = CMBufferQueue.CreateUnsorted (1);
			
			// Create serial queue for movie writing
			movieWritingQueue = new DispatchQueue ("Movie Writing Queue");
			
			if (captureSession == null)
				SetupCaptureSession ();
			
			NSNotificationCenter.DefaultCenter.AddObserver (AVCaptureSession.DidStopRunningNotification, CaptureSessionStoppedRunningNotification, captureSession);
			
			if (!captureSession.Running)
				captureSession.StartRunning ();			
		}
コード例 #27
0
ファイル: VideoCamera.cs プロジェクト: jorik041/ARDemo
		void CreateOutput ()
		{
			output = new AVCaptureVideoDataOutput ();
			output.VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA);

			queue = new DispatchQueue ("VideoCameraQueue");
			output.SetSampleBufferDelegateAndQueue (new VideoCameraDelegate { Camera = this }, queue);

			session.AddOutput (output);
		}
コード例 #28
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            this.View.BackgroundColor = UIColor.White;

            NSError error;


            // Create the session. The AVCaptureSession is the managing instance of the whole video handling.
            var captureSession = new AVCaptureSession()
            {
                // Defines what quality we want to use for the images we grab. Photo gives highest resolutions.
                SessionPreset = AVCaptureSession.PresetPhoto
            };

            // Find a suitable AVCaptureDevice for video input.
            var device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            if (device == null)
            {
                // This will not work on the iOS Simulator - there is no camera. :-)
                throw new InvalidProgramException("Failed to get AVCaptureDevice for video input!");
            }

            // Create a device input with the device and add it to the session.
            var videoInput = AVCaptureDeviceInput.FromDevice(device, out error);

            if (videoInput == null)
            {
                throw new InvalidProgramException("Failed to get AVCaptureDeviceInput from AVCaptureDevice!");
            }

            // Let session read from the input, this is our source.
            captureSession.AddInput(videoInput);

            // Create output for the video stream. This is the destination.
            var videoOutput = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true
            };

            // Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting
            // properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated,
            // so I use the WeakVideoSettings instead which takes an NSDictionary as input.
            this.videoSettingsDict = new NSMutableDictionary();
            this.videoSettingsDict.Add(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA));
            videoOutput.WeakVideoSettings = this.videoSettingsDict;

            // Create a delegate to report back to us when an image has been captured.
            // We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate
            // which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project.
            this.sampleBufferDelegate = new VideoFrameSamplerDelegate();

            // Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue.
            // This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues).
            this.sessionQueue = new DispatchQueue("AVSessionQueue");

            // Assign the queue and the delegate to the output. Now all output will go through the delegate.
            videoOutput.SetSampleBufferDelegateQueue(this.sampleBufferDelegate, this.sessionQueue);

            // Add output to session.
            captureSession.AddOutput(videoOutput);

            // We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer.
            // A CALayer can be added to a UIView. We add that layer to the controller's main view.
            var layer = this.View.Layer;

            this.videoLayer       = AVCaptureVideoPreviewLayer.FromSession(captureSession);
            this.videoLayer.Frame = layer.Bounds;
            layer.AddSublayer(this.videoLayer);

            // All setup! Start capturing!
            captureSession.StartRunning();

            // Configure framerate. Kind of weird way of doing it but the only one that works.
            device.LockForConfiguration(out error);
            // CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case.
            device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS);
            device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS);
            device.UnlockForConfiguration();
        }
コード例 #29
0
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			// Disable UI until the session starts running
			CameraButton.Enabled = false;
			RecordButton.Enabled = false;
			PhotoButton.Enabled = false;
			CaptureModeControl.Enabled = false;
			HUDButton.Enabled = false;

			ManualHUD.Hidden = true;
			ManualHUDPhotoView.Hidden = true;
			ManualHUDFocusView.Hidden = true;
			ManualHUDExposureView.Hidden = true;
			ManualHUDWhiteBalanceView.Hidden = true;
			ManualHUDLensStabilizationView.Hidden = true;

			// Create the AVCaptureSession
			Session = new AVCaptureSession ();

			// Set up preview
			PreviewView.Session = Session;

			sessionQueue = new DispatchQueue ("session queue");
			setupResult = SetupResult.Success;

			// Check video authorization status. Video access is required and audio access is optional.
			// If audio access is denied, audio is not recorded during movie recording.
			CheckDeviceAuthorizationStatus ();

			// Setup the capture session.
			// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
			// Why not do all of this on the main queue?
			// Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
			// so that the main queue isn't blocked, which keeps the UI responsive.
			sessionQueue.DispatchAsync (ConfigureSession);
		}
コード例 #30
0
		List<Activity> CreateActivityDataWithActivities (CMMotionActivity[] activities, Action completionHandler)
		{
			var results = new List<Activity> ();

			var group = DispatchGroup.Create ();
			var queue = new DispatchQueue ("resultQueue");

			var filteredActivities = activities.Where (activity => activity.HasActivitySignature ()
                 && !activity.Stationary
                 && activity.Confidence != CMMotionActivityConfidence.Low).ToArray<CMMotionActivity> ();

			var activitySegments = FindActivitySegments (filteredActivities);

			foreach (var segment in activitySegments) {
				group.Enter ();
				pedometer.QueryPedometerData (segment.Item1.StartDate, (NSDate)segment.Item2, (pedometerData, error) => {
					queue.DispatchAsync (() => {
						var activity = new Activity (segment.Item1,
			               ((DateTime)segment.Item1.StartDate).ToLocalTime (),
			               segment.Item2.ToLocalTime (),
			               pedometerData);
						
						results.Add (activity);
					});

					if (error != null)
						HandleError (error);

					group.Leave ();
				});
			}

			group.Notify (DispatchQueue.MainQueue, () => {
				queue.DispatchSync (() => {
					RecentActivities = results;
					RecentActivities.Reverse ();
					completionHandler?.Invoke ();
				});
			});

			return results;
		}
コード例 #31
0
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}
コード例 #32
0
		public CustomVideoCompositor (IntPtr handle) : base (handle)
		{
			renderingQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.renderingqueue");
			renderContextQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.rendercontextqueue");
			renderContextDidChange = false;
		}
コード例 #33
0
		/// <summary>
		/// Stops the and tears down the capture session.
		/// </summary>
		public void StopAndTearDownCaptureSession ()
		{
			captureSession.StopRunning ();
			if (captureSession != null)
				NSNotificationCenter.DefaultCenter.RemoveObserver (this, AVCaptureSession.DidStopRunningNotification, captureSession);
			captureSession.Dispose ();
			captureSession = null;
			
			if (previewBufferQueue != null){
				previewBufferQueue.Dispose ();
				previewBufferQueue = null;
			}
			
			if (movieWritingQueue != null){
				movieWritingQueue.Dispose ();
				movieWritingQueue = null;
			}
		}
コード例 #34
0
		public SessionManager ()
		{
			sessionQueue = new DispatchQueue ("com.apple.sample.sessionmanager.capture");
			pipelineRunningTask = 0;
		}