예제 #1
0
		public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			try {
				connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

				using (var image = ImageFromSampleBuffer (sampleBuffer)){
					if(_CurrentState.didKnock){
						KnockCount++;

						if(KnockCount==1){
							_CurrentState.CountDown = 5;

							InvokeOnMainThread (delegate {  
								_CurrentState.TopLabel.Text = "Knock Again to Post!!";
								_CurrentState.BottomLabel.Text = "Knock to Post: 5 sec";
							});

						}else if(KnockCount==40){
							_CurrentState.CountDown = 4;
							InvokeOnMainThread (delegate {
								_CurrentState.BottomLabel.Text = "Knock to Post: 4 sec";
							});
						}else if(KnockCount==80){
							_CurrentState.CountDown = 3;
							InvokeOnMainThread (delegate {
								_CurrentState.BottomLabel.Text = "Knock to Post: 3 sec";
							});
						}else if(KnockCount==120){
							_CurrentState.CountDown = 2;
							InvokeOnMainThread (delegate {  
								_CurrentState.BottomLabel.Text = "Knock to Post: 2 sec";
							});
						}else if(KnockCount==160){
							_CurrentState.CountDown = 1;
							InvokeOnMainThread (delegate {  
								_CurrentState.BottomLabel.Text = "Knock to Post: 1 sec";
							});
						}else if(KnockCount>200){
							InvokeOnMainThread (delegate {  
								_CurrentState.TopLabel.Text = "Follow @JoesDoor on Twitter";
								_CurrentState.BottomLabel.Text = "Knock to take a photo";
							});
							KnockCount=0;
							_CurrentState.CountDown = 0;
							_CurrentState.didKnock=false;

						}
					}else{
						InvokeOnMainThread(delegate {
							using (var pool = new NSAutoreleasePool ()) {
								_CurrentState.DisplayImageView.Image = image;
							}
						});
					}
				}
				sampleBuffer.Dispose ();
			} catch (Exception e){
				Console.WriteLine (e);
			}
		}
예제 #2
0
			public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
			{
				foreach(var m in metadataObjects)
				{
					if(m is AVMetadataMachineReadableCodeObject)
					{
						MetadataFound(this, m as AVMetadataMachineReadableCodeObject);
					}
				}
			}
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection)
        {
            List<int> unseen = faceViews.Keys.ToList ();
            List<int> seen = new List<int> ();

            CATransaction.Begin ();
            CATransaction.SetValueForKey (NSObject.FromObject (true), (NSString) (CATransaction.DisableActions.ToString ()));

            foreach (var face in faces) {
                int faceId = (face as AVMetadataFaceObject).FaceID;
                unseen.Remove (faceId);
                seen.Add (faceId);

                FaceView view;
                if (faceViews.ContainsKey (faceId))
                    view = faceViews [faceId];
                else {
                    view = new FaceView ();
                    view.Layer.CornerRadius = 10;
                    view.Layer.BorderWidth = 3;
                    view.Layer.BorderColor = UIColor.Green.CGColor;
                    previewView.AddSubview (view);
                    faceViews.Add (faceId, view);
                    view.Id = faceId;
                    view.Callback = TouchCallBack;
                    if (lockedFaceID != null)
                        view.Alpha = 0;
                }

                AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject (face);
                view.Frame = adjusted.Bounds;
            }

            foreach (int faceId in unseen) {
                FaceView view = faceViews [faceId];
                view.RemoveFromSuperview ();
                faceViews.Remove (faceId);
                if (faceId == lockedFaceID)
                    clearLockedFace ();
            }

            if (lockedFaceID != null) {
                FaceView view = faceViews [lockedFaceID.GetValueOrDefault ()];
                float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor;
                float zoomDelta = lockedFaceSize / size;
                float lockTime = (float)(CATransition.CurrentMediaTime () - this.lockTime);
                float zoomRate = (float)(Math.Log (zoomDelta) / lockTime);
                if (Math.Abs (zoomDelta) > 0.1)
                    device.RampToVideoZoom (zoomRate > 0 ? MaxZoom : 1, zoomRate);
            }

            CATransaction.Commit ();
        }
		public override void DidOutputSampleBuffer
		(
			AVCaptureOutput captureOutput, 
			CMSampleBuffer sampleBuffer, 
			AVCaptureConnection connection
		)
		{
			try 
			{
				// render the image into the debug preview pane
				UIImage image = getImageFromSampleBuffer(sampleBuffer);
				
				// event the capture up
				onImageCaptured( image );
				
				// make sure AVFoundation does not run out of buffers
				sampleBuffer.Dispose ();
			} 
			catch (Exception ex)
			{
				string errorMessage =  string.Format("Failed to process image capture: {0}", ErrorHandling.GetExceptionDetailedText(ex) );
				onCaptureError( errorMessage );
			}
		}
예제 #5
0
		public void DidOutputMetadataObjects (AVCaptureOutput captureOutput, 
		                               AVMetadataObject[] metadataObjects,
		                               AVCaptureConnection connection)
		{
			Barcodes = metadataObjects.ToList ();
		}
예제 #6
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            try
            {
                lastSampleTime = sampleBuffer.PresentationTimeStamp;

                var image = ImageFromSampleBuffer(sampleBuffer);

                if (frame == 0)
                {
                    writer.StartWriting();
                    writer.StartSessionAtSourceTime(lastSampleTime);
                    frame = 1;
                }
                String infoString = "";
                if (inputWriter.ReadyForMoreMediaData)
                {
                    if (!inputWriter.AppendSampleBuffer(sampleBuffer))
                    {
                        infoString = "Failed to append sample buffer";
                    }
                    else
                    {
                        infoString = String.Format("{0} frames captured", frame++);
                    }
                }
                else
                {
                    infoString = "Writer not ready";
                }

                ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image);
                InfoLabel.BeginInvokeOnMainThread(() => InfoLabel.Text = infoString);
            }
            catch (Exception e)
            {
                Failure.Alert(e.Message);
            }
            finally
            {
                sampleBuffer.Dispose();
            }
        }
예제 #7
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				if ((DateTime.UtcNow - lastAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames || working
				    || CancelTokenSource.IsCancellationRequested)
					return;

				working = true;
				//Console.WriteLine("SAMPLE");

				lastAnalysis = DateTime.UtcNow;

				try 
				{
					using (var image = ImageFromSampleBuffer (sampleBuffer))
						HandleImage(image);
					
					//
					// Although this looks innocent "Oh, he is just optimizing this case away"
					// this is incredibly important to call on this callback, because the AVFoundation
					// has a fixed number of buffers and if it runs out of free buffers, it will stop
					// delivering frames. 
					//	
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}

				working = false;
			}
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {

					if (!gotResult)
					{
						LuminanceSource luminance;
						//connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

						using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer) {
		
							if (bytes == null)
								bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow];
		
							pixelBuffer.Lock (0);
							Marshal.Copy (pixelBuffer.BaseAddress, bytes, 0, bytes.Length);
		
							luminance = new RGBLuminanceSource (bytes, pixelBuffer.Width, pixelBuffer.Height);


							pixelBuffer.Unlock (0);
						}

						var binarized = new BinaryBitmap (new HybridBinarizer (luminance));
						var result = reader.decodeWithState (binarized);

						//parent.session.StopRunning ();

						gotResult = true;

					
						if (parent.Scan != null)
							parent.Scan (result);
					}

				} catch (ReaderException) {

					// ignore this exception; it happens every time there is a failed scan

				} catch (Exception e) {

					// TODO: this one is unexpected.. log or otherwise handle it

					throw;

				} finally {
					try {
						// lamest thing, but seems that this throws :(
						sampleBuffer.Dispose ();
					} catch { }
				}
			}
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {

                    var image = ImageFromSampleBuffer (sampleBuffer);
                    sampleBuffer.Dispose();
                    captureOutput.Dispose();
                    connection.Dispose();
                    if (barChecking == false) {
                        barChecking = true;
                        codeImage = image;
                        Thread barCodeThread = new Thread (new ThreadStart (CheckBarCode));
                        barCodeThread.Start ();
                    }

                    AppDelegate.main.decryptVC.imageCaptureView.BeginInvokeOnMainThread (delegate {
                        AppDelegate.main.decryptVC.imageCaptureView.Image = image;
                    });

                } catch (Exception e) {
                    Console.WriteLine (e);
                }
            }
            public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
            {
                string code = "";
                foreach (var metadata in metadataObjects)
                {
                    if (metadata.Type == AVMetadataObject.TypeQRCode) {
                        code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
                        Console.WriteLine ("qrcode: " + code);
                    } else {
                        Console.WriteLine ("type: " + metadata.Type);
                        code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
                        Console.WriteLine ("----: " + code);
                    }
                }

                if (parent.QrScan != null)
                        parent.QrScan (code);
            }
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, MonoTouch.CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){	
						int width = pixelBuffer.Width;
						int height = pixelBuffer.Height;
					
						if (container.ripple == null || width != textureWidth || height != textureHeight){
							textureWidth = width;
							textureHeight = height;
							container.SetupRipple (textureWidth, textureHeight);
						}
						CleanupTextures ();
						
						// Y-plane
						GL.ActiveTexture (All.Texture0);
						All re = (All) 0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension
						CVReturn status;
						lumaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status);
						
						if (lumaTexture == null){
							Console.WriteLine ("Error creating luma texture: {0}", status);
							return;
						}
						GL.BindTexture ((All)lumaTexture.Target, lumaTexture.Name);
						GL.TexParameter (All.Texture2D, All.TextureWrapS, (int) All.ClampToEdge);
						GL.TexParameter (All.Texture2D, All.TextureWrapT, (int) All.ClampToEdge);
						
						// UV Plane
						GL.ActiveTexture (All.Texture1);
						re = (All) 0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension
						chromaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth/2, textureHeight/2, re, DataType.UnsignedByte, 1, out status);
						
						if (chromaTexture == null){
							Console.WriteLine ("Error creating chroma texture: {0}", status);
							return;
						}
						GL.BindTexture ((All) chromaTexture.Target, chromaTexture.Name);
						GL.TexParameter (All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge);
						GL.TexParameter (All.Texture2D, All.TextureWrapT, (int) All.ClampToEdge);
					}
				} finally {
					sampleBuffer.Dispose ();
				}
			}
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();

				if (connection == processor.videoConnection) {
					// Get framerate
					CMTime timestamp = sampleBuffer.PresentationTimeStamp;
					CalculateFramerateAtTimestamp (timestamp);			
						
					// Get frame dimensions (for onscreen display)
					if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0)
						processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
						
					// Get the buffer type
					if (processor.VideoType == 0)
						processor.VideoType = formatDescription.MediaSubType;
					// TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType);
					
					// Synchronously process the pixel buffer to de-green it.
					using (var pixelBuffer = sampleBuffer.GetImageBuffer ())
						ProcessPixelBuffer (pixelBuffer);

					processor.previewBufferQueue.Enqueue (sampleBuffer);
						
					//var writeBuffer = sampleBuffer.Duplicate ();
					InvokeOnMainThread (() => {
						var j = processor.previewBufferQueue.Dequeue ();
				
						var sbuf = j as CMSampleBuffer;
						if (sbuf == null) {
							// Record the current sampleBuffer.ClassHandle
							// Then run another iteration and on the next one, print the ClassHandle
							Console.WriteLine ("The type is {0}", new NSString (CFCopyDescription (j.Handle)));
							return;
						}
						
						using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()){
							if (processor.PixelBufferReadyForDisplay != null)
								processor.PixelBufferReadyForDisplay (pixBuf);
						}
		
						if(processor.assetWriter == null)
							sbuf.Dispose();
						else
							processor.CompleteBufferUse (sbuf);
					});
				}
				
				
				processor.movieWritingQueue.DispatchAsync (() => {
					if (processor.assetWriter != null) {
						bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
					
						// Initialize the video input if this is not done yet
						if (!processor.readyToRecordVideo)
							processor.readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription);
						
						// Write the video data to file
						if (processor.readyToRecordVideo && processor.readyToRecordAudio)
							processor.WriteSampleBuffer (sampleBuffer, AVMediaType.Video);
			
						bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
					
						if (!wasReadyToRecord && isReadyToRecord) {
							processor.recordingWillBeStarted = false;
							processor.IsRecording = true;
						
							if (processor.RecordingDidStart != null)
								processor.RecordingDidStart ();
						}
						
						processor.CompleteBufferUse (sampleBuffer);
					}
				});	
			}
			// This runs on the movieWritingQueue already
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{		
				try {
					if (processor.assetWriter != null) {
						var formatDescription = sampleBuffer.GetFormatDescription ();
						bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
						
						// Initalize the audio input if this is not done yet
						if (!processor.readyToRecordAudio)
							processor.readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription);
							
						// Write audio data to file
						if (processor.readyToRecordAudio && processor.readyToRecordVideo)
							processor.WriteSampleBuffer (sampleBuffer, AVMediaType.Audio);
			
						bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
						
						if (!wasReadyToRecord && isReadyToRecord) {
							processor.recordingWillBeStarted = false;
							processor.IsRecording = true;
							
							if (processor.RecordingDidStart != null)
								processor.RecordingDidStart ();
						}
					}
				} finally {
					sampleBuffer.Dispose();
				}
			}
예제 #15
0
			public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
			{
				if (DidOutputMetadataObjectsAction != null)
					DidOutputMetadataObjectsAction (captureOutput, metadataObjects, connection);
			}
예제 #16
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, 
				                                        AVCaptureConnection connection)
			{
				try {
					var frame = ImageFromSampleBuffer (sampleBuffer);
					Camera.OnFrameCaptured (frame);
					sampleBuffer.Dispose ();
				} catch (Exception ex) {
					Debug.WriteLine (ex);
				}
			}
예제 #17
0
 public override void DidOutputSampleBuffer(AVCaptureOutput output, CMSampleBuffer buffer, AVCaptureConnection con)
 {
     //  Implement
     //  - see: http://go-mono.com/docs/index.aspx?link=T%3aMonoTouch.Foundation.ModelAttribute
     //
 }
예제 #18
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					var image = ImageFromSampleBuffer (sampleBuffer);
	
					// Do something with the image, we just stuff it in our main view.
					AppDelegate.ImageView.BeginInvokeOnMainThread (delegate {
						AppDelegate.ImageView.Image = image;
					});
			
					//
					// Although this looks innocent "Oh, he is just optimizing this case away"
					// this is incredibly important to call on this callback, because the AVFoundation
					// has a fixed number of buffers and if it runs out of free buffers, it will stop
					// delivering frames. 
					//	
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}
			}
예제 #19
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					var image = ImageFromSampleBuffer (sampleBuffer);
	
					// Do something with the image, we just stuff it in our main view.
					AppDelegate.ImageView.BeginInvokeOnMainThread (delegate {
						AppDelegate.ImageView.Image = image;
					});
				
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}
			}
		public virtual void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();

			if (connection == videoConnection) {
				// Get framerate
				CMTime timestamp = sampleBuffer.PresentationTimeStamp;
				CalculateFramerateAtTimestamp (timestamp);			
					
				// Get frame dimensions (for onscreen display)
				if (VideoDimensions.IsEmpty)
					VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
					
				// Get the buffer type
				if (VideoType == 0)
					VideoType = formatDescription.MediaSubType;

				// Synchronously process the pixel buffer to de-green it.
				using (var pixelBuffer = sampleBuffer.GetImageBuffer ())
					ProcessPixelBuffer (pixelBuffer);

				previewBufferQueue.Enqueue (sampleBuffer);
					
				//var writeBuffer = sampleBuffer.Duplicate ();
				InvokeOnMainThread (() => {
					var j = previewBufferQueue.Dequeue ();
			
					var sbuf = j as CMSampleBuffer;
					if (sbuf == null) {
#if DEBUG
						// Record the current sampleBuffer.ClassHandle
						// Then run another iteration and on the next one, print the ClassHandle
						Console.WriteLine ("The type is {0}", new NSString (CFCopyDescription (j.Handle)));
#endif
						return;
					}

					using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()) {
						if (PixelBufferReadyForDisplay != null)
							PixelBufferReadyForDisplay (pixBuf);
					}
				});
			}
			// keep a reference to 'sampleBuffer', movieWritingQueue will remove it
			CompleteBufferUse (sampleBuffer);

			movieWritingQueue.DispatchAsync (() => {
				if (assetWriter != null) {
					bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);
				
					if (connection == videoConnection) {
						// Initialize the video input if this is not done yet
						if (!readyToRecordVideo)
							readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription);

						// Write the video data to file
						if (readyToRecordVideo && readyToRecordAudio) {
							WriteSampleBuffer (sampleBuffer, AVMediaType.Video);
						}
					} else if (connection == audioConnection) {
						if (!readyToRecordAudio)
							readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription);

						if (readyToRecordAudio && readyToRecordVideo)
							WriteSampleBuffer (sampleBuffer, AVMediaType.Audio);
					}
					bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);
				
					if (!wasReadyToRecord && isReadyToRecord) {
						recordingWillBeStarted = false;
						IsRecording = true;
					
						if (RecordingDidStart != null)
							RecordingDidStart ();
					}
				}
				CompleteBufferUse (sampleBuffer);
			});	
		}
예제 #21
0
			public void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				//Console.WriteLine("DROPPED");
			}