Ejemplo n.º 1
0
        public override void DidOutputSampleBuffer(
            AVCaptureOutput captureOutput,
            CMSampleBuffer sampleBuffer,
            AVCaptureConnection connection)
        {
            try
            {
                if ((DateTime.Now - _lastSendTime).TotalSeconds > 2)
                {
                    _lastSendTime = DateTime.Now;
                    Task.Run(async() =>
                    {
                        // ここでフレーム画像を取得していろいろしたり
                        //変更した画像をプレビューに反映させたりする
                        var image = GetImageFromSampleBuffer(sampleBuffer);
                        var d     = DependencyService.Get <IJankenJudgeService>() as JankenJudgeService;
                        await d.DetectAsync(image);
                    });
                }

                //これがないと"Received memory warning." で落ちたり、画面の更新が止まったりする
                GC.Collect();  //  "Received memory warning." 回避
            }
            catch (Exception e)
            {
                Console.WriteLine("Error sampling buffer: {0}", e.Message);
            }
        }
Ejemplo n.º 2
0
		public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			try {
				connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

				using (var image = ImageFromSampleBuffer (sampleBuffer)){
					if(_CurrentState.didKnock){
						KnockCount++;

						if(KnockCount==1){
							_CurrentState.CountDown = 5;

							InvokeOnMainThread (delegate {  
								_CurrentState.TopLabel.Text = "Knock Again to Post!!";
								_CurrentState.BottomLabel.Text = "Knock to Post: 5 sec";
							});

						}else if(KnockCount==40){
							_CurrentState.CountDown = 4;
							InvokeOnMainThread (delegate {
								_CurrentState.BottomLabel.Text = "Knock to Post: 4 sec";
							});
						}else if(KnockCount==80){
							_CurrentState.CountDown = 3;
							InvokeOnMainThread (delegate {
								_CurrentState.BottomLabel.Text = "Knock to Post: 3 sec";
							});
						}else if(KnockCount==120){
							_CurrentState.CountDown = 2;
							InvokeOnMainThread (delegate {  
								_CurrentState.BottomLabel.Text = "Knock to Post: 2 sec";
							});
						}else if(KnockCount==160){
							_CurrentState.CountDown = 1;
							InvokeOnMainThread (delegate {  
								_CurrentState.BottomLabel.Text = "Knock to Post: 1 sec";
							});
						}else if(KnockCount>200){
							InvokeOnMainThread (delegate {  
								_CurrentState.TopLabel.Text = "Follow @JoesDoor on Twitter";
								_CurrentState.BottomLabel.Text = "Knock to take a photo";
							});
							KnockCount=0;
							_CurrentState.CountDown = 0;
							_CurrentState.didKnock=false;

						}
					}else{
						InvokeOnMainThread(delegate {
							using (var pool = new NSAutoreleasePool ()) {
								_CurrentState.DisplayImageView.Image = image;
							}
						});
					}
				}
				sampleBuffer.Dispose ();
			} catch (Exception e){
				Console.WriteLine (e);
			}
		}
Ejemplo n.º 3
0
        /// <Docs>The capture output on which the frame was captured.</Docs>
        /// <param name="connection">The connection on which the video frame was received.</param>
        /// <remarks>Unless you need to keep the buffer for longer, you must call
        ///  Dispose() on the sampleBuffer before returning. The system
        ///  has a limited pool of video frames, and once it runs out of
        ///  those buffers, the system will stop calling this method
        ///  until the buffers are released.</remarks>
        /// <summary>
        /// Dids the output sample buffer.
        /// </summary>
        /// <param name="captureOutput">Capture output.</param>
        /// <param name="sampleBuffer">Sample buffer.</param>
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            // Trap all errors
            try
            {
                // Grab an image from the buffer
                var image = GetImageFromSampleBuffer(sampleBuffer);

                // Display the image
                if (DisplayView != null)
                {
                    DisplayView.BeginInvokeOnMainThread(() =>
                    {
                        // Set the image
                        DisplayView.Image = image;

                        // Rotate image to the correct display orientation
                        //DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                        DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                    });
                }

                // IMPORTANT: You must release the buffer because AVFoundation has a fixed number
                // of buffers and will stop delivering frames if it runs out.
                sampleBuffer.Dispose();
            }
            catch (Exception e)
            {
                // Report error
                Console.WriteLine("Error sampling buffer: {0}", e.Message);
            }
        }
Ejemplo n.º 4
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            frameNumber++;
            frameId++;
            if (frameNumber % 30 == 0)
            {
                var currentTime = NSProcessInfo.ProcessInfo.SystemUptime;
                var fps         = Math.Round(frameNumber / (currentTime - initialTime));
                action?.Invoke((int)fps);
                frameNumber = 0;
                initialTime = NSProcessInfo.ProcessInfo.SystemUptime;
            }

            var buffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer;

            var frameEntry = new FrameEntry
            {
                TimeStamp = DateTimeOffset.UtcNow,
                Width     = (int)buffer.Width,
                Height    = (int)buffer.Height,
                Frame     = CVPixelBufferToByte(buffer)
            };

            Mvx.IoCProvider.Resolve <IInternalNotificationHubService>().NewFrame(frameEntry);

            TryDispose(sampleBuffer);
            TryDispose(buffer);
        }
Ejemplo n.º 5
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     try
     {
         lock (syncObject)
         {
             this.TryDispose(image);
             image = ImageFromSampleBuffer(sampleBuffer);
         }
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
     finally
     {
         //
         // Although this looks innocent "Oh, he is just optimizing this case away"
         // this is incredibly important to call on this callback, because the AVFoundation
         // has a fixed number of buffers and if it runs out of free buffers, it will stop
         // delivering frames.
         //
         sampleBuffer.Dispose();
     }
 }
Ejemplo n.º 6
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     lastRunTime = DateTimeOffset.Now.ToUnixTimeMilliseconds();
     if (lastRunTime - lastAnalysisTime > scanIntervalInMs && Configuration.IsScanning)
     {
         lastAnalysisTime = lastRunTime;
         try
         {
             var image = GetImageFromSampleBuffer(sampleBuffer);
             if (image == null)
             {
                 return;
             }
             Width  = (float)image.Size.Width;
             Height = (float)image.Size.Height;
             var visionImage = new VisionImage(image)
             {
                 Metadata = metadata
             };
             releaseSampleBuffer(sampleBuffer);
             DetectBarcodeActionAsync(visionImage);
         }
         catch (Exception exception)
         {
             System.Diagnostics.Debug.WriteLine(exception.Message);
         }
     }
     releaseSampleBuffer(sampleBuffer);
 }
Ejemplo n.º 7
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                if ((DateTime.UtcNow - lastAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames || working ||
                    CancelTokenSource.IsCancellationRequested)
                {
                    return;
                }

                working = true;
                //Console.WriteLine("SAMPLE");

                lastAnalysis = DateTime.UtcNow;

                try
                {
                    using (var image = ImageFromSampleBuffer(sampleBuffer))
                        HandleImage(image);

                    //
                    // Although this looks innocent "Oh, he is just optimizing this case away"
                    // this is incredibly important to call on this callback, because the AVFoundation
                    // has a fixed number of buffers and if it runs out of free buffers, it will stop
                    // delivering frames.
                    //
                    sampleBuffer.Dispose();
                } catch (Exception e) {
                    Console.WriteLine(e);
                }

                working = false;
            }
Ejemplo n.º 8
0
        /// <Docs>The capture output on which the frame was captured.</Docs>
        /// <param name="connection">The connection on which the video frame was received.</param>
        /// <remarks>Unless you need to keep the buffer for longer, you must call
        ///  Dispose() on the sampleBuffer before returning. The system
        ///  has a limited pool of video frames, and once it runs out of
        ///  those buffers, the system will stop calling this method
        ///  until the buffers are released.</remarks>
        /// <summary>
        /// Dids the output sample buffer.
        /// </summary>
        /// <param name="captureOutput">Capture output.</param>
        /// <param name="sampleBuffer">Sample buffer.</param>
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            // Trap all errors
            try {
                // Grab an image from the buffer
                var image = GetImageFromSampleBuffer(sampleBuffer);

                // Display the image
                if (DisplayView != null)
                {
                    DisplayView.BeginInvokeOnMainThread(() => {
                        // Set the image
                        DisplayView.Image = image;

                        // Rotate image to the correct display orientation
                        DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                    });
                }

                // IMPORTANT: You must release the buffer because AVFoundation has a fixed number
                // of buffers and will stop delivering frames if it runs out.
                sampleBuffer.Dispose();
            }
            catch (Exception e) {
                // Report error
                Console.WriteLine("Error sampling buffer: {0}", e.Message);
            }
        }
Ejemplo n.º 9
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {
                    UIImage image = ImageFromSampleBuffer(sampleBuffer);

                    // Do something with the image, we just stuff it in our main view.
                    BeginInvokeOnMainThread(delegate {
                        if (_imageView.Frame.Size != image.Size)
                        {
                            _imageView.Frame = new CGRect(CGPoint.Empty, image.Size);
                        }
                        _imageView.Image = image;
                    });

                    //
                    // Although this looks innocent "Oh, he is just optimizing this case away"
                    // this is incredibly important to call on this callback, because the AVFoundation
                    // has a fixed number of buffers and if it runs out of free buffers, it will stop
                    // delivering frames.
                    //
                    sampleBuffer.Dispose();
                } catch (Exception e) {
                    Console.WriteLine(e);
                }
            }
        public override void DidOutputSampleBuffer(
            AVCaptureOutput captureOutput,
            CMSampleBuffer sampleBuffer,
            AVCaptureConnection connection)
        {
            try
            {
                // ここでフレーム画像を取得していろいろしたり

                if (m_CustomCamera.Shutter)
                {
                    UIImage uiimage = GetImageFromSampleBuffer(sampleBuffer);
                    Xamarin.Forms.ImageSource img = Xamarin.Forms.ImageSource.FromStream(() => uiimage.AsPNG().AsStream());
                    //Xamarin.Forms.ImageSource img = Xamarin.Forms.ImageSource.FromFile("tab_feed.png");

                    m_CustomCamera.Shutter = false;
                    InvokeOnMainThread(
                        () => {
                        m_CustomCamera.onShutter(img);
                    }
                        );
                }

                //PCLプロジェクトとのやりとりやら

                //変更した画像をプレビューに反映させたりする

                //これがないと"Received memory warning." で落ちたり、画面の更新が止まったりする
                GC.Collect();  //  "Received memory warning." 回避
            }
            catch (Exception e)
            {
                Console.WriteLine("Error sampling buffer: {0}", e.Message);
            }
        }
Ejemplo n.º 11
0
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection avConnection)
        {
            CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();
            var           pixelBuffer = imageBuffer as CVPixelBuffer;

            var bufferSize = pixelBuffer.Height * pixelBuffer.BytesPerRow;

            if (bytes.Length != bufferSize)
            {
                bytes = new byte[bufferSize];
            }

            pixelBuffer.Lock(CVPixelBufferLock.None);
            Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length);
            pixelBuffer.Unlock(CVPixelBufferLock.None);

            var image = SixLabors.ImageSharp.Image
                        .LoadPixelData <SixLabors.ImageSharp.PixelFormats.Rgb24>(
                SixLabors.ImageSharp.Configuration.Default,
                bytes, (int)pixelBuffer.Width, (int)pixelBuffer.Height);

            string asciiImage = ImageConverter.ImageToAsciiArt(image);

            connection.InvokeAsync("SendFrame", asciiImage);
        }
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            //handle ny errors that occur in the image
            try {
                //grab an image from the buffer
                var image = GetImageFromSampleBuffer(sampleBuffer);

                //display the image
                if (DisplayView != null)
                {
                    DisplayView.BeginInvokeOnMainThread(() =>
                    {
                        //set the image
                        DisplayView.Image = image;

                        //rotate image to the correct display orientation
                        DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                    });
                }
                //dispose of the sample image buffer otherwise the UI will get stuck at a certain number of frames
                sampleBuffer.Dispose();
            }
            catch (Exception e) {
                //report the error
                Console.WriteLine("Error sample Buffer description: {0}", e.Message);
            }
        }
Ejemplo n.º 13
0
 public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     try
     {
         var currentDate = DateTime.Now;
         // control the pace of the machine vision to protect battery life
         if (currentDate - lastAnalysis >= pace)
         {
             lastAnalysis = currentDate;
         }
         else
         {
             return;                     // don't run the classifier more often than we need
         }
         // Crop and resize the image data.
         // Note, this uses a Core Image pipeline that could be appended with other pre-processing.
         // If we don't want to do anything custom, we can remove this step and let the Vision framework handle
         // crop and resize as long as we are careful to pass the orientation properly.
         using (var croppedBuffer = CroppedSampleBuffer(sampleBuffer))
         {
             if (croppedBuffer == null)
             {
                 return;
             }
             FrameCaptured(this, new EventArgsT <CVPixelBuffer>(croppedBuffer));
         }
     }
     finally
     {
         sampleBuffer.Dispose();
     }
 }
Ejemplo n.º 14
0
        //
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {   //可以从缓冲区中输出一帧图像时,将会调用该方法
            try
            {
                //从缓冲区中提取一帧图像
                var image = ImageFromSampleBuffer(sampleBuffer);

                /*
                 *
                 * do some processing
                 *
                 */

                //显示当前帧
                m_imageView.BeginInvokeOnMainThread(delegate()
                {
                    m_imageView.Image = image;
                });

                sampleBuffer.Dispose();
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds;

                if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames ||
                    (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) ||
                    working ||
                    CancelTokenSource.IsCancellationRequested)
                {
                    if (sampleBuffer != null)
                    {
                        sampleBuffer.Dispose();
                        sampleBuffer = null;
                    }
                    return;
                }

                wasScanned   = false;
                working      = true;
                lastAnalysis = DateTime.UtcNow;

                try
                {
                    // Get the CoreVideo image
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
                    {
                        // Lock the base address
                        pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // MAYBE NEEDS READ/WRITE

                        CVPixelBufferARGB32LuminanceSource luminanceSource;

                        // Let's access the raw underlying data and create a luminance source from it
                        unsafe
                        {
                            var rawData    = (byte *)pixelBuffer.BaseAddress.ToPointer();
                            var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); //This drops 8 bytes from the original length to give us the expected length

                            luminanceSource = new CVPixelBufferARGB32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height);
                        }

                        HandleImage(luminanceSource);

                        pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly);
                    }

                    //
                    // Although this looks innocent "Oh, he is just optimizing this case away"
                    // this is incredibly important to call on this callback, because the AVFoundation
                    // has a fixed number of buffers and if it runs out of free buffers, it will stop
                    // delivering frames.
                    //
                    sampleBuffer.Dispose();
                    sampleBuffer = null;
                } catch (Exception e) {
                    Console.WriteLine(e);
                }

                working = false;
            }
Ejemplo n.º 16
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     if (BufferReceived != null)
     {
         BufferReceived(this, new BufferReceivedEventArgs(sampleBuffer));
     }
     sampleBuffer.Dispose();
 }
Ejemplo n.º 17
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            try
            {
                using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
                {
                    int width  = (int)pixelBuffer.Width;
                    int height = (int)pixelBuffer.Height;

                    if (this.viewControllerReference.TryGetTarget(out var container))
                    {
                        if (container.Ripple == null || width != this.textureWidth || height != this.textureHeight)
                        {
                            this.textureWidth  = width;
                            this.textureHeight = height;
                            container.SetupRipple(this.textureWidth, this.textureHeight);
                        }

                        this.CleanupTextures();

                        // Y-plane
                        GL.ActiveTexture(TextureUnit.Texture0);
                        All re = (All)0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension

                        this.lumaTexture = container.VideoTextureCache.TextureFromImage(pixelBuffer, true, re, this.textureWidth, this.textureHeight, re, DataType.UnsignedByte, 0, out CVReturn status);
                        if (this.lumaTexture == null)
                        {
                            Console.WriteLine("Error creating luma texture: {0}", status);
                            return;
                        }

                        GL.BindTexture(this.lumaTexture.Target, this.lumaTexture.Name);
                        GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge);
                        GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge);

                        // UV Plane
                        GL.ActiveTexture(TextureUnit.Texture1);
                        re = (All)0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension
                        this.chromaTexture = container.VideoTextureCache.TextureFromImage(pixelBuffer, true, re, this.textureWidth / 2, this.textureHeight / 2, re, DataType.UnsignedByte, 1, out status);
                        if (this.chromaTexture == null)
                        {
                            Console.WriteLine("Error creating chroma texture: {0}", status);
                            return;
                        }

                        GL.BindTexture(this.chromaTexture.Target, this.chromaTexture.Name);
                        GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge);
                        GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge);
                    }
                }
            }
            finally
            {
                sampleBuffer.Dispose();
            }
        }
Ejemplo n.º 18
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                using (var ciImage = new CIImage(pixelBuffer))
                    using (var imageWithOrientation = ciImage.CreateByApplyingOrientation(ImageIO.CGImagePropertyOrientation.LeftMirrored))
                    {
                        DetectFaceLandmarks(imageWithOrientation);
                    }

            sampleBuffer.Dispose();
        }
Ejemplo n.º 19
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer,
                                            AVCaptureConnection connection)
 {
     try {
         var frame = ImageFromSampleBuffer(sampleBuffer);
         Camera.OnFrameCaptured(frame);
         sampleBuffer.Dispose();
     } catch (Exception ex) {
         Debug.WriteLine(ex);
     }
 }
Ejemplo n.º 20
0
 public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     using (var pixelBuffer = sampleBuffer.GetImageBuffer())
         using (var attachments = sampleBuffer.GetAttachments <NSString, NSObject> (CMAttachmentMode.ShouldPropagate))
             using (var ciimage = new CIImage(pixelBuffer, attachments))
                 using (var ciImageWithOrientation = ciimage.CreateWithOrientation(CIImageOrientation.RightTop)) {
                     DetectFace(ciImageWithOrientation);
                 }
     // make sure we do not run out of sampleBuffers
     sampleBuffer.Dispose();
 }
 public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     //connection.VideoOrientation = UIApplication.SharedApplication.StatusBarOrientation;
     using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
         using (var cIImage = new CIImage(pixelBuffer))
         {
             _cameraImage = cIImage;
         }
     DispatchQueue.MainQueue.DispatchAsync(
         () => this._imageView.SetNeedsDisplay()
         );
 }
Ejemplo n.º 22
0
        /// <summary>
        /// Callback method for when a new image is recieved from the video.
        /// </summary>
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            var imageBuffer   = sampleBuffer.GetImageBuffer();
            var ciImage       = new CIImage(imageBuffer);
            var cgImage       = this.context.CreateCGImage(ciImage, ciImage.Extent);
            var current_color = ColorProcessor.AverageColor(cgImage);


            if (musicProcessor == null)
            {
                Console.WriteLine("Music processor is null. Waiting for init.");
            }
            else if (!recording)
            {
                musicProcessor.PlayColor(current_color, previous_color);
            }

            previous_color = current_color;

            if (!recording)
            {
                // we intentionally do not await these methods since it would
                // prohibit the rest of the app from working.
                // these methods are executed depending on the durationn of
                // the API call to Azure.

                // we handle the busy part here since if we handled it in PlayObjects
                // it would overcrowd the threads for async calls.

                if (generalObjectRecognition && !objectsProcessor.busy)
                {
                    objectsProcessor.PlayObjects(cgImage);
                }

                if (customObjectRecognition && !customObjectsProcessor.busy)
                {
                    customObjectsProcessor.PlayObjects(cgImage);
                }
            }

            BeginInvokeOnMainThread(async() =>
            {
                // we need to change the background color on the main thread
                // this thread is the UI thred
                background.BackgroundColor = current_color;
            });

            // we dispose the sample buffer since we are not able to process
            // all the images and we would overcrowd
            sampleBuffer.Dispose();
        }
Ejemplo n.º 23
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {
                    if (!gotResult)
                    {
                        LuminanceSource luminance;
                        //connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

                        using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) {
                            if (bytes == null)
                            {
                                bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow];
                            }

                            pixelBuffer.Lock(0);
                            Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length);

                            luminance = new RGBLuminanceSource(bytes, pixelBuffer.Width, pixelBuffer.Height);


                            pixelBuffer.Unlock(0);
                        }

                        var binarized = new BinaryBitmap(new HybridBinarizer(luminance));
                        var result    = reader.decodeWithState(binarized);

                        //parent.session.StopRunning ();

                        gotResult = true;


                        if (parent.Scan != null)
                        {
                            parent.Scan(result);
                        }
                    }
                } catch (ReaderException) {
                    // ignore this exception; it happens every time there is a failed scan
                } catch (Exception e) {
                    // TODO: this one is unexpected.. log or otherwise handle it

                    throw;
                } finally {
                    try {
                        // lamest thing, but seems that this throws :(
                        sampleBuffer.Dispose();
                    } catch { }
                }
            }
Ejemplo n.º 24
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {
                    var image = ImageFromSampleBuffer(sampleBuffer);

                    // Do something with the image, we just stuff it in our main view.
                    AppDelegate.ImageView.BeginInvokeOnMainThread(delegate {
                        AppDelegate.ImageView.Image = image;
                    });

                    sampleBuffer.Dispose();
                } catch (Exception e) {
                    Console.WriteLine(e);
                }
            }
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, MonoTouch.CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer){
                        int width  = pixelBuffer.Width;
                        int height = pixelBuffer.Height;

                        if (container.ripple == null || width != textureWidth || height != textureHeight)
                        {
                            textureWidth  = width;
                            textureHeight = height;
                            container.SetupRipple(textureWidth, textureHeight);
                        }
                        CleanupTextures();

                        // Y-plane
                        GL.ActiveTexture(All.Texture0);
                        All      re = (All)0x1903;                     // GL_RED_EXT, RED component from ARB OpenGL extension
                        CVReturn status;
                        lumaTexture = container.videoTextureCache.TextureFromImage(pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status);

                        if (lumaTexture == null)
                        {
                            Console.WriteLine("Error creating luma texture: {0}", status);
                            return;
                        }
                        GL.BindTexture((All)lumaTexture.Target, lumaTexture.Name);
                        GL.TexParameter(All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge);
                        GL.TexParameter(All.Texture2D, All.TextureWrapT, (int)All.ClampToEdge);

                        // UV Plane
                        GL.ActiveTexture(All.Texture1);
                        re            = (All)0x8227;               // GL_RG_EXT, RED GREEN component from ARB OpenGL extension
                        chromaTexture = container.videoTextureCache.TextureFromImage(pixelBuffer, true, re, textureWidth / 2, textureHeight / 2, re, DataType.UnsignedByte, 1, out status);

                        if (chromaTexture == null)
                        {
                            Console.WriteLine("Error creating chroma texture: {0}", status);
                            return;
                        }
                        GL.BindTexture((All)chromaTexture.Target, chromaTexture.Name);
                        GL.TexParameter(All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge);
                        GL.TexParameter(All.Texture2D, All.TextureWrapT, (int)All.ClampToEdge);
                    }
                } finally {
                    sampleBuffer.Dispose();
                }
            }
Ejemplo n.º 26
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     // TODO: Implement - see: http://go-mono.com/docs/index.aspx?link=T%3aMonoTouch.Foundation.ModelAttribute
     if (Configuration.IsScanning)
     {
         try
         {
             UIImage image       = GetImageFromSampleBuffer(sampleBuffer);
             var     visionImage = new VisionImage(image);
             visionImage.Metadata = metadata;
             DetectBarcodeActionAsync(visionImage);
         }
         catch { }
         finally { sampleBuffer.Dispose(); }
     }
 }
Ejemplo n.º 27
0
 public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     try
     {
         var image = ImageFromSampleBuffer(sampleBuffer);
         ValiVisionV2.VideoFrameAnalyzer.CurrentFrame.Frame = image;
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
     finally
     {
         sampleBuffer.Dispose();
     }
 }
 public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     try 
     {
         using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer)
         {
             pixelBuffer.Lock (0);
             CVPixelFormatType ft = pixelBuffer.PixelFormatType;
             var baseAddress = pixelBuffer.BaseAddress;
             int bytesPerRow = pixelBuffer.BytesPerRow;
             int width = pixelBuffer.Width;
             int height = pixelBuffer.Height;
             byte [] managedArray = new byte[width * height];
             Marshal.Copy(baseAddress, managedArray, 0, width * height);
             byte [] rawResult = new byte[3000];
             int resLength = BarcodeScannerClass.MWB_scanGrayscaleImage(managedArray,width,height,out rawResult);
             pixelBuffer.Unlock (0);
 public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
     if (!imageProcessingStarted)
     {
         if ((DateTime.Now - lastImageProcessedTime).TotalMilliseconds < LabelReaderConstants.ImageCaptureBeginDelayMilliseconds)
         {
             return;
         }
         imageProcessingStarted = true;
     }
     if ((DateTime.Now - lastImageProcessedTime).TotalMilliseconds < LabelReaderConstants.ImageCaptureDelayMilliseconds)
     {
         return;
     }
     lastImageProcessedTime = DateTime.Now;
     (Element as LabelReader).ProcessPhoto(sampleBuffer);
 }
Ejemplo n.º 30
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {
                    var image = ImageFromSampleBuffer(sampleBuffer);

                    // Do something with the image, we just stuff it in our main view.
                    AppDelegate.ImageView.BeginInvokeOnMainThread(() => {
                        TryDispose(AppDelegate.ImageView.Image);
                        AppDelegate.ImageView.Image     = image;
                        AppDelegate.ImageView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                    });
                } catch (Exception e) {
                    Console.WriteLine(e);
                } finally {
                    sampleBuffer.Dispose();
                }
            }
		public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			try {
				// render the image into the debug preview pane
				UIImage image = getImageFromSampleBuffer (sampleBuffer);

				// event the capture up
				OnImageCaptured (image);

				// make sure AVFoundation does not run out of buffers
				sampleBuffer.Dispose ();
			}
			catch (Exception ex) {
				string exceptionText = ErrorHandling.GetExceptionDetailedText (ex);
				string errorMessage = $"Failed to process image capture: {exceptionText}";
				OnCaptureError (errorMessage);
			}
		}
		public void ConfigureCaptureSession (AVCaptureSession captureSession, AVCaptureStillImageOutput captureOutput)
		{
			if (previewLayer != null) {
				previewLayer.RemoveFromSuperLayer ();
				previewLayer = null;
			}

			previewLayer = new AVCaptureVideoPreviewLayer (captureSession) {
				VideoGravity = AVPlayerLayer.GravityResizeAspect,
				Frame = Bounds
			};

			Layer.AddSublayer (previewLayer);

			CaptureOutput = captureOutput;

			CaptureOutput.AddObserver (this, capturingStillImageKeypath, NSKeyValueObservingOptions.New, IntPtr.Zero);
		}
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            try {
                // render the image into the debug preview pane
                UIImage image = getImageFromSampleBuffer(sampleBuffer);

                // event the capture up
                OnImageCaptured(image);

                // make sure AVFoundation does not run out of buffers
                sampleBuffer.Dispose();
            }
            catch (Exception ex) {
                string exceptionText = ErrorHandling.GetExceptionDetailedText(ex);
                string errorMessage  = $"Failed to process image capture: {exceptionText}";
                OnCaptureError(errorMessage);
            }
        }
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            if (!capture)
            {
                sampleBuffer.Dispose();
                return;
            }

            InvokeOnMainThread(() =>
            {
                // Image is prepared
                var orientation             = UIApplication.SharedApplication.StatusBarOrientation;
                connection.VideoOrientation = VideoOrientation(orientation);

                textCaptureService.AddSampleBuffer(sampleBuffer.Handle);

                sampleBuffer.Dispose();
            });
        }
		/// <summary>
		/// Gets called by the video session if a new image is available.
		/// </summary>
		/// <param name="captureOutput">Capture output.</param>
		/// <param name="sampleBuffer">Sample buffer.</param>
		/// <param name="connection">Connection.</param>
		public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			try 
			{
				// Convert the raw image data into a CGImage.
				using(CGImage sourceImage = GetImageFromSampleBuffer(sampleBuffer))
				{
					this.OnImageCaptured( sourceImage );
				}

				// Make sure AVFoundation does not run out of buffers
				sampleBuffer.Dispose ();

			} 
			catch (Exception ex)
			{
				string errorMessage =  string.Format("Failed to process image capture: {0}", ex);
				this.OnCaptureError( errorMessage, ex );
			}
		}
Ejemplo n.º 36
0
 public static void DidDropSampleBuffer(IAVCaptureAudioDataOutputSampleBufferDelegate This, AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
 }
Ejemplo n.º 37
0
 public virtual void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
 {
 }
		public virtual void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
		{
			CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();

			if (connection == videoConnection) {
				// Get framerate
				CMTime timestamp = sampleBuffer.PresentationTimeStamp;
				CalculateFramerateAtTimestamp (timestamp);			
					
				// Get frame dimensions (for onscreen display)
				if (VideoDimensions.IsEmpty)
					VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
					
				// Get the buffer type
				if (VideoType == 0)
					VideoType = formatDescription.MediaSubType;

				// Synchronously process the pixel buffer to de-green it.
				using (var pixelBuffer = sampleBuffer.GetImageBuffer ())
					ProcessPixelBuffer (pixelBuffer);

				previewBufferQueue.Enqueue (sampleBuffer);
					
				//var writeBuffer = sampleBuffer.Duplicate ();
				InvokeOnMainThread (() => {
					var j = previewBufferQueue.Dequeue ();
			
					var sbuf = j as CMSampleBuffer;
					if (sbuf == null) {
#if DEBUG
						// Record the current sampleBuffer.ClassHandle
						// Then run another iteration and on the next one, print the ClassHandle
						Console.WriteLine ("The type is {0}", new NSString (CFCopyDescription (j.Handle)));
#endif
						return;
					}

					using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()) {
						if (PixelBufferReadyForDisplay != null)
							PixelBufferReadyForDisplay (pixBuf);
					}
				});
			}
			// keep a reference to 'sampleBuffer', movieWritingQueue will remove it
			CompleteBufferUse (sampleBuffer);

			movieWritingQueue.DispatchAsync (() => {
				if (assetWriter != null) {
					bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);
				
					if (connection == videoConnection) {
						// Initialize the video input if this is not done yet
						if (!readyToRecordVideo)
							readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription);

						// Write the video data to file
						if (readyToRecordVideo && readyToRecordAudio) {
							WriteSampleBuffer (sampleBuffer, AVMediaType.Video);
						}
					} else if (connection == audioConnection) {
						if (!readyToRecordAudio)
							readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription);

						if (readyToRecordAudio && readyToRecordVideo)
							WriteSampleBuffer (sampleBuffer, AVMediaType.Audio);
					}
					bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);
				
					if (!wasReadyToRecord && isReadyToRecord) {
						recordingWillBeStarted = false;
						IsRecording = true;
					
						if (RecordingDidStart != null)
							RecordingDidStart ();
					}
				}
				CompleteBufferUse (sampleBuffer);
			});	
		}
Ejemplo n.º 39
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					var image = ImageFromSampleBuffer (sampleBuffer);
	
					// Do something with the image, we just stuff it in our main view.
					AppDelegate.ImageView.BeginInvokeOnMainThread (delegate {
						AppDelegate.ImageView.Image = image;
					});
			
					//
					// Although this looks innocent "Oh, he is just optimizing this case away"
					// this is incredibly important to call on this callback, because the AVFoundation
					// has a fixed number of buffers and if it runs out of free buffers, it will stop
					// delivering frames. 
					//	
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}
			}
			// This runs on the movieWritingQueue already
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{		
				try {
					if (processor.assetWriter != null) {
						var formatDescription = sampleBuffer.GetFormatDescription ();
						bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
						
						// Initalize the audio input if this is not done yet
						if (!processor.readyToRecordAudio)
							processor.readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription);
							
						// Write audio data to file
						if (processor.readyToRecordAudio && processor.readyToRecordVideo)
							processor.WriteSampleBuffer (sampleBuffer, AVMediaType.Audio);
			
						bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
						
						if (!wasReadyToRecord && isReadyToRecord) {
							processor.recordingWillBeStarted = false;
							processor.IsRecording = true;
							
							if (processor.RecordingDidStart != null)
								processor.RecordingDidStart ();
						}
					}
				} finally {
					sampleBuffer.Dispose();
				}
			}
Ejemplo n.º 41
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				if ((DateTime.UtcNow - lastAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames || working
				    || CancelTokenSource.IsCancellationRequested)
					return;

				working = true;
				//Console.WriteLine("SAMPLE");

				lastAnalysis = DateTime.UtcNow;

				try 
				{
					using (var image = ImageFromSampleBuffer (sampleBuffer))
						HandleImage(image);
					
					//
					// Although this looks innocent "Oh, he is just optimizing this case away"
					// this is incredibly important to call on this callback, because the AVFoundation
					// has a fixed number of buffers and if it runs out of free buffers, it will stop
					// delivering frames. 
					//	
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}

				working = false;
			}
Ejemplo n.º 42
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					var image = ImageFromSampleBuffer (sampleBuffer);
	
					// Do something with the image, we just stuff it in our main view.
					AppDelegate.ImageView.BeginInvokeOnMainThread (delegate {
						AppDelegate.ImageView.Image = image;
					});
				
					sampleBuffer.Dispose ();
				} catch (Exception e){
					Console.WriteLine (e);
				}
			}
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, MonoTouch.CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {
					using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){	
						int width = pixelBuffer.Width;
						int height = pixelBuffer.Height;
					
						if (container.ripple == null || width != textureWidth || height != textureHeight){
							textureWidth = width;
							textureHeight = height;
							container.SetupRipple (textureWidth, textureHeight);
						}
						CleanupTextures ();
						
						// Y-plane
						GL.ActiveTexture (All.Texture0);
						All re = (All) 0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension
						CVReturn status;
						lumaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status);
						
						if (lumaTexture == null){
							Console.WriteLine ("Error creating luma texture: {0}", status);
							return;
						}
						GL.BindTexture ((All)lumaTexture.Target, lumaTexture.Name);
						GL.TexParameter (All.Texture2D, All.TextureWrapS, (int) All.ClampToEdge);
						GL.TexParameter (All.Texture2D, All.TextureWrapT, (int) All.ClampToEdge);
						
						// UV Plane
						GL.ActiveTexture (All.Texture1);
						re = (All) 0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension
						chromaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth/2, textureHeight/2, re, DataType.UnsignedByte, 1, out status);
						
						if (chromaTexture == null){
							Console.WriteLine ("Error creating chroma texture: {0}", status);
							return;
						}
						GL.BindTexture ((All) chromaTexture.Target, chromaTexture.Name);
						GL.TexParameter (All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge);
						GL.TexParameter (All.Texture2D, All.TextureWrapT, (int) All.ClampToEdge);
					}
				} finally {
					sampleBuffer.Dispose ();
				}
			}
Ejemplo n.º 44
0
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, 
				                                        AVCaptureConnection connection)
			{
				try {
					var frame = ImageFromSampleBuffer (sampleBuffer);
					Camera.OnFrameCaptured (frame);
					sampleBuffer.Dispose ();
				} catch (Exception ex) {
					Debug.WriteLine (ex);
				}
			}
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                try {

                    var image = ImageFromSampleBuffer (sampleBuffer);
                    sampleBuffer.Dispose();
                    captureOutput.Dispose();
                    connection.Dispose();
                    if (barChecking == false) {
                        barChecking = true;
                        codeImage = image;
                        Thread barCodeThread = new Thread (new ThreadStart (CheckBarCode));
                        barCodeThread.Start ();
                    }

                    AppDelegate.main.decryptVC.imageCaptureView.BeginInvokeOnMainThread (delegate {
                        AppDelegate.main.decryptVC.imageCaptureView.Image = image;
                    });

                } catch (Exception e) {
                    Console.WriteLine (e);
                }
            }
Ejemplo n.º 46
0
 public override void DidOutputSampleBuffer(AVCaptureOutput output, CMSampleBuffer buffer, AVCaptureConnection con)
 {
     //  Implement
     //  - see: http://go-mono.com/docs/index.aspx?link=T%3aMonoTouch.Foundation.ModelAttribute
     //
 }
Ejemplo n.º 47
0
		public void DidOutputMetadataObjects (AVCaptureOutput captureOutput, 
		                               AVMetadataObject[] metadataObjects,
		                               AVCaptureConnection connection)
		{
			Barcodes = metadataObjects.ToList ();
		}
Ejemplo n.º 48
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            try
            {
                lastSampleTime = sampleBuffer.PresentationTimeStamp;

                var image = ImageFromSampleBuffer(sampleBuffer);

                if (frame == 0)
                {
                    writer.StartWriting();
                    writer.StartSessionAtSourceTime(lastSampleTime);
                    frame = 1;
                }
                String infoString = "";
                if (inputWriter.ReadyForMoreMediaData)
                {
                    if (!inputWriter.AppendSampleBuffer(sampleBuffer))
                    {
                        infoString = "Failed to append sample buffer";
                    }
                    else
                    {
                        infoString = String.Format("{0} frames captured", frame++);
                    }
                }
                else
                {
                    infoString = "Writer not ready";
                }

                ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image);
                InfoLabel.BeginInvokeOnMainThread(() => InfoLabel.Text = infoString);
            }
            catch (Exception e)
            {
                Failure.Alert(e.Message);
            }
            finally
            {
                sampleBuffer.Dispose();
            }
        }
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();

				if (connection == processor.videoConnection) {
					// Get framerate
					CMTime timestamp = sampleBuffer.PresentationTimeStamp;
					CalculateFramerateAtTimestamp (timestamp);			
						
					// Get frame dimensions (for onscreen display)
					if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0)
						processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
						
					// Get the buffer type
					if (processor.VideoType == 0)
						processor.VideoType = formatDescription.MediaSubType;
					// TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType);
					
					// Synchronously process the pixel buffer to de-green it.
					using (var pixelBuffer = sampleBuffer.GetImageBuffer ())
						ProcessPixelBuffer (pixelBuffer);

					processor.previewBufferQueue.Enqueue (sampleBuffer);
						
					//var writeBuffer = sampleBuffer.Duplicate ();
					InvokeOnMainThread (() => {
						var j = processor.previewBufferQueue.Dequeue ();
				
						var sbuf = j as CMSampleBuffer;
						if (sbuf == null) {
							// Record the current sampleBuffer.ClassHandle
							// Then run another iteration and on the next one, print the ClassHandle
							Console.WriteLine ("The type is {0}", new NSString (CFCopyDescription (j.Handle)));
							return;
						}
						
						using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()){
							if (processor.PixelBufferReadyForDisplay != null)
								processor.PixelBufferReadyForDisplay (pixBuf);
						}
		
						if(processor.assetWriter == null)
							sbuf.Dispose();
						else
							processor.CompleteBufferUse (sbuf);
					});
				}
				
				
				processor.movieWritingQueue.DispatchAsync (() => {
					if (processor.assetWriter != null) {
						bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
					
						// Initialize the video input if this is not done yet
						if (!processor.readyToRecordVideo)
							processor.readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription);
						
						// Write the video data to file
						if (processor.readyToRecordVideo && processor.readyToRecordAudio)
							processor.WriteSampleBuffer (sampleBuffer, AVMediaType.Video);
			
						bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);
					
						if (!wasReadyToRecord && isReadyToRecord) {
							processor.recordingWillBeStarted = false;
							processor.IsRecording = true;
						
							if (processor.RecordingDidStart != null)
								processor.RecordingDidStart ();
						}
						
						processor.CompleteBufferUse (sampleBuffer);
					}
				});	
			}
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try {

					if (!gotResult)
					{
						LuminanceSource luminance;
						//connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;

						using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer) {
		
							if (bytes == null)
								bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow];
		
							pixelBuffer.Lock (0);
							Marshal.Copy (pixelBuffer.BaseAddress, bytes, 0, bytes.Length);
		
							luminance = new RGBLuminanceSource (bytes, pixelBuffer.Width, pixelBuffer.Height);


							pixelBuffer.Unlock (0);
						}

						var binarized = new BinaryBitmap (new HybridBinarizer (luminance));
						var result = reader.decodeWithState (binarized);

						//parent.session.StopRunning ();

						gotResult = true;

					
						if (parent.Scan != null)
							parent.Scan (result);
					}

				} catch (ReaderException) {

					// ignore this exception; it happens every time there is a failed scan

				} catch (Exception e) {

					// TODO: this one is unexpected.. log or otherwise handle it

					throw;

				} finally {
					try {
						// lamest thing, but seems that this throws :(
						sampleBuffer.Dispose ();
					} catch { }
				}
			}
			public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				try
				{
					var image = ImageFromSampleBuffer (sampleBuffer);

					// Do something with the image, we just stuff it in our main view.
					ImageView.BeginInvokeOnMainThread(() => {
						TryDispose (ImageView.Image);
						ImageView.Image = image;
						ImageView.Transform = CGAffineTransform.MakeRotation (NMath.PI / 2);
					});
				}
				catch (Exception e) {
					Console.WriteLine (e);
				}
				finally {
					sampleBuffer.Dispose ();
				}
			}
Ejemplo n.º 52
0
			public void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
			{
				//Console.WriteLine("DROPPED");
			}