コード例 #1
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds;

                if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames ||
                    (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) ||
                    working ||
                    CancelTokenSource.IsCancellationRequested)
                {
                    if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames)
                    {
                        Console.WriteLine("Too soon between frames");
                    }
                    if (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans)
                    {
                        Console.WriteLine("Too soon since last scan");
                    }

                    if (sampleBuffer != null)
                    {
                        sampleBuffer.Dispose();
                        sampleBuffer = null;
                    }
                    return;
                }

                wasScanned   = false;
                working      = true;
                lastAnalysis = DateTime.UtcNow;

                try
                {
                    // Get the CoreVideo image
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
                    {
                        // Lock the base address
                        pixelBuffer.Lock(CVPixelBufferLock.ReadOnly);                         // MAYBE NEEDS READ/WRITE

                        LuminanceSource luminanceSource;

                        // Let's access the raw underlying data and create a luminance source from it
                        unsafe
                        {
                            var rawData    = (byte *)pixelBuffer.BaseAddress.ToPointer();
                            var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4);                             //This drops 8 bytes from the original length to give us the expected length

                            luminanceSource = new CVPixelBufferBGRA32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height);
                        }

                        if (handleImage(luminanceSource))
                        {
                            wasScanned = true;
                        }

                        pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly);
                    }

                    //
                    // Although this looks innocent "Oh, he is just optimizing this case away"
                    // this is incredibly important to call on this callback, because the AVFoundation
                    // has a fixed number of buffers and if it runs out of free buffers, it will stop
                    // delivering frames.
                    //
                    sampleBuffer.Dispose();
                    sampleBuffer = null;
                }
                catch (Exception e)
                {
                    Console.WriteLine(e);
                }
                finally
                {
                    working = false;
                }
            }
コード例 #2
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds;

                if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames ||
                    (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) ||
                    working ||
                    CancelTokenSource.IsCancellationRequested)
                {
                    if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames)
                    {
                        Console.WriteLine("Too soon between frames");
                    }
                    if (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans)
                    {
                        Console.WriteLine("Too soon since last scan");
                    }

                    if (sampleBuffer != null)
                    {
                        sampleBuffer.Dispose();
                        sampleBuffer = null;
                    }
                    return;
                }

                wasScanned   = false;
                working      = true;
                lastAnalysis = DateTime.UtcNow;

                try
                {
                    // Get the CoreVideo image
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
                    {
                        // Lock the base address
                        pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // MAYBE NEEDS READ/WRITE

                        if (Picture)
                        {
                            Picture       = false;
                            CapturedImage = null;
                            var  baseAddress = pixelBuffer.BaseAddress;
                            nint bytesPerRow = pixelBuffer.BytesPerRow;
                            nint width       = pixelBuffer.Width;
                            nint height      = pixelBuffer.Height;

                            var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;
                            // Create a CGImage on the RGB colorspace from the configured parameter above
                            using (var cs = CGColorSpace.CreateDeviceRGB())
                                using (var context = new CGBitmapContext(baseAddress, width, height, 8, bytesPerRow, cs, flags))
                                    using (var cgImage = context.ToImage())
                                    {
                                        pixelBuffer.Unlock(CVPixelBufferLock.None);
                                        CapturedImage = UIImage.FromImage(cgImage);
                                        //SendPictureBack?.Invoke(this, CapturedImage);
                                        HandleCapturedImage(CapturedImage);
                                    }
                        }

                        LuminanceSource luminanceSource;

                        // Let's access the raw underlying data and create a luminance source from it
                        unsafe
                        {
                            var rawData    = (byte *)pixelBuffer.BaseAddress.ToPointer();
                            var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); //This drops 8 bytes from the original length to give us the expected length

                            luminanceSource = new CVPixelBufferBGRA32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height);
                        }

                        if (HandleImage(luminanceSource))
                        {
                            wasScanned = true;
                        }

                        pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly);
                    }

                    //
                    // Although this looks innocent "Oh, he is just optimizing this case away"
                    // this is incredibly important to call on this callback, because the AVFoundation
                    // has a fixed number of buffers and if it runs out of free buffers, it will stop
                    // delivering frames.
                    //
                    sampleBuffer.Dispose();
                    sampleBuffer = null;
                }
                catch (Exception e)
                {
                    Console.WriteLine(e);
                }
                finally
                {
                    working = false;
                }
            }