public void SetInvalidateCallback_Null() { using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) { CMFormatDescriptionError fde; using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) { var sampleTiming = new CMSampleTimingInfo(); CMSampleBufferError sbe; using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) { // ignore `null`, i.e. no crash Assert.That(sb.SetInvalidateCallback(null), Is.EqualTo(CMSampleBufferError.None), "null"); int i = 0; var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) { i++; Assert.AreSame(buffer, sb, "same"); }); Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "SetInvalidateCallback/None"); // we can reset (nullify) the callback Assert.That(sb.SetInvalidateCallback(null), Is.EqualTo(CMSampleBufferError.None), "null-2"); result = (CMSampleBufferError)sb.Invalidate(); Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "Invalidate/None"); Assert.That(i, Is.EqualTo(0), "0"); } } } }
public CustomVideoCompositor (AVVideoComposition videoComposition) : base() { renderingQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.renderingqueue"); renderContextQueue = new DispatchQueue ("com.apple.aplcustomvideocompositor.rendercontextqueue"); previousBuffer = null; renderContextDidChange = false; }
public void SetInvalidateCallback() { using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) { CMFormatDescriptionError fde; using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) { var sampleTiming = new CMSampleTimingInfo(); CMSampleBufferError sbe; using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) { int i = 0; var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) { i++; Assert.AreSame(buffer, sb, "same"); }); Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "SetInvalidateCallback/None"); result = (CMSampleBufferError)sb.Invalidate(); Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "Invalidate/None"); Assert.That(i, Is.EqualTo(1), "1"); // a second call to Invalidate returns Invalidated result = (CMSampleBufferError)sb.Invalidate(); Assert.That(result, Is.EqualTo(CMSampleBufferError.Invalidated), "Invalidated"); } } } }
public virtual CVOpenGLESTexture LumaTextureForPixelBuffer(CVPixelBuffer pixelBuffer) { CVOpenGLESTexture lumaTexture = null; if (_videoTextureCache == null) { Console.Error.WriteLine("No video texture cache"); goto bail; } // Periodic texture cache flush every frame _videoTextureCache.Flush(0); // CVOpenGLTextureCacheCreateTextureFromImage will create GL texture optimally from CVPixelBufferRef. // Y lumaTexture = _videoTextureCache.TextureFromImage(pixelBuffer, true, All.RedExt, (int)pixelBuffer.Width, (int)pixelBuffer.Height, All.RedExt, DataType.UnsignedByte, 0, out CVReturn error); if (lumaTexture == null || error != CVReturn.Success) { Console.Error.WriteLine($"Error at creating luma texture using CVOpenGLESTextureCacheCreateTextureFromImage: {error}"); } bail: return(lumaTexture); }
void DrawTextInCorner(CVPixelBuffer pixelBuffer) { var textLayer = new CATextLayer(); const float textLayerWidth = 100f; const float textLayerHeight = 50f; if (AffineTransform.xx == -1.0f && AffineTransform.yy == -1.0f) { textLayer.AffineTransform = AffineTransform; } else if (AffineTransform.xy == 1.0f && AffineTransform.yx == -1f) { textLayer.AffineTransform = new CGAffineTransform( AffineTransform.xx * -1f, AffineTransform.xy * -1f, AffineTransform.yx * -1f, AffineTransform.yy * -1f, AffineTransform.x0, AffineTransform.y0 ); } textLayer.Frame = new CGRect(Bounds.Size.Width - textLayerWidth, 0f, textLayerWidth, textLayerHeight); textLayer.String = TimeCode; textLayer.BackgroundColor = UIColor.Black.CGColor; AddSublayer(textLayer); }
public static CVPixelBuffer ToCVPixelBuffer(this UIImage self) { var attrs = new CVPixelBufferAttributes(); attrs.CGImageCompatibility = true; attrs.CGBitmapContextCompatibility = true; var cgImg = self.CGImage; var pb = new CVPixelBuffer(cgImg.Width, cgImg.Height, CVPixelFormatType.CV32ARGB, attrs); pb.Lock(CVPixelBufferLock.None); var pData = pb.BaseAddress; var colorSpace = CGColorSpace.CreateDeviceRGB(); var ctxt = new CGBitmapContext(pData, cgImg.Width, cgImg.Height, 8, pb.BytesPerRow, colorSpace, CGImageAlphaInfo.NoneSkipFirst); ctxt.TranslateCTM(0, cgImg.Height); ctxt.ScaleCTM(1.0f, -1.0f); UIGraphics.PushContext(ctxt); self.Draw(new CGRect(0, 0, cgImg.Width, cgImg.Height)); UIGraphics.PopContext(); pb.Unlock(CVPixelBufferLock.None); return(pb); }
//private static MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 1.0, 1.0); UIImage ImageFromSampleBuffer(CMSampleBuffer sampleBuffer) { // Get the CoreVideo image using (CVPixelBuffer pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // Get the number of bytes per row for the pixel buffer IntPtr baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = (int)pixelBuffer.BytesPerRow; int width = (int)pixelBuffer.Width; int height = (int)pixelBuffer.Height; using (Image <Bgra, byte> bgra = new Image <Bgra, byte>(width, height, bytesPerRow, baseAddress)) using (Image <Bgr, byte> bgr = bgra.Convert <Bgr, byte>()) using (Image <Bgr, byte> bgr2 = bgr.Rotate(90, new Bgr(0, 0, 0))) { bgr2.Draw( string.Format("{0} x {1}", width, height), new Point(20, 20), FontFace.HersheySimplex, 1.0, new Bgr(255, 0, 0)); //CvInvoke.cvCvtColor(bgr2, bgra, Emgu.CV.CvEnum.COLOR_CONVERSION.CV_BGR2BGRA); UIImage result = bgr2.ToUIImage(); pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); return(result); } } }
public override IScanResult Decode(CVPixelBuffer pixelBuffer) { var decoder = PerformanceCounter.Start(); unsafe { var rawData = (byte *)pixelBuffer.BaseAddress.ToPointer(); int rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); int width = (int)pixelBuffer.Width; int height = (int)pixelBuffer.Height; var luminanceSource = new CVPixelBufferBGRA32LuminanceSource(rawData, rawDatalen, width, height); var res = _reader.Decode(luminanceSource); var result = new ZXingResult(); if (res == null) { result.Success = false; result.Timestamp = DateTime.Now.Ticks; } else { result.Success = true; result.Timestamp = res.Timestamp; result.Text = res.Text; } PerformanceCounter.Stop(decoder, "ZXing Decoder take {0} ms."); return(result); } }
public virtual CVOpenGLESTexture ChromaTextureForPixelBuffer(CVPixelBuffer pixelBuffer) { CVOpenGLESTexture chromaTexture = null; CVReturn err; if (VideoTextureCache == null) { Console.Error.WriteLine("No video texture cache"); return(chromaTexture); } // Periodic texture cache flush every frame VideoTextureCache.Flush(0); // CVOpenGLTextureCacheCreateTextureFromImage will create GL texture optimally from CVPixelBufferRef. // UV var height = pixelBuffer.GetHeightOfPlane(1); var width = pixelBuffer.GetWidthOfPlane(1); chromaTexture = VideoTextureCache.TextureFromImage(pixelBuffer, true, All.RgExt, width, height, All.RgExt, DataType.UnsignedByte, 1, out err); if (chromaTexture == null || err != CVReturn.Success) { Console.Error.WriteLine("Error at creating chroma texture using CVOpenGLESTextureCacheCreateTextureFromImage: " + err.ToString()); } return(chromaTexture); }
public static NSImage ToNSImage(this CMSampleBuffer sampleBuffer) { NSImage image; using (CVPixelBuffer pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); using (CIImage cIImage = new CIImage(pixelBuffer)) { image = null; AutoResetEvent e = new AutoResetEvent(false); Xamarin.Forms.Device.BeginInvokeOnMainThread(delegate { NSCIImageRep rep = new NSCIImageRep(cIImage); image = new NSImage(rep.Size); image.AddRepresentation(rep); e.Set(); }); e.WaitOne(); } pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); } return(image); }
CVPixelBuffer newRenderedPixelBufferForRequest(AVAsynchronousVideoCompositionRequest request, out NSError error) { CVPixelBuffer dstPixels = null; float tweenFactor = (float)factorForTimeInRange(request.CompositionTime, request.VideoCompositionInstruction.TimeRange); var currentInstruction = (CustomVideoCompositionInstruction)request.VideoCompositionInstruction; CVPixelBuffer foregroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.ForegroundTrackID); CVPixelBuffer backgroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.BackgroundTrackID); dstPixels = renderContext.CreatePixelBuffer(); if (renderContextDidChange) { var renderSize = renderContext.Size; var destinationSize = new SizeF(dstPixels.Width, dstPixels.Height); var renderContextTransform = new CGAffineTransform(renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2); var destinationTransform = new CGAffineTransform(2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1); var normalizedRenderTransform = CGAffineTransform.Multiply(CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform); oglRender.RenderTransform = normalizedRenderTransform; renderContextDidChange = false; } oglRender.RenderPixelBuffer(dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor); error = null; return(dstPixels); }
public void CreateWithBytes() { nint width = 1280; nint height = 720; nint bytesPerRow = width * 4; CVReturn status; var data = new byte [height * bytesPerRow]; using (var buf = CVPixelBuffer.Create(width, height, CVPixelFormatType.CV32RGBA, data, bytesPerRow, null, out status)) { Assert.AreEqual(status, CVReturn.InvalidPixelFormat, "CV32RGBA"); Assert.IsNull(buf, "CV32RGBA - null"); } using (var buf = CVPixelBuffer.Create(width, height, CVPixelFormatType.CV32BGRA, data, bytesPerRow, null, out status)) { Assert.AreEqual(status, CVReturn.Success, "CV32RGBA"); Assert.IsNotNull(buf, "CV32BGRA - null"); } var dict = new CVPixelBufferAttributes(); using (var buf = CVPixelBuffer.Create(width, height, CVPixelFormatType.CV32BGRA, data, bytesPerRow, dict)) { Assert.IsNotNull(buf); } Assert.Throws <ArgumentNullException> (() => CVPixelBuffer.Create(width, height, CVPixelFormatType.CV32BGRA, null, bytesPerRow, null), "null data"); Assert.Throws <ArgumentOutOfRangeException> (() => CVPixelBuffer.Create(width, height, CVPixelFormatType.CV32BGRA, data, bytesPerRow + 1, null), "bytesPerRow+1"); Assert.Throws <ArgumentOutOfRangeException> (() => CVPixelBuffer.Create(width, height + 1, CVPixelFormatType.CV32BGRA, data, bytesPerRow + 1, null), "height+1"); }
void ImagePicker_FinishedPickingMedia(object sender, UIImagePickerMediaPickedEventArgs e) { if (e.Info[UIImagePickerController.MediaType].ToString() == "public.image") { UIImage originalImage = e.Info[UIImagePickerController.OriginalImage] as UIImage; if (originalImage != null) { var scaledImage = originalImage.Scale(new CGSize(300, 300)); var classifier = new ImageClassifier(); var coreImage = new CIImage(scaledImage); CVPixelBuffer buffer = new CVPixelBuffer(300, 300, CVPixelFormatType.CV32ARGB); UIGraphics.BeginImageContext(new CGSize(300, 300)); CIContext context = CIContext.FromContext(UIGraphics.GetCurrentContext(), null); context.Render(coreImage, buffer); UIGraphics.EndImageContext(); var output = classifier.GetPrediction(buffer, out NSError error); imgSelected.Image = scaledImage; lblResult.Text = $"This looks like: {output.ClassLabel}"; } } imagePicker.DismissModalViewController(true); }
public void SetInvalidateCallback_Replace() { using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) { CMFormatDescriptionError fde; using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) { var sampleTiming = new CMSampleTimingInfo(); CMSampleBufferError sbe; using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) { int i = 0; var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) { i++; }); // we cannot replace the (native) callback without getting an error (so we should not replace // the managed one either, that would be confusing and make it hard to port code) result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) { i--; Assert.AreSame(buffer, sb, "same"); }); Assert.That(result, Is.EqualTo(CMSampleBufferError.RequiredParameterMissing), "RequiredParameterMissing"); sb.Invalidate(); Assert.That(i, Is.EqualTo(1), "1"); } } } }
void DisplayPixelBuffer(CVPixelBuffer pixelBuffer, double framePTS) { nint width = pixelBuffer.Width; nint height = pixelBuffer.Height; var f = View.Frame; if (width > f.Width || height > f.Height) { width /= 2; height /= 2; } var layer = new EAGLLayer(); if (Math.Abs(videoPreferredTransform.xx + 1f) < float.Epsilon) { layer.AffineTransform.Rotate(NMath.PI); } else if (Math.Abs(videoPreferredTransform.yy) < float.Epsilon) { layer.AffineTransform.Rotate(NMath.PI / 2); } layer.Frame = new CGRect(0f, View.Frame.Height - 50f - height, width, height); layer.PresentationRect = new CGSize(width, height); layer.TimeCode = framePTS.ToString("0.000"); layer.SetupGL(); View.Layer.AddSublayer(layer); layer.DisplayPixelBuffer(pixelBuffer); }
public unsafe void ProcessPixelBuffer(CVImageBuffer imageBuffer) { using (CVPixelBuffer pixelBuffer = imageBuffer as CVPixelBuffer) { pixelBuffer.Lock(CVOptionFlags.None); int bufferWidth = pixelBuffer.Width; int bufferHeight = pixelBuffer.Height; byte *pixelPtr = (byte *)pixelBuffer.BaseAddress.ToPointer(); int position = 0; for (var row = 0; row < bufferHeight; row++) { for (var column = 0; column < bufferWidth; column++) { // De-green (Second pixel in BGRA is green) *(pixelPtr + 1) = 0; pixelPtr += BYTES_PER_PIXEL; position += BYTES_PER_PIXEL; // For each pixel increase the offset by the number of bytes per pixel } } pixelBuffer.Unlock(CVOptionFlags.None); } }
public CustomVideoCompositor(AVVideoComposition videoComposition) : base() { renderingQueue = new DispatchQueue("com.apple.aplcustomvideocompositor.renderingqueue"); renderContextQueue = new DispatchQueue("com.apple.aplcustomvideocompositor.rendercontextqueue"); previousBuffer = null; renderContextDidChange = false; }
public void AdjustPixelBuffer(CVPixelBuffer inputBuffer, CVPixelBuffer outputBuffer) { using (CIImage img = CIImage.FromImageBuffer(inputBuffer)) { ciFilter.Image = img; using (CIImage outImg = ciFilter.OutputImage) ciContext.Render(outImg, outputBuffer); } }
public void AdjustPixelBuffer(CVPixelBuffer inputBuffer, CVPixelBuffer outputBuffer) { using (CIImage img = CIImage.FromImageBuffer (inputBuffer)) { ciFilter.Image = img; using (CIImage outImg = ciFilter.OutputImage) ciContext.Render (outImg, outputBuffer); } }
public MobCatOrNotInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public void AdjustPixelBuffer(CVPixelBuffer inputBuffer, CVPixelBuffer outputBuffer) { CIImage img = CIImage.FromImageBuffer(inputBuffer); ciFilter.SetValueForKey(img, CIFilterInputKey.Image); img = ciFilter.OutputImage; ciContext.Render(img, outputBuffer); }
public coil100Model_CoreMLInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public mymodelInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public ImageClassifierInput(CVPixelBuffer image) { if (image == null) { throw new ArgumentNullException(nameof(image)); } Image = image; }
public TheseAreBirdsInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public customvisionInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public CookHappyJuneInput(CVPixelBuffer data) { if (data == null) { throw new ArgumentNullException(nameof(data)); } Data = data; }
public SqueezeNetInput(CVPixelBuffer image) { if (image == null) { throw new ArgumentNullException(nameof(image)); } Image = image; }
public void Render_Colorspace() { using (var ctx = new EAGLContext(EAGLRenderingAPI.OpenGLES2)) using (var ci = CIContext.FromContext(ctx)) using (var cv = new CVPixelBuffer(1, 1, CVPixelFormatType.CV24RGB)) using (CIImage img = new CIImage(CIColor.FromRgb(0.5f, 0.5f, 0.5f))) { // that one "null allowed" was undocumented ci.Render(img, cv, RectangleF.Empty, null); } }
public ObjectDetectorInput(CVPixelBuffer image, double iouThreshold, double confidenceThreshold) { if (image == null) { throw new ArgumentNullException(nameof(image)); } Image = image; IouThreshold = iouThreshold; ConfidenceThreshold = confidenceThreshold; }
private static unsafe bool CropAndScaleToWrapper(CVPixelBuffer outputPixelBuffer, byte[] buffer, Func <CVPixelBuffer, IntPtr, bool> handler) { if (outputPixelBuffer == null) { throw new ArgumentNullException(nameof(outputPixelBuffer)); } if (buffer != null && buffer.Length > 0) fixed(byte *ptr = &buffer[0]) return(handler(outputPixelBuffer, new IntPtr(ptr))); return(handler(outputPixelBuffer, IntPtr.Zero)); }
public override IScanResult Decode(CVPixelBuffer pixelBuffer) { var decoder = PerformanceCounter.Start(); _barcodeResult = new TaskCompletionSource <IScanResult>(); var handler = new VNImageRequestHandler(pixelBuffer, new VNImageOptions()); handler.Perform(new VNRequest[] { barcodesRequest }, out NSError error); _barcodeResult.Task.Wait(); PerformanceCounter.Stop(decoder, "Vision framework Decoder take {0} ms."); return(_barcodeResult.Task.Result); }
public virtual CVOpenGLESTexture LumaTextureForPixelBuffer (CVPixelBuffer pixelBuffer) { CVOpenGLESTexture lumaTexture = null; CVReturn err; if (VideoTextureCache == null) { Console.Error.WriteLine ("No video texture cache"); return lumaTexture; } // Periodic texture cache flush every frame VideoTextureCache.Flush (0); // CVOpenGLTextureCacheCreateTextureFromImage will create GL texture optimally from CVPixelBufferRef. // UV lumaTexture = VideoTextureCache.TextureFromImage (pixelBuffer, true, All.RedExt, pixelBuffer.Width, pixelBuffer.Height, All.RedExt, DataType.UnsignedByte, 0, out err); if (lumaTexture == null || err != CVReturn.Success) Console.Error.WriteLine ("Error at creating luma texture using CVOpenGLESTextureCacheCreateTextureFromImage: " + err.ToString ()); return lumaTexture; }
public virtual CVOpenGLESTexture ChromaTextureForPixelBuffer (CVPixelBuffer pixelBuffer) { CVOpenGLESTexture chromaTexture = null; CVReturn err; if (VideoTextureCache == null) { Console.Error.WriteLine ("No video texture cache"); return chromaTexture; } // Periodic texture cache flush every frame VideoTextureCache.Flush (0); // CVOpenGLTextureCacheCreateTextureFromImage will create GL texture optimally from CVPixelBufferRef. // UV var height = pixelBuffer.GetHeightOfPlane (1); var width = pixelBuffer.GetWidthOfPlane (1); chromaTexture = VideoTextureCache.TextureFromImage (pixelBuffer, true, All.RgExt, width, height, All.RgExt, DataType.UnsignedByte, 1, out err); if (chromaTexture == null || err != CVReturn.Success) Console.Error.WriteLine ("Error at creating chroma texture using CVOpenGLESTextureCacheCreateTextureFromImage: " + err.ToString ()); return chromaTexture; }
protected ICMAttachmentBearer GetInstance(Type t) { if (!CMAttachmentInterfaceType.IsAssignableFrom (t)) throw new ArgumentException ("t"); switch (t.Name) { case "CMBlockBuffer": CMBlockBufferError bbe; var result = CMBlockBuffer.CreateEmpty (0, CMBlockBufferFlags.AssureMemoryNow, out bbe); if (bbe == CMBlockBufferError.None) return result; else throw new InvalidOperationException (string.Format ("Could not create the new instance {0}.", bbe.ToString ())); case "CMSampleBuffer": var pixelBuffer = new CVPixelBuffer (20, 10, CVPixelFormatType.CV24RGB); CMFormatDescriptionError fde; var desc = CMVideoFormatDescription.CreateForImageBuffer (pixelBuffer, out fde); var sampleTiming = new CMSampleTimingInfo (); CMSampleBufferError sbe; var sb = CMSampleBuffer.CreateForImageBuffer (pixelBuffer, true, desc, sampleTiming, out sbe); if (sbe == CMSampleBufferError.None) return sb; else throw new InvalidOperationException (string.Format ("Could not create the new instance {0}.", sbe.ToString ())); default: throw new InvalidOperationException (string.Format ("Could not create the new instance for type {0}.", t.Name)); } }
public void DidReadAndWriteSampleBuffer (ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer, CVPixelBuffer sampleBufferForWrite) { // Calculate progress (scale of 0.0 to 1.0) double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange); _progressProc((float)progress * 100); // Grab the pixel buffer from the sample buffer, if possible CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer (); var pixelBuffer = imageBuffer as CVPixelBuffer; if (pixelBuffer != null) Delegate.AdjustPixelBuffer (pixelBuffer, sampleBufferForWrite); }
public void AdjustPixelBuffer (CVPixelBuffer inputBuffer, CVPixelBuffer outputBuffer) { CIImage img = CIImage.FromImageBuffer (inputBuffer); ciFilter.SetValueForKey (img, CIFilterInputKey.Image); img = ciFilter.OutputImage; ciContext.Render (img, outputBuffer); }
void DisplayPixelBuffer(CVPixelBuffer pixelBuffer, double framePTS) { nint width = pixelBuffer.Width; nint height = pixelBuffer.Height; var f = View.Frame; if (width > f.Width || height > f.Height) { width /= 2; height /= 2; } var layer = new EAGLLayer (); if (Math.Abs (videoPreferredTransform.xx + 1f) < float.Epsilon) layer.AffineTransform.Rotate (NMath.PI); else if (Math.Abs (videoPreferredTransform.yy) < float.Epsilon) layer.AffineTransform.Rotate (NMath.PI / 2); layer.Frame = new CGRect (0f, View.Frame.Height - 50f - height, width, height); layer.PresentationRect = new CGSize (width, height); layer.TimeCode = framePTS.ToString ("0.000"); layer.SetupGL (); View.Layer.AddSublayer (layer); layer.DisplayPixelBuffer (pixelBuffer); }
public void DisplayPixelBuffer (CVPixelBuffer pixelBuffer) { DrawTextInCorner (pixelBuffer); CVReturn error; if (pixelBuffer != null) { int frameWidth = (int)pixelBuffer.Width; int frameHeight = (int)pixelBuffer.Height; if (videoTextureCache == null) { Console.WriteLine ("No video texture cache"); return; } CleanUpTextures (); CVAttachmentMode attachmentMode; var colorAttachments = pixelBuffer.GetAttachment <NSString> (CVImageBuffer.YCbCrMatrixKey, out attachmentMode); if (colorAttachments == CVImageBuffer.YCbCrMatrix_ITU_R_601_4) preferredConversion = colorConversion601; else preferredConversion = colorConversion709; GL.ActiveTexture (TextureUnit.Texture0); lumaTexture = videoTextureCache.TextureFromImage (pixelBuffer, true, All.RedExt, frameWidth, frameHeight, All.RedExt, DataType.UnsignedByte, 0, out error); if (lumaTexture == null) Console.WriteLine ("Error at CVOpenGLESTextureCach.TextureFromImage"); GL.BindTexture (lumaTexture.Target, lumaTexture.Name); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge); GL.ActiveTexture (TextureUnit.Texture1); chromaTexture = videoTextureCache.TextureFromImage (pixelBuffer, true, All.RgExt, frameWidth / 2, frameHeight / 2, All.RgExt, DataType.UnsignedByte, 1, out error); if (chromaTexture == null) Console.WriteLine ("Error at CVOpenGLESTextureCach.TextureFromImage"); GL.BindTexture (chromaTexture.Target, chromaTexture.Name); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge); GL.BindFramebuffer (FramebufferTarget.Framebuffer, frameBufferHandle); GL.Viewport (0, 0, backingWidth, backingHeight); } GL.ClearColor (0f, 0f, 0f, 1f); GL.Clear (ClearBufferMask.ColorBufferBit); GL.UseProgram (Program); GL.Uniform1 (uniforms [(int)UniformIndex.RotationAngle], 0f); GL.UniformMatrix3 (uniforms [(int)UniformIndex.ColorConversionMatrix], 1, false, preferredConversion); CGRect vertexSamplingRect = AVUtilities.WithAspectRatio (Bounds, PresentationRect); var normalizedSamplingSize = new CGSize (0f, 0f); var cropScaleAmount = new CGSize (vertexSamplingRect.Width / Bounds.Width, vertexSamplingRect.Height / Bounds.Height); if (cropScaleAmount.Width > cropScaleAmount.Height) { normalizedSamplingSize.Width = 1f; normalizedSamplingSize.Height = cropScaleAmount.Height / cropScaleAmount.Width; } else { normalizedSamplingSize.Width = 1f; normalizedSamplingSize.Height = cropScaleAmount.Width / cropScaleAmount.Height; } float[] quadVertexData = { -1f * (float)normalizedSamplingSize.Width, -1f * (float)normalizedSamplingSize.Height, (float)normalizedSamplingSize.Width, -1f * (float)normalizedSamplingSize.Height, -1f * (float)normalizedSamplingSize.Width, (float)normalizedSamplingSize.Height, (float)normalizedSamplingSize.Width, (float)normalizedSamplingSize.Height, }; GL.VertexAttribPointer ((int)AttributeIndex.Vertex, 2, VertexAttribPointerType.Float, false, 0, quadVertexData); GL.EnableVertexAttribArray ((int)AttributeIndex.Vertex); var textureSamplingRect = new CGRect (0, 0, 1, 1); float[] quadTextureData = { (float)textureSamplingRect.GetMinX (), (float)textureSamplingRect.GetMaxY (), (float)textureSamplingRect.GetMaxX (), (float)textureSamplingRect.GetMaxY (), (float)textureSamplingRect.GetMinX (), (float)textureSamplingRect.GetMinY (), (float)textureSamplingRect.GetMaxX (), (float)textureSamplingRect.GetMinY () }; GL.VertexAttribPointer ((int)AttributeIndex.TextureCoordinates, 2, VertexAttribPointerType.Float, false, 0, quadTextureData); GL.EnableVertexAttribArray ((int)AttributeIndex.TextureCoordinates); GL.DrawArrays (BeginMode.TriangleStrip, 0, 4); GL.BindRenderbuffer (RenderbufferTarget.Renderbuffer, colorBufferHandle); context.PresentRenderBuffer ((int)RenderbufferTarget.Renderbuffer); }
void DrawTextInCorner (CVPixelBuffer pixelBuffer) { var textLayer = new CATextLayer (); const float textLayerWidth = 100f; const float textLayerHeight = 50f; if (AffineTransform.xx == -1.0f && AffineTransform.yy == -1.0f) { textLayer.AffineTransform = AffineTransform; } else if (AffineTransform.xy == 1.0f && AffineTransform.yx == -1f) { textLayer.AffineTransform = new CGAffineTransform ( AffineTransform.xx * -1f, AffineTransform.xy * -1f, AffineTransform.yx * -1f, AffineTransform.yy * -1f, AffineTransform.x0, AffineTransform.y0 ); } textLayer.Frame = new CGRect (Bounds.Size.Width - textLayerWidth, 0f, textLayerWidth, textLayerHeight); textLayer.String = TimeCode; textLayer.BackgroundColor = UIColor.Black.CGColor; AddSublayer (textLayer); }
public virtual void RenderPixelBuffer(CVPixelBuffer destinationPixelBuffer, CVPixelBuffer foregroundPixelBuffer, CVPixelBuffer backgroundPixelBuffer, float tween) { DoesNotRecognizeSelector (new MonoTouch.ObjCRuntime.Selector ("_cmd")); }