CVPixelBuffer newRenderedPixelBufferForRequest(AVAsynchronousVideoCompositionRequest request, out NSError error) { CVPixelBuffer dstPixels = null; float tweenFactor = (float)factorForTimeInRange(request.CompositionTime, request.VideoCompositionInstruction.TimeRange); var currentInstruction = (CustomVideoCompositionInstruction)request.VideoCompositionInstruction; CVPixelBuffer foregroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.ForegroundTrackID); CVPixelBuffer backgroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.BackgroundTrackID); dstPixels = renderContext.CreatePixelBuffer(); if (renderContextDidChange) { var renderSize = renderContext.Size; var destinationSize = new SizeF(dstPixels.Width, dstPixels.Height); var renderContextTransform = new CGAffineTransform(renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2); var destinationTransform = new CGAffineTransform(2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1); var normalizedRenderTransform = CGAffineTransform.Multiply(CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform); oglRender.RenderTransform = normalizedRenderTransform; renderContextDidChange = false; } oglRender.RenderPixelBuffer(dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor); error = null; return(dstPixels); }
private CVPixelBuffer NewRenderedPixelBufferForRequest(AVAsynchronousVideoCompositionRequest request, out NSError error) { CVPixelBuffer dstPixels; // tweenFactor indicates how far within that timeRange are we rendering this frame. This is normalized to vary between 0.0 and 1.0. // 0.0 indicates the time at first frame in that videoComposition timeRange // 1.0 indicates the time at last frame in that videoComposition timeRange var tweenFactor = (float)FactorForTimeInRange(request.CompositionTime, request.VideoCompositionInstruction.TimeRange); var currentInstruction = request.VideoCompositionInstruction as CustomVideoCompositionInstruction; // Source pixel buffers are used as inputs while rendering the transition var foregroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.ForegroundTrackId); var backgroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.BackgroundTrackId); // Destination pixel buffer into which we render the output dstPixels = renderContext.CreatePixelBuffer(); // Recompute normalized render transform everytime the render context changes if (renderContextDidChange) { // The renderTransform returned by the renderContext is in X: [0, w] and Y: [0, h] coordinate system // But since in this sample we render using OpenGLES which has its coordinate system between [-1, 1] we compute a normalized transform var renderSize = renderContext.Size; var destinationSize = new CGSize(dstPixels.Width, dstPixels.Height); var renderContextTransform = new CGAffineTransform(renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2); var destinationTransform = new CGAffineTransform(2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1); var normalizedRenderTransform = CGAffineTransform.Multiply(CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform); Render.RenderTransform = normalizedRenderTransform; renderContextDidChange = false; } Render.RenderPixelBuffer(dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor); error = null; return(dstPixels); }