CVPixelBuffer newRenderedPixelBufferForRequest(AVAsynchronousVideoCompositionRequest request, out NSError error)
        {
            CVPixelBuffer dstPixels   = null;
            float         tweenFactor = (float)factorForTimeInRange(request.CompositionTime, request.VideoCompositionInstruction.TimeRange);

            var currentInstruction = (CustomVideoCompositionInstruction)request.VideoCompositionInstruction;

            CVPixelBuffer foregroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.ForegroundTrackID);
            CVPixelBuffer backgroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.BackgroundTrackID);

            dstPixels = renderContext.CreatePixelBuffer();

            if (renderContextDidChange)
            {
                var renderSize                = renderContext.Size;
                var destinationSize           = new SizeF(dstPixels.Width, dstPixels.Height);
                var renderContextTransform    = new CGAffineTransform(renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2);
                var destinationTransform      = new CGAffineTransform(2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1);
                var normalizedRenderTransform = CGAffineTransform.Multiply(CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform);
                oglRender.RenderTransform = normalizedRenderTransform;

                renderContextDidChange = false;
            }

            oglRender.RenderPixelBuffer(dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor);

            error = null;
            return(dstPixels);
        }
Exemple #2
0
 public override void StartVideoCompositionRequest(AVAsynchronousVideoCompositionRequest request)
 {
     renderingQueue.DispatchAsync(() =>
     {
         // Check if all pending requests have been cancelled
         if (shouldCancelAllRequests)
         {
             request.FinishCancelledRequest();
         }
         else
         {
             // Get the next rendererd pixel buffer
             var resultPixels = NewRenderedPixelBufferForRequest(request, out NSError error);
             if (resultPixels != null)
             {
                 // The resulting pixelbuffer from OpenGL renderer is passed along to the request
                 request.FinishWithComposedVideoFrame(resultPixels);
                 resultPixels.Dispose();
                 resultPixels = null;
             }
             else
             {
                 request.FinishWithError(error);
             }
         }
     });
 }
		public override void StartVideoCompositionRequest (AVAsynchronousVideoCompositionRequest asyncVideoCompositionRequest)
		{
			renderingQueue.DispatchAsync (() => {
				if(shouldCancelAllRequests)
					asyncVideoCompositionRequest.FinishCancelledRequest();
				else
				{
					NSError error;
					CVPixelBuffer resultPixels = newRenderedPixelBufferForRequest( asyncVideoCompositionRequest, out error);
					if(resultPixels != null){
						asyncVideoCompositionRequest.FinishWithComposedVideoFrame(resultPixels);
						resultPixels.Dispose();
					}
					else{
						asyncVideoCompositionRequest.FinishWithError(error);
					}

				}
			});
		}
Exemple #4
0
        private CVPixelBuffer NewRenderedPixelBufferForRequest(AVAsynchronousVideoCompositionRequest request, out NSError error)
        {
            CVPixelBuffer dstPixels;

            // tweenFactor indicates how far within that timeRange are we rendering this frame. This is normalized to vary between 0.0 and 1.0.
            // 0.0 indicates the time at first frame in that videoComposition timeRange
            // 1.0 indicates the time at last frame in that videoComposition timeRange
            var tweenFactor = (float)FactorForTimeInRange(request.CompositionTime, request.VideoCompositionInstruction.TimeRange);

            var currentInstruction = request.VideoCompositionInstruction as CustomVideoCompositionInstruction;

            // Source pixel buffers are used as inputs while rendering the transition
            var foregroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.ForegroundTrackId);
            var backgroundSourceBuffer = request.SourceFrameByTrackID(currentInstruction.BackgroundTrackId);

            // Destination pixel buffer into which we render the output
            dstPixels = renderContext.CreatePixelBuffer();

            // Recompute normalized render transform everytime the render context changes
            if (renderContextDidChange)
            {
                // The renderTransform returned by the renderContext is in X: [0, w] and Y: [0, h] coordinate system
                // But since in this sample we render using OpenGLES which has its coordinate system between [-1, 1] we compute a normalized transform
                var renderSize                = renderContext.Size;
                var destinationSize           = new CGSize(dstPixels.Width, dstPixels.Height);
                var renderContextTransform    = new CGAffineTransform(renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2);
                var destinationTransform      = new CGAffineTransform(2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1);
                var normalizedRenderTransform = CGAffineTransform.Multiply(CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform);
                Render.RenderTransform = normalizedRenderTransform;

                renderContextDidChange = false;
            }

            Render.RenderPixelBuffer(dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor);

            error = null;
            return(dstPixels);
        }
 public override void StartVideoCompositionRequest(AVAsynchronousVideoCompositionRequest asyncVideoCompositionRequest)
 {
     renderingQueue.DispatchAsync(() => {
         if (shouldCancelAllRequests)
         {
             asyncVideoCompositionRequest.FinishCancelledRequest();
         }
         else
         {
             NSError error = null;
             CVPixelBuffer resultPixels = newRenderedPixelBufferForRequest(asyncVideoCompositionRequest, out error);
             if (resultPixels != null)
             {
                 asyncVideoCompositionRequest.FinishWithComposedVideoFrame(resultPixels);
                 resultPixels.Dispose();
             }
             else
             {
                 asyncVideoCompositionRequest.FinishWithError(error);
             }
         }
     });
 }
		CVPixelBuffer newRenderedPixelBufferForRequest (AVAsynchronousVideoCompositionRequest request, out NSError error )
		{
			CVPixelBuffer dstPixels;
			float tweenFactor =(float) FactorForTimeInRange (request.CompositionTime, request.VideoCompositionInstruction.TimeRange);

			var currentInstruction = (CustomVideoCompositionInstruction)request.VideoCompositionInstruction;

			CVPixelBuffer foregroundSourceBuffer = request.SourceFrameByTrackID (currentInstruction.ForegroundTrackID);
			CVPixelBuffer backgroundSourceBuffer = request.SourceFrameByTrackID (currentInstruction.BackgroundTrackID);

			dstPixels = renderContext.CreatePixelBuffer ();

			if (renderContextDidChange) {
				var renderSize = renderContext.Size;
				var destinationSize = new CGSize (dstPixels.Width, dstPixels.Height);
				var renderContextTransform = new CGAffineTransform (renderSize.Width / 2, 0, 0, renderSize.Height / 2, renderSize.Width / 2, renderSize.Height / 2);
				var destinationTransform = new CGAffineTransform (2 / destinationSize.Width, 0, 0, 2 / destinationSize.Height, -1, -1);
				var normalizedRenderTransform = CGAffineTransform.Multiply( CGAffineTransform.Multiply(renderContextTransform, renderContext.RenderTransform), destinationTransform);
				oglRender.RenderTransform = normalizedRenderTransform;

				renderContextDidChange = false;
			}

			oglRender.RenderPixelBuffer (dstPixels, foregroundSourceBuffer, backgroundSourceBuffer, tweenFactor);

			error = null;
			return dstPixels;
		}