public virtual VideoRenderer.I420Frame takeFrame(VideoRenderer.I420Frame source) { long desc = summarizeFrameDimensions(source); VideoRenderer.I420Frame dst = null; lock (availableFrames) { LinkedList <VideoRenderer.I420Frame> frames; availableFrames.TryGetValue(desc, out frames); if (frames == null) { frames = new LinkedList <VideoRenderer.I420Frame>(); availableFrames[desc] = frames; } if (frames.Count > 0) { dst = frames.First.Value; frames.RemoveFirst(); } else { dst = new VideoRenderer.I420Frame(source.Width, source.Height, source.YuvStrides.ToArray(), null); } } return(dst); }
/// <summary> /// Queue |frame| to be uploaded. </summary> public virtual void queueFrame(Endpoint stream, Org.Webrtc.VideoRenderer.I420Frame frame) { // Paying for the copy of the YUV data here allows CSC and painting time // to get spent on the render thread instead of the UI thread. abortUnless(FramePool.validateDimensions(frame), "Frame too large!"); VideoRenderer.I420Frame frameCopy = framePool.takeFrame(frame).CopyFrom(frame); bool needToScheduleRender; lock (framesToRender) { // A new render needs to be scheduled (via updateFrames()) iff there isn't // already a render scheduled, which is true iff framesToRender is empty. needToScheduleRender = framesToRender.Count == 0; VideoRenderer.I420Frame frameToDrop; framesToRender.TryGetValue(stream, out frameToDrop); framesToRender[stream] = frameCopy; if (frameToDrop != null) { framePool.returnFrame(frameToDrop); } } if (needToScheduleRender) { QueueEvent(updateFrames); } }
// Return a code summarizing the dimensions of |frame|. Two frames that // return the same summary are guaranteed to be able to store each others' // contents. Used like Object.hashCode(), but we need all the bits of a long // to do a good job, and hashCode() returns int, so we do this. private static long summarizeFrameDimensions(VideoRenderer.I420Frame frame) { long ret = frame.Width; ret = ret * MAX_DIMENSION + frame.Height; ret = ret * MAX_DIMENSION + frame.YuvStrides[0]; ret = ret * MAX_DIMENSION + frame.YuvStrides[1]; ret = ret * MAX_DIMENSION + frame.YuvStrides[2]; return(ret); }
// Upload the YUV planes from |frame| to |textures|. private void texImage2D(VideoRenderer.I420Frame frame, int[] textures) { for (int i = 0; i < 3; ++i) { ByteBuffer plane = frame.YuvPlanes[i]; GLES20.GlActiveTexture(GLES20.GlTexture0 + i); GLES20.GlBindTexture(GLES20.GlTexture2d, textures[i]); int w = i == 0 ? frame.Width : frame.Width / 2; int h = i == 0 ? frame.Height : frame.Height / 2; abortUnless(w == frame.YuvStrides[i], frame.YuvStrides[i] + "!=" + w); GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlLuminance, w, h, 0, GLES20.GlLuminance, GLES20.GlUnsignedByte, plane); } checkNoGLES2Error(); }
public virtual void returnFrame(VideoRenderer.I420Frame frame) { long desc = summarizeFrameDimensions(frame); lock (availableFrames) { LinkedList <VideoRenderer.I420Frame> frames = availableFrames[desc]; if (frames == null) { throw new System.ArgumentException("Unexpected frame dimensions"); } frames.AddFirst(frame); } }
// Upload the planes from |framesToRender| to the textures owned by this View. private void updateFrames() { VideoRenderer.I420Frame localFrame = null; VideoRenderer.I420Frame remoteFrame = null; lock (framesToRender) { framesToRender.TryGetValue(Endpoint.LOCAL, out localFrame); framesToRender.Remove(Endpoint.LOCAL); framesToRender.TryGetValue(Endpoint.REMOTE, out remoteFrame); framesToRender.Remove(Endpoint.REMOTE); } if (localFrame != null) { texImage2D(localFrame, yuvTextures[0]); framePool.returnFrame(localFrame); } if (remoteFrame != null) { texImage2D(remoteFrame, yuvTextures[1]); framePool.returnFrame(remoteFrame); } abortUnless(localFrame != null || remoteFrame != null, "Nothing to render!"); RequestRender(); }
public virtual VideoRenderer.I420Frame takeFrame(VideoRenderer.I420Frame source) { long desc = summarizeFrameDimensions(source); VideoRenderer.I420Frame dst = null; lock (availableFrames) { LinkedList<VideoRenderer.I420Frame> frames; availableFrames.TryGetValue(desc, out frames); if (frames == null) { frames = new LinkedList<VideoRenderer.I420Frame>(); availableFrames[desc] = frames; } if (frames.Count > 0) { dst = frames.First.Value; frames.RemoveFirst(); } else { dst = new VideoRenderer.I420Frame(source.Width, source.Height, source.YuvStrides.ToArray(), null); } } return dst; }
public void RenderFrame(VideoRenderer.I420Frame frame) { view.queueFrame(stream, frame); }
/// <summary> /// Validate that |frame| can be managed by the pool. </summary> public static bool validateDimensions(VideoRenderer.I420Frame frame) { return(frame.Width < MAX_DIMENSION && frame.Height < MAX_DIMENSION && frame.YuvStrides[0] < MAX_DIMENSION && frame.YuvStrides[1] < MAX_DIMENSION && frame.YuvStrides[2] < MAX_DIMENSION); }