RenderTexture TryReceiveFrame() { PrepareInternalObjects(); // Do nothing if the recv object is not ready. if (_recv == null) { return(null); } // Try getting a video frame. var frameOrNull = RecvHelper.TryCaptureVideoFrame(_recv); if (frameOrNull == null) { return(null); } var frame = (Interop.VideoFrame)frameOrNull; // Pixel format conversion var rt = _converter.Decode (frame.Width, frame.Height, Util.CheckAlpha(frame.FourCC), frame.Data); // Free the frame up. _recv.FreeVideoFrame(frame); return(rt); }
RenderTexture TryReceiveFrame() { PrepareInternalObjects(); // Do nothing if the recv object is not ready. if (_recv == null) { return(null); } // Try getting a video frame. var frameOrNull = RecvHelper.TryCaptureVideoFrame(_recv); if (frameOrNull == null) { return(null); } var frame = (Interop.VideoFrame)frameOrNull; // Pixel format conversion var rt = _converter.Decode (frame.Width, frame.Height, Util.CheckAlpha(frame.FourCC), frame.Data); // Copy the metadata if any. if (frame.Metadata != System.IntPtr.Zero) { metadata = Marshal.PtrToStringAnsi(frame.Metadata); } else { metadata = null; } // Free the frame up. _recv.FreeVideoFrame(frame); return(rt); }
RenderTexture TryReceiveFrame() { PrepareReceiverObjects(); if (_recv == null) { return(null); } // Video frame capturing var frameOrNull = RecvHelper.TryCaptureVideoFrame(_recv); if (frameOrNull == null) { return(null); } var frame = (Interop.VideoFrame)frameOrNull; // Pixel format conversion var rt = _converter.Decode (frame.Width, frame.Height, Util.HasAlpha(frame.FourCC), frame.Data); // Metadata retrieval if (frame.Metadata != IntPtr.Zero) { metadata = Marshal.PtrToStringAnsi(frame.Metadata); } else { metadata = null; } // Video frame release _recv.FreeVideoFrame(frame); return(rt); }
void Update() { PrepareInternalObjects(); // Do nothing if the recv object is not ready. if (_recv == null) { return; } // Handle metadata frames lock (metadataFrameQueue) { if (metadataFrameQueue.Count > 0) { onMetaDataReceived?.Invoke(metadataFrameQueue[0]); metadataFrameQueue.RemoveAt(0); } } // Handle audio metadata lock (audioMetadataQueue) { if (audioMetadataQueue.Count > 0) { onAudioMetaDataReceived?.Invoke(audioMetadataQueue[0]); audioMetadataQueue.RemoveAt(0); } } // Handle VideoFrames RenderTexture rt = null; lock (videoFrameQueue) { if (videoFrameQueue.Count > 0) { // Unlike audio, we don't need to worry about processing every frame // There should only be a single frame in the queue anyway, // But still, we grab the latest from the queue and discard the rest var vf = videoFrameQueue[videoFrameQueue.Count - 1]; // Pixel format conversion rt = _converter.Decode (vf.Width, vf.Height, Util.CheckAlpha(vf.FourCC), vf.Data); // Handle Videoframe metadata if (vf.Metadata != IntPtr.Zero) { onVideoMetaDataReceived?.Invoke(Marshal.PtrToStringAnsi(vf.Metadata)); } // Store the videoframe resolution if (resolution == null || resolution.x != vf.Width || resolution.y != vf.Height) { resolution = new Vector2(vf.Width, vf.Height); } // Free the videoframe videoFrameQueue.ForEach(v => _recv.FreeVideoFrame(v)); videoFrameQueue.Clear(); } } if (rt == null) { return; } // Material property override if (_targetRenderer != null) { _targetRenderer.GetPropertyBlock(_override); _override.SetTexture(_targetMaterialProperty, rt); _targetRenderer.SetPropertyBlock(_override); } // External texture update if (_targetTexture != null) { Graphics.Blit(rt, _targetTexture); } }