IEnumerator shareScreen() { while (running) { yield return(new WaitForEndOfFrame()); //Read the Pixels inside the Rectangle mTexture.ReadPixels(mRect, 0, 0); //Apply the Pixels read from the rectangle to the texture mTexture.Apply(); // Get the Raw Texture data from the the from the texture and apply it to an array of bytes byte[] bytes = mTexture.GetRawTextureData(); // int size = Marshal.SizeOf(bytes[0]) * bytes.Length; // Check to see if there is an engine instance already created //if the engine is present if (mRtcEngine != null) { //Create a new external video frame ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); //Set the buffer type of the video frame externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // Set the video pixel format //externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; // V.2.9.x externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_RGBA; // V.3.x.x //apply raw data you are pulling from the rectangle you created earlier to the video frame externalVideoFrame.buffer = bytes; //Set the width of the video frame (in pixels) externalVideoFrame.stride = (int)mRect.width; //Set the height of the video frame externalVideoFrame.height = (int)mRect.height; //Remove pixels from the sides of the frame externalVideoFrame.cropLeft = 10; externalVideoFrame.cropTop = 10; externalVideoFrame.cropRight = 10; externalVideoFrame.cropBottom = 10; //Rotate the video frame (0, 90, 180, or 270) externalVideoFrame.rotation = 180; externalVideoFrame.timestamp = timestamp++; //Push the external video frame with the frame we just created mRtcEngine.PushVideoFrame(externalVideoFrame); if (timestamp % 100 == 0) { Debug.LogWarning("Pushed frame = " + timestamp); } } } }
void ShareCam(Texture2D tex) { if (tex == null) { return; } byte[] bytes = tex.GetRawTextureData(); // Debug.Log(tex.width+"|"+tex.height+"|"+bytes.Length); int size = Marshal.SizeOf(bytes[0]) * bytes.Length; // 查询是否存在 IRtcEngine 实例。 IRtcEngine rtc = IRtcEngine.QueryEngine(); if (rtc != null) { // 921600 // 创建外部视频帧。 ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); // 设置视频帧 buffer 类型。 externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // 设置像素格式。 // if(useCam) // externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_NV12; // else externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; // 应用原始数据。 externalVideoFrame.buffer = bytes; // 设置视频帧宽度(pixel)。 externalVideoFrame.stride = tex.width; // 设置视频帧高度(pixel)。 externalVideoFrame.height = tex.height; // 设置从哪侧移除视频帧的像素。 externalVideoFrame.cropLeft = 0; externalVideoFrame.cropTop = 0; externalVideoFrame.cropRight = 0; externalVideoFrame.cropBottom = 0; // 设置视频帧旋转角度: 0、90、180 或 270。 externalVideoFrame.rotation = 180; // 使用视频时间戳增加 i。 externalVideoFrame.timestamp = i++; // 推送外部视频帧。 int a = rtc.PushVideoFrame(externalVideoFrame); } }
IEnumerator shareScreen() { yield return(new WaitForEndOfFrame()); // ��ȡ��Ļ���ء� mTexture.ReadPixels(mRect, 0, 0); // Ӧ�����ء� mTexture.Apply(); // ��ȡ Raw Texture ������Ӧ�õ��ֽ������С� byte[] bytes = mTexture.GetRawTextureData(); int size = Marshal.SizeOf(bytes[0]) * bytes.Length; // ��ѯ�Ƿ���� IRtcEngine ʵ���� IRtcEngine rtc = IRtcEngine.QueryEngine(); if (rtc != null) { // 921600 // �����ⲿ��Ƶ֡�� ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); // ������Ƶ֡ buffer ���͡� externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // �������ظ�ʽ�� externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; // Ӧ��ԭʼ���ݡ� externalVideoFrame.buffer = bytes; // ������Ƶ֡���ȣ�pixel���� externalVideoFrame.stride = (int)mRect.width; // ������Ƶ֡�߶ȣ�pixel���� externalVideoFrame.height = (int)mRect.height; // ���ô��IJ��Ƴ���Ƶ֡�����ء� externalVideoFrame.cropLeft = 10; externalVideoFrame.cropTop = 10; externalVideoFrame.cropRight = 10; externalVideoFrame.cropBottom = 10; // ������Ƶ֡��ת�Ƕȣ� 0��90��180 �� 270�� externalVideoFrame.rotation = 180; // ʹ����Ƶʱ������� i�� externalVideoFrame.timestamp = i++; // �����ⲿ��Ƶ֡�� int a = rtc.PushVideoFrame(externalVideoFrame); } }
// Push frame to the remote client IEnumerator PushFrame(byte[] bytes, int width, int height, System.Action onFinish) { if (bytes == null || bytes.Length == 0) { Debug.LogError("Zero bytes found!!!!"); yield break; } IRtcEngine rtc = IRtcEngine.QueryEngine(); //if the engine is present if (rtc != null) { //Create a new external video frame ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); //Set the buffer type of the video frame externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // Set the video pixel format //externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; externalVideoFrame.format = PixelFormat; //apply raw data you are pulling from the rectangle you created earlier to the video frame externalVideoFrame.buffer = bytes; //Set the width of the video frame (in pixels) externalVideoFrame.stride = width; //Set the height of the video frame externalVideoFrame.height = height; //Remove pixels from the sides of the frame externalVideoFrame.cropLeft = 10; externalVideoFrame.cropTop = 10; externalVideoFrame.cropRight = 10; externalVideoFrame.cropBottom = 10; //Rotate the video frame (0, 90, 180, or 270) externalVideoFrame.rotation = 90; // increment i with the video timestamp externalVideoFrame.timestamp = i++; //Push the external video frame with the frame we just created int a = rtc.PushVideoFrame(externalVideoFrame); Debug.Log(" pushVideoFrame(" + i + ") size:" + bytes.Length + " => " + a); } yield return(null); onFinish(); }
// 开始屏幕共享。 IEnumerator shareScreen() { yield return(new WaitForEndOfFrame()); // 读取屏幕像素。 mTexture.ReadPixels(mRect, 0, 0); // 应用像素。 mTexture.Apply(); // 获取 Raw Texture 并将其应用到字节数组中。 byte[] bytes = mTexture.GetRawTextureData(); // 为字节数组提供足够的空间。 int size = Marshal.SizeOf(bytes[0]) * bytes.Length; // 查询是否存在 IRtcEngine 实例。 IRtcEngine rtc = IRtcEngine.QueryEngine(); if (rtc != null) { // 创建外部视频帧。 ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); // 设置视频帧 buffer 类型。 externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // 设置像素格式。 externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; // 应用原始数据。 externalVideoFrame.buffer = bytes; // 设置视频帧宽度(pixel)。 externalVideoFrame.stride = (int)mRect.width; // 设置视频帧高度(pixel)。 externalVideoFrame.height = (int)mRect.height; // 设置从哪侧移除视频帧的像素。 externalVideoFrame.cropLeft = 10; externalVideoFrame.cropTop = 10; externalVideoFrame.cropRight = 10; externalVideoFrame.cropBottom = 10; // 设置视频帧旋转角度: 0、90、180 或 270。 externalVideoFrame.rotation = 180; // 使用视频时间戳增加 i。 externalVideoFrame.timestamp = i++; // 推送外部视频帧。 int a = rtc.PushVideoFrame(externalVideoFrame); } }
IEnumerator shareScreen() { yield return(new WaitForEndOfFrame()); // Reads the Pixels of the rectangle you create. mTexture.ReadPixels(mRect, 0, 0); // Applies the Pixels read from the rectangle to the texture. mTexture.Apply(); // Gets the Raw Texture data from the texture and apply it to an array of bytes. byte[] bytes = mTexture.GetRawTextureData(); // Gives enough space for the bytes array. int size = Marshal.SizeOf(bytes[0]) * bytes.Length; // Checks whether the IRtcEngine instance is existed. IRtcEngine rtc = IRtcEngine.QueryEngine(); if (rtc != null) { // Creates a new external video frame. ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame(); // Sets the buffer type of the video frame. externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; // Sets the format of the video pixel. externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; // Applies raw data. externalVideoFrame.buffer = bytes; // Sets the width (pixel) of the video frame. externalVideoFrame.stride = (int)mRect.width; // Sets the height (pixel) of the video frame. externalVideoFrame.height = (int)mRect.height; // Removes pixels from the sides of the frame // externalVideoFrame.cropLeft = 10; // externalVideoFrame.cropTop = 10; // externalVideoFrame.cropRight = 10; // externalVideoFrame.cropBottom = 10; // Rotates the video frame (0, 90, 180, or 270) externalVideoFrame.rotation = 180; // Increments i with the video timestamp. externalVideoFrame.timestamp = i++; // Pushes the external video frame with the frame you create. int a = rtc.PushVideoFrame(externalVideoFrame); } }
// Update is called once per frame void Update() { #if (UNITY_2018_3_OR_NEWER) CheckPermission(); #endif if (isHost == true && SceneManager.GetActiveScene().name == "TestSceneHelloVideo") { //Debug.Log("."); ExternalVideoFrame frame = new ExternalVideoFrame(); Texture2D tex2d = TextureToTexture2D(InSceneTex.texture); frame.stride = tex2d.width; frame.height = tex2d.height; frame.buffer = tex2d.GetRawTextureData(); frame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA; frame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA; app.mRtcEngine.PushExternVideoFrame(frame); } }
public abstract int PushVideoFrame(ExternalVideoFrame frame);
public void PushVideoFrame(ExternalVideoFrame externalVideoFrame) { _RtcEngine.PushVideoFrame(externalVideoFrame); }