/// <summary> /// Updates the remote video. If the frame is null it will hide the video image. /// </summary> /// <param name="frame"></param> public virtual void UpdateRemoteTexture(IFrame frame, FramePixelFormat format) { if (uRemoteVideoImage != null) { if (frame != null) { UnityMediaHelper.UpdateTexture(frame, ref mRemoteVideoTexture); uRemoteVideoImage.texture = mRemoteVideoTexture; //watch out: due to conversion from WebRTC to Unity format the image is flipped (top to bottom) //this also inverts the rotation uRemoteVideoImage.transform.rotation = Quaternion.Euler(0, 0, frame.Rotation * -1); mHasRemoteVideo = true; mRemoteVideoWidth = frame.Width; mRemoteVideoHeight = frame.Height; mRemoteVideoFormat = format; mRemoteFrameCounter++; } else { mHasRemoteVideo = false; uRemoteVideoImage.texture = uNoCameraTexture; uRemoteVideoImage.transform.rotation = Quaternion.Euler(0, 0, 0); } } }
/// <summary> /// Updates the local video. If the frame is null it will hide the video image /// </summary> /// <param name="frame"></param> public virtual void UpdateLocalTexture(IFrame frame, FramePixelFormat format) { if (uLocalVideoImage != null) { if (frame != null) { UnityMediaHelper.UpdateTexture(frame, ref mLocalVideoTexture); uLocalVideoImage.texture = mLocalVideoTexture; if (uLocalVideoImage.gameObject.activeSelf == false) { uLocalVideoImage.gameObject.SetActive(true); } //apply rotation //watch out uLocalVideoImage should be scaled -1 X to make the local camera appear mirrored //it should also be scaled -1 Y because Unity reads the image from bottom to top uLocalVideoImage.transform.rotation = Quaternion.Euler(0, 0, frame.Rotation); mHasLocalVideo = true; mLocalFrameCounter++; mLocalVideoWidth = frame.Width; mLocalVideoHeight = frame.Height; mLocalVideoFormat = format; } else { //app shutdown. reset values mHasLocalVideo = false; uLocalVideoImage.texture = null; uLocalVideoImage.transform.rotation = Quaternion.Euler(0, 0, 0); uLocalVideoImage.gameObject.SetActive(false); } } }
//666 Roger added another argument private void UpdateTexture(GameObject videoObject, bool flipped, ref Texture2D videoTexture, IFrame frame, FramePixelFormat format) { if (frame != null) { if (videoTexture == null) { DebugLog.AddEntry("Video texture: " + frame.Width + "x" + frame.Height + " Format:" + format); } UnityMediaHelper.UpdateTexture(frame, ref videoTexture); videoObject.GetComponent <UITexture>().mainTexture = videoTexture; if (flipped) { videoObject.transform.rotation = Quaternion.Euler(0, 180f, 180f); } else { videoObject.transform.rotation = Quaternion.Euler(0, 0, 180f); } } else { // app shutdown. reset values videoObject.GetComponent <UITexture>().mainTexture = null; videoObject.transform.rotation = Quaternion.Euler(0, 0, 0); } }
/// <summary> /// Updates the remote video. If the frame is null it will hide the video image. /// </summary> /// <param name="frame"></param> public virtual void UpdateRemoteTexture(IFrame frame, FramePixelFormat format) { if (uRemoteVideoImage != null) { if (frame != null) { UnityMediaHelper.UpdateTexture(frame, ref mRemoteVideoTexture); //Implement Video texture in UI mode uRemoteVideoImage.texture = mRemoteVideoTexture; uRemoteVideoImage.transform.rotation = Quaternion.Euler(new Vector3(0, 0, frame.Rotation * -1)); //Implement Video texture in Holokit mode FloatingPlaneLeft.GetComponent <Renderer>().material.mainTexture = mRemoteVideoTexture; FloatingPlaneRight.GetComponent <Renderer>().material.mainTexture = mRemoteVideoTexture; //Shift texture for Lefteye_plane & Righteye_plane FloatingPlaneLeft.GetComponent <Renderer>().material.mainTextureScale = new Vector2(0.5f, 1); FloatingPlaneRight.GetComponent <Renderer>().material.mainTextureScale = new Vector2(0.5f, 1); FloatingPlaneRight.GetComponent <Renderer>().material.mainTextureOffset = new Vector2(0.5f, 0); mHasRemoteVideo = true; mRemoteVideoWidth = frame.Width; mRemoteVideoHeight = frame.Height; mRemoteVideoFormat = format; mRemoteFrameCounter++; } else { mHasRemoteVideo = false; uRemoteVideoImage.texture = uNoCameraTexture; FloatingPlaneLeft.GetComponent <Renderer>().material.mainTexture = uNoCameraTexture; FloatingPlaneRight.GetComponent <Renderer>().material.mainTexture = uNoCameraTexture; } } }
private void Call_CallEvent(object sender, CallEventArgs args) { if (args.Type == CallEventType.ConfigurationComplete) { //STEP3: configuration completed -> try calling Call(); } else if (args.Type == CallEventType.ConfigurationFailed) { Error("Accessing audio / video failed"); } else if (args.Type == CallEventType.ConnectionFailed) { Error("ConnectionFailed"); } else if (args.Type == CallEventType.ListeningFailed) { Error("ListeningFailed"); } else if (args.Type == CallEventType.CallAccepted) { //STEP5: We are connected mState = SimpleCallState.InCall; Log("Connection established"); } else if (args.Type == CallEventType.CallEnded) { mState = SimpleCallState.Ended; Log("Call ended."); } else if (args.Type == CallEventType.FrameUpdate) { //STEP6: until the end of the call we receive frames here //Note that this is being called after Configure already for local frames even before //a connection is established! //This is triggered each video frame for local and remote video images FrameUpdateEventArgs frameArgs = args as FrameUpdateEventArgs; if (frameArgs.ConnectionId == ConnectionId.INVALID) { bool textureCreated = UnityMediaHelper.UpdateRawImage(_LocalImage, frameArgs.Frame); if (textureCreated) { Texture2D tex = _LocalImage.texture as Texture2D; Log("Local Texture(s) created " + tex.width + "x" + tex.height + " format: " + frameArgs.Frame.Format); } } else { bool textureCreated = UnityMediaHelper.UpdateRawImage(_RemoteImage, frameArgs.Frame); if (textureCreated) { Texture2D tex = _RemoteImage.texture as Texture2D; Log("Remote Texture(s) created " + tex.width + "x" + tex.height + " format: " + frameArgs.Frame.Format); } } } }
public void SetFrame(IFrame frame, FramePixelFormat format) { if (frame != null) { Debug.Log("frameee"); UnityMediaHelper.UpdateTexture(frame, ref VideoTexture); mr.material.mainTexture = VideoTexture; } }
private void Call_CallEvent(object sender, CallEventArgs args) { if (args.Type == CallEventType.ConfigurationComplete) { //STEP3: configuration completed -> try calling Call(); } else if (args.Type == CallEventType.ConfigurationFailed) { Error("Accessing audio / video failed"); } else if (args.Type == CallEventType.ConnectionFailed) { Error("ConnectionFailed"); } else if (args.Type == CallEventType.ListeningFailed) { Error("ListeningFailed"); } else if (args.Type == CallEventType.CallAccepted) { //STEP5: We are connected mState = SimpleCallState.InCall; Log("Connection established"); } else if (args.Type == CallEventType.CallEnded) { mState = SimpleCallState.Ended; Log("Call ended."); } else if (args.Type == CallEventType.FrameUpdate) { //STEP6: until the end of the call we receive frames here //Note that this is being called after Configure already for local frames even before //a connection is established! //This is triggered each video frame for local and remote video images FrameUpdateEventArgs frameArgs = args as FrameUpdateEventArgs; if (frameArgs.ConnectionId == ConnectionId.INVALID) { //invalid connection id means this is a local frame //copy the raw pixels into a unity texture bool textureCreated = UnityMediaHelper.UpdateTexture(ref mLocalVideo, frameArgs.Frame, frameArgs.Format); if (textureCreated) { if (_LocalImage != null) { _LocalImage.texture = mLocalVideo; } Log("Local Texture created " + frameArgs.Frame.Width + "x" + frameArgs.Frame.Height + " format: " + frameArgs.Format); } } else { //remote frame. For conference calls we would get multiple remote frames with different id's bool textureCreated = UnityMediaHelper.UpdateTexture(ref mRemoteVideo, frameArgs.Frame, frameArgs.Format); if (textureCreated) { if (_RemoteImage != null) { _RemoteImage.texture = mRemoteVideo; } Log("Remote Texture created " + frameArgs.Frame.Width + "x" + frameArgs.Frame.Height + " format: " + frameArgs.Format); } } } }