Exemplo n.º 1
0
        public void renderFrame(int width, int height, int rotation, int textureName, AndroidJavaObject i420Frame)
        {
            ThreadUtils.RunOnUpdate(() => {
                IntPtr textureId = new IntPtr(textureName);
                if (nativeTexture == null || this.width != width || this.height != height || this.rotation != rotation)
                {
                    CleanUp();
                    this.width         = width;
                    this.height        = height;
                    this.rotation      = rotation;
                    nativeTexture      = Texture2D.CreateExternalTexture(width, height, TextureFormat.RGB565, false, false, textureId);
                    rTexture           = new RenderTexture(Mathf.RoundToInt(width * resolution), Mathf.RoundToInt(height * resolution), 0, RenderTextureFormat.RGB565);
                    rTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;

                    //To optimize, if rotation is zero, we use a different pass, to avoid calculating all the cos and sin.
                    usePass = rotation == 0 ? FlipV_OESExternal_To_RGBA_PassIndex : Rotate_FlipV_OESExternal_To_RGBA_PassIndex;
                    VideoDecodeMaterial.SetFloat("rotation", -(float)rotation * Mathf.Deg2Rad);

                    Action <Texture> OnTextureHandler = OnTexture;
                    if (OnTextureHandler != null)
                    {
                        OnTextureHandler(rTexture);
                    }
                }
                else
                {
                    nativeTexture.UpdateExternalTexture(textureId);
                }

                Graphics.Blit(nativeTexture, rTexture, VideoDecodeMaterial, usePass);
                WebRTCAndroid.KillFrame(i420Frame);
            });
        }
Exemplo n.º 2
0
        public void StartCapture()
        {
            StopCapture();
            switch (this._source)
            {
            case CaptureSource.CameraBack:
                ThreadUtils.RunOnUpdate(() => {
                    WebRTCAndroid.WebRTC_JavaClass.CallStatic("StartCameraCapture", false, _callback);
                });
                break;

            case CaptureSource.CameraFront:
                ThreadUtils.RunOnUpdate(() => {
                    WebRTCAndroid.WebRTC_JavaClass.CallStatic("StartCameraCapture", true, _callback);
                });
                break;

            case CaptureSource.Screen:
                ThreadUtils.RunOnUpdate(() => {
                    WebRTCAndroid.WebRTC_JavaClass.CallStatic("StartScreenCapture", _callback);
                });
                break;

            case CaptureSource.RenderTexture:
                throw new NotImplementedException("CaptureSource.RenderTexture is not yet supported");

            default:
                throw new NotSupportedException("CaptureSource not valid");
            }
        }
Exemplo n.º 3
0
        public void renderFrame(int width, int height, int textureName, AndroidJavaObject i420Frame)
        {
            ThreadUtils.RunOnUpdate(() => {
                if (!WebRTCmat.shader.isSupported)
                {
                    onVideoCapturerError("Unsupported shader");
                    return;
                }

                IntPtr textureId = new IntPtr(textureName);
                if (nativeTexture != null || this.width != width || this.height != height)
                {
                    CleanUp();
                    this.width    = width;
                    this.height   = height;
                    nativeTexture = Texture2D.CreateExternalTexture(width, height, TextureFormat.YUY2, false, false, textureId);
                    rTexture      = new RenderTexture(width, height, 0, RenderTextureFormat.ARGB32);

                    Action <Texture> OnTextureHandler = OnTexture;
                    if (OnTextureHandler != null)
                    {
                        OnTextureHandler(rTexture);
                    }
                }
                else
                {
                    nativeTexture.UpdateExternalTexture(textureId);
                }

                Graphics.Blit(nativeTexture, rTexture, WebRTCmat);

                WebRTCAndroid.KillFrame(i420Frame);
            });
        }
Exemplo n.º 4
0
 public void onVideoCapturerStarted(AndroidJavaObject videoCapturer, AndroidJavaObject videoTrack)
 {
     ThreadUtils.RunOnUpdate(() => {
         Action <AndroidJavaObject, AndroidJavaObject> OnVideoCapturerStartedHandler = OnVideoCapturerStarted;
         if (OnVideoCapturerStartedHandler != null)
         {
             OnVideoCapturerStartedHandler(videoCapturer, videoTrack);
         }
     });
 }
Exemplo n.º 5
0
 public void onVideoCapturerStopped()
 {
     ThreadUtils.RunOnUpdate(() => {
         CleanUp();
         Action OnVideoCapturerStoppedHandler = OnVideoCapturerStopped;
         if (OnVideoCapturerStoppedHandler != null)
         {
             OnVideoCapturerStoppedHandler();
         }
     });
 }
Exemplo n.º 6
0
 public void onVideoCapturerError(string error)
 {
     ThreadUtils.RunOnUpdate(() => {
         CleanUp();
         Action <string> OnVideoCapturerErrorHandler = OnVideoCapturerError;
         if (OnVideoCapturerErrorHandler != null)
         {
             OnVideoCapturerErrorHandler(error);
         }
     });
 }
Exemplo n.º 7
0
 static void setUnityContext()
 {
     ThreadUtils.RunOnUpdate(() => {
         UnityEGLContext_JavaClass.CallStatic("setUnityContext");
     });
 }