Esempio n. 1
0
 /// <summary>
 /// Called before any Start functions and also just after a prefab is instantiated.
 /// </summary>
 private new void Awake()
 {
     base.Awake();
     status = VideoCaptureCtrl.StatusType.NOT_START;
     // Init the copy material use hidden shader.
     blitMaterial           = new Material(Shader.Find("Hidden/BlitCopy"));
     blitMaterial.hideFlags = HideFlags.HideAndDontSave;
 }
Esempio n. 2
0
 /// <summary>
 /// Frame encoding thread impl.
 /// </summary>
 private void FrameEncodeThreadFunction()
 {
     while (status == VideoCaptureCtrl.StatusType.STARTED || frameQueue.Count > 0)
     {
         if (frameQueue.Count > 0 && status != VideoCaptureCtrlBase.StatusType.PAUSED)
         {
             FrameData frame;
             lock (this)
             {
                 frame = frameQueue.Dequeue();
             }
             if (mode == ModeType.LIVE_STREAMING)
             {
                 VideoStreamingLib_WriteFrames(libAPI, frame.pixels, frame.count);
             }
             else
             {
                 VideoCaptureLib_WriteFrames(libAPI, frame.pixels, frame.count);
             }
             encodedFrameCount++;
             if (VideoCaptureCtrl.instance.debug)
             {
                 Debug.Log(
                     "[VideoCapture::FrameEncodeThreadFunction] Encoded " +
                     encodedFrameCount + " frames. " + frameQueue.Count +
                     " frames remaining."
                     );
             }
         }
         else
         {
             // Wait 1 second for captured frame.
             Thread.Sleep(1000);
         }
     }
     // Notify native encoding process finish.
     if (mode == ModeType.LIVE_STREAMING)
     {
         VideoStreamingLib_Close(libAPI);
     }
     else
     {
         VideoCaptureLib_Close(libAPI);
     }
     // Notify caller video capture complete.
     if (eventDelegate.OnComplete != null)
     {
         eventDelegate.OnComplete();
     }
     if (VideoCaptureCtrl.instance.debug)
     {
         Debug.Log("[VideoCapture::FrameEncodeThreadFunction] Encode " +
                   "process finish!");
     }
     // Update current status.
     status = VideoCaptureCtrl.StatusType.FINISH;
 }
Esempio n. 3
0
 /// <summary>
 /// Pause capture video.
 /// </summary>
 public override void ToggleCapture()
 {
     if (status == VideoCaptureCtrlBase.StatusType.STARTED)
     {
         status = VideoCaptureCtrlBase.StatusType.PAUSED;
     }
     else if (status == VideoCaptureCtrlBase.StatusType.PAUSED)
     {
         status = VideoCaptureCtrlBase.StatusType.STARTED;
     }
 }
 /// <summary>
 /// Stop capture video.
 /// </summary>
 public override void StopCapture()
 {
     if (status != VideoCaptureCtrl.StatusType.STARTED && status != VideoCaptureCtrl.StatusType.PAUSED)
     {
         Debug.LogWarning("[VideoCapture::StopCapture] capture session not start yet!");
         return;
     }
     if (offlineRender)
     {
         // Restore maximumDeltaTime states.
         Time.maximumDeltaTime = originalMaximumDeltaTime;
     }
     // Update current status.
     status = VideoCaptureCtrl.StatusType.STOPPED;
 }
Esempio n. 5
0
        /// <summary>
        /// Start capture video.
        /// </summary>
        public override void StartCapture()
        {
            // Check if we can start capture session.
            if (status != VideoCaptureCtrl.StatusType.NOT_START &&
                status != VideoCaptureCtrl.StatusType.FINISH)
            {
                Debug.LogWarning("[VideoCapture::StartCapture] Previous " +
                                 " capture not finish yet!");
                return;
            }
            if (mode == ModeType.LIVE_STREAMING)
            {
                if (!StringUtils.IsRtmpAddress(streamingAddress))
                {
                    Debug.LogWarning(
                        "[VideoCapture::StartCapture] Video live streaming " +
                        "require rtmp server address setup!"
                        );
                    return;
                }
            }
            if (format == FormatType.PANORAMA && !isDedicated)
            {
                Debug.LogWarning(
                    "[VideoCapture::StartCapture] Capture equirectangular " +
                    "video always require dedicated camera!"
                    );
                isDedicated = true;
            }
            if (mode == ModeType.LOCAL)
            {
                filePath = PathConfig.SaveFolder + StringUtils.GetMp4FileName(StringUtils.GetRandomString(5));
            }
            // Create a RenderTexture with desired frame size for dedicated
            // camera capture to store pixels in GPU.
            // Use Camera.targetTexture as RenderTexture if already existed.
            if (captureCamera.targetTexture != null)
            {
                // Use binded rendertexture will ignore antiAliasing config.
                frameRenderTexture    = captureCamera.targetTexture;
                isCreateRenderTexture = false;
            }
            else
            {
                // Create a rendertexture for video capture.
                // Size it according to the desired video frame size.
                frameRenderTexture = new RenderTexture(frameWidth, frameHeight, 24);
                frameRenderTexture.antiAliasing = antiAliasing;
                frameRenderTexture.wrapMode     = TextureWrapMode.Clamp;
                frameRenderTexture.filterMode   = FilterMode.Trilinear;
                frameRenderTexture.hideFlags    = HideFlags.HideAndDontSave;
                // Make sure the rendertexture is created.
                frameRenderTexture.Create();
                isCreateRenderTexture = true;
                if (isDedicated)
                {
                    captureCamera.targetTexture = frameRenderTexture;
                }
            }
            // For capturing normal 2D video, use frameTexture(Texture2D) for
            // intermediate cpu saving, frameRenderTexture(RenderTexture) store
            // the pixels read by frameTexture.
            if (format == FormatType.NORMAL)
            {
                if (isDedicated)
                {
                    // Set the aspect ratio of the camera to match the rendertexture.
                    captureCamera.aspect        = frameWidth / ((float)frameHeight);
                    captureCamera.targetTexture = frameRenderTexture;
                }
            }
            // For capture panorama video:
            // EQUIRECTANGULAR: use frameCubemap(Cubemap) for intermediate cpu
            // saving.
            // CUBEMAP: use frameTexture(Texture2D) for intermediate cpu saving.
            else if (format == FormatType.PANORAMA)
            {
                copyReverseMaterial     = Resources.Load("Materials/CopyReverse") as Material;
                cubemap2Equirectangular = Resources.Load("Materials/Cubemap2Equirectangular") as Material;
                if (panoramaProjection == PanoramaProjectionType.CUBEMAP)
                {
                    // Create render cubemap.
                    frameCubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
                    // Setup camera as required for panorama capture.
                    captureCamera.aspect      = 1.0f;
                    captureCamera.fieldOfView = 90;
                }
                else
                {
                    copyReverseMaterial.DisableKeyword("REVERSE_TOP_BOTTOM");
                    copyReverseMaterial.DisableKeyword("REVERSE_LEFT_RIGHT");
                    captureCamera.fieldOfView = 90;
                    // Create render texture.
                    panoramaTempRenderTexture                  = new RenderTexture(cubemapSize, cubemapSize, 24);
                    panoramaTempRenderTexture.dimension        = UnityEngine.Rendering.TextureDimension.Cube;
                    panoramaTempRenderTexture.useMipMap        = false;
                    panoramaTempRenderTexture.autoGenerateMips = false;
                    panoramaTempRenderTexture.antiAliasing     = antiAliasing;
                    panoramaTempRenderTexture.wrapMode         = TextureWrapMode.Clamp;
                    panoramaTempRenderTexture.filterMode       = FilterMode.Bilinear;
                    if (captureGUI)
                    {
                        faceTarget = new RenderTexture(cubemapSize, cubemapSize, 24);
                        faceTarget.antiAliasing                = antiAliasing;
                        faceTarget.isPowerOfTwo                = true;
                        faceTarget.wrapMode                    = TextureWrapMode.Clamp;
                        faceTarget.filterMode                  = FilterMode.Bilinear;
                        faceTarget.autoGenerateMips            = false;
                        panoramaTempRenderTexture.antiAliasing = 1;
                    }
                }
            }
            if (stereo != StereoType.NONE)
            {
                // Init stereo video material.
                if (stereoPackMaterial == null)
                {
                    stereoPackMaterial = new Material(Shader.Find("RockVR/Stereoscopic"));
                }
                stereoPackMaterial.hideFlags = HideFlags.HideAndDontSave;
                stereoPackMaterial.DisableKeyword("STEREOPACK_TOP");
                stereoPackMaterial.DisableKeyword("STEREOPACK_BOTTOM");
                stereoPackMaterial.DisableKeyword("STEREOPACK_LEFT");
                stereoPackMaterial.DisableKeyword("STEREOPACK_RIGHT");
                // Init the temporary stereo target texture.
                stereoTargetTexture = new RenderTexture(frameWidth, frameHeight, 24);
                stereoTargetTexture.isPowerOfTwo     = true;
                stereoTargetTexture.dimension        = UnityEngine.Rendering.TextureDimension.Tex2D;
                stereoTargetTexture.useMipMap        = false;
                stereoTargetTexture.antiAliasing     = antiAliasing;
                stereoTargetTexture.wrapMode         = TextureWrapMode.Clamp;
                stereoTargetTexture.filterMode       = FilterMode.Trilinear;
                stereoTargetTexture.autoGenerateMips = false;
            }
            // Init the final stereo texture.
            finalTargetTexture = new RenderTexture(frameWidth, frameHeight, 24);
            finalTargetTexture.isPowerOfTwo     = true;
            finalTargetTexture.dimension        = UnityEngine.Rendering.TextureDimension.Tex2D;
            finalTargetTexture.useMipMap        = false;
            finalTargetTexture.antiAliasing     = antiAliasing;
            finalTargetTexture.wrapMode         = TextureWrapMode.Clamp;
            finalTargetTexture.filterMode       = FilterMode.Trilinear;
            finalTargetTexture.autoGenerateMips = false;
            // Pixels stored in frameRenderTexture(RenderTexture) always read by frameTexture(Texture2D).
            frameTexture            = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            frameTexture.hideFlags  = HideFlags.HideAndDontSave;
            frameTexture.wrapMode   = TextureWrapMode.Clamp;
            frameTexture.filterMode = FilterMode.Trilinear;
            frameTexture.hideFlags  = HideFlags.HideAndDontSave;
            frameTexture.anisoLevel = 0;
            // Reset tempory variables.
            capturingTime      = 0f;
            capturedFrameCount = 0;
            encodedFrameCount  = 0;
            frameQueue         = new Queue <FrameData>();
            // Projection info for native plugin.
            int proj = 0;

            if (format == FormatType.PANORAMA)
            {
                if (panoramaProjection == PanoramaProjectionType.EQUIRECTANGULAR)
                {
                    proj = 1;
                }
                if (panoramaProjection == PanoramaProjectionType.CUBEMAP)
                {
                    proj = 2;
                }
            }
            if (stereo == StereoType.TOP_BOTTOM)
            {
                proj = 3;
            }
            else if (stereo == StereoType.LEFT_RIGHT)
            {
                proj = 4;
            }
            if (mode == ModeType.LIVE_STREAMING)
            {
                libAPI = VideoStreamingLib_Get(
                    frameWidth,
                    frameHeight,
                    targetFramerate,
                    proj,
                    streamingAddress,
                    PathConfig.ffmpegPath);
            }
            else
            {
                libAPI = VideoCaptureLib_Get(
                    frameWidth,
                    frameHeight,
                    targetFramerate,
                    proj,
                    filePath,
                    PathConfig.ffmpegPath);
            }
            if (libAPI == System.IntPtr.Zero)
            {
                Debug.LogWarning("[VideoCapture::StartCapture] Get native " +
                                 "capture api failed!");
                return;
            }
            if (offlineRender)
            {
                // Backup maximumDeltaTime states.
                originalMaximumDeltaTime = Time.maximumDeltaTime;
                Time.maximumDeltaTime    = Time.fixedDeltaTime;
            }
            if (encodeThread != null)
            {
                encodeThread.Abort();
            }
            // Start encoding thread.
            encodeThread              = new Thread(FrameEncodeThreadFunction);
            encodeThread.Priority     = System.Threading.ThreadPriority.Lowest;
            encodeThread.IsBackground = true;
            encodeThread.Start();
            // Update current status.
            status = VideoCaptureCtrl.StatusType.STARTED;
        }
 /// <summary>
 /// Called before any Start functions and also just after a prefab is instantiated.
 /// </summary>
 private new void Awake()
 {
     base.Awake();
     status = VideoCaptureCtrl.StatusType.NOT_START;
 }
        /// <summary>
        /// Start capture video.
        /// </summary>
        public override void StartCapture()
        {
            // Check if we can start capture session.
            if (status != VideoCaptureCtrl.StatusType.NOT_START &&
                status != VideoCaptureCtrl.StatusType.FINISH)
            {
                Debug.LogWarning("[VideoCapture::StartCapture] Previous " +
                                 " capture not finish yet!");
                return;
            }
            if (mode == ModeType.LIVE_STREAMING)
            {
                if (!StringUtils.IsRtmpAddress(streamingAddress))
                {
                    Debug.LogWarning(
                        "[VideoCapture::StartCapture] Video live streaming " +
                        "require rtmp server address setup!"
                        );
                    return;
                }
            }
            if (format == FormatType.PANORAMA && !isDedicated)
            {
                Debug.LogWarning(
                    "[VideoCapture::StartCapture] Capture equirectangular " +
                    "video always require dedicated camera!"
                    );
                isDedicated = true;
            }
            if (mode == ModeType.LOCAL)
            {
                path = PathConfig.saveFolder + StringUtils.GetMp4FileName(StringUtils.GetRandomString(5));
            }
            // Create a RenderTexture with desired frame size for dedicated
            // camera capture to store pixels in GPU.
            if (isDedicated)
            {
                // Use Camera.targetTexture as RenderTexture if already existed.
                if (captureCamera.targetTexture != null)
                {
                    // Use binded rendertexture will ignore antiAliasing config.
                    frameRenderTexture = captureCamera.targetTexture;
                }
                else
                {
                    // Create a rendertexture for video capture.
                    // Size it according to the desired video frame size.
                    frameRenderTexture = new RenderTexture(frameWidth, frameHeight, 24);
                    frameRenderTexture.antiAliasing = antiAliasing;
                    frameRenderTexture.wrapMode     = TextureWrapMode.Clamp;
                    frameRenderTexture.filterMode   = FilterMode.Trilinear;
                    frameRenderTexture.anisoLevel   = 0;
                    frameRenderTexture.hideFlags    = HideFlags.HideAndDontSave;
                    // Make sure the rendertexture is created.
                    frameRenderTexture.Create();
                    captureCamera.targetTexture = frameRenderTexture;
                }
            }
            // For capturing normal 2D video, use frameTexture(Texture2D) for
            // intermediate cpu saving, frameRenderTexture(RenderTexture) store
            // the pixels read by frameTexture.
            if (format == FormatType.NORMAL)
            {
                if (isDedicated)
                {
                    // Set the aspect ratio of the camera to match the rendertexture.
                    captureCamera.aspect        = frameWidth / ((float)frameHeight);
                    captureCamera.targetTexture = frameRenderTexture;
                }
            }
            // For capture panorama video:
            // EQUIRECTANGULAR: use frameCubemap(Cubemap) for intermediate cpu
            // saving.
            // CUBEMAP: use frameTexture(Texture2D) for intermediate cpu saving.
            else if (format == FormatType.PANORAMA)
            {
                // Create render cubemap.
                frameCubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
                // Setup camera as required for panorama capture.
                captureCamera.aspect      = 1.0f;
                captureCamera.fieldOfView = 90;
            }
            // Pixels stored in frameRenderTexture(RenderTexture) always read by frameTexture(Texture2D).
            // NORMAL:
            // camera render -> frameRenderTexture -> frameTexture -> frameQueue
            // CUBEMAP:
            // 6 cameras render -> 6 faceRenderTexture -> frameTexture -> frameQueue
            // EQUIRECTANGULAR:
            // 6 camera render -> 6 faceRenderTexture-> frameCubemap -> Cubemap2Equirect ->
            // frameRenderTexture -> frameTexture -> frameQueue
            frameTexture            = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            frameTexture.hideFlags  = HideFlags.HideAndDontSave;
            frameTexture.wrapMode   = TextureWrapMode.Clamp;
            frameTexture.filterMode = FilterMode.Trilinear;
            frameTexture.hideFlags  = HideFlags.HideAndDontSave;
            frameTexture.anisoLevel = 0;
            // Reset tempory variables.
            capturingTime      = 0f;
            capturedFrameCount = 0;
            encodedFrameCount  = 0;
            frameQueue         = new Queue <FrameData>();
            // Projection info for native plugin.
            int proj = 0;

            if (format == FormatType.PANORAMA)
            {
                if (panoramaProjection == PanoramaProjectionType.EQUIRECTANGULAR)
                {
                    proj = 1;
                }
                if (panoramaProjection == PanoramaProjectionType.CUBEMAP)
                {
                    proj = 2;
                }
            }
            if (mode == ModeType.LIVE_STREAMING)
            {
                libAPI = LibVideoStreamingAPI_Get(
                    frameWidth,
                    frameHeight,
                    targetFramerate,
                    proj,
                    streamingAddress,
                    PathConfig.ffmpegPath);
            }
            else
            {
                libAPI = LibVideoCaptureAPI_Get(
                    frameWidth,
                    frameHeight,
                    targetFramerate,
                    proj,
                    path,
                    PathConfig.ffmpegPath);
            }
            if (libAPI == System.IntPtr.Zero)
            {
                Debug.LogWarning("[VideoCapture::StartCapture] Get native " +
                                 "capture api failed!");
                return;
            }
            if (offlineRender)
            {
                // Backup maximumDeltaTime states.
                originalMaximumDeltaTime = Time.maximumDeltaTime;
                Time.maximumDeltaTime    = Time.fixedDeltaTime;
            }
            // Start encoding thread.
            encodeThread              = new Thread(FrameEncodeThreadFunction);
            encodeThread.Priority     = System.Threading.ThreadPriority.Lowest;
            encodeThread.IsBackground = true;
            encodeThread.Start();
            // Update current status.
            status = VideoCaptureCtrl.StatusType.STARTED;
        }