Exemplo n.º 1
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            timeStampList = new List <byte> ();

            var camera = GetComponent <Camera>();
            var width  = PupilSettings.Instance.recorder.resolutions [(int)PupilSettings.Instance.recorder.resolution] [0];
            var height = PupilSettings.Instance.recorder.resolutions [(int)PupilSettings.Instance.recorder.resolution] [1];

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.

            var name = "Unity_" + PupilSettings.Instance.currentCamera.name;

            _pipe = new FFmpegPipe(name, width, height, _frameRate, PupilSettings.Instance.recorder.codec);

            // Change the application frame rate.
            if (Time.captureFramerate == 0)
            {
                Time.captureFramerate = _frameRate;
            }
            else if (Time.captureFramerate != _frameRate)
            {
                Debug.LogWarning(
                    "Frame rate mismatch; the application frame rate has been " +
                    "changed with a different value. Make sure using the same " +
                    "frame rate when capturing multiple cameras."
                    );
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemplo n.º 2
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height, 24);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            if (_useCustomTexture)
            {
                width  = _renderTexture.width;
                height = _renderTexture.height;
            }
            // Open an output stream.
            _pipe = new FFmpegPipe(name, width, height, _frameRate, _preset);
            _activePipeCount++;

            // Change the application frame rate on the first pipe.
            if (_activePipeCount == 1)
            {
                if (_allowSlowDown)
                {
                    Time.captureFramerate = _frameRate;
                }
                else
                {
                    Application.targetFrameRate = _frameRate;
                }
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemplo n.º 3
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.
            _codec = FFmpegPipe.Codec.ProRes;
            _pipe  = new FFmpegPipe(_anim.clip.name.Replace(".motion3.json", string.Empty), width, height, _frameRate, _codec);
            _activePipeCount++;

            // Change the application frame rate on the first pipe.
            if (_activePipeCount == 1)
            {
                if (_allowSlowDown)
                {
                    Time.captureFramerate = _frameRate;
                }
                else
                {
                    Application.targetFrameRate = _frameRate;
                }
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemplo n.º 4
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.
            _pipe = new FFmpegPipe(Recorder.FilePath, width, height, _frameRate, _codec);

            // Change the application frame rate.
            if (Time.captureFramerate == 0)
            {
                Time.captureFramerate = _frameRate;
            }
            else if (Time.captureFramerate != _frameRate)
            {
                Debug.LogWarning(
                    "Frame rate mismatch; the application frame rate has been " +
                    "changed with a different value. Make sure using the same " +
                    "frame rate when capturing multiple cameras."
                    );
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemplo n.º 5
0
        void Update()
        {
            var camera = GetComponent <Camera>();

            // Lazy initialization
            if (_session == null)
            {
                // Give a newly created temporary render texture to the camera
                // if it's set to render to a screen. Also create a blitter
                // object to keep frames presented on the screen.
                if (camera.targetTexture == null)
                {
                    _tempRT = new RenderTexture(_width, _height, 24, GetTargetFormat(camera));
                    _tempRT.antiAliasing = GetAntiAliasingLevel(camera);
                    camera.targetTexture = _tempRT;
                    _blitter             = Blitter.CreateInstance(camera);
                }

                // Start an FFmpeg session.
                _session = FFmpegSession.Create(
                    gameObject.name,
                    camera.targetTexture.width,
                    camera.targetTexture.height,
                    _frameRate, preset
                    );

                _startTime      = Time.time;
                _frameCount     = 0;
                _frameDropCount = 0;
            }

            var gap   = Time.time - FrameTime;
            var delta = 1 / _frameRate;

            if (gap < 0)
            {
                // Update without frame data.
                _session.PushFrame(null);
            }
            else if (gap < delta)
            {
                // Single-frame behind from the current time:
                // Push the current frame to FFmpeg.
                _session.PushFrame(camera.targetTexture);
                _frameCount++;
            }
            else if (gap < delta * 2)
            {
                // Two-frame behind from the current time:
                // Push the current frame twice to FFmpeg. Actually this is not
                // an efficient way to catch up. We should think about
                // implementing frame duplication in a more proper way. #fixme
                _session.PushFrame(camera.targetTexture);
                _session.PushFrame(camera.targetTexture);
                _frameCount += 2;
            }
            else
            {
                // Show a warning message about the situation.
                WarnFrameDrop();

                // Push the current frame to FFmpeg.
                _session.PushFrame(camera.targetTexture);

                // Compensate the time delay.
                _frameCount += Mathf.FloorToInt(gap * _frameRate);
            }
        }
Exemplo n.º 6
0
        void Update()
        {
            var camera = GetComponent <Camera>();

            // Lazy initialization
            if (_session == null)
            {
                // Give a newly created temporary render texture to the camera
                // if it's set to render to a screen. Also create a blitter
                // object to keep frames presented on the screen.
                if (camera.targetTexture == null)
                {
                    _tempRT = new RenderTexture(_width, _height, 24, GetTargetFormat(camera));
                    _tempRT.antiAliasing = GetAntiAliasingLevel(camera);
                    camera.targetTexture = _tempRT;
                    _blitter             = Blitter.CreateInstance(camera);
                }

                // Start an FFmpeg session.
                var targetTexture = camera.targetTexture;
                if (_isStream)
                {
                    _session = FFmpegSession.CreateWithArguments(
                        "-re"
                        + " -y -f rawvideo -vcodec rawvideo -pixel_format rgba"
                        + " -colorspace bt709"
                        + " -video_size " + targetTexture.width + "x" + targetTexture.height
                        + " -framerate " + _frameRate
                        + " -loglevel warning -i - " + preset.GetOptions()
                        + " -preset " + _compressionSpeed.ToString().ToLower()               // compression preset
                        + $" -b:v {_bitrate}k -maxrate {_bitrate}k -bufsize {_bitrate * 2}k" // Video bitrates
                        + $" -f flv \"{_rtmpUrl}\""
                        );
                }
                else
                {
                    _session = FFmpegSession.Create(
                        gameObject.name,
                        targetTexture.width,
                        targetTexture.height,
                        _frameRate, preset
                        );
                }

                _startTime      = Time.time;
                _frameCount     = 0;
                _frameDropCount = 0;
            }

            var gap   = Time.time - FrameTime;
            var delta = 1 / _frameRate;

            if (gap < 0)
            {
                // Update without frame data.
                _session.PushFrame(null);
            }
            else if (gap < delta)
            {
                // Single-frame behind from the current time:
                // Push the current frame to FFmpeg.
                _session.PushFrame(camera.targetTexture);
                _frameCount++;
            }
            else if (gap < delta * 2)
            {
                // Two-frame behind from the current time:
                // Push the current frame twice to FFmpeg. Actually this is not
                // an efficient way to catch up. We should think about
                // implementing frame duplication in a more proper way. #fixme
                _session.PushFrame(camera.targetTexture);
                _session.PushFrame(camera.targetTexture);
                _frameCount += 2;
            }
            else
            {
                // Show a warning message about the situation.
                WarnFrameDrop();

                // Push the current frame to FFmpeg.
                _session.PushFrame(camera.targetTexture);

                // Compensate the time delay.
                _frameCount += Mathf.FloorToInt(gap * _frameRate);
            }
        }