Exemple #1
0
        void ClosePipe()
        {
            var camera = GetComponent <Camera>();

            // Release the temporary render target.
            if (_tempTarget != null && _tempTarget == camera.targetTexture)
            {
                camera.targetTexture = null;
                RenderTexture.ReleaseTemporary(_tempTarget);
                _tempTarget = null;
            }

            // Close the output stream.
            if (_pipe != null)
            {
                Debug.Log("Capture ended (" + _pipe.Filename + ")");

                _pipe.Close();

                if (!string.IsNullOrEmpty(_pipe.Error))
                {
                    Debug.LogWarning(
                        "ffmpeg returned with a warning or an error message. " +
                        "See the following lines for details:\n" + _pipe.Error
                        );
                }

                _pipe = null;
            }
        }
        void ClosePipe()
        {
            var camera = GetComponent <Camera>();

            // Destroy the blitter object.
            if (_tempBlitter != null)
            {
                Destroy(_tempBlitter);
                _tempBlitter = null;
            }

            // Release the temporary render target.
            if (_tempTarget != null && _tempTarget == camera.targetTexture)
            {
                camera.targetTexture = null;
                RenderTexture.ReleaseTemporary(_tempTarget);
                _tempTarget = null;
            }

            // Close the output stream.
            if (_pipe != null)
            {
                Debug.Log("Capture ended (" + _pipe.Filename + ")");

                _pipe.Close();
                _activePipeCount--;

                if (!string.IsNullOrEmpty(_pipe.Error))
                {
                    Debug.LogWarning(
                        "ffmpeg returned with a warning or an error message. " +
                        "See the following lines for details:\n" + _pipe.Error
                        );
                }

                _pipe = null;

                // Reset the application frame rate on the last pipe.
                if (_activePipeCount == 0)
                {
                    if (_allowSlowDown)
                    {
                        Time.captureFramerate = 0;
                    }
                    else
                    {
                        Application.targetFrameRate = -1;
                    }
                }
            }
        }
Exemple #3
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            timeStampList = new List <byte> ();

            var camera = GetComponent <Camera>();
            var width  = PupilSettings.Instance.recorder.resolutions [(int)PupilSettings.Instance.recorder.resolution] [0];
            var height = PupilSettings.Instance.recorder.resolutions [(int)PupilSettings.Instance.recorder.resolution] [1];

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.

            var name = "Unity_" + PupilSettings.Instance.currentCamera.name;

            _pipe = new FFmpegPipe(name, width, height, _frameRate, PupilSettings.Instance.recorder.codec);

            // Change the application frame rate.
            if (Time.captureFramerate == 0)
            {
                Time.captureFramerate = _frameRate;
            }
            else if (Time.captureFramerate != _frameRate)
            {
                Debug.LogWarning(
                    "Frame rate mismatch; the application frame rate has been " +
                    "changed with a different value. Make sure using the same " +
                    "frame rate when capturing multiple cameras."
                    );
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemple #4
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height, 24);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            if (_useCustomTexture)
            {
                width  = _renderTexture.width;
                height = _renderTexture.height;
            }
            // Open an output stream.
            _pipe = new FFmpegPipe(name, width, height, _frameRate, _preset);
            _activePipeCount++;

            // Change the application frame rate on the first pipe.
            if (_activePipeCount == 1)
            {
                if (_allowSlowDown)
                {
                    Time.captureFramerate = _frameRate;
                }
                else
                {
                    Application.targetFrameRate = _frameRate;
                }
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemple #5
0
        void ClosePipe()
        {
            var camera = GetComponent <Camera>();

            // Destroy the blitter object.
            if (_tempBlitter != null)
            {
                Destroy(_tempBlitter);
                _tempBlitter = null;
            }

            // Release the temporary render target.
            if (_tempTarget != null && _tempTarget == camera.targetTexture)
            {
                camera.targetTexture = null;
                RenderTexture.ReleaseTemporary(_tempTarget);
                _tempTarget = null;
            }

            // Close the output stream.
            if (_pipe != null)
            {
                Debug.Log("Capture ended (" + _pipe.Filename + ")" + ". Rendered frame count on MainThread : " + renderedFrameCount + ". Written out frame count on SecondaryThread : " + writtenFrameCount + ". Leftover : " + renderPipeQueue.Count);


                string timeStampFileName  = "Unity_" + Camera.main.name;
                byte[] timeStampByteArray = pupilTracker.floatArrayToByteArray(timeStampList.ToArray());
                File.WriteAllBytes(_pipe.FilePath + "/" + timeStampFileName + ".time", timeStampByteArray);

                //File.WriteAllText (_pipe.FilePath + "/" + csvFileName + ".time", strBuilder.ToString ());


                _pipe.Close();

                if (!string.IsNullOrEmpty(_pipe.Error))
                {
                    Debug.LogWarning(
                        "ffmpeg returned with a warning or an error message. " +
                        "See the following lines for details:\n" + _pipe.Error
                        );
                }

                _pipe = null;
            }
        }
Exemple #6
0
        void ClosePipe()
        {
            var camera = GetComponent <Camera>();

            // Destroy the blitter object.
            if (_tempBlitter != null)
            {
                Destroy(_tempBlitter);
                _tempBlitter = null;
            }

            // Release the temporary render target.
            if (_tempTarget != null && _tempTarget == camera.targetTexture)
            {
                camera.targetTexture = null;
                RenderTexture.ReleaseTemporary(_tempTarget);
                _tempTarget = null;
            }

            // Close the output stream.
            if (_pipe != null)
            {
                Debug.Log("Capture ended (" + _pipe.Filename + ").");

                // Write pupil timestamps to a file
                string timeStampFileName  = "Unity_" + PupilSettings.Instance.currentCamera.name;
                byte[] timeStampByteArray = timeStampList.ToArray();
                File.WriteAllBytes(_pipe.FilePath + "/" + timeStampFileName + ".time", timeStampByteArray);

                PupilTools.SaveRecording(_pipe.FilePath);

                _pipe.Close();

                if (!string.IsNullOrEmpty(_pipe.Error))
                {
                    Debug.LogWarning(
                        "ffmpeg returned with a warning or an error message. " +
                        "See the following lines for details:\n" + _pipe.Error
                        );
                }

                _pipe = null;
            }
        }
Exemple #7
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.
            _codec = FFmpegPipe.Codec.ProRes;
            _pipe  = new FFmpegPipe(_anim.clip.name.Replace(".motion3.json", string.Empty), width, height, _frameRate, _codec);
            _activePipeCount++;

            // Change the application frame rate on the first pipe.
            if (_activePipeCount == 1)
            {
                if (_allowSlowDown)
                {
                    Time.captureFramerate = _frameRate;
                }
                else
                {
                    Application.targetFrameRate = _frameRate;
                }
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
Exemple #8
0
        void OpenPipe()
        {
            if (_pipe != null)
            {
                return;
            }

            var camera = GetComponent <Camera>();
            var width  = _width;
            var height = _height;

            // Apply the screen resolution settings.
            if (_setResolution)
            {
                _tempTarget          = RenderTexture.GetTemporary(width, height);
                camera.targetTexture = _tempTarget;
                _tempBlitter         = Blitter.CreateGameObject(camera);
            }
            else
            {
                width  = camera.pixelWidth;
                height = camera.pixelHeight;
            }

            // Open an output stream.
            _pipe = new FFmpegPipe(Recorder.FilePath, width, height, _frameRate, _codec);

            // Change the application frame rate.
            if (Time.captureFramerate == 0)
            {
                Time.captureFramerate = _frameRate;
            }
            else if (Time.captureFramerate != _frameRate)
            {
                Debug.LogWarning(
                    "Frame rate mismatch; the application frame rate has been " +
                    "changed with a different value. Make sure using the same " +
                    "frame rate when capturing multiple cameras."
                    );
            }

            Debug.Log("Capture started (" + _pipe.Filename + ")");
        }
 FFmpegSession(string arguments)
 {
     if (!FFmpegPipe.IsAvailable)
     {
         Debug.LogWarning(
             "Failed to initialize an FFmpeg session due to missing " +
             "executable file. Please check FFmpeg installation."
             );
     }
     else if (!UnityEngine.SystemInfo.supportsAsyncGPUReadback)
     {
         Debug.LogWarning(
             "Failed to initialize an FFmpeg session due to lack of " +
             "async GPU readback support. Please try changing " +
             "graphics API to readback-enabled one."
             );
     }
     else
     {
         _pipe = new FFmpegPipe(arguments);
     }
 }