Exemple #1
0
 private void OnRenderImage(RenderTexture src, RenderTexture dst)
 {
     // Blit to recording frame
     if (!IsPaused)
     {
         var encoderFrame = NatCorder.AcquireFrame();
         encoderFrame.timestamp = (long)(Time.realtimeSinceStartup * 1e+9f);
         Graphics.Blit(src, encoderFrame);
         NatCorder.CommitFrame(encoderFrame);
     }
     // Blit to render pipeline
     Graphics.Blit(src, dst);
 }
Exemple #2
0
 /// <summary>
 /// Start recording a replay with no audio
 /// </summary>
 /// <param name="recordingCamera">Source camera for recording replay</param>
 /// <param name="configuration">Configuration for recording</param>
 /// <param name="saveCallback">Callback to be invoked when the video is saved</param>
 public static void StartRecording(Camera recordingCamera, Configuration configuration, SaveCallback saveCallback)
 {
     if (!recordingCamera)
     {
         Util.LogError("Cannot record replay without source camera");
         return;
     }
     if (saveCallback == null)
     {
         Util.LogError("Cannot record replay without callback");
         return;
     }
     NatCorder.StartRecording(configuration, saveCallback);
     camera = recordingCamera.gameObject.AddComponent <CameraRecorder>();
 }
            private double timestamp, lastTime = -1; // Used to support pausing and resuming
            #endregion


            #region --Operations--

            void OnAudioFilterRead(float[] data, int channels)
            {
                // Calculate time
                if (!pause)
                {
                    timestamp += lastTime > 0 ? AudioSettings.dspTime - lastTime : 0f;
                }
                lastTime = AudioSettings.dspTime;
                // Send to NatCorder for encoding
                NatCorder.CommitSamples(data, (long)(timestamp * 1e+9f));
                if (mute)
                {
                    Array.Clear(data, 0, data.Length);
                }
            }
Exemple #4
0
 public static void StartRecording(Camera recordingCamera, Configuration configuration, VideoCallback replayCallback, IAudioSource audioSource = null)
 {
     if (!recordingCamera)
     {
         Debug.LogError("NatCorder Error: Cannot record replay without source camera");
         return;
     }
     if (replayCallback == null)
     {
         Debug.LogError("NatCorder Error: Cannot record replay without callback");
         return;
     }
     NatCorder.StartRecording(configuration, replayCallback, audioSource);
     videoRecorder = recordingCamera.gameObject.AddComponent <VideoRecorder>();
     videoRecorder.configuration = configuration;
 }
Exemple #5
0
            private void OnRenderImage(RenderTexture src, RenderTexture dst)
            {
                // Blit to recording frame
                bool canRecordFrame = IsRecording && !IsPaused;

                if (canRecordFrame && (Time.frameCount % frameSkip) == 0)
                {
                    var encoderFrame = NatCorder.AcquireFrame();
                    encoderFrame.timestamp = (long)(timestamp * 1e+9f);
                    Graphics.Blit(src, encoderFrame);
                    NatCorder.CommitFrame(encoderFrame);
                }
                // Increment timestamp
                if (canRecordFrame)
                {
                    timestamp += Time.unscaledDeltaTime;
                }
                // Blit to render pipeline
                Graphics.Blit(src, dst);
            }
Exemple #6
0
            private long timestamp, lastTime = -1; // Used to support pausing and resuming
            #endregion


            #region --Operations--

            void OnAudioFilterRead(float[] data, int channels)
            {
                // Calculate time
                var audioTime = Frame.CurrentTimestamp;

                if (!IsPaused)
                {
                    timestamp += lastTime > 0 ? audioTime - lastTime : 0;
                }
                lastTime = audioTime;
                // Send to NatCorder for encoding
                if (!IsPaused)
                {
                    NatCorder.CommitSamples(data, timestamp);
                }
                if (mute)
                {
                    Array.Clear(data, 0, data.Length);
                }
            }
            private void OnRenderImage(RenderTexture src, RenderTexture dst)
            {
                // Calculate time
                var frameTime = Frame.CurrentTimestamp;

                if (!IsPaused)
                {
                    timestamp += lastTime > 0 ? frameTime - lastTime : 0;
                }
                lastTime = frameTime;
                // Blit to recording frame
                if (IsRecording && !IsPaused && UnityEngine.Time.frameCount % frameSkip == 0)
                {
                    var encoderFrame = NatCorder.AcquireFrame();
                    encoderFrame.timestamp = timestamp;
                    Graphics.Blit(src, encoderFrame, aspectFitter);
                    NatCorder.CommitFrame(encoderFrame);
                }
                // Blit to render pipeline
                Graphics.Blit(src, dst);
            }
Exemple #8
0
 public static void StopRecording()
 {
     VideoRecorder.Destroy(videoRecorder);
     NatCorder.StopRecording();
 }
Exemple #9
0
 /// <summary>
 /// Stop recording a replay
 /// </summary>
 public static void StopRecording()
 {
     CameraRecorder.Destroy(camera);
     NatCorder.StopRecording();
 }