string ResolutionResolver(RecordingSession session) { var input = m_RecorderSettings.InputsSettings.FirstOrDefault() as ImageInputSettings; if (input == null) { return("NA"); } return(input.OutputWidth + "x" + input.OutputHeight); }
protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } Settings.fileNameGenerator.CreateDirectory(session); return(true); }
public override void RecordFrame(RecordingSession session) { var audioInput = (AudioInput)m_Inputs[0]; if (!audioInput.audioSettings.preserveAudio) { return; } m_Encoder.AddSamples(audioInput.mainBuffer); }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } int width; int height; if (m_Inputs[0] is ScreenCaptureInput) { var input = (ScreenCaptureInput)m_Inputs[0]; width = input.outputWidth; height = input.outputHeight; m_Encoder.AddFrame(input.image); } else { var input = (BaseRenderTextureInput)m_Inputs[0]; width = input.outputWidth; height = input.outputHeight; if (!m_ReadBackTexture) { m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false); } var backupActive = RenderTexture.active; RenderTexture.active = input.outputRT; m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false); m_Encoder.AddFrame(m_ReadBackTexture); RenderTexture.active = backupActive; } var audioInput = (AudioInput)m_Inputs[1]; if (!audioInput.audioSettings.m_PreserveAudio) { return; } #if RECORD_AUDIO_MIXERS for (int n = 0; n < m_WavWriters.Length; n++) { if (m_WavWriters[n] != null) { m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n)); } } #endif m_Encoder.AddSamples(audioInput.mainBuffer); }
protected override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } // Store path name for this frame into a queue, as WriteFrame may be called // asynchronously later on, when the current frame is no longer the same (thus creating // a file name that isn't in sync with the session's current frame). m_PathQueue.Enqueue(Settings.FileNameGenerator.BuildAbsolutePath(session)); base.RecordFrame(session); }
/// <inheritdoc/> protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } m_UseAsyncGPUReadback = SystemInfo.supportsAsyncGPUReadback; m_OngoingAsyncGPURequestsCount = 0; m_DelayedEncoderDispose = false; return(true); }
/// <inheritdoc/> protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } UseAsyncGPUReadback = SystemInfo.supportsAsyncGPUReadback; m_OngoingRequests = new Dictionary <AsyncGPUReadbackRequest, double>(); m_DelayedEncoderDispose = false; return(true); }
/// <inheritdoc/> protected internal override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_OngoingAsyncGPURequestsCount > 0) { m_DelayedEncoderDispose = true; } else { DisposeEncoder(); } }
protected override void EndRecording(RecordingSession session) { // Restore the asyncShaderCompilation setting EditorSettings.asyncShaderCompilation = m_asyncShaderCompileSetting; #if OIIO_AVAILABLE if (m_imgOutput != null) { ImageOutput.destroy(m_imgOutput); } #endif base.EndRecording(session); }
// <summary> // Prepares a frame before recording it. Callback is invoked for every frame during the recording session, before RecordFrame. // </summary> // <param name="ctx">The current recording session.</param> protected internal override void PrepareNewFrame(RecordingSession ctx) { base.PrepareNewFrame(ctx); #if HDRP_ACCUM_API if (UnityHelpers.CaptureAccumulation(settings)) { if (RenderPipelineManager.currentPipeline is HDRenderPipeline hdPipeline) { hdPipeline.PrepareNewSubFrame(); } } #endif }
// Used by the Timeline public static void SendStartEvent(RecordingSession session) { if (!EditorAnalytics.enabled) { return; } EditorAnalytics.RegisterEventWithLimit(startEventName, maxEventsPerHour, maxNumberOfElements, vendorKey); var data = CreateSessionStartEvent(session); // Send the data to the database EditorAnalytics.SendEventWithLimit(startEventName, data); }
public static void SendStopEvent(RecordingSession session, bool error, bool complete) { if (!EditorAnalytics.enabled) { return; } EditorAnalytics.RegisterEventWithLimit(completeEventName, maxEventsPerHour, maxNumberOfElements, vendorKey); var data = CreateStopEvent(session, error, complete); // Send the data to the database EditorAnalytics.SendEventWithLimit(completeEventName, data); }
string ApplyWildcards(string str, RecordingSession session) { if (string.IsNullOrEmpty(str)) { return(string.Empty); } foreach (var w in wildcards) { str = str.Replace(w.pattern, w.Resolve(session)); } return(str); }
public virtual bool BeginRecording(RecordingSession session) { if (recording) { throw new Exception("Already recording!"); } if (Options.verboseMode) { Debug.Log(string.Format("Recorder {0} starting to record", GetType().Name)); } return(recording = true); }
public override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_Encoder != null) { m_Encoder.Dispose(); m_Encoder = null; } // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (m_Settings.m_DestinationPath.root == OutputPath.ERoot.AssetsPath) { AssetDatabase.Refresh(); } }
protected override void EndRecording(RecordingSession session) { base.EndRecording(session); if (waveFileCreator != null) { waveFileCreator.StopAndWrite(); } if (analyzer != null) { analyzer.DetachDspBus(); analyzer.Dispose(); } }
static RecorderSessionEndEvent.Outcome GetOutcome(this RecordingSession session) { if (session == null) { return(RecorderSessionEndEvent.Outcome.Error); } if (session.settings.RecordMode == RecordMode.TimeInterval && session.currentFrameStartTS < session.settings.EndTime || session.settings.RecordMode == RecordMode.FrameInterval && session.frameIndex < session.settings.EndFrame) { return(RecorderSessionEndEvent.Outcome.UserStopped); } return(RecorderSessionEndEvent.Outcome.Complete); }
protected internal override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } base.RecordFrame(session); var audioInput = (AudioInputBase)m_Inputs[1]; if (audioInput.audioSettings.PreserveAudio) { Settings.m_EncoderManager.AddSamples(m_EncoderHandle, audioInput.mainBuffer); } }
public RecordingSession CreateRecorderSessionWithRecorderComponent(RecorderSettings settings) { var component = GetRecorderComponent(settings); var session = new RecordingSession { recorder = RecordersInventory.CreateDefaultRecorder(settings), recorderGameObject = component.gameObject, recorderComponent = component }; component.session = session; return(session); }
public override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_Encoder != null) { m_Encoder.Dispose(); m_Encoder = null; } // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (settings.fileNameGenerator.root == OutputPath.Root.AssetsFolder || settings.fileNameGenerator.root == OutputPath.Root.StreamingAssets) { AssetDatabase.Refresh(); } }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } base.RecordFrame(session); var audioInput = (AudioInput)m_Inputs[1]; if (audioInput.audioSettings.preserveAudio) { m_Encoder.AddSamples(audioInput.mainBuffer); } }
/// <summary> /// Starts a new recording session. Callback is invoked once when the recording session starts. /// </summary> /// <param name="session">The newly created recording session.</param> /// <returns>True if recording can start, False otherwise.</returns> /// <exception cref="Exception">Throws if there is already a recording session running.</exception> protected internal virtual bool BeginRecording(RecordingSession session) { if (Recording) { throw new Exception("Already recording!"); } // Log old warnings (non-blocking) var oldWarnings = new List <string>(); #pragma warning disable 618 if (!session.settings.ValidityCheck(oldWarnings)) #pragma warning restore 618 { foreach (var w in oldWarnings) { ConsoleLogMessage(w, LogType.Warning); } } // Log non-blocking warnings var warnings = new List <string>(); session.settings.GetWarnings(warnings); foreach (var w in warnings) { ConsoleLogMessage(w, LogType.Warning); } // Log blocking errors and stop var errors = new List <string>(); session.settings.GetErrors(errors); foreach (var w in errors) { ConsoleLogMessage(w, LogType.Error); } if (errors.Count > 0) { Recording = false; return(false); } if (RecorderOptions.VerboseMode) { ConsoleLogMessage($"Starting to record", LogType.Log); } return(Recording = true); }
protected internal override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_RecordingStartedProperly) { s_ConcurrentCount--; if (s_ConcurrentCount < 0) { Debug.LogError($"Recording ended with no matching beginning recording."); } if (s_ConcurrentCount <= 1 && s_WarnedUserOfConcurrentCount) { s_WarnedUserOfConcurrentCount = false; // reset so that we can warn at the next occurence } } }
protected internal override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_RecordingStartedProperly && !m_RecordingAlreadyEnded) { s_ConcurrentCount--; if (s_ConcurrentCount < 0) { ConsoleLogMessage($"Recording ended with no matching beginning recording.", LogType.Error); } if (s_ConcurrentCount <= 1 && s_WarnedUserOfConcurrentCount) { s_WarnedUserOfConcurrentCount = false; // reset so that we can warn at the next occurence } m_RecordingAlreadyEnded = true; } }
public RecordingSession CreateRecorderSession(RecorderSettings settings) { var sceneHook = GetSessionHook(); if (sceneHook == null) { return(null); } var session = new RecordingSession { recorder = RecordersInventory.CreateDefaultRecorder(settings), recorderGameObject = sceneHook }; return(session); }
protected internal override void EndRecording(RecordingSession session) { // Case REC-98 crash gif animation when start/stop recording in playmode // If you start recording while in playmode pause the RecordFrame // will never be called and m_ctx and m_stream will de-reference null // pointers that will crash unity. if (m_ctx) { m_ctx.Release(); } if (m_stream) { m_stream.Release(); } base.EndRecording(session); }
protected override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } // Save the async compile shader setting to restore it at the end of recording m_asyncShaderCompileSetting = EditorSettings.asyncShaderCompilation; // Disable async compile shader setting when recording EditorSettings.asyncShaderCompilation = false; Settings.FileNameGenerator.CreateDirectory(session); #if OIIO_AVAILABLE m_imgOutput = ImageOutput.create("dummy." + m_Settings.extension); #endif return(true); }
protected internal override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } if (!m_RecordingStartedProperly) { return; // error will have been triggered in BeginRecording() } base.RecordFrame(session); var audioInput = (AudioInput)m_Inputs[1]; if (audioInput.audioSettings.PreserveAudio && !UnityHelpers.CaptureAccumulation(settings)) { Settings.m_EncoderManager.AddSamples(m_EncoderHandle, audioInput.mainBuffer); } }
/// <inheritdoc/> protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } #if HDRP_ACCUM_API var hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline; if (hdPipeline != null) { if (settings.IsAccumulationSupported() && settings is IAccumulation accumulation) { AccumulationSettings aSettings = accumulation.GetAccumulationSettings(); if (aSettings != null && aSettings.CaptureAccumulation) { if (aSettings != null && aSettings.ShutterType == AccumulationSettings.ShutterProfileType.Range) { hdPipeline.BeginRecording( aSettings.Samples, aSettings.ShutterInterval, aSettings.ShutterFullyOpen, aSettings.ShutterBeginsClosing ); } else { hdPipeline.BeginRecording( aSettings.Samples, aSettings.ShutterInterval, aSettings.ShutterProfileCurve ); } } } } #endif UseAsyncGPUReadback = SystemInfo.supportsAsyncGPUReadback; m_OngoingAsyncGPURequestsCount = 0; m_DelayedEncoderDispose = false; return(true); }
protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } foreach (var input in m_Inputs) { var aInput = (AnimationInput)input; if (aInput.GameObjectRecorder == null) { Recording = false; return(false); } } return(true); }