// <summary> // Prepares a frame before recording it. Callback is invoked for every frame during the recording session, before RecordFrame. // </summary> // <param name="ctx">The current recording session.</param> protected internal override void PrepareNewFrame(RecordingSession ctx) { base.PrepareNewFrame(ctx); #if HDRP_ACCUM_API if (UnityHelpers.CaptureAccumulation(settings)) { if (RenderPipelineManager.currentPipeline is HDRenderPipeline hdPipeline) { hdPipeline.PrepareNewSubFrame(); } } #endif }
protected override void ImageRenderOptionsGUI() { var recorder = (RecorderSettings)target; foreach (var inputsSetting in recorder.InputsSettings) { var audioSettings = inputsSetting as AudioInputSettings; using (new EditorGUI.DisabledScope(audioSettings != null && UnityHelpers.CaptureAccumulation(recorder))) { var p = GetInputSerializedProperty(serializedObject, inputsSetting); EditorGUILayout.PropertyField(p, Styles.SourceLabel); } } }
protected internal override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } if (!m_RecordingStartedProperly) { return; // error will have been triggered in BeginRecording() } base.RecordFrame(session); var audioInput = (AudioInput)m_Inputs[1]; if (audioInput.audioSettings.PreserveAudio && !UnityHelpers.CaptureAccumulation(settings)) { Settings.m_EncoderManager.AddSamples(m_EncoderHandle, audioInput.mainBuffer); } }
/// <inheritdoc/> protected internal override void EndRecording(RecordingSession session) { base.EndRecording(session); #if HDRP_ACCUM_API if (UnityHelpers.CaptureAccumulation(settings)) { if (RenderPipelineManager.currentPipeline is HDRenderPipeline hdPipeline) { hdPipeline.EndRecording(); } } #endif if (m_OngoingAsyncGPURequestsCount > 0) { Recording = true; m_DelayedEncoderDispose = true; } else { DisposeEncoder(); } }
string CheckRecordersIncompatibility() { var activeRecorders = m_ControllerSettings.RecorderSettings.Where(r => r.Enabled).ToArray(); if (activeRecorders.Length == 0) { return(null); } var outputPaths = new Dictionary <string, RecorderSettings>(); foreach (var recorder in activeRecorders) { var path = recorder.fileNameGenerator.BuildAbsolutePath(null); // Does not detect all conflict or might have false positives if (outputPaths.ContainsKey(path)) { return("Recorders '" + outputPaths[path].name + "' and '" + recorder.name + "' might try to save into the same output file."); } outputPaths.Add(path, recorder); } var gameViewRecorders = new Dictionary <ImageHeight, RecorderSettings>(); foreach (var recorder in activeRecorders) { var gameView = recorder.InputsSettings.FirstOrDefault(i => i is GameViewInputSettings) as GameViewInputSettings; if (gameView != null) { if (gameViewRecorders.Any() && !gameViewRecorders.ContainsKey(gameView.outputImageHeight)) { return("Recorders '" + gameViewRecorders.Values.First().name + "' and '" + recorder.name + "' are recording the Game View using different resolutions. This can lead to unexpected behaviour."); } gameViewRecorders[gameView.outputImageHeight] = recorder; } } // Validate that only one recorder support enable capture SubFrames // int numberOfSubframeRecorder = 0; foreach (var recorderSetting in activeRecorders) { if (UnityHelpers.CaptureAccumulation(recorderSetting)) { numberOfSubframeRecorder++; } if (numberOfSubframeRecorder >= 1 && activeRecorders.Length > 1) { return("You can only use one active Recorder at a time when you capture accumulation."); } } return(null); }
/// <summary> /// Prepares the recording context. /// To start recording once you've called this method, you must call <see cref="StartRecording"/>. /// </summary> /// <remarks> /// Sets up the internal data for the recording session and pauses the simulation to ensure a proper synchronization between the Recorder and the Unity Editor. /// </remarks> public void PrepareRecording() { if (!Application.isPlaying) { throw new Exception("You can only call the PrepareRecording method in Play mode."); } if (RecorderOptions.VerboseMode) { Debug.Log("Prepare Recording."); } if (m_Settings == null) { throw new NullReferenceException("Can start recording without prefs"); } SceneHook.PrepareSessionRoot(); m_RecordingSessions = new List <RecordingSession>(); int numberOfSubframeRecorder = 0; int numberOfRecorderEnabled = 0; foreach (var recorderSetting in m_Settings.RecorderSettings) { if (recorderSetting == null) { if (RecorderOptions.VerboseMode) { Debug.Log("Ignoring unknown recorder."); } continue; } m_Settings.ApplyGlobalSetting(recorderSetting); if (recorderSetting.HasErrors()) { if (RecorderOptions.VerboseMode) { Debug.Log("Ignoring invalid recorder '" + recorderSetting.name + "'"); } continue; } if (!recorderSetting.Enabled) { if (RecorderOptions.VerboseMode) { Debug.Log("Ignoring disabled recorder '" + recorderSetting.name + "'"); } continue; } if (recorderSetting.Enabled) { numberOfRecorderEnabled++; } // Validate that only one recorder support enable capture SubFrames if (UnityHelpers.CaptureAccumulation(recorderSetting)) { numberOfSubframeRecorder++; if (numberOfSubframeRecorder >= 1 && numberOfRecorderEnabled > 1) { Debug.LogError("You can only use one active Recorder at a time when you capture accumulation."); continue; } } var session = m_SceneHook.CreateRecorderSessionWithRecorderComponent(recorderSetting); m_RecordingSessions.Add(session); } }
protected internal override bool BeginRecording(RecordingSession session) { m_RecordingStartedProperly = false; if (!base.BeginRecording(session)) { return(false); } try { Settings.fileNameGenerator.CreateDirectory(session); } catch (Exception) { ConsoleLogMessage($"Unable to create the output directory \"{Settings.fileNameGenerator.BuildAbsolutePath(session)}\".", LogType.Error); Recording = false; return(false); } var input = m_Inputs[0] as BaseRenderTextureInput; if (input == null) { ConsoleLogMessage("Movie Recorder could not find its input.", LogType.Error); Recording = false; return(false); } int width = input.OutputWidth; int height = input.OutputHeight; var currentEncoderReg = Settings.GetCurrentEncoder(); string errorMessage; var imageInputSettings = m_Inputs[0].settings as ImageInputSettings; var alphaWillBeInImage = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.RecordTransparency; if (alphaWillBeInImage && !currentEncoderReg.SupportsTransparency(Settings, out errorMessage)) { ConsoleLogMessage(errorMessage, LogType.Error); Recording = false; return(false); } var videoAttrs = new VideoTrackAttributes { frameRate = RationalFromDouble(session.settings.FrameRate), width = (uint)width, height = (uint)height, includeAlpha = alphaWillBeInImage, bitRateMode = Settings.VideoBitRateMode }; if (RecorderOptions.VerboseMode) { ConsoleLogMessage( $"MovieRecorder starting to write video {width}x{height}@[{videoAttrs.frameRate.numerator}/{videoAttrs.frameRate.denominator}] fps into {Settings.fileNameGenerator.BuildAbsolutePath(session)}", LogType.Log); } var audioInput = (AudioInput)m_Inputs[1]; var audioAttrsList = new List <AudioTrackAttributes>(); if (audioInput.audioSettings.PreserveAudio && !UnityHelpers.CaptureAccumulation(settings)) { #if UNITY_EDITOR_OSX // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically // increasing timestamps"). This happens only with Target Cameras. if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == VideoRecorderOutputFormat.WebM) { UseAsyncGPUReadback = false; } #endif var audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (RecorderOptions.VerboseMode) { ConsoleLogMessage($"Starting to write audio {audioAttrs.channelCount}ch @ {audioAttrs.sampleRate.numerator}Hz", LogType.Log); } } else { if (RecorderOptions.VerboseMode) { ConsoleLogMessage("Starting with no audio.", LogType.Log); } } try { var path = Settings.fileNameGenerator.BuildAbsolutePath(session); // If an encoder already exist destroy it Settings.DestroyIfExists(m_EncoderHandle); // Get the currently selected encoder register and create an encoder m_EncoderHandle = currentEncoderReg.Register(Settings.m_EncoderManager); // Create the list of attributes for the encoder, Video, Audio and preset // TODO: Query the list of attributes from the encoder attributes var attr = new List <IMediaEncoderAttribute>(); attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", videoAttrs)); if (audioInput.audioSettings.PreserveAudio && !UnityHelpers.CaptureAccumulation(settings)) { if (audioAttrsList.Count > 0) { attr.Add(new AudioTrackMediaEncoderAttribute("AudioAttributes", audioAttrsList.ToArray()[0])); } } attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CodecFormat], Settings.encoderPresetSelected)); attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.ColorDefinition], Settings.encoderColorDefinitionSelected)); if (Settings.encoderPresetSelectedName == "Custom") { // For custom attr.Add(new StringAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CustomOptions], Settings.encoderCustomOptions)); } // Construct the encoder given the list of attributes Settings.m_EncoderManager.Construct(m_EncoderHandle, path, attr); s_ConcurrentCount++; m_RecordingStartedProperly = true; m_RecordingAlreadyEnded = false; return(true); } catch (Exception ex) { ConsoleLogMessage($"Unable to create encoder: '{ex.Message}'", LogType.Error); Recording = false; return(false); } }