public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } var input = (BaseRenderTextureInput)m_Inputs[0]; var width = input.outputRT.width; var height = input.outputRT.height; var tex = new Texture2D(width, height, m_Settings.m_OutputFormat != PNGRecordeOutputFormat.EXR ? TextureFormat.RGBA32 : TextureFormat.RGBAFloat, false); var backupActive = RenderTexture.active; RenderTexture.active = input.outputRT; tex.ReadPixels(new Rect(0, 0, width, height), 0, 0); tex.Apply(); RenderTexture.active = backupActive; byte[] bytes; string ext; switch (m_Settings.m_OutputFormat) { case PNGRecordeOutputFormat.PNG: bytes = tex.EncodeToPNG(); ext = "png"; break; case PNGRecordeOutputFormat.JPEG: bytes = tex.EncodeToJPG(); ext = "jpg"; break; case PNGRecordeOutputFormat.EXR: bytes = tex.EncodeToEXR(); ext = "exr"; break; default: throw new ArgumentOutOfRangeException(); } UnityHelpers.Destroy(tex); var fileName = m_Settings.m_BaseFileName.BuildFileName(session, recordedFramesCount, width, height, ext); var path = Path.Combine(m_Settings.m_DestinationPath.GetFullPath(), fileName); File.WriteAllBytes(path, bytes); }
public virtual bool BeginRecording(RecordingSession session) { if (recording) { throw new Exception("Already recording!"); } if (Verbose.enabled) { Debug.Log(string.Format("Recorder {0} starting to record", GetType().Name)); } return(recording = true); }
public void OnDisable() { if (session != null) { session.Dispose(); session = null; #if UNITY_EDITOR if (autoExitPlayMode) { UnityEditor.EditorApplication.isPlaying = false; } #endif } }
public override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_Encoder != null) { m_Encoder.Dispose(); m_Encoder = null; } // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (m_Settings.m_DestinationPath.root == OutputPath.ERoot.AssetsPath) { AssetDatabase.Refresh(); } }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } var textureInput = (BaseRenderTextureInput)m_Inputs[0]; var width = textureInput.outputWidth; var height = textureInput.outputHeight; if (Verbose.enabled) { Debug.Log(string.Format("MovieRecorder.RecordFrame {0} x {1} (wanted: {2} x {3})", textureInput.outputRT.width, textureInput.outputRT.height, width, height)); } if (!m_ReadBackTexture) { m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false); } var backupActive = RenderTexture.active; RenderTexture.active = textureInput.outputRT; m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false); m_Encoder.AddFrame(m_ReadBackTexture); RenderTexture.active = backupActive; var audioInput = (AudioInput)m_Inputs[1]; if (!audioInput.audioSettings.m_PreserveAudio) { return; } #if RECORD_AUDIO_MIXERS for (int n = 0; n < m_WavWriters.Length; n++) { if (m_WavWriters[n] != null) { m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n)); } } #endif m_Encoder.AddSamples(audioInput.mainBuffer); }
public virtual void SessionCreated(RecordingSession session) { if (Verbose.enabled) { Debug.Log(string.Format("Recorder {0} session created", GetType().Name)); } settings.SelfAdjustSettings(); // ignore return value. var fixedRate = settings.m_FrameRateMode == FrameRateMode.Constant ? (int)settings.m_FrameRate : 0; if (fixedRate > 0) { if (Time.captureFramerate != 0 && fixedRate != Time.captureFramerate) { Debug.LogError(string.Format("Recorder {0} is set to record at a fixed rate and another component has already set a conflicting value for [Time.captureFramerate], new value being applied : {1}!", GetType().Name, fixedRate)); } else if (Time.captureFramerate == 0 && Verbose.enabled) { Debug.Log("Frame recorder set fixed frame rate to " + fixedRate); } Time.captureFramerate = (int)fixedRate; sm_CaptureFrameRateCount++; m_ModifiedCaptureFR = true; } m_Inputs = new List <RecorderInput>(); foreach (var inputSettings in settings.inputsSettings) { var input = Activator.CreateInstance(inputSettings.inputType) as RecorderInput; input.settings = inputSettings; m_Inputs.Add(input); SignalInputsOfStage(ERecordingSessionStage.SessionCreated, session); } }
public virtual void NewFrameStarting(RecordingSession session) { }
public virtual void BeginRecording(RecordingSession session) { }
public virtual void SessionCreated(RecordingSession session) { }
public virtual void PrepareNewFrame(RecordingSession ctx) { }
public abstract void RecordFrame(RecordingSession ctx);
public override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } try { m_Settings.m_DestinationPath.CreateDirectory(); } catch (Exception) { Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", m_Settings.m_DestinationPath.GetFullPath())); return(false); } var input = (BaseRenderTextureInput)m_Inputs[0]; if (input == null) { if (Verbose.enabled) { Debug.Log("MediaRecorder could not find input."); } return(false); } var width = input.outputWidth; var height = input.outputHeight; if (width <= 0 || height <= 0) { if (Verbose.enabled) { Debug.Log(string.Format( "MovieRecorder got invalid input resolution {0} x {1}.", width, height)); } return(false); } var cbRenderTextureInput = input as CBRenderTextureInput; bool includeAlphaFromTexture = cbRenderTextureInput != null && cbRenderTextureInput.cbSettings.m_AllowTransparency; if (includeAlphaFromTexture && m_Settings.m_OutputFormat == MediaRecorderOutputFormat.MP4) { Debug.LogWarning("Mp4 format does not support alpha."); includeAlphaFromTexture = false; } var videoAttrs = new VideoTrackAttributes() { frameRate = RationalFromDouble(session.settings.m_FrameRate), width = (uint)width, height = (uint)height, includeAlpha = includeAlphaFromTexture }; if (Verbose.enabled) { Debug.Log( string.Format( "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}", width, height, videoAttrs.frameRate.numerator, videoAttrs.frameRate.denominator, m_Settings.m_DestinationPath.GetFullPath())); } var audioInput = (AudioInput)m_Inputs[1]; var audioAttrsList = new List <UnityEditor.Media.AudioTrackAttributes>(); var audioAttrs = new UnityEditor.Media.AudioTrackAttributes() { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (Verbose.enabled) { Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator)); } #if RECORD_AUDIO_MIXERS var audioSettings = input.audioSettings; m_WavWriters = new WavWriter [audioSettings.m_AudioMixerGroups.Length]; for (int n = 0; n < m_WavWriters.Length; n++) { if (audioSettings.m_AudioMixerGroups[n].m_MixerGroup == null) { continue; } var path = Path.Combine( m_Settings.m_DestinationPath, "recording of " + audioSettings.m_AudioMixerGroups[n].m_MixerGroup.name + ".wav"); if (Verbose.enabled) { Debug.Log("Starting wav recording into file " + path); } m_WavWriters[n].Start(path); } #endif try { var fileName = m_Settings.m_BaseFileName.BuildFileName(session, recordedFramesCount, width, height, m_Settings.m_OutputFormat.ToString().ToLower()); var path = m_Settings.m_DestinationPath.GetFullPath() + "/" + fileName; m_Encoder = new UnityEditor.Media.MediaEncoder(path, videoAttrs, audioAttrsList.ToArray()); return(true); } catch { if (Verbose.enabled) { Debug.LogError("MovieRecorder unable to create MovieEncoder."); } } return(false); }