public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } int width; int height; if (m_Inputs[0] is ScreenCaptureInput) { var input = (ScreenCaptureInput)m_Inputs[0]; width = input.outputWidth; height = input.outputHeight; m_Encoder.AddFrame(input.image); } else { var input = (BaseRenderTextureInput)m_Inputs[0]; width = input.outputWidth; height = input.outputHeight; if (!m_ReadBackTexture) { m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false); } var backupActive = RenderTexture.active; RenderTexture.active = input.outputRT; m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false); m_Encoder.AddFrame(m_ReadBackTexture); RenderTexture.active = backupActive; } var audioInput = (AudioInput)m_Inputs[1]; if (!audioInput.audioSettings.m_PreserveAudio) { return; } #if RECORD_AUDIO_MIXERS for (int n = 0; n < m_WavWriters.Length; n++) { if (m_WavWriters[n] != null) { m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n)); } } #endif m_Encoder.AddSamples(audioInput.mainBuffer); }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } base.RecordFrame(session); var audioInput = (AudioInput)m_Inputs[1]; if (audioInput.audioSettings.preserveAudio) { m_Encoder.AddSamples(audioInput.mainBuffer); } }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } var textureInput = (BaseRenderTextureInput)m_Inputs[0]; var width = textureInput.outputWidth; var height = textureInput.outputHeight; if (Verbose.enabled) { Debug.Log(string.Format("MovieRecorder.RecordFrame {0} x {1} (wanted: {2} x {3})", textureInput.outputRT.width, textureInput.outputRT.height, width, height)); } if (!m_ReadBackTexture) { m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false); } var backupActive = RenderTexture.active; RenderTexture.active = textureInput.outputRT; m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false); m_Encoder.AddFrame(m_ReadBackTexture); RenderTexture.active = backupActive; var audioInput = (AudioInput)m_Inputs[1]; if (!audioInput.audioSettings.m_PreserveAudio) { return; } #if RECORD_AUDIO_MIXERS for (int n = 0; n < m_WavWriters.Length; n++) { if (m_WavWriters[n] != null) { m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n)); } } #endif m_Encoder.AddSamples(audioInput.mainBuffer); }
public void MakeVideo() { float[] soundArray; sounds.GetData(soundArray = new float[sounds.samples], 0); using (var encoder = new MediaEncoder(encodedFilePath, videoAttrs, audioAttrs)) using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { sounds.GetData(audioBuffer.ToArray(), 0); for (int i = 0; i < images.Count; ++i) { //tex.SetPixels(images[i].GetPixels()); encoder.AddFrame(images[i]); encoder.AddSamples(audioBuffer); } } Microphone.End(""); }
public void AddFrame() { if (encoder != null && isRecording) { encoder.AddFrame(videoInput.GetFrame()); videoInput.EndFrame(); #if UNITY_2018_1_OR_NEWER if (recorder.recordAudio) { audioInput.StartFrame(); encoder.AddSamples(audioInput.GetBuffer()); audioInput.EndFrame(); } #endif currentFrame++; } }
IEnumerator RecordFrame() { yield return(new WaitForEndOfFrame()); if (encoder != null && isRecording) { if (recorder.recordAudio) { audioInput.StartFrame(); } encoder.AddFrame(videoInput.GetFrame()); videoInput.EndFrame(); if (recorder.recordAudio) { encoder.AddSamples(audioInput.GetBuffer()); audioInput.EndFrame(); } } }
static public void RecordMovie() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(50), width = 320, height = 200, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; var encodedFilePath = Path.Combine(Path.GetTempPath(), "my_movie.mp4"); Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false); using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { for (int i = 0; i < 100; ++i) { // Fill 'tex' with the video content to be encoded into the file for this frame. // ... encoder.AddFrame(tex); // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame. // ... encoder.AddSamples(audioBuffer); } } }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 2) { throw new Exception("Unsupported number of sources"); } if (m_Inputs[0] is GameViewInput) { var input = (GameViewInput)m_Inputs[0]; m_Encoder.AddFrame(input.image); } else { var input = (BaseRenderTextureInput)m_Inputs[0]; var width = input.outputWidth; var height = input.outputHeight; if (!m_ReadBackTexture) { m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false); } var backupActive = RenderTexture.active; RenderTexture.active = input.outputRT; m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false); m_Encoder.AddFrame(m_ReadBackTexture); RenderTexture.active = backupActive; } var audioInput = (AudioInput)m_Inputs[1]; if (!audioInput.audioSettings.preserveAudio) { return; } m_Encoder.AddSamples(audioInput.mainBuffer); }
private IEnumerator recordCoroutine() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(FramesPerSecond), width = Tic80Config.WIDTH, height = Tic80Config.HEIGHT, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(SoundFreq), channelCount = 2, language = "en" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; var tex = View.Instance.GetScreenTexture(); var frames = RecordTime * FramesPerSecond; float deltaTime = (float)1 / FramesPerSecond; AudioRenderer.Start(); using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (var audioBuffer = new NativeArray <float> (sampleFramesPerVideoFrame, Allocator.Persistent)) { for (int i = 0; i < frames; ++i) { encoder.AddFrame(tex); AudioRenderer.Render(audioBuffer); encoder.AddSamples(audioBuffer); yield return(deltaTime); } } AudioRenderer.Stop(); Debug.Log("Video saved to: " + encodedFilePath); EditorUtility.RevealInFinder(encodedFilePath); }