public void StartRecord() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(30), width = (uint)recordTexture.width, height = (uint)recordTexture.height, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "jp" }; var time = DateTime.Now; var encodedFilePath = Path.Combine(Path.GetTempPath(), time.Year.ToString() + time.Month.ToString() + time.Day.ToString() + time.Hour.ToString() + time.Minute.ToString() + time.Second.ToString() + ".mp4"); Debug.Log(encodedFilePath); mediaEncoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr); isRecording = true; StartCoroutine(Record()); }
public void Construct(string path, List <IMediaEncoderAttribute> attributes) { VideoTrackAttributes vAttr = new VideoTrackAttributes(); List <AudioTrackAttributes> aAttrs = new List <AudioTrackAttributes>(); int nVideoTracks = 0; foreach (var a in attributes) { Type t = a.GetType(); if (t == typeof(VideoTrackMediaEncoderAttribute)) { nVideoTracks++; var vmAttr = (VideoTrackMediaEncoderAttribute)a; vAttr = vmAttr.Value; } else if (t == typeof(AudioTrackMediaEncoderAttribute)) { var amAttr = (AudioTrackMediaEncoderAttribute)a; aAttrs.Add(amAttr.Value); } } Debug.Assert(nVideoTracks > 0, "No video track"); if (aAttrs.Count == 0) { Construct(path, vAttr); } else { Construct(path, vAttr, aAttrs.ToArray()[0]); } }
private void BeginMediaEncoderRecording() { InitInputs(); if (recorder.realTime) { Application.targetFrameRate = recorder.frameRate; } else { Time.captureFramerate = recorder.frameRate; } // Configure encoder AudioSpeakerMode speakerMode = AudioSettings.speakerMode; audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = AudioSettings.outputSampleRate, denominator = 1 }, channelCount = (ushort)speakerMode, language = "" }; videoInput.BeginRecording(); videoAttrs = new VideoTrackAttributes { frameRate = new MediaRational(recorder.frameRate), width = (uint)videoInput.outputWidth, height = (uint)videoInput.outputHeight, includeAlpha = false }; encodedFilePath = Path.Combine(outputPath, GetFileName()); Debug.Log("[VimeoRecorder] Recording to " + GetFileName()); if (!recorder.realTime) { recorder.recordAudio = false; } if (recorder.recordAudio) { #if UNITY_2018_1_OR_NEWER audioInput.BeginRecording(); encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs, audioAttrs); #else encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs); #endif } else { encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs); } }
void Construct(string path, VideoTrackAttributes vAttr, AudioTrackAttributes[] aAttr) { CoreMediaEncoderLog("Construct()"); if (Encoder.IsCreated) { throw new InvalidOperationException("CoreMediaEncoder already instantiated"); } Encoder.Target = new MediaEncoder(path, vAttr, aAttr); }
private void Start() { _defaultBackground = background.texture; WebCamDevice[] devices = WebCamTexture.devices; //Loop Over Cameras and use the last webcam, if it is front facing. if (devices.Length == 0) { Debug.Log("No camera could be found"); _camAvailable = false; return; } for (int i = 0; i < devices.Length; i++) { if (devices[i].isFrontFacing) { _frontCam = new WebCamTexture(devices[i].name, Screen.width, Screen.height); } } if (_frontCam == null) { Debug.Log("No front camera found"); return; } //Startup Webcam and save texture onto the RawImage GameObject _frontCam.Play(); background.texture = _frontCam; //Path for the finished mp4 file _filename = string.Format("TestVideo_{0}.mp4", System.DateTime.Now.ToFileTime()); _filepath = System.IO.Path.Combine(System.IO.Path.GetTempPath(), _filename); _filepath = _filepath.Replace("/", @"\"); //Setup the Video Attributes for the Media Encoder Debug.Log("setting up"); _videoTrackAttributes = new VideoTrackAttributes(); _videoTrackAttributes.width = (uint)_frontCam.width; _videoTrackAttributes.height = (uint)_frontCam.height; _currentTexture = new Texture2D(_frontCam.width, _frontCam.height); _videoTrackAttributes.frameRate = new MediaRational(30); _videoTrackAttributes.includeAlpha = false; _mediaEncoder = new MediaEncoder(_filepath, _videoTrackAttributes); Debug.Log("Camera is setup"); _camAvailable = true; }
static public void EncodeVideo() { Debug.Log("Enconde Start"); VideoInfo info = VideoInfo.videoInfo; VideoTrackAttributes videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(info.FrameRate), width = info.width, height = info.height, includeAlpha = info.includeAlpha }; AudioTrackAttributes audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4"); Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false); using (MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (NativeArray <float> audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { foreach (Texture2D _tex in info.TexList) { encoder.AddFrame(_tex); } //for (int i = 0; i < info.TotalFrameCount; ++i) //{ // // Fill 'tex' with the video content to be encoded into the file for this frame. // // ... // // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame. // // ... // encoder.AddSamples(audioBuffer); //} } }
IEnumerator CreateVideo(string path, List <byte[]> textureList, string chara, Text progressTxt) { progressTxt.gameObject.SetActive(true); VideoTrackAttributes videoAttributes = new VideoTrackAttributes { frameRate = new MediaRational(90), width = 320, height = 240, includeAlpha = false }; string filePath = Path.Combine(path, "eye_vid" + chara + ".mp4"); using (MediaEncoder encoder = new MediaEncoder(filePath, videoAttributes)) { for (int i = 0; i < textureList.Count; i++) { Texture2D tex = new Texture2D(320, 240, TextureFormat.RGBA32, false); tex.LoadImage(textureList[i]); Texture2D newTex = new Texture2D(320, 240, TextureFormat.RGBA32, false); newTex.SetPixels(tex.GetPixels()); newTex.Apply(); encoder.AddFrame(newTex); if (i % 100 == 0) { Resources.UnloadUnusedAssets(); System.GC.Collect(); } if (i % 10 == 0) { progressTxt.text = "Writing " + chara + " : " + System.Math.Round(((float)i / (float)textureList.Count) * 100, 2) + "%"; } yield return(new WaitForEndOfFrame()); } encoder.Dispose(); } Debug.Log("Finished!"); textureList.Clear(); Resources.UnloadUnusedAssets(); System.GC.Collect(); progressTxt.gameObject.SetActive(false); }
static public void RecordMovie() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(50), width = 320, height = 200, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; var encodedFilePath = Path.Combine(Path.GetTempPath(), "my_movie.mp4"); Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false); using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { for (int i = 0; i < 100; ++i) { // Fill 'tex' with the video content to be encoded into the file for this frame. // ... encoder.AddFrame(tex); // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame. // ... encoder.AddSamples(audioBuffer); } } }
private IEnumerator recordCoroutine() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(FramesPerSecond), width = Tic80Config.WIDTH, height = Tic80Config.HEIGHT, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(SoundFreq), channelCount = 2, language = "en" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; var tex = View.Instance.GetScreenTexture(); var frames = RecordTime * FramesPerSecond; float deltaTime = (float)1 / FramesPerSecond; AudioRenderer.Start(); using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (var audioBuffer = new NativeArray <float> (sampleFramesPerVideoFrame, Allocator.Persistent)) { for (int i = 0; i < frames; ++i) { encoder.AddFrame(tex); AudioRenderer.Render(audioBuffer); encoder.AddSamples(audioBuffer); yield return(deltaTime); } } AudioRenderer.Stop(); Debug.Log("Video saved to: " + encodedFilePath); EditorUtility.RevealInFinder(encodedFilePath); }
public void Construct(string path, VideoTrackAttributes vAttr, NativeArray <AudioTrackAttributes> aAttr) { Construct(path, vAttr, aAttr.ToArray()); }
public void Construct(MediaEncoderHandle handle, string path, VideoTrackAttributes vAttr, NativeArray <AudioTrackAttributes> aAttr) { DisposeCheck(handle); m_Encoders[handle.m_VersionHandle.Index].m_encoderInterface.Construct(path, vAttr, aAttr); }
public VideoTrackMediaEncoderAttribute(string pname, VideoTrackAttributes vAttr) { name = pname; Value = vAttr; }
/// <summary> /// Sets up /// recording object(s), /// recording framerate, /// folders /// </summary> public void BeginRecording() { // only allow recording in edit mode #if !UNITY_EDITOR return; #endif #pragma warning disable CS0162 // Suppress unreachable code warning // in all cases: regulate the passage of time // Time.captureFramerate fixes Update() calls such that Update() effectively only gets called // after enough simulated playback time has passed to warrant a new frame // forcing captureFramerate = 60 ensures that the correct video framerate AND duration are made // without Unity time at 60, 24 fps recordings are 2.5x longer than they should be Time.captureFramerate = frameRate; isActive = true; // in all cases, we will need RenderTextures from LeiaCamera if (leia_cam == null) { leia_cam = transform.GetComponent <LeiaCamera>(); // if LeiaCamera has clear flag "Solid color" in PNG format with a weak alpha, background pixels will be dimmed by alpha if ((leia_cam.Camera.clearFlags == CameraClearFlags.Color || leia_cam.Camera.clearFlags == CameraClearFlags.SolidColor) && recordingFormat.ToString().Equals("png") && leia_cam.Camera.backgroundColor.a < 1.0f ) { LogUtil.Log(LogLevel.Warning, "When recording in format {0} from {1} with clear flag {2} and background {3}:\n\tBackground pixels will be dimmed by alpha channel of color {3}", recordingFormat, leia_cam, leia_cam.Camera.clearFlags, leia_cam.Camera.backgroundColor); } } if (leia_cam != null && leia_cam.GetView(0) != null && leia_cam.GetView(0).TargetTexture != null) { RenderTexture view_prime = leia_cam.GetView(0).TargetTexture; cols = Mathf.FloorToInt(Mathf.Sqrt(leia_cam.GetViewCount())); rows = (cols == 0 ? 0 : leia_cam.GetViewCount() / cols); record_w = view_prime.width * cols; record_h = view_prime.height * rows; views = new RenderTexture[leia_cam.GetViewCount()]; for (int i = 0; i < leia_cam.GetViewCount(); i++) { views[i] = leia_cam.GetView(i).TargetTexture; } } System.DateTime currTime = System.DateTime.Now; folderPath = Path.Combine(Application.streamingAssetsPath, string.Format("{0:D3}_{1:D2}_{2:D2}_{3:D2}", currTime.DayOfYear, currTime.Hour, currTime.Minute, currTime.Second)); Directory.CreateDirectory(folderPath); // if png/jpg // no additional behavior // if mp4 #if UNITY_EDITOR && UNITY_2017_3_OR_NEWER if (recordingFormat.ToString().Equals("mp4")) { VideoTrackAttributes videoAttr = new VideoTrackAttributes() { frameRate = new MediaRational(frameRate), width = (uint)record_w, height = (uint)record_h, includeAlpha = false }; string vid_name = string.Format("recording_{0}x{1}.{2}", cols, rows, recordingFormat.ToString()); encoder = new MediaEncoder(Path.Combine(folderPath, vid_name), videoAttr); } #endif #pragma warning restore CS0162 // Suppress unreachable code warning }
public override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } try { m_Settings.fileNameGenerator.CreateDirectory(session); } catch (Exception) { Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", m_Settings.fileNameGenerator.BuildAbsolutePath(session))); return(false); } var input = m_Inputs[0] as BaseRenderTextureInput; if (input == null) { Debug.LogError("MediaRecorder could not find input."); return(false); } int width = input.outputWidth; int height = input.outputHeight; if (width <= 0 || height <= 0) { Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height)); return(false); } if (m_Settings.outputFormat == VideoRecorderOutputFormat.MP4) { if (width > 4096 || height > 4096) { Debug.LogWarning(string.Format("Mp4 format might not support resolutions bigger than 4096. Current resolution: {0} x {1}.", width, height)); } if (width % 2 != 0 || height % 2 != 0) { Debug.LogError(string.Format("Mp4 format does not support odd values in resolution. Current resolution: {0} x {1}.", width, height)); return(false); } } var imageInputSettings = m_Inputs[0].settings as ImageInputSettings; var includeAlphaFromTexture = imageInputSettings != null && imageInputSettings.supportsTransparent && imageInputSettings.allowTransparency; if (includeAlphaFromTexture && m_Settings.outputFormat == VideoRecorderOutputFormat.MP4) { Debug.LogWarning("Mp4 format does not support alpha."); includeAlphaFromTexture = false; } var videoAttrs = new VideoTrackAttributes { frameRate = RationalFromDouble(session.settings.frameRate), width = (uint)width, height = (uint)height, includeAlpha = includeAlphaFromTexture, bitRateMode = m_Settings.videoBitRateMode }; if (Options.verboseMode) { Debug.Log( string.Format( "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}", width, height, videoAttrs.frameRate.numerator, videoAttrs.frameRate.denominator, m_Settings.fileNameGenerator.BuildAbsolutePath(session))); } var audioInput = (AudioInput)m_Inputs[1]; var audioAttrsList = new List <AudioTrackAttributes>(); if (audioInput.audioSettings.preserveAudio) { var audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (Options.verboseMode) { Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator)); } } else { if (Options.verboseMode) { Debug.Log("MovieRecorder starting with no audio."); } } try { var path = m_Settings.fileNameGenerator.BuildAbsolutePath(session); m_Encoder = new MediaEncoder(path, videoAttrs, audioAttrsList.ToArray()); return(true); } catch { if (Options.verboseMode) { Debug.LogError("MovieRecorder unable to create MovieEncoder."); } } return(false); }
void OnApplicationQuit() { if (!Record) { return; } if (!Directory.Exists(MoviePath)) { Debug.LogError(MoviePath + " does not exist. Cannot save output"); return; } Debug.Log("Saving recorded frames to disk..."); string filePathMP4 = ""; string filePathDir = ""; for (int i = 0; i < 9999; ++i) { filePathMP4 = Path.Combine(MoviePath, "capture_" + i.ToString("0000") + ".mp4"); filePathDir = Path.Combine(MoviePath, "capture_" + i.ToString("0000")); if (!File.Exists(filePathMP4) && !Directory.Exists(filePathDir)) { break; } } var textures = new List <Texture2D>(); foreach (var frame in m_Frames) { RenderTexture.active = frame; var tex = new Texture2D(Width, Height, TextureFormat.RGBA32, false); tex.ReadPixels(new Rect(0, 0, Width, Height), 0, 0); tex.Apply(); textures.Add(tex); } RenderTexture.active = null; #if UNITY_EDITOR if (MakeMP4InEditor) { VideoTrackAttributes videoAttr = new VideoTrackAttributes { frameRate = new MediaRational((int)(GenlockRate + 0.5)), width = (uint)Width, height = (uint)Height, includeAlpha = false, bitRateMode = UnityEditor.VideoBitrateMode.High }; using (var encoder = new MediaEncoder(filePathMP4, videoAttr)) foreach (var tex in textures) { encoder.AddFrame(tex); } Debug.Log("Recorded " + m_Frames.Count + " frames to " + filePathMP4); } else #else if (MakeMP4InEditor) { Debug.Log("Cannot encode MP4 outside of Editor"); } #endif { int f = 0; Directory.CreateDirectory(filePathDir); foreach (var tex in textures) { byte[] bytes = tex.EncodeToJPG(); File.WriteAllBytes(Path.Combine(filePathDir, "frame_" + (f++).ToString("0000") + ".jpg"), bytes); } Debug.Log("Recorded " + m_Frames.Count + " frames to " + Path.Combine(filePathDir, "frame_XXXX.jpg")); } }
protected internal override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } try { Settings.fileNameGenerator.CreateDirectory(session); } catch (Exception) { Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", Settings.fileNameGenerator.BuildAbsolutePath(session))); return(false); } var input = m_Inputs[0] as BaseRenderTextureInput; if (input == null) { Debug.LogError("MediaRecorder could not find input."); return(false); } int width = input.OutputWidth; int height = input.OutputHeight; if (width <= 0 || height <= 0) { Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height)); return(false); } if (Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.MP4) { if (width > 4096 || height > 4096) { Debug.LogWarning(string.Format("Mp4 format might not support resolutions bigger than 4096. Current resolution: {0} x {1}.", width, height)); } if (width % 2 != 0 || height % 2 != 0) { Debug.LogError(string.Format("Mp4 format does not support odd values in resolution. Current resolution: {0} x {1}.", width, height)); return(false); } } var imageInputSettings = m_Inputs[0].settings as ImageInputSettings; var includeAlphaFromTexture = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.AllowTransparency; if (includeAlphaFromTexture && Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.MP4) { Debug.LogWarning("Mp4 format does not support alpha."); includeAlphaFromTexture = false; } var videoAttrs = new VideoTrackAttributes { frameRate = RationalFromDouble(session.settings.FrameRate), width = (uint)width, height = (uint)height, includeAlpha = includeAlphaFromTexture, bitRateMode = Settings.VideoBitRateMode }; if (RecorderOptions.VerboseMode) { Debug.Log( string.Format( "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}", width, height, videoAttrs.frameRate.numerator, videoAttrs.frameRate.denominator, Settings.fileNameGenerator.BuildAbsolutePath(session))); } var audioInput = (AudioInput)m_Inputs[1]; var audioAttrsList = new List <AudioTrackAttributes>(); if (audioInput.audioSettings.PreserveAudio) { #if UNITY_EDITOR_OSX // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically // increasing timestamps"). This happens only with Target Cameras. if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.WebM) { UseAsyncGPUReadback = false; } #endif var audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (RecorderOptions.VerboseMode) { Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator)); } } else { if (RecorderOptions.VerboseMode) { Debug.Log("MovieRecorder starting with no audio."); } } try { var path = Settings.fileNameGenerator.BuildAbsolutePath(session); m_Encoder = new MediaEncoder(path, videoAttrs, audioAttrsList.ToArray()); return(true); } catch { if (RecorderOptions.VerboseMode) { Debug.LogError("MovieRecorder unable to create MovieEncoder."); } } return(false); }
public override bool BeginRecording(RecordingSession session) { if (!base.BeginRecording(session)) { return(false); } try { m_Settings.m_DestinationPath.CreateDirectory(); } catch (Exception) { Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", m_Settings.m_DestinationPath.GetFullPath())); return(false); } var input = (BaseRenderTextureInput)m_Inputs[0]; if (input == null) { if (Verbose.enabled) { Debug.Log("MediaRecorder could not find input."); } return(false); } var width = input.outputWidth; var height = input.outputHeight; if (width <= 0 || height <= 0) { if (Verbose.enabled) { Debug.Log(string.Format( "MovieRecorder got invalid input resolution {0} x {1}.", width, height)); } return(false); } var cbRenderTextureInput = input as CBRenderTextureInput; bool includeAlphaFromTexture = cbRenderTextureInput != null && cbRenderTextureInput.cbSettings.m_AllowTransparency; if (includeAlphaFromTexture && m_Settings.m_OutputFormat == MediaRecorderOutputFormat.MP4) { Debug.LogWarning("Mp4 format does not support alpha."); includeAlphaFromTexture = false; } var videoAttrs = new VideoTrackAttributes() { frameRate = RationalFromDouble(session.settings.m_FrameRate), width = (uint)width, height = (uint)height, includeAlpha = includeAlphaFromTexture }; if (Verbose.enabled) { Debug.Log( string.Format( "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}", width, height, videoAttrs.frameRate.numerator, videoAttrs.frameRate.denominator, m_Settings.m_DestinationPath.GetFullPath())); } var audioInput = (AudioInput)m_Inputs[1]; var audioAttrsList = new List <UnityEditor.Media.AudioTrackAttributes>(); var audioAttrs = new UnityEditor.Media.AudioTrackAttributes() { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (Verbose.enabled) { Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator)); } #if RECORD_AUDIO_MIXERS var audioSettings = input.audioSettings; m_WavWriters = new WavWriter [audioSettings.m_AudioMixerGroups.Length]; for (int n = 0; n < m_WavWriters.Length; n++) { if (audioSettings.m_AudioMixerGroups[n].m_MixerGroup == null) { continue; } var path = Path.Combine( m_Settings.m_DestinationPath, "recording of " + audioSettings.m_AudioMixerGroups[n].m_MixerGroup.name + ".wav"); if (Verbose.enabled) { Debug.Log("Starting wav recording into file " + path); } m_WavWriters[n].Start(path); } #endif try { var fileName = m_Settings.m_BaseFileName.BuildFileName(session, recordedFramesCount, width, height, m_Settings.m_OutputFormat.ToString().ToLower()); var path = m_Settings.m_DestinationPath.GetFullPath() + "/" + fileName; m_Encoder = new UnityEditor.Media.MediaEncoder(path, videoAttrs, audioAttrsList.ToArray()); return(true); } catch { if (Verbose.enabled) { Debug.LogError("MovieRecorder unable to create MovieEncoder."); } } return(false); }
protected internal override bool BeginRecording(RecordingSession session) { m_RecordingStartedProperly = false; if (!base.BeginRecording(session)) { return(false); } try { Settings.fileNameGenerator.CreateDirectory(session); } catch (Exception) { Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", Settings.fileNameGenerator.BuildAbsolutePath(session))); return(false); } var input = m_Inputs[0] as BaseRenderTextureInput; if (input == null) { Debug.LogError("MediaRecorder could not find input."); return(false); } int width = input.OutputWidth; int height = input.OutputHeight; if (width <= 0 || height <= 0) { Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height)); return(false); } var currentEncoderReg = Settings.GetCurrentEncoder(); string erroMessage; if (!currentEncoderReg.SupportsResolution(Settings, width, height, out erroMessage)) { Debug.LogError(erroMessage); return(false); } var imageInputSettings = m_Inputs[0].settings as ImageInputSettings; var alphaWillBeInImage = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.RecordTransparency; if (alphaWillBeInImage && !currentEncoderReg.SupportsTransparency(Settings, out erroMessage)) { Debug.LogError(erroMessage); return(false); } // In variable frame rate mode, we set the encoder to the frame rate of the current display. m_FrameRate = RationalFromDouble( session.settings.FrameRatePlayback == FrameRatePlayback.Variable ? GameHarness.DisplayFPSTarget : session.settings.FrameRate); var videoAttrs = new VideoTrackAttributes { width = (uint)width, height = (uint)height, frameRate = m_FrameRate, includeAlpha = alphaWillBeInImage, bitRateMode = Settings.VideoBitRateMode }; Debug.Log($"(UnityRecorder/MovieRecorder) Encoding video " + $"{width}x{height}@[{videoAttrs.frameRate.numerator}/{videoAttrs.frameRate.denominator}] fps into " + $"{Settings.fileNameGenerator.BuildAbsolutePath(session)}"); var audioInput = (AudioInputBase)m_Inputs[1]; var audioAttrsList = new List <AudioTrackAttributes>(); if (audioInput.audioSettings.PreserveAudio) { #if UNITY_EDITOR_OSX // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically // increasing timestamps"). This happens only with Target Cameras. if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == VideoRecorderOutputFormat.WebM) { UseAsyncGPUReadback = false; } #endif var audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = audioInput.sampleRate, denominator = 1 }, channelCount = audioInput.channelCount, language = "" }; audioAttrsList.Add(audioAttrs); if (RecorderOptions.VerboseMode) { Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator)); } } else { if (RecorderOptions.VerboseMode) { Debug.Log("MovieRecorder starting with no audio."); } } try { var path = Settings.fileNameGenerator.BuildAbsolutePath(session); // If an encoder already exist destroy it Settings.DestroyIfExists(m_EncoderHandle); // Get the currently selected encoder register and create an encoder m_EncoderHandle = currentEncoderReg.Register(Settings.m_EncoderManager); // Create the list of attributes for the encoder, Video, Audio and preset // TODO: Query the list of attributes from the encoder attributes var attr = new List <IMediaEncoderAttribute>(); attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", videoAttrs)); if (audioInput.audioSettings.PreserveAudio) { if (audioAttrsList.Count > 0) { attr.Add(new AudioTrackMediaEncoderAttribute("AudioAttributes", audioAttrsList.ToArray()[0])); } } attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CodecFormat], Settings.encoderPresetSelected)); attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.ColorDefinition], Settings.encoderColorDefinitionSelected)); if (Settings.encoderPresetSelectedName == "Custom") { // For custom attr.Add(new StringAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CustomOptions], Settings.encoderCustomOptions)); } // Construct the encoder given the list of attributes Settings.m_EncoderManager.Construct(m_EncoderHandle, path, attr); s_ConcurrentCount++; m_RecordingStartedProperly = true; return(true); } catch (Exception ex) { Debug.LogError("MovieRecorder unable to create MovieEncoder. " + ex.Message); return(false); } }
IEnumerator playVideo() { rend = GetComponent <MeshRenderer>(); videoPlayer = gameObject.AddComponent <VideoPlayer>(); audioSource = gameObject.AddComponent <AudioSource>(); //Disable Play on Awake for both Video and Audio videoPlayer.playOnAwake = false; audioSource.playOnAwake = false; videoPlayer.source = VideoSource.VideoClip; videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource; videoPlayer.EnableAudioTrack(0, true); videoPlayer.SetTargetAudioSource(0, audioSource); //Set video To Play then prepare Audio to prevent Buffering videoPlayer.clip = videoToPlay; videoPlayer.Prepare(); //Wait until video is prepared while (!videoPlayer.isPrepared) { yield return(null); } //Assign the Texture from Video to Material texture tex = videoPlayer.texture; rend.material.mainTexture = tex; videoPlayer.sendFrameReadyEvents = true; videoPlayer.frameReady += OnNewFrame; videoPlayer.Play(); audioSource.Play(); while (videoPlayer.isPlaying) { Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time)); Debug.Log("Playing Video"); yield return(null); } //while (texList.Count != (int)videoPlayer.frameCount) //{ //yield return null; //} //int i = 0; //foreach(Texture2D tex in texList) //{ // i++; // if(i > 447 && i < 765) // { // byte[] bytes; // bytes = tex.EncodeToPNG(); // string filePath = Application.dataPath + "/Resources/ConvertImages/"; // string fileName = filePath + i.ToString() + ".png"; // System.IO.File.WriteAllBytes(fileName, bytes); // AssetDatabase.ImportAsset(fileName); // yield return null; // } //} Debug.Log("Done Playing Video"); /////////////////////////////////////////// //// /// // 왜곡이미지 처리/// /////////////////////////////////////////// VideoTrackAttributes videoAttr = new VideoTrackAttributes { frameRate = new MediaRational((int)videoPlayer.frameRate), width = videoPlayer.width, height = videoPlayer.height, }; AudioTrackAttributes audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; // 동영상 생성 경로 string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4"); MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr); for (int i = 0; i < texList.Count; ++i) { Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count); encoder.AddFrame(texList[i]); yield return(null); } encoder.Dispose(); Debug.Log("Convert To Video Complete"); }
public void BeginRecording() { Debug.Log("VimeoRecorder: BeginRecording()"); isRecording = true; _camera = GetComponent <Camera>(); encodedFilePath = Path.Combine(outputPath, "test-recording.mp4"); Debug.Log(encodedFilePath); // Setup shader/material/quad if (shaderCopy == null) { shaderCopy = Shader.Find("Hidden/FrameRecorder/CopyFrameBuffer"); } if (matCopy == null) { matCopy = new Material(shaderCopy); } if (fullscreenQuad == null) { fullscreenQuad = VimeoRecorder.CreateFullscreenQuad(); } // Get Camera data and prepare to send to buffer int captureWidth = (_camera.pixelWidth + 1) & ~1; int captureHeight = (_camera.pixelHeight + 1) & ~1; renderBuffer = new RenderTexture(captureWidth, captureHeight, 0); renderBuffer.wrapMode = TextureWrapMode.Repeat; renderBuffer.Create(); Debug.Log("WxH: " + captureWidth + "x" + captureHeight); // Configure encoder videoAttrs = new VideoTrackAttributes { frameRate = new MediaRational(40), width = (uint)captureWidth, height = (uint)captureHeight, includeAlpha = false }; audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "en" }; encoder = new MediaEncoder(encodedFilePath, videoAttrs, audioAttrs); //sampleFramesPerVideoFrame = audioAttrs.channelCount * audioAttrs.sampleRate.numerator / videoAttrs.frameRate.numerator; //audioBuffer = new NativeArray<float>(sampleFramesPerVideoFrame, Allocator.Temp); // Setup the command buffer // TODO: Support RenderTexture int tid = Shader.PropertyToID("_TmpFrameBuffer"); commandBuffer = new CommandBuffer(); commandBuffer.name = "VimeoRecorder: copy frame buffer"; commandBuffer.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Bilinear); commandBuffer.Blit(BuiltinRenderTextureType.CurrentActive, tid); commandBuffer.SetRenderTarget(renderBuffer); commandBuffer.DrawMesh(fullscreenQuad, Matrix4x4.identity, matCopy, 0, 0); commandBuffer.ReleaseTemporaryRT(tid); _camera.AddCommandBuffer(CameraEvent.AfterEverything, commandBuffer); }
IEnumerator playVideo() { Debug.Log(Application.dataPath); rend = GetComponent <Renderer>(); videoPlayer = gameObject.AddComponent <VideoPlayer>(); audioSource = gameObject.AddComponent <AudioSource>(); //Disable Play on Awake for both Video and Audio videoPlayer.playOnAwake = false; audioSource.playOnAwake = false; videoPlayer.source = VideoSource.VideoClip; videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource; videoPlayer.EnableAudioTrack(0, true); videoPlayer.SetTargetAudioSource(0, audioSource); //Set video To Play then prepare Audio to prevent Buffering videoPlayer.clip = videoToPlay; videoPlayer.Prepare(); //Wait until video is prepared while (!videoPlayer.isPrepared) { yield return(null); } //Assign the Texture from Video to Material texture tex = videoPlayer.texture; rend.material.mainTexture = tex; videoPlayer.sendFrameReadyEvents = true; videoPlayer.frameReady += OnNewFrame; videoPlayer.Play(); audioSource.Play(); Debug.Log("Playing Video"); while (texList.Count != (int)videoPlayer.frameCount) { yield return(null); } Debug.Log("Done Playing Video"); /////////////////////////////////////////// //// /// // 왜곡이미지 처리/// /////////////////////////////////////////// VideoTrackAttributes videoAttr = new VideoTrackAttributes { frameRate = new MediaRational((int)videoPlayer.frameRate), width = videoPlayer.width, height = videoPlayer.height, }; AudioTrackAttributes audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; // 동영상 생성 경로 string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4"); MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr); for (int i = 0; i < texList.Count; ++i) { Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count); encoder.AddFrame(texList[i]); yield return(null); } encoder.Dispose(); Debug.Log("Convert To Video Complete"); }
public void Construct(string path, VideoTrackAttributes vAttr, AudioTrackAttributes aAttr) { Construct(path, vAttr, new[] { aAttr }); }
public void Construct(string path, VideoTrackAttributes vAttr) { Construct(path, vAttr, new AudioTrackAttributes[0]); }