private void JoinFiles(string filePath, FfmpegPipe videoPipe, FfmpegPipe audioPipe) { m_isSaving = true; try { videoPipe.WaitForEncoderExit(/*ms*/ 20 * 1000); if (audioPipe.OutputFile.Length > 0) { audioPipe.WaitForEncoderExit(/*ms*/ 20 * 1000); if (FfmpegPipe.Mux(audioPipe.OutputFile, videoPipe.OutputFile, filePath)) { System.IO.File.Delete(videoPipe.OutputFile); System.IO.File.Delete(audioPipe.OutputFile); } } m_playbackLoops = 0; } catch (System.Exception e) { UnityEngine.Debug.LogException(e); } finally { m_isSaving = false; } }
// Start background threads. private void Start() { m_ffmpegVideoReader = new FfmpegPipe(); m_ffmpegVideo = new FfmpegPipe(); m_ffmpegAudio = new FfmpegPipe(); m_audioBuffer = new StereoBuffer(); // SetFps is called below. // we use this to ensure StopCapture() is called on exit, even if this behavior is disabled. App.Instance.AppExit += OnGuaranteedAppQuit; }
private void RemoveFile(string filePath, FfmpegPipe videoPipe, FfmpegPipe audioPipe) { try { videoPipe.WaitForEncoderExit(/*ms*/ 20 * 1000); System.IO.File.Delete(videoPipe.OutputFile); if (audioPipe.OutputFile.Length > 0) { audioPipe.WaitForEncoderExit(/*ms*/ 20 * 1000); System.IO.File.Delete(audioPipe.OutputFile); // Remove the primary file last, to avoid file collisions with the next recording. System.IO.File.Delete(filePath); } } catch (System.Exception e) { UnityEngine.Debug.LogException(e); m_isSaving = false; } }
private void DrainStop(string filePath, Queue <Color32[]> buffer, FfmpegPipe videoPipe, FfmpegPipe audioPipe) { const int kTimeoutMs = 60 * 1000; try { Stopwatch timer = new Stopwatch(); System.Console.WriteLine("VideoRecorder: DrainStop, frames buffered: {0}", buffer.Count); timer.Start(); while (buffer.Count > 0) { if (timer.ElapsedMilliseconds > kTimeoutMs) { break; } if (videoPipe.IsReadyForInput) { Color32Bytable c = new Color32Bytable(buffer.Dequeue()); videoPipe.QueueFrame(c); } } m_playbackFrameCount = FrameCount; System.Console.WriteLine("VideoRecorder: DrainStop exit"); videoPipe.Stop(); audioPipe.Stop(); JoinFiles(filePath, videoPipe, audioPipe); } catch (System.Exception e) { UnityEngine.Debug.LogException(e); } }
void Update() { if (m_odsCamera.FrameCount >= m_framesToCapture) { if (m_framesToCapture > 0) { // We rendered everything. if ((Application.platform == RuntimePlatform.WindowsPlayer) || (Application.platform == RuntimePlatform.WindowsEditor)) { System.Diagnostics.Process.Start("explorer.exe", "/open," + m_outputFolder); } System.Diagnostics.Process proc = new System.Diagnostics.Process(); proc.StartInfo.FileName = Path.GetFullPath(TiltBrush.FfmpegPipe.GetFfmpegExe()); proc.StartInfo.Arguments = System.String.Format( @"-y -framerate {0} -f image2 -i ""{1}_%06d.png"" " + @"-c:v " + FfmpegPipe.GetVideoEncoder() + @" -r {0} -pix_fmt yuv420p ""{2}""", m_fps, m_imagesPath, m_videoPath); Debug.LogFormat("{0} {1}", proc.StartInfo.FileName, proc.StartInfo.Arguments); proc.StartInfo.CreateNoWindow = false; proc.StartInfo.ErrorDialog = true; proc.StartInfo.UseShellExecute = false; proc.StartInfo.RedirectStandardError = true; proc.Start(); UnityEngine.Debug.Log(proc.StandardError.ReadToEnd()); #if USD_SUPPORTED if (m_PathSerializer != null) { m_PathSerializer.Stop(); } #endif proc.Close(); Application.Quit(); Debug.Break(); } return; } if (m_odsCamera.IsRendering) { return; } if (m_frameTick) { if (m_frameTimer.ElapsedMilliseconds < 1000.0f / m_fps) { return; } m_frameTimer.Stop(); } if (m_renderTimer.IsRunning) { m_renderTimer.Stop(); Debug.LogFormat("ODS Frame Time: {0}", m_renderTimer.ElapsedMilliseconds / 1000.0f); } m_frameTick = !m_frameTick; if (m_frameTick) { Time.timeScale = 1.0f; m_frameTimer.Reset(); m_frameTimer.Start(); return; } Time.timeScale = 0.0f; if (!HaveCameraPath) { float progress = m_odsCamera.FrameCount / (float)m_framesToCapture; App.Scene.Pose = GetScenePose(progress); if (m_turnTableRotation > 0) { TiltBrush.TrTransform sc = App.Scene.Pose; sc.rotation = Quaternion.AngleAxis(progress * m_turnTableRotation, Vector3.up); App.Scene.Pose = sc; } } else { #if USD_SUPPORTED m_PathSerializer.Time = m_odsCamera.FrameCount / m_fps; m_PathSerializer.Deserialize(); #endif } Camera cam = OdsCamera.GetComponent <Camera>(); Camera parentCam = TiltBrush.App.VrSdk.GetVrCamera(); cam.clearFlags = parentCam.clearFlags; cam.backgroundColor = parentCam.backgroundColor; // Copy back the culling mask so the preview window looks like the final render. parentCam.cullingMask = cam.cullingMask; if (m_odsCamera.FrameCount == 0 && m_framesToCapture > 0) { if (QualitySettings.GetQualityLevel() != 3) { QualitySettings.SetQualityLevel(3); } } App.Instance.FrameCountDisplay.SetCurrentFrame(m_odsCamera.FrameCount); // Move the viewer camera, so the user can see what's going on. Transform viewerXform = App.VrSdk.GetVrCamera().transform; viewerXform.position = transform.position; viewerXform.rotation = transform.rotation; viewerXform.localScale = transform.localScale; m_renderTimer.Reset(); m_renderTimer.Start(); StartCoroutine(m_odsCamera.Render(transform)); }
// Stop the encoder and optionally save the captured stream. // When save is false, the stream is discarded. public void StopCapture(bool save) { if (m_isPlayingBack) { m_isPlayingBack = false; m_ffmpegVideoReader.Stop(); m_ffmpegVideoReader = null; UnityEngine.Debug.LogWarning("Stop Video reader"); } SetCaptureFramerate(0); if (!m_isCapturing) { return; } #if ENABLE_AUDIO_DEBUG m_audioBuffer.WriteLog(); #endif m_videoFramePool.Clear(); m_ffmpegVideo.ReleaseFrame -= ReturnFrameToPool; // Grab local references of the FFMPEG pipes, which is required to get them to ref-capture // correctly in the lambdas below (we want to capture a reference to the object, not a // reference to this classes members). FfmpegPipe videoPipe = m_ffmpegVideo; FfmpegPipe audioPipe = m_ffmpegAudio; bool draining = false; if (save && m_bufferedVideoFrames.Count > 0) { // Transfer the buffer to the encoder thread to finish processing. // Note the local captures, which allow release of the classes references. Queue <Color32[]> buffer = m_bufferedVideoFrames; System.Threading.Thread t = new System.Threading.Thread( () => DrainStop(m_filePath, buffer, videoPipe, audioPipe)); t.IsBackground = true; m_bufferedVideoFrames = new Queue <Color32[]>(); m_ffmpegVideo = new FfmpegPipe(); m_ffmpegAudio = new FfmpegPipe(); try { t.Start(); m_isSaving = true; } catch { m_isSaving = false; throw; } draining = true; } else { // Request background threads to stop. m_ffmpegVideo.Stop(); m_ffmpegAudio.Stop(); draining = false; m_playbackFrameCount = FrameCount; } // Clear the Stopwatch m_frameTimer.Reset(); m_captureBuffer.Dispose(); m_captureBuffer = null; Texture2D.Destroy(m_playbackTexture); m_playbackTexture = null; m_frameBuffered = false; m_texBuffered = false; if (!save) { // We need to trash the file on a background thread because we need to wait for the ffmpeg // process to fully exit before touching the file. // // TODO: This should be a task executed in a thread pool, using a thread is overkill. string filePath = m_filePath; System.Threading.Thread t = new System.Threading.Thread( () => RemoveFile(filePath, videoPipe, audioPipe)); m_ffmpegVideo = new FfmpegPipe(); m_ffmpegAudio = new FfmpegPipe(); t.IsBackground = true; t.Start(); } else if (!draining) { // Only do this if drainstop is not already running, in that case, DrainStop() is responsible // for joining the files after they are complete. string filePath = m_filePath; System.Threading.Thread t = new System.Threading.Thread( () => JoinFiles(filePath, videoPipe, audioPipe)); m_ffmpegVideo = new FfmpegPipe(); m_ffmpegAudio = new FfmpegPipe(); t.IsBackground = true; try { t.Start(); m_isSaving = true; } catch { m_isSaving = false; throw; } } m_isCapturing = false; m_isCapturingAudio = false; }