示例#1
0
 // Ensure background threads shutdown cleanly.
 private void OnGuaranteedAppQuit()
 {
     if (m_isCapturing)
     {
         StopCapture(save: true);
     }
     else if (m_isPlayingBack)
     {
         m_isPlayingBack = false;
         m_ffmpegVideoReader.Stop();
     }
 }
示例#2
0
        private void DrainStop(string filePath,
                               Queue <Color32[]> buffer,
                               FfmpegPipe videoPipe,
                               FfmpegPipe audioPipe)
        {
            const int kTimeoutMs = 60 * 1000;

            try
            {
                Stopwatch timer = new Stopwatch();
                System.Console.WriteLine("VideoRecorder: DrainStop, frames buffered: {0}", buffer.Count);

                timer.Start();
                while (buffer.Count > 0)
                {
                    if (timer.ElapsedMilliseconds > kTimeoutMs)
                    {
                        break;
                    }
                    if (videoPipe.IsReadyForInput)
                    {
                        Color32Bytable c = new Color32Bytable(buffer.Dequeue());
                        videoPipe.QueueFrame(c);
                    }
                }

                m_playbackFrameCount = FrameCount;

                System.Console.WriteLine("VideoRecorder: DrainStop exit");
                videoPipe.Stop();
                audioPipe.Stop();

                JoinFiles(filePath, videoPipe, audioPipe);
            }
            catch (System.Exception e)
            {
                UnityEngine.Debug.LogException(e);
            }
        }
示例#3
0
        // Stop the encoder and optionally save the captured stream.
        // When save is false, the stream is discarded.
        public void StopCapture(bool save)
        {
            if (m_isPlayingBack)
            {
                m_isPlayingBack = false;
                m_ffmpegVideoReader.Stop();
                m_ffmpegVideoReader = null;
                UnityEngine.Debug.LogWarning("Stop Video reader");
            }
            SetCaptureFramerate(0);
            if (!m_isCapturing)
            {
                return;
            }

#if ENABLE_AUDIO_DEBUG
            m_audioBuffer.WriteLog();
#endif

            m_videoFramePool.Clear();
            m_ffmpegVideo.ReleaseFrame -= ReturnFrameToPool;

            // Grab local references of the FFMPEG pipes, which is required to get them to ref-capture
            // correctly in the lambdas below (we want to capture a reference to the object, not a
            // reference to this classes members).
            FfmpegPipe videoPipe = m_ffmpegVideo;
            FfmpegPipe audioPipe = m_ffmpegAudio;
            bool       draining  = false;

            if (save && m_bufferedVideoFrames.Count > 0)
            {
                // Transfer the buffer to the encoder thread to finish processing.
                // Note the local captures, which allow release of the classes references.
                Queue <Color32[]>       buffer = m_bufferedVideoFrames;
                System.Threading.Thread t      = new System.Threading.Thread(
                    () => DrainStop(m_filePath, buffer, videoPipe, audioPipe));
                t.IsBackground        = true;
                m_bufferedVideoFrames = new Queue <Color32[]>();
                m_ffmpegVideo         = new FfmpegPipe();
                m_ffmpegAudio         = new FfmpegPipe();
                try
                {
                    t.Start();
                    m_isSaving = true;
                }
                catch
                {
                    m_isSaving = false;
                    throw;
                }
                draining = true;
            }
            else
            {
                // Request background threads to stop.
                m_ffmpegVideo.Stop();
                m_ffmpegAudio.Stop();
                draining             = false;
                m_playbackFrameCount = FrameCount;
            }

            // Clear the Stopwatch
            m_frameTimer.Reset();

            m_captureBuffer.Dispose();
            m_captureBuffer = null;

            Texture2D.Destroy(m_playbackTexture);
            m_playbackTexture = null;

            m_frameBuffered = false;
            m_texBuffered   = false;

            if (!save)
            {
                // We need to trash the file on a background thread because we need to wait for the ffmpeg
                // process to fully exit before touching the file.
                //
                // TODO: This should be a task executed in a thread pool, using a thread is overkill.
                string filePath           = m_filePath;
                System.Threading.Thread t = new System.Threading.Thread(
                    () => RemoveFile(filePath, videoPipe, audioPipe));
                m_ffmpegVideo  = new FfmpegPipe();
                m_ffmpegAudio  = new FfmpegPipe();
                t.IsBackground = true;
                t.Start();
            }
            else if (!draining)
            {
                // Only do this if drainstop is not already running, in that case, DrainStop() is responsible
                // for joining the files after they are complete.
                string filePath           = m_filePath;
                System.Threading.Thread t = new System.Threading.Thread(
                    () => JoinFiles(filePath, videoPipe, audioPipe));
                m_ffmpegVideo  = new FfmpegPipe();
                m_ffmpegAudio  = new FfmpegPipe();
                t.IsBackground = true;
                try
                {
                    t.Start();
                    m_isSaving = true;
                }
                catch
                {
                    m_isSaving = false;
                    throw;
                }
            }

            m_isCapturing      = false;
            m_isCapturingAudio = false;
        }
示例#4
0
        // -------------------------------------------------------------------------------------------- //
        // Public API
        // -------------------------------------------------------------------------------------------- //

        // Launch the encoder targeting the given file path.
        // Return true on success, false if capture could not start.
        public bool StartCapture(string filePath, int audioSampleRate, bool captureAudio, bool blocking,
                                 float fps)
        {
            if (m_isCapturing)
            {
                return(true);
            }
            if (m_isPlayingBack)
            {
                m_isPlayingBack = false;
                m_ffmpegVideoReader.Stop();
            }

            m_FPS = (decimal)fps;

            m_audioBuffer.SetSampleRates(m_FPS, audioSampleRate);

            m_nextAudioFrame      = 1;
            m_lastVideoFrame      = -1;
            m_audioFrameCount     = 0;
            m_audioFramesRequired = 0;
            m_audioBuffer.Clear();
            m_isCapturingAudio = captureAudio;

            Camera cam = GetComponent <Camera>();

            m_filePath = filePath;

            m_frameBuffered   = false;
            m_texBuffered     = false;
            m_videoFrameCount = 0;
            m_bufferedVideoFrames.Clear();

            int width  = cam.pixelWidth;
            int height = cam.pixelHeight;

            if (cam.pixelHeight == 0)
            {
                width  = Screen.width;
                height = Screen.height;
            }

            const string kPipeStdIn = @"pipe:0";

            // We need to "touch" the destination file immediately, to avoid overlapping encoder instances
            // from stomping each other.
            FileStream myFileStream = File.Open(m_filePath, FileMode.OpenOrCreate,
                                                FileAccess.Write, FileShare.ReadWrite);

            myFileStream.Close();
            myFileStream.Dispose();
            File.SetLastWriteTimeUtc(m_filePath, System.DateTime.UtcNow);

            string videoFileName = audioSampleRate > 0
                ? m_filePath + ".tmp." + App.UserConfig.Video.ContainerType
                : m_filePath;

            if (!m_ffmpegVideo.Start(kPipeStdIn, videoFileName, width, height, (float)m_FPS, blocking))
            {
                return(false);
            }

            m_ffmpegAudio.OutputFile = "";
            if (m_isCapturingAudio &&
                !m_ffmpegAudio.Start(kPipeStdIn, m_filePath + ".tmp.m4a",
                                     width, height, audioSampleRate, blocking))
            {
                m_ffmpegVideo.Stop();
                return(false);
            }

            // Give the encoder a means to return used frames
            m_ffmpegVideo.ReleaseFrame += ReturnFrameToPool;

            //
            // Init capture and playback buffers.
            //
            m_playbackTexture = new Texture2D(width, height, TextureFormat.ARGB32, false);
            long kPixelSizeBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Color32));

            m_captureBuffer = new ComputeBuffer(width * height, (int)kPixelSizeBytes);

            var tempInitBuffer = new Color32[width * height];

            m_captureBuffer.SetData(tempInitBuffer);
            m_currentFrameBuffer = null;

            // Save the temp buffer for reuse later.
            m_videoFramePool.Enqueue(tempInitBuffer);

            m_blitToCompute.SetBuffer("_CaptureBuffer", m_captureBuffer);

            // Note, UAV register must match shader register (e.g. register(u1)).
            const int uavRegister = 1;

            Graphics.SetRandomWriteTarget(uavRegister, m_captureBuffer, true);

            //
            // Finalize local state setup.
            //
            m_width  = width;
            m_height = height;

            m_frameTimer = new Stopwatch();
            m_frameTimer.Start();

            // Since audio capture is asynchronous, these *must* be set as the last step.
            m_isCapturing = true;

            return(true);
        }