Example #1
0
        void PostCapture()
        {
            long kPixelSizeBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Color32));

            bool isReady = m_ffmpegVideo.IsReadyForInput;

            if (!m_isCapturing || !m_frameBuffered)
            {
                // Even though there is no fame to capture or capture is disabled, there may be frames to push
                // to the encoder. This also allows the buffer to drain between captured frames, assuming the
                // encoder can keep up.
                if (isReady && m_bufferedVideoFrames.Count > 0)
                {
                    Color32Bytable c = new Color32Bytable(m_bufferedVideoFrames.Dequeue());
                    m_ffmpegVideo.QueueFrame(c);
                }

                return;
            }

            //
            // Step 3: Read the actual pixel buffer from the texture, one frame after it was copied.
            //

            // It may be more efficient to skip enqueuing the frame to m_bufferedVideoFrames if the queue is
            // empty, however the logic below is considerably more readable if we ignore that optimization.
            // Considering we are running the garbage collector in the background right after this, the
            // extra queuing operation is highly likely in the noise and will be cleaned up during that
            // collection pass.
            //
            // Similarly, the m_bufferedVideoFrames queue is intentionally managed in this thread, which
            // implies a single background worker. Until we need multiple workers, this design improves the
            // readability of this code and avoids the need for synchronization primitives.
            Color32[] frame = m_currentFrameBuffer;
            m_currentFrameBuffer = null;

            long usedBufferBytes = frame.Length * kPixelSizeBytes * m_bufferedVideoFrames.Count;

            if (usedBufferBytes < kMaxQueueSizeBytes)
            {
                m_bufferedVideoFrames.Enqueue(frame);
            }
            else
            {
                System.Console.WriteLine("Dropped frame [{0}], buffer overflow", m_videoFrameCount);
            }

            m_frameBuffered = false;

            // If the encoder is ready to accept another frame, pass it along and increment the expected
            // frame count.
            if (isReady)
            {
                Color32Bytable c = new Color32Bytable(m_bufferedVideoFrames.Dequeue());
                m_ffmpegVideo.QueueFrame(c);
            }
        }
Example #2
0
        private void CaptureAudio()
        {
            try
            {
                if (m_audioFrameCount < m_videoFrameCount)
                {
                    m_audioFramesRequired++;
                    m_audioFrameCount++;
                }

                if (m_audioFramesRequired == 0)
                {
                    return;
                }

                // We grab the previous frame of audio, since the video takes a frame to capture.
                int curFrame = m_nextAudioFrame;

                if (RealTimeFrameCount - m_nextAudioFrame > 4)
                {
                    m_nextAudioFrame = RealTimeFrameCount - 1;
                }

                if (m_audioBuffer.IsFrameReady(curFrame))
                {
                    m_ffmpegAudio.QueueFrame(m_audioBuffer.PopFrame(curFrame, RealTimeFrameCount, FrameCount));
                    m_audioFramesRequired--;
                    m_nextAudioFrame++;
                }
            }
            catch (System.Exception e)
            {
                UnityEngine.Debug.LogException(e);
            }
        }
Example #3
0
        private void DrainStop(string filePath,
                               Queue <Color32[]> buffer,
                               FfmpegPipe videoPipe,
                               FfmpegPipe audioPipe)
        {
            const int kTimeoutMs = 60 * 1000;

            try
            {
                Stopwatch timer = new Stopwatch();
                System.Console.WriteLine("VideoRecorder: DrainStop, frames buffered: {0}", buffer.Count);

                timer.Start();
                while (buffer.Count > 0)
                {
                    if (timer.ElapsedMilliseconds > kTimeoutMs)
                    {
                        break;
                    }
                    if (videoPipe.IsReadyForInput)
                    {
                        Color32Bytable c = new Color32Bytable(buffer.Dequeue());
                        videoPipe.QueueFrame(c);
                    }
                }

                m_playbackFrameCount = FrameCount;

                System.Console.WriteLine("VideoRecorder: DrainStop exit");
                videoPipe.Stop();
                audioPipe.Stop();

                JoinFiles(filePath, videoPipe, audioPipe);
            }
            catch (System.Exception e)
            {
                UnityEngine.Debug.LogException(e);
            }
        }