コード例 #1
0
ファイル: VideoRecorder.cs プロジェクト: mikeage/open-brush
        void PostCapture()
        {
            long kPixelSizeBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Color32));

            bool isReady = m_ffmpegVideo.IsReadyForInput;

            if (!m_isCapturing || !m_frameBuffered)
            {
                // Even though there is no fame to capture or capture is disabled, there may be frames to push
                // to the encoder. This also allows the buffer to drain between captured frames, assuming the
                // encoder can keep up.
                if (isReady && m_bufferedVideoFrames.Count > 0)
                {
                    Color32Bytable c = new Color32Bytable(m_bufferedVideoFrames.Dequeue());
                    m_ffmpegVideo.QueueFrame(c);
                }

                return;
            }

            //
            // Step 3: Read the actual pixel buffer from the texture, one frame after it was copied.
            //

            // It may be more efficient to skip enqueuing the frame to m_bufferedVideoFrames if the queue is
            // empty, however the logic below is considerably more readable if we ignore that optimization.
            // Considering we are running the garbage collector in the background right after this, the
            // extra queuing operation is highly likely in the noise and will be cleaned up during that
            // collection pass.
            //
            // Similarly, the m_bufferedVideoFrames queue is intentionally managed in this thread, which
            // implies a single background worker. Until we need multiple workers, this design improves the
            // readability of this code and avoids the need for synchronization primitives.
            Color32[] frame = m_currentFrameBuffer;
            m_currentFrameBuffer = null;

            long usedBufferBytes = frame.Length * kPixelSizeBytes * m_bufferedVideoFrames.Count;

            if (usedBufferBytes < kMaxQueueSizeBytes)
            {
                m_bufferedVideoFrames.Enqueue(frame);
            }
            else
            {
                System.Console.WriteLine("Dropped frame [{0}], buffer overflow", m_videoFrameCount);
            }

            m_frameBuffered = false;

            // If the encoder is ready to accept another frame, pass it along and increment the expected
            // frame count.
            if (isReady)
            {
                Color32Bytable c = new Color32Bytable(m_bufferedVideoFrames.Dequeue());
                m_ffmpegVideo.QueueFrame(c);
            }
        }
コード例 #2
0
ファイル: VideoRecorder.cs プロジェクト: mikeage/open-brush
        // -------------------------------------------------------------------------------------------- //
        // PostEffect Render Hook
        // -------------------------------------------------------------------------------------------- //

        void OnRenderImage(RenderTexture source, RenderTexture destination)
        {
            // Loop playback. We intentionally don't buffer the entire video, which means we have to run
            // FFMPEG in a loop until we're done previewing.
            if (m_isPlayingBack && m_ffmpegVideoReader.DidExit)
            {
                StartPlaybackReader();
            }

            if (!m_isPlayingBack)
            {
                // If capturing, grab the current frame from the source buffer.
                if (!m_isCapturing || !Capture(source, destination))
                {
                    // For whatever reason, Capture decided not to capture, so blit.
                    Graphics.Blit(source, destination);
                }
            }
            else
            {
                Color32Bytable b = null;

                if (m_playbackCurFrame >= RealTimeFrameCount)
                {
                    return;
                }

                m_ffmpegVideoReader.GetFrame(ref b);

                if (b == null)
                {
                    return;
                }

                Color32[] c = b.GetArray() as Color32[];
                if (c == null || c.Length == 0)
                {
                    // Should never happen.
                    UnityEngine.Debug.LogWarning("No data.");
                    return;
                }

                RenderTexture trg = destination;
                if (!m_playbackTexture ||
                    m_playbackTexture.width != trg.width ||
                    m_playbackTexture.height != trg.height)
                {
                    m_playbackTexture = new Texture2D(trg.width, trg.height, TextureFormat.ARGB32, false);
                }

                m_playbackCurFrame++;
                m_playbackTexture.SetPixels32(c);
                m_playbackTexture.Apply();

                Graphics.Blit(m_playbackTexture, destination);
            }
        }
コード例 #3
0
ファイル: FfmpegPipe.cs プロジェクト: mikeage/open-brush-lfs
  public void GetFrame(ref Color32Bytable buffer) {
    if (m_framesOut.IsEmpty) {
      buffer = null;
    } else {
      // The writer thread will never touch m_framesOut[0], therefore this should be thread safe.
      // This is true as long as the writer never calls Enqueue with overwriteIfFull=true.
      buffer = m_framesOut.Dequeue() as Color32Bytable;
    }

    m_frameReady.Set();
  }
コード例 #4
0
ファイル: VideoRecorder.cs プロジェクト: mikeage/open-brush
        private void DrainStop(string filePath,
                               Queue <Color32[]> buffer,
                               FfmpegPipe videoPipe,
                               FfmpegPipe audioPipe)
        {
            const int kTimeoutMs = 60 * 1000;

            try
            {
                Stopwatch timer = new Stopwatch();
                System.Console.WriteLine("VideoRecorder: DrainStop, frames buffered: {0}", buffer.Count);

                timer.Start();
                while (buffer.Count > 0)
                {
                    if (timer.ElapsedMilliseconds > kTimeoutMs)
                    {
                        break;
                    }
                    if (videoPipe.IsReadyForInput)
                    {
                        Color32Bytable c = new Color32Bytable(buffer.Dequeue());
                        videoPipe.QueueFrame(c);
                    }
                }

                m_playbackFrameCount = FrameCount;

                System.Console.WriteLine("VideoRecorder: DrainStop exit");
                videoPipe.Stop();
                audioPipe.Stop();

                JoinFiles(filePath, videoPipe, audioPipe);
            }
            catch (System.Exception e)
            {
                UnityEngine.Debug.LogException(e);
            }
        }
コード例 #5
0
ファイル: FfmpegPipe.cs プロジェクト: mikeage/open-brush
        private void ReadFramesFromFfmpeg()
        {
            long PIXEL_SIZE_BYTES = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Color32));

            // TODO: Use ffprobe instead of width/height, then w/h properties could be removed.
            byte[] buf = new byte[Height * Width * PIXEL_SIZE_BYTES];
            System.IO.BinaryReader dataPipe = m_dataReader;
            // Keep a local set of buffers to avoid garbage collection.
            Bytable[] localRefs = new Bytable[m_framesOut.Capacity];

            // Init local refs.
            int lastLocalRef = 0;

            for (int i = 0; i < localRefs.Length; i++)
            {
                localRefs[i] = new Color32Bytable(null);
            }

            try
            {
                using (dataPipe)
                {
                    while (!m_shouldExit)
                    {
                        while (!m_shouldPause)
                        {
                            if (m_framesOut.IsFull)
                            {
                                // Wait for the consumer.
                                m_frameReady.WaitOne();
                            }

                            int bytesRead = dataPipe.Read(buf, 0, buf.Length);
                            while (bytesRead < buf.Length)
                            {
                                bytesRead += dataPipe.Read(buf, bytesRead, buf.Length - bytesRead);
                                if (bytesRead == 0)
                                {
                                    return;
                                }
                            }
                            if (bytesRead != buf.Length)
                            {
                                // For some reason we only read the wrong amount of data.
                                UnityEngine.Debug.LogWarningFormat("BAD READ RESULT: got {0} bytes, expected {1}",
                                                                   bytesRead, buf.Length);
                                continue;
                            }

                            // If the last buffer we had was the same size, no allocation will happen here. We
                            // will also be holding a reference to that array, so even after it's removed from the
                            // m_framesOut buffer, it should not generate garbage.
                            Bytable curFrame = localRefs[lastLocalRef];
                            lastLocalRef = (lastLocalRef + 1) % localRefs.Length;

                            curFrame.FromBytes(buf);

                            // If called with overwriteIfFull=true, this code will require a lock.
                            m_framesOut.Enqueue(curFrame);
                            m_frameCount++;
                        }

                        // Wait for the next frame
                        m_ready.WaitOne();
                    }
                }
            }
            catch (System.Threading.ThreadInterruptedException)
            {
                // This is fine, the render thread sent an interrupt.
            }
            catch (System.Exception e)
            {
                UnityEngine.Debug.LogException(e);
            }
            finally
            {
                Stop();
            }
        }