Example #1
0
        // Transition to State.Looking
        // Valid source states are Disabled, Capturing, Looking.
        // If already in Looking, state will be reset (ie, search will restart from scratch)
        void TransitionToLooking(float delaySeconds = 0)
        {
            // Clean up whatever state we were in before
            // (except the Looking bits, which will be re-initialized)
            StopCapture();
            StopFuture();

            // Initalize Looking
            m_SelectNextCaptureTimer = delaySeconds;
            m_HotValues.Clear();
            VisualizerManager.m_Instance.AudioCaptureStatusChange(false);
            m_State = State.Looking;
        }
Example #2
0
        // -------------------------------------------------------------------------------------------- //
        // Public API
        // -------------------------------------------------------------------------------------------- //

        // Launch the encoder targeting the given file path.
        // Return true on success, false if capture could not start.
        public bool StartCapture(string filePath, int audioSampleRate, bool captureAudio, bool blocking,
                                 float fps)
        {
            if (m_isCapturing)
            {
                return(true);
            }
            if (m_isPlayingBack)
            {
                m_isPlayingBack = false;
                m_ffmpegVideoReader.Stop();
            }

            m_FPS = (decimal)fps;

            m_audioBuffer.SetSampleRates(m_FPS, audioSampleRate);

            m_nextAudioFrame      = 1;
            m_lastVideoFrame      = -1;
            m_audioFrameCount     = 0;
            m_audioFramesRequired = 0;
            m_audioBuffer.Clear();
            m_isCapturingAudio = captureAudio;

            Camera cam = GetComponent <Camera>();

            m_filePath = filePath;

            m_frameBuffered   = false;
            m_texBuffered     = false;
            m_videoFrameCount = 0;
            m_bufferedVideoFrames.Clear();

            int width  = cam.pixelWidth;
            int height = cam.pixelHeight;

            if (cam.pixelHeight == 0)
            {
                width  = Screen.width;
                height = Screen.height;
            }

            const string kPipeStdIn = @"pipe:0";

            // We need to "touch" the destination file immediately, to avoid overlapping encoder instances
            // from stomping each other.
            FileStream myFileStream = File.Open(m_filePath, FileMode.OpenOrCreate,
                                                FileAccess.Write, FileShare.ReadWrite);

            myFileStream.Close();
            myFileStream.Dispose();
            File.SetLastWriteTimeUtc(m_filePath, System.DateTime.UtcNow);

            string videoFileName = audioSampleRate > 0
                ? m_filePath + ".tmp." + App.UserConfig.Video.ContainerType
                : m_filePath;

            if (!m_ffmpegVideo.Start(kPipeStdIn, videoFileName, width, height, (float)m_FPS, blocking))
            {
                return(false);
            }

            m_ffmpegAudio.OutputFile = "";
            if (m_isCapturingAudio &&
                !m_ffmpegAudio.Start(kPipeStdIn, m_filePath + ".tmp.m4a",
                                     width, height, audioSampleRate, blocking))
            {
                m_ffmpegVideo.Stop();
                return(false);
            }

            // Give the encoder a means to return used frames
            m_ffmpegVideo.ReleaseFrame += ReturnFrameToPool;

            //
            // Init capture and playback buffers.
            //
            m_playbackTexture = new Texture2D(width, height, TextureFormat.ARGB32, false);
            long kPixelSizeBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Color32));

            m_captureBuffer = new ComputeBuffer(width * height, (int)kPixelSizeBytes);

            var tempInitBuffer = new Color32[width * height];

            m_captureBuffer.SetData(tempInitBuffer);
            m_currentFrameBuffer = null;

            // Save the temp buffer for reuse later.
            m_videoFramePool.Enqueue(tempInitBuffer);

            m_blitToCompute.SetBuffer("_CaptureBuffer", m_captureBuffer);

            // Note, UAV register must match shader register (e.g. register(u1)).
            const int uavRegister = 1;

            Graphics.SetRandomWriteTarget(uavRegister, m_captureBuffer, true);

            //
            // Finalize local state setup.
            //
            m_width  = width;
            m_height = height;

            m_frameTimer = new Stopwatch();
            m_frameTimer.Start();

            // Since audio capture is asynchronous, these *must* be set as the last step.
            m_isCapturing = true;

            return(true);
        }