示例#1
0
        void OnFrame()
        {
            var frame = videoRecorder.AcquireFrame();

            Graphics.Blit(cameraTexture, frame);
            videoRecorder.CommitFrame(frame, recordingClock.Timestamp);
        }
示例#2
0
 void Update()
 {
     // Record frames from the webcam
     if (recording && webCamTexture.didUpdateThisFrame)
     {
         webCamTexture.GetPixels32(pixelBuffer);
         recorder.CommitFrame(pixelBuffer, clock.timestamp);
     }
 }
示例#3
0
 void Update()
 {
     // Record frames
     if (videoRecorder != null && webcamTexture.didUpdateThisFrame)
     {
         webcamTexture.GetPixels32(pixelBuffer);
         videoRecorder.CommitFrame(pixelBuffer, clock.Timestamp);
     }
 }
示例#4
0
    private IEnumerator recscreen()
    {
        yield return(new WaitForEndOfFrame());

        Texture2D s = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24, false);

        s.ReadPixels(new Rect(0, 0, Screen.width, Screen.height), 0, 0);
        s.Apply();
        videoRecorder.CommitFrame(s.GetPixels32(), clock.Timestamp);
        Destroy(s);
    }
示例#5
0
 void Update()
 {
     // Animate the greyness
     if (cameraPreview.cameraTexture && rawImage.texture == cameraPreview.cameraTexture)
     {
         var currentGreyness = rawImage.material.GetFloat("_Greyness");
         var targetGreyness  = Mathf.Lerp(currentGreyness, greyness, GreySpeed * Time.deltaTime);
         rawImage.material.SetFloat("_Greyness", targetGreyness);
     }
     // Record frames
     if (videoRecorder != null && cameraPreview.cameraTexture.didUpdateThisFrame)
     {
         var frame = videoRecorder.AcquireFrame();
         Graphics.Blit(cameraPreview.cameraTexture, frame, rawImage.material);
         videoRecorder.CommitFrame(frame, clock.Timestamp);
     }
 }
示例#6
0
    private IEnumerator RenderRoutine(float length)
    {
        // Calculate audioData
        int audioSamples = clip.frequency;
        int channels     = clip.channels;

        float[] samples = new float[clip.samples * channels];
        clip.GetData(samples, 0);
        int samplesPerFrame = audioSamples / fps;

        // Create output rendering camera
        Camera        renderCam = CreateRenderingCamera();
        RenderTexture tex       = renderCam.targetTexture;

        // Create native recorder
        MP4Recorder recorder =
            new MP4Recorder(tex.width, tex.height, fps, audioSamples, channels, s => { Debug.Log(s); });
        FixedIntervalClock clock = new FixedIntervalClock(fps);

        // Loop each rendering frame to grab and commit frame and samples
        for (int frame = 0; frame < length * fps; frame++)
        {
            yield return(new WaitForEndOfFrame());

            long      timestamp     = clock.Timestamp;
            Texture2D fTex          = RenderTextureToTexture2D(tex);
            float[]   commitSamples = GetPartialSampleArray(samples, samplesPerFrame * frame, samplesPerFrame);
            recorder.CommitFrame(fTex.GetPixels32(), timestamp);
            recorder.CommitSamples(commitSamples, timestamp);
            DestroyImmediate(fTex);
            Debug.Log($"Generated Frame {frame}/{(int) (length * fps) - 1}");
        }

        // Complete render and dispose the native recorder
        // Disposing also finishes the file encoding
        recorder.Dispose();
    }