Exemplo n.º 1
0
    private IEnumerator RenderRoutine(float length)
    {
        // Calculate audioData
        int audioSamples = clip.frequency;
        int channels     = clip.channels;

        float[] samples = new float[clip.samples * channels];
        clip.GetData(samples, 0);
        int samplesPerFrame = audioSamples / fps;

        // Create output rendering camera
        Camera        renderCam = CreateRenderingCamera();
        RenderTexture tex       = renderCam.targetTexture;

        // Create native recorder
        MP4Recorder recorder =
            new MP4Recorder(tex.width, tex.height, fps, audioSamples, channels, s => { Debug.Log(s); });
        FixedIntervalClock clock = new FixedIntervalClock(fps);

        // Loop each rendering frame to grab and commit frame and samples
        for (int frame = 0; frame < length * fps; frame++)
        {
            yield return(new WaitForEndOfFrame());

            long      timestamp     = clock.Timestamp;
            Texture2D fTex          = RenderTextureToTexture2D(tex);
            float[]   commitSamples = GetPartialSampleArray(samples, samplesPerFrame * frame, samplesPerFrame);
            recorder.CommitFrame(fTex.GetPixels32(), timestamp);
            recorder.CommitSamples(commitSamples, timestamp);
            DestroyImmediate(fTex);
            Debug.Log($"Generated Frame {frame}/{(int) (length * fps) - 1}");
        }

        // Complete render and dispose the native recorder
        // Disposing also finishes the file encoding
        recorder.Dispose();
    }
Exemplo n.º 2
0
 // Invoked by NatMic audio device with new audio sample buffer
 public void OnSampleBuffer(float[] sampleBuffer, int sampleRate, int channelCount, long timestamp)
 {
     // Send sample buffers directly to the video recorder for recording
     videoRecorder.CommitSamples(sampleBuffer, recordingClock.Timestamp);
 }