예제 #1
0
 public CustomYieldInstruction Stop()
 {
     // Stop recording
     deviceCamera.StopPreview();
     videoRecorder.Dispose();
     return(this);
 }
예제 #2
0
 public void StopRecording()
 {
     // Stop recording
     videoRecorder.Dispose();
     videoRecorder = null;
     pixelBuffer   = null;
 }
예제 #3
0
 public void StopRecording()
 {
     // Revert to normal color
     greyness = 0f;
     // Stop recording
     videoRecorder.Dispose();
     videoRecorder = null;
 }
예제 #4
0
 public void StopRecording()
 {
     // Stop the microphone
     audioDevice.StopRecording();
     // Stop recording
     cameraInput.Dispose();
     videoRecorder.Dispose();
     cameraInput   = null;
     videoRecorder = null;
 }
예제 #5
0
 public void StopRecording()
 {
     // Stop the recording inputs
     if (recordMicrophone)
     {
         StopMicrophone();
         audioInput.Dispose();
     }
     cameraInput.Dispose();
     // Stop recording
     videoRecorder.Dispose();
 }
        public CustomYieldInstruction Stop()
        {
            // Stop the recording inputs
            if (recordMicrophone)
            {
                StopMicrophone();
                audioInput.Dispose();
            }
            cameraInput.Dispose();
            // Stop recording
            videoRecorder.Dispose();

            return(this);
        }
예제 #7
0
    public void StopRecording()
    {
        Debug.Log("StopRecording------");
        isRecording = false;
        // Stop the recording inputs
        if (recordMicrophone)
        {
            StopMicrophone();
            audioInput.Dispose();
        }
        cameraInput.Dispose();
        // Stop recording
        videoRecorder.Dispose();

        //Mvc.MvcTool.sendNotice(MessageKeys.ChangeRecState,isRecording);
    }
예제 #8
0
    private IEnumerator RenderRoutine(float length)
    {
        // Calculate audioData
        int audioSamples = clip.frequency;
        int channels     = clip.channels;

        float[] samples = new float[clip.samples * channels];
        clip.GetData(samples, 0);
        int samplesPerFrame = audioSamples / fps;

        // Create output rendering camera
        Camera        renderCam = CreateRenderingCamera();
        RenderTexture tex       = renderCam.targetTexture;

        // Create native recorder
        MP4Recorder recorder =
            new MP4Recorder(tex.width, tex.height, fps, audioSamples, channels, s => { Debug.Log(s); });
        FixedIntervalClock clock = new FixedIntervalClock(fps);

        // Loop each rendering frame to grab and commit frame and samples
        for (int frame = 0; frame < length * fps; frame++)
        {
            yield return(new WaitForEndOfFrame());

            long      timestamp     = clock.Timestamp;
            Texture2D fTex          = RenderTextureToTexture2D(tex);
            float[]   commitSamples = GetPartialSampleArray(samples, samplesPerFrame * frame, samplesPerFrame);
            recorder.CommitFrame(fTex.GetPixels32(), timestamp);
            recorder.CommitSamples(commitSamples, timestamp);
            DestroyImmediate(fTex);
            Debug.Log($"Generated Frame {frame}/{(int) (length * fps) - 1}");
        }

        // Complete render and dispose the native recorder
        // Disposing also finishes the file encoding
        recorder.Dispose();
    }