public CameraInput(IMediaRecorder mediaRecorder, IClock clock, params Camera[] cameras) { this.mediaRecorder = mediaRecorder; this.cameras = cameras; this.clock = clock; DispatchUtility.onFrame += OnFrame; }
public void StartRecordVedio(Camera[] captureCamera, int videoWidth, int videoHeight, bool recordMicrophone = false, AudioSource microphoneSource = null, int recordTime = 10, Action <string> recordVedioCallBack = null) { this.recordMicrophone = recordMicrophone; this.microphoneSource = microphoneSource; RecordVedioCallBack = recordVedioCallBack; var sampleRate = recordMicrophone ? AudioSettings.outputSampleRate : 0; var channelCount = recordMicrophone ? (int)AudioSettings.speakerMode : 0; recordingClock = new RealtimeClock(); videoRecorder = new MP4Recorder( videoWidth, videoHeight, 30, sampleRate, channelCount, OnReplay ); cameraInput = new CameraInput(videoRecorder, recordingClock, captureCamera); //录像 StartMicrophone(); audioInput = recordMicrophone ? new AudioInput(videoRecorder, recordingClock, microphoneSource, true) : null; if (audioInput != null) { microphoneSource.mute = false; } isStartRecording = true; Invoke("StopRecordVedio", recordTime); }
public void StartRecording(ExportVideoDTO exportVideo) { _record = true; _settings = exportVideo; switch (exportVideo.VideoType) { case VideoType.MP4: _mediaRecorder = new MP4Recorder(exportVideo.Width, exportVideo.Height, exportVideo.Framerate, AudioSettings.outputSampleRate, (int)AudioSettings.speakerMode, OnStopRecording); break; case VideoType.GIF: _mediaRecorder = new GIFRecorder(exportVideo.Width, exportVideo.Height, exportVideo.Framerate, OnStopRecording); break; default: throw new ArgumentOutOfRangeException(); } _clock = new RealtimeClock(); _cameraInput = new CameraInput(_mediaRecorder, _clock, _camera); _audioInput = new AudioInput(_mediaRecorder, _clock, AudioPeerService.GetAudioListener()); _mainCanvas.gameObject.SetActive(false); _recordCanvas.gameObject.SetActive(true); AudioPeerService.Stop(); AudioPeerService.Play(); _coroutine = StartCoroutine(_Play()); }
public AudioInput(IMediaRecorder mediaRecorder, IClock clock, AudioListener audioListener) { this.mediaRecorder = mediaRecorder; this.clock = clock; this.attachment = audioListener.gameObject.AddComponent <AudioInputAttachment>(); this.attachment.sampleBufferDelegate = OnSampleBuffer; }
public AudioInput(IMediaRecorder mediaRecorder, IClock clock, AudioSource audioSource, bool mute = false) { this.mediaRecorder = mediaRecorder; this.clock = clock; this.attachment = audioSource.gameObject.AddComponent <AudioInputAttachment>(); this.attachment.sampleBufferDelegate = OnSampleBuffer; this.mute = mute; }
public void StartRecording() { // Start recording clock = new RealtimeClock(); recorder = new MP4Recorder(webCamTexture.width, webCamTexture.height, 30); pixelBuffer = webCamTexture.GetPixels32(); recording = true; }
/// <summary> /// Create a crop texture input. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> /// <param name="textureInput">Backing texture input for committing frames to recorder.</param> public CropTextureInput(IMediaRecorder recorder, ITextureInput textureInput = null) { this.recorder = recorder; this.input = textureInput ?? new TextureInput(recorder); this.material = new Material(Shader.Find(@"Hidden/NCPX/CropTextureInput")); this.cropRect = new RectInt(0, 0, recorder.frameSize.width, recorder.frameSize.height); this.aspectMode = 0; }
public RenderTextureInput(IMediaRecorder mediaRecorder) { // Construct state this.mediaRecorder = mediaRecorder; this.pixelBuffer = new byte[mediaRecorder.pixelWidth * mediaRecorder.pixelHeight * 4]; this.readbackBuffer = new Texture2D(mediaRecorder.pixelWidth, mediaRecorder.pixelHeight, TextureFormat.RGBA32, false, false); this.readbackiOS = MTLReadbackCreate(mediaRecorder.pixelWidth, mediaRecorder.pixelHeight); }
private AudioInput(IMediaRecorder recorder, IClock clock, GameObject gameObject, bool mute = false) { this.recorder = recorder; this.clock = clock; this.attachment = gameObject.AddComponent <AudioInputAttachment>(); this.attachment.sampleBufferDelegate = OnSampleBuffer; this.mute = mute; }
public WebCamTextureInput(IMediaRecorder recorder, IClock clock, WebCamTexture webCamTexture) { this.recorder = recorder; this.clock = clock; this.webCamTexture = webCamTexture; this.pixelBuffer = webCamTexture.GetPixels32(); this.attachment = new GameObject("WebCamTextureInputAttachment").AddComponent <WebCamTextureInputAttachment>(); attachment.StartCoroutine(OnFrame()); }
/// <summary> /// Create a RenderTexture input which performs synchronous readbacks. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> public TextureInput(IMediaRecorder recorder) { this.recorder = recorder; this.readbackBuffer = new Texture2D( recorder.frameSize.width, recorder.frameSize.height, TextureFormat.RGBA32, false, false ); }
public void StartRecording() { // Start recording var frameRate = 30; var sampleRate = recordMicrophone ? AudioSettings.outputSampleRate : 0; var channelCount = recordMicrophone ? (int)AudioSettings.speakerMode : 0; var clock = new RealtimeClock(); recorder = new MP4Recorder(videoWidth, videoHeight, frameRate, sampleRate, channelCount); // Create recording inputs cameraInput = new CameraInput(recorder, clock, Camera.main); audioInput = recordMicrophone ? new AudioInput(recorder, clock, microphoneSource, true) : null; // Unmute microphone microphoneSource.mute = audioInput == null; }
/// <summary> /// Create a GLES texture input. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> /// <param name="multithreading">Enable multithreading. This improves recording performance.</param> public GLESTextureInput(IMediaRecorder recorder, bool multithreading = false) { // Check platform if (Application.platform != RuntimePlatform.Android) { throw new InvalidOperationException(@"GLESTextureInput can only be used on Android"); } // Check render API if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES3) { throw new InvalidOperationException(@"GLESTextureInput can only be used with OpenGL ES3"); } // Save this.recorder = recorder; this.readback = new GLESReadback(recorder.frameSize.width, recorder.frameSize.height, multithreading); }
/// <summary> /// Create a Metal texture input. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> /// <param name="multithreading">Enable multithreading. This improves recording performance.</param> public MTLTextureInput(IMediaRecorder recorder, bool multithreading = false) { // Check platform if (Application.platform != RuntimePlatform.IPhonePlayer) { throw new InvalidOperationException(@"MTLTextureInput can only be used on iOS"); } // Check render API if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Metal) { throw new InvalidOperationException(@"MTLTextureInput can only be used with Metal"); } // Save this.recorder = recorder; this.readback = null; //new MTLReadback(recorder.frameSize.width, recorder.frameSize.height, multithreading); }
/// <summary> /// Finish writing. /// </summary> /// <param name="recordingName">Desired recording name. This MUST include the file extension.</param> /// <param name="overwrite">Should any existing file be overwritten?</param> /// <returns>Path to recorded media file.</returns> public static async Task <string> FinishWriting(this IMediaRecorder recorder, string recordingName, bool overwrite = false) { // Get source and destination paths var src = await recorder.FinishWriting(); var dst = Path.Combine(new FileInfo(src).Directory.FullName, recordingName); var directory = File.GetAttributes(src).HasFlag(FileAttributes.Directory); // src and dst are same type var exists = File.Exists(dst) || Directory.Exists(dst); // Delete existing file if (exists && overwrite) { if (directory) { Directory.Delete(dst, true); } else { File.Delete(dst); } } // Move try { if (directory) { Directory.Move(src, dst); } else { File.Move(src, dst); } } // Delete source catch (IOException ex) { if (directory) { Directory.Delete(src, true); } else { File.Delete(src); } throw ex; } // Return new path return(dst); }
public CameraInput(IMediaRecorder mediaRecorder, IClock clock, params Camera[] cameras) { // Sort cameras by depth Array.Sort(cameras, (a, b) => (int)(10 * (a.depth - b.depth))); // Create frame input this.clock = clock; this.cameras = cameras; this.frameInput = new RenderTextureInput(mediaRecorder); // Create framebuffer var frameDescriptor = new RenderTextureDescriptor(mediaRecorder.pixelWidth, mediaRecorder.pixelHeight, RenderTextureFormat.ARGB32, 24); frameDescriptor.sRGB = true; this.framebuffer = RenderTexture.GetTemporary(frameDescriptor); // Start recording this.frameHelper = cameras[0].gameObject.AddComponent <CameraInputAttachment>(); frameHelper.StartCoroutine(OnFrame()); }
/// <summary> /// Create a video recording input from a game camera. /// </summary> /// <param name="recorder">Media recorder to receive committed frames.</param> /// <param name="clock">Clock for generating timestamps.</param> /// <param name="cameras">Game cameras to record.</param> public CameraInput(IMediaRecorder recorder, IClock clock, params Camera[] cameras) { // Sort cameras by depth Array.Sort(cameras, (a, b) => (int)(10 * (a.depth - b.depth))); var(width, height) = recorder.frameSize; // Save state this.recorder = recorder; this.clock = clock; this.cameras = cameras; this.frameDescriptor = new RenderTextureDescriptor(width, height, RenderTextureFormat.ARGB32, 24); this.readbackBuffer = SystemInfo.supportsAsyncGPUReadback ? null : new Texture2D(width, height, TextureFormat.RGBA32, false, false); this.attachment = cameras[0].gameObject.AddComponent <CameraInputAttachment>(); this.pixelBuffer = new byte[width * height * 4]; // Start recording frameDescriptor.sRGB = true; attachment.StartCoroutine(OnFrame()); }
IEnumerator first(float seconds) { yield return(new WaitForSeconds(seconds)); // Start recording var frameRate = 30; var sampleRate = AudioSettings.outputSampleRate; var channelCount = (int)AudioSettings.speakerMode; var clock = new RealtimeClock(); recorder = new MP4Recorder(videoWidth, videoHeight, frameRate, sampleRate, channelCount); // Create recording inputs cameraInput = new CameraInput(recorder, clock, Camera.main); audioInput = new AudioInput(recorder, clock, microphoneSource, true); // Unmute microphone microphoneSource.mute = audioInput == null; }
public void StartRecording(Camera camera, bool recordMicrophone = true) { int sampleRate = 0; int channelCount = 0; if (recordMicrophone) { sampleRate = AudioSettings.outputSampleRate; channelCount = (int)AudioSettings.speakerMode; } mediaRecorder = new MP4Recorder(targetWidth, targetHeight, targetFrameRate, sampleRate, channelCount); recordingClock = new RealtimeClock(); cameraInput = new CameraInput(mediaRecorder, recordingClock, camera); if (recordMicrophone) { StartCoroutine(StartMicrophone()); audioInput = new AudioInput(mediaRecorder, recordingClock, microphoneSource, mute: true); } }
private IEnumerator Start() { recorder = null; cameraInput = null; audioInput = null; microphoneSource = null; timerGoing = false; foreach (var device in Microphone.devices) { Debug.Log("Name: " + device); } Microphone.GetDeviceCaps(null, out minFreq, out maxFreq); Debug.Log("minFreq:" + minFreq); Debug.Log("maxFreq:" + maxFreq); if (minFreq == 0 && maxFreq == 0) { //...meaning 44100 Hz can be used as the recording sampling rate maxFreq = 44100; } startRecordBtn.SetActive(true); stoprecordBtn.SetActive(false); // Start microphone microphoneSource = this.GetComponent <AudioSource>(); microphoneSource.mute = microphoneSource.loop = true; microphoneSource.bypassEffects = microphoneSource.bypassListenerEffects = false; microphoneSource.clip = Microphone.Start(null, true, 1, maxFreq); yield return(new WaitUntil(() => Microphone.GetPosition(null) > 0)); microphoneSource.Play(); }
public AudioInput(IMediaRecorder recorder, IClock clock, AudioSource audioSource, bool mute = false) : this(recorder, clock, audioSource.gameObject, mute) { }
public AudioInput(IMediaRecorder recorder, IClock clock, AudioListener audioListener) : this(recorder, clock, audioListener.gameObject) { }
private async Task StartRecording() { if (isVideoPlaying || isVideoRecording || isFinishWriting) { return; } Debug.Log("StartRecording ()"); // First make sure recording microphone is only on MP4 or HEVC recordMicrophoneAudio = recordMicrophoneAudioToggle.isOn; recordMicrophoneAudio &= (container == ContainerPreset.MP4 || container == ContainerPreset.HEVC); // Create recording configurations recordingWidth = webCamTextureToMatHelper.GetWidth(); recordingHeight = webCamTextureToMatHelper.GetHeight(); videoFramerate = 30; audioSampleRate = recordMicrophoneAudio ? AudioSettings.outputSampleRate : 0; audioChannelCount = recordMicrophoneAudio ? (int)AudioSettings.speakerMode : 0; frameDuration = 0.1f; // Create video recorder recordingClock = new RealtimeClock(); if (container == ContainerPreset.MP4) { videoRecorder = new MP4Recorder( recordingWidth, recordingHeight, videoFramerate, audioSampleRate, audioChannelCount, (int)videoBitRate, audioBitRate: (int)audioBitRate ); recordEveryNthFrame = 1; } else if (container == ContainerPreset.HEVC) { videoRecorder = new HEVCRecorder( recordingWidth, recordingHeight, videoFramerate, audioSampleRate, audioChannelCount, (int)videoBitRate, audioBitRate: (int)audioBitRate ); recordEveryNthFrame = 1; } else if (container == ContainerPreset.GIF) { videoRecorder = new GIFRecorder( recordingWidth, recordingHeight, frameDuration ); recordEveryNthFrame = 5; } else if (container == ContainerPreset.JPG) // macOS and Windows platform only. { videoRecorder = new JPGRecorder( recordingWidth, recordingHeight ); recordEveryNthFrame = 5; } frameCount = 0; // Start recording isVideoRecording = true; HideAllVideoUI(); recordVideoButton.interactable = true; recordVideoButton.GetComponentInChildren <UnityEngine.UI.Text>().color = Color.red; CreateSettingInfo(); // Start microphone and create audio input if (recordMicrophoneAudio) { await StartMicrophone(); audioInput = new AudioInput(videoRecorder, recordingClock, microphoneSource, true); } // Unmute microphone microphoneSource.mute = audioInput == null; // Start countdown cancellationTokenSource = new CancellationTokenSource(); try { Debug.Log("Countdown start."); await CountdownAsync( sec => { string str = "Recording"; for (int i = 0; i < sec; i++) { str += "."; } if (fpsMonitor != null) { fpsMonitor.consoleText = str; } }, MAX_RECORDING_TIME, cancellationTokenSource.Token); Debug.Log("Countdown end."); } catch (OperationCanceledException e) { if (e.CancellationToken == cancellationTokenSource.Token) { Debug.Log("Countdown canceled."); } } cancellationTokenSource.Dispose(); cancellationTokenSource = null; if (this != null && isActiveAndEnabled) { await FinishRecording(); } }
/// <summary> /// Create a RenderTexture input which performs synchronous readbacks. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> public AsyncTextureInput(IMediaRecorder recorder) => this.recorder = recorder;
/// <summary> /// Create an audio recording input from an AudioSource. /// </summary> /// <param name="recorder">Media recorder to receive audio frames.</param> /// <param name="audioSource">Audio source to record.</param> /// <param name="mute">Optional. Mute audio source while recording so that it is not heard in scene.</param> public AudioInput(IMediaRecorder recorder, AudioSource audioSource, bool mute = false) : this(recorder, default, audioSource, mute) { }
/// <summary> /// Create a Metal texture input. /// </summary> /// <param name="recorder">Media recorder to receive video frames.</param> /// <param name="multithreading">Enable multithreading. This improves recording performance.</param> public MTLTextureInput(IMediaRecorder recorder, bool multithreading = false) { this.recorder = recorder; this.readback = new MTLReadback(recorder.frameSize.width, recorder.frameSize.height, multithreading); }
/// <summary> /// Create an audio recording input from a scene's AudioListener. /// </summary> /// <param name="recorder">Media recorder to receive audio frames.</param> /// <param name="audioListener">Audio listener for the current scene.</param> public AudioInput(IMediaRecorder recorder, AudioListener audioListener) : this(recorder, default, audioListener) { }