public void Stop() { Console.WriteLine("Audio recording stopped"); timer.Stop(); WaveIn.StopRecording(); RecordingStopped?.Invoke(this, new EventArgs()); }
private void SafeInvokeRecordingStopped() { if (RecordingStopped == null) { return; //No Listeners } RecordingStoppedEvent listener = null; Delegate[] dels = RecordingStopped.GetInvocationList(); foreach (Delegate del in dels) { try { listener = (RecordingStoppedEvent)del; listener.Invoke(); } catch (Exception) { //Could not reach the destination, so remove it //from the list RecordingStopped -= listener; } } }
public void StopRecording() { Recording = false; RecordingStopped?.Invoke(); _waveWriter.Close(); _waveWriter = null; }
/// <summary> /// Stop Recording. /// </summary> public void Stop() { _silencePlayer?.Stop(); BassWasapi.CurrentDevice = _deviceIndex; BassWasapi.Stop(); RecordingStopped?.Invoke(this, new EndEventArgs(null)); }
public async void Stop(DialogViewModel viewModel, bool cancel) { Logger.Debug(Target.Recording, "Stop invoked, cancel: " + cancel); await _recordQueue.Enqueue(async() => { Logger.Debug(Target.Recording, "Enqueued stop invoked"); var recorder = _recorder; var file = _file; var mode = _mode; if (recorder == null || file == null) { Logger.Debug(Target.Recording, "recorder or file == null, abort"); return; } RecordingStopped?.Invoke(this, EventArgs.Empty); var now = DateTime.Now; var elapsed = now - _start; Logger.Debug(Target.Recording, "stopping recorder, elapsed " + elapsed); await recorder.StopAsync(); Logger.Debug(Target.Recording, "recorder stopped"); if (cancel || elapsed.TotalMilliseconds < 700) { try { await file.DeleteAsync(); } catch { } Logger.Debug(Target.Recording, "recording canceled or too short, abort"); if (elapsed.TotalMilliseconds < 700) { RecordingTooShort?.Invoke(this, EventArgs.Empty); } } else { Logger.Debug(Target.Recording, "sending recorded file"); Send(viewModel, mode, file, recorder._mirroringPreview, (int)elapsed.TotalSeconds); } _recorder = null; _file = null; }); }
/// <summary> /// Stop Recording. /// </summary> public void Stop() { Bass.ChannelPause(_mixer); Bass.ChannelPause(_recording); Bass.ChannelPause(_loopback); Bass.ChannelPause(_silence); RecordingStopped?.Invoke(this, new EndEventArgs(null)); }
public bool StopRecording() { bool stopped = _recordingConfig.StopRecordingTimeoutMilliseconds > 0 ? _streamer.StopRecording(TimeSpan.FromMilliseconds(_recordingConfig.StopRecordingTimeoutMilliseconds)) : _streamer.StopRecording(TimeSpan.Zero); if (stopped) { RecordingStopped?.Invoke(this, EventArgs.Empty); } return(stopped); }
public bool StopRecording() { Log?.Invoke(this, new Core.LogMessage(Core.LogLevel.Info, LogTags.Recording, "Stopping recording...")); bool stopped = _recordingConfig.StopRecordingTimeoutMilliseconds > 0 ? _streamer.StopRecording(TimeSpan.FromMilliseconds(_recordingConfig.StopRecordingTimeoutMilliseconds)) : _streamer.StopRecording(TimeSpan.Zero); if (stopped) { RecordingStopped?.Invoke(this, EventArgs.Empty); Log?.Invoke(this, new Core.LogMessage(Core.LogLevel.Info, LogTags.Recording, "Recording stopped")); } return(stopped); }
/// <summary> /// Creates a new instance of <see cref="WaveInAudioProvider"/>. /// </summary> /// <param name="Device">The Recording Device.</param> /// <param name="Wf"><see cref="WaveFormat"/> to use.</param> public WaveInAudioProvider(int Device, WaveFormat Wf) { _waveInEvent = new WaveInEvent { DeviceNumber = Device, BufferMilliseconds = 100, NumberOfBuffers = 3, WaveFormat = new NWaveFormat(Wf.SampleRate, Wf.BitsPerSample, Wf.Channels) }; WaveFormat = Wf; _waveInEvent.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(this, new EndEventArgs(Args.Exception)); _waveInEvent.DataAvailable += (Sender, Args) => DataAvailable?.Invoke(this, new DataAvailableEventArgs(Args.Buffer, Args.BytesRecorded)); }
/// <summary> /// Stop the recording. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void _waveIn_RecordingStopped(object sender, StoppedEventArgs e) { // Close the writer. if (_writer != null) { _writer.Dispose(); _writer = null; } // If created. if (_waveInCreated) { // Stop recording. RecordingStopped?.Invoke(sender, new StoppedEventArgs(e.Exception, true)); } }
/// <summary> /// Creates a new synchronizable instance of <see cref="WaveInProvider"/> to be used with an <see cref="IRecorder"/>. /// </summary> /// <param name="Device">The Recording Device.</param> /// <param name="Wf"><see cref="WaveFormat"/> to use.</param> /// <param name="FrameRate">The <see cref="IRecorder"/>'s FrameRate.</param> public WaveInProvider(WaveInDevice Device, WaveFormat Wf, int FrameRate) { IsSynchronizable = FrameRate != -1; _waveInEvent = new WaveInEvent { DeviceNumber = Device.DeviceNumber, BufferMilliseconds = IsSynchronizable ? (int)Math.Ceiling(1000 / (decimal)FrameRate) : 100, NumberOfBuffers = 3, WaveFormat = new NWaveFormat(Wf.SampleRate, Wf.BitsPerSample, Wf.Channels) }; WaveFormat = Wf; _waveInEvent.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(this, new EndEventArgs(Args.Exception)); _waveInEvent.DataAvailable += (Sender, Args) => DataAvailable?.Invoke(this, new DataAvailableEventArgs(Args.Buffer, Args.BytesRecorded)); }
/// <summary> /// Create a new instance of <see cref="LoopbackProvider"/>. /// </summary> /// <param name="Device"><see cref="MMDevice"/> to use.</param> /// <param name="IncludeSilence">Whether to record silence?... default = true</param> public LoopbackProvider(MMDevice Device, bool IncludeSilence = true) { _capture = new WasapiLoopbackCapture(Device); _capture.DataAvailable += (Sender, Args) => DataAvailable?.Invoke(this, new DataAvailableEventArgs(Args.Buffer, Args.BytesRecorded)); _capture.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(this, new EndEventArgs(Args.Exception)); var mixFormat = _capture.WaveFormat; WaveFormat = WaveFormat.CreateIeeeFloatWaveFormat(mixFormat.SampleRate, mixFormat.Channels); if (!IncludeSilence) { return; } _silenceOut = new WasapiOut(Device, AudioClientShareMode.Shared, false, 100); _silenceOut.Init(new SilenceProvider()); }
public async void Stop(bool cancel) { await _recordQueue.Enqueue(async() => { var recorder = _recorder; var file = _file; if (recorder == null || file == null) { return; } RecordingStopped?.Invoke(this, EventArgs.Empty); var now = DateTime.Now; var elapsed = now - _start; await recorder.StopAsync(); if (cancel || elapsed.TotalMilliseconds < 700) { try { await file.DeleteAsync(); } catch { } if (elapsed.TotalMilliseconds < 700) { RecordingTooShort?.Invoke(this, EventArgs.Empty); } } else { Send(file); } _recorder = null; _file = null; }); }
/// <summary> /// Creates a new instance of <see cref="EncodedAudioProvider"/>. /// </summary> /// <param name="AudioProvider">The <see cref="IAudioProvider"/> to wrap.</param> /// <param name="AudioEncoder">The <see cref="IAudioEncoder"/> to use.</param> public EncodedAudioProvider(IAudioProvider AudioProvider, IAudioEncoder AudioEncoder) { if (AudioProvider == null) { throw new ArgumentNullException(nameof(AudioProvider)); } if (AudioEncoder == null) { throw new ArgumentNullException(nameof(AudioEncoder)); } _audioProvider = AudioProvider; _audioEncoder = AudioEncoder; WaveFormat = AudioEncoder.WaveFormat; AudioProvider.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(Sender, Args); AudioProvider.DataAvailable += AudioProviderOnDataAvailable; }
private void Record() { try { while (!stopRequested && reader.CanRead) { const int n = 10000; var buffer = new byte[n]; var bytes = reader.Read(buffer, 0, n); if (bytes == 0) { break; } DataAvailable?.Invoke(this, new WaveInEventArgs(buffer, bytes)); Thread.Sleep(1); } } catch (Exception e) { RecordingStopped?.Invoke(this, new StoppedEventArgs(e)); } finally { RecordingStopped?.Invoke(this, new StoppedEventArgs()); } }
/// <summary> /// Creates a new synchronizable instance of <see cref="RecordingProvider"/> to be used with an <see cref="IRecorder"/>. /// </summary> /// <param name="Device">The Recording Device.</param> /// <param name="Wf"><see cref="WaveFormat"/> to use.</param> /// <param name="FrameRate">The <see cref="IRecorder"/>'s FrameRate.</param> public RecordingProvider(RecordingDevice Device, WaveFormat Wf, int FrameRate) { WaveFormat = Wf; BASS.RecordInit(Device.DeviceIndex); BASS.CurrentRecordingDevice = Device.DeviceIndex; var flags = BassFlags.RecordPause; if (Wf.Encoding == WaveFormatEncoding.Float && Wf.BitsPerSample == 32) { flags |= BassFlags.Float; } else if (Wf.Encoding == WaveFormatEncoding.Pcm && Wf.BitsPerSample == 8) { flags |= BassFlags.Byte; } else if (!(Wf.Encoding == WaveFormatEncoding.Pcm && Wf.BitsPerSample == 16)) { throw new ArgumentException(nameof(Wf)); } IsSynchronizable = FrameRate != -1; if (IsSynchronizable) { BASS.RecordingBufferLength = 3000 / FrameRate; } _handle = IsSynchronizable ? BASS.RecordStart(Wf.SampleRate, Wf.Channels, flags, BASS.RecordingBufferLength / 3, Procedure, IntPtr.Zero) : BASS.RecordStart(Wf.SampleRate, Wf.Channels, flags, Procedure); BASS.ChannelSetSync(_handle, SyncFlags.Free, 0, (H, C, D, U) => RecordingStopped?.Invoke(this, new EndEventArgs(null))); }
/// <summary> /// Raises the <see cref="RecordingStopped" /> event. /// </summary> /// <seealso cref="EventArgs" /> protected virtual void OnRecordingStopped() { RecordingStopped?.Invoke(this, EventArgs.Empty); }
private void UpdateRecordingInterface() { Logger.Debug(Target.Recording, "Updating interface, state: " + recordInterfaceState); if (_recordingLocked && _recordingAudioVideo) { if (recordInterfaceState == 2) { return; } recordInterfaceState = 2; this.BeginOnUIThread(() => { VisualStateManager.GoToState(this, "Locked", false); ClickMode = ClickMode.Press; RecordingLocked?.Invoke(this, EventArgs.Empty); }); } else if (_recordingAudioVideo) { if (recordInterfaceState == 1) { return; } recordInterfaceState = 1; try { if (_request == null) { _request = new DisplayRequest(); _request.GetType(); } } catch { } _recordingLocked = false; _start = DateTime.Now; this.BeginOnUIThread(() => { VisualStateManager.GoToState(this, "Started", false); ClickMode = ClickMode.Release; RecordingStarted?.Invoke(this, EventArgs.Empty); }); } else { if (_request != null) { try { _request.RequestRelease(); _request = null; } catch { } } if (recordInterfaceState == 0) { return; } recordInterfaceState = 0; _recordingLocked = false; this.BeginOnUIThread(() => { VisualStateManager.GoToState(this, "Stopped", false); ClickMode = ClickMode.Press; RecordingStopped?.Invoke(this, EventArgs.Empty); }); } Logger.Debug(Target.Recording, "Updated interface, state: " + recordInterfaceState); }
private void MapViewModel_RecordingStopped(object sender, EventArgs e) { ForceDisableAllOtherMaps(sender, false); RecordingStopped?.Invoke(sender, e); }
public void RecordingStoppedProxyHandler() { RecordingStopped?.Invoke(); }
public void StopRecording() { _asioOut?.Stop(); RecordingStopped?.Invoke(this, new StoppedEventArgs()); }
private void Stop() { Task.Run(async() => { _stopReset.WaitOne(); _stopReset.Reset(); _recording = false; Execute.BeginOnUIThread(() => { if (_video) { _roundView.IsOpen = false; } RecordingStopped?.Invoke(this, EventArgs.Empty); }); //_startReset.Set(); //return; var now = DateTime.Now; var elapsed = now - _start; Debug.WriteLine("Stop reached"); Debug.WriteLine("Stop: " + now); if (_recorder == null) { _startReset.Set(); return; } if (_recorder.IsRecording) { await _recorder.StopAsync(); } if (_cancelOnRelease || elapsed < TimeSpan.FromSeconds(1)) { await _file.DeleteAsync(); } else if (_file != null) { Debug.WriteLine("Sending voice message"); Execute.BeginOnUIThread(async() => { if (_video) { var props = await _file.Properties.GetVideoPropertiesAsync(); var width = props.Width; var height = props.Height; var x = 0d; var y = 0d; if (width > height) { x = (width - height) / 2; width = height; } if (height > width) { y = (height - width) / 2; height = width; } var transform = new VideoTransformEffectDefinition(); transform.CropRectangle = new Windows.Foundation.Rect(x, y, width, height); transform.OutputSize = new Windows.Foundation.Size(240, 240); var profile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Vga); profile.Video.Width = 240; profile.Video.Height = 240; profile.Video.Bitrate = 300000; await ViewModel.SendVideoAsync(_file, null, true, profile, transform); } else { await ViewModel.SendAudioAsync(_file, (int)elapsed.TotalSeconds, true, null, null, null); } }); } _startReset.Set(); }); }
// Private static methods private static void RemoveAllEventHandlers() { if (WasClosed != null) { foreach (Everyplay.WasClosedDelegate del in WasClosed.GetInvocationList()) { WasClosed -= del; } } if (ReadyForRecording != null) { foreach (Everyplay.ReadyForRecordingDelegate del in ReadyForRecording.GetInvocationList()) { ReadyForRecording -= del; } } if (RecordingStarted != null) { foreach (Everyplay.RecordingStartedDelegate del in RecordingStarted.GetInvocationList()) { RecordingStarted -= del; } } if (RecordingStopped != null) { foreach (Everyplay.RecordingStoppedDelegate del in RecordingStopped.GetInvocationList()) { RecordingStopped -= del; } } if (FaceCamSessionStarted != null) { foreach (Everyplay.FaceCamSessionStartedDelegate del in FaceCamSessionStarted.GetInvocationList()) { FaceCamSessionStarted -= del; } } if (FaceCamRecordingPermission != null) { foreach (Everyplay.FaceCamRecordingPermissionDelegate del in FaceCamRecordingPermission.GetInvocationList()) { FaceCamRecordingPermission -= del; } } if (FaceCamSessionStopped != null) { foreach (Everyplay.FaceCamSessionStoppedDelegate del in FaceCamSessionStopped.GetInvocationList()) { FaceCamSessionStopped -= del; } } if (ThumbnailReadyAtFilePath != null) { foreach (Everyplay.ThumbnailReadyAtFilePathDelegate del in ThumbnailReadyAtFilePath.GetInvocationList()) { ThumbnailReadyAtFilePath -= del; } } if (ThumbnailReadyAtTextureId != null) { foreach (Everyplay.ThumbnailReadyAtTextureIdDelegate del in ThumbnailReadyAtTextureId.GetInvocationList()) { ThumbnailReadyAtTextureId -= del; } } if (UploadDidStart != null) { foreach (Everyplay.UploadDidStartDelegate del in UploadDidStart.GetInvocationList()) { UploadDidStart -= del; } } if (UploadDidProgress != null) { foreach (Everyplay.UploadDidProgressDelegate del in UploadDidProgress.GetInvocationList()) { UploadDidProgress -= del; } } if (UploadDidComplete != null) { foreach (Everyplay.UploadDidCompleteDelegate del in UploadDidComplete.GetInvocationList()) { UploadDidComplete -= del; } } }
private void OnRecordingStopped() { RecordingStopped?.Invoke(); }
void Setup() { _waveInEvent.RecordingStopped += (Sender, Args) => RecordingStopped?.Invoke(this, new EndEventArgs(Args.Exception)); _waveInEvent.DataAvailable += (Sender, Args) => DataAvailable?.Invoke(this, new DataAvailableEventArgs(Args.Buffer, Args.BytesRecorded)); }
public void StopRecording() { Recording = false; RecordingStopped?.Invoke(this, new StoppedEventArgs()); }