private void Recorder_Stopped(IAudioRecorder arg1, ErrorEventArgs arg2) { Recorder.Stopped -= Recorder_Stopped; Directory.CreateDirectory(System.IO.Path.GetDirectoryName(PathToCurrentAudioSegment)); // make sure audio directory exists int millisecondsToTrimFromEndForMouseClick = 100; try { var minimum = TimeSpan.FromMilliseconds(300); // this is arbitrary AudioRecorder.TrimWavFile(PathToTemporaryWav, PathToCurrentAudioSegment, new TimeSpan(), TimeSpan.FromMilliseconds(millisecondsToTrimFromEndForMouseClick), minimum); RobustFile.Delete(PathToTemporaryWav); // Otherwise, these continue to clutter up the temp directory. } catch (Exception error) { Logger.WriteEvent(error.Message); RobustFile.Copy(PathToTemporaryWav, PathToCurrentAudioSegment, true); } //We don't actually need the mp3 now, so let people play with recording even without LAME (previously it could crash BL-3159). //We could put this off entirely until we make the ePUB. //I'm just gating this for now because maybe the thought was that it's better to do it a little at a time? //That's fine so long as it doesn't make the UI unresponsive on slow machines. if (LameEncoder.IsAvailable()) { _mp3Encoder.Encode(PathToCurrentAudioSegment, PathToCurrentAudioSegment.Substring(0, PathToCurrentAudioSegment.Length - 4), new NullProgress()); // Note: we need to keep the .wav file as well as the mp3 one. The mp3 format (or alternative mp4) // is required for ePUB. The wav file is a better permanent record of the recording; also, // it is used for playback. } }
//public RecorderViewModel(IAudioRecorder recorder) public RecorderViewModel(IAudioRecorder recorder, IAudioRecorder recorder2) { this.recorder = recorder; this.recorder2 = recorder2; this.recorder.Stopped += OnRecorderStopped; this.recorder2.Stopped += OnRecorderStopped; beginRecordingCommand = new RelayCommand(BeginRecording, () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); stopCommand = new RelayCommand(Stop, () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; recorder2.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated2; Messenger.Default.Register <ShuttingDownMessage>(this, OnShuttingDown); //do these also need to have secondary objects or some shit? /*xx5 * beginRecordingCommand = new RelayCommand(BeginRecording, * () => recorder2.RecordingState == RecordingState.Stopped || * recorder2.RecordingState == RecordingState.Monitoring); * stopCommand = new RelayCommand(Stop, * () => recorder2.RecordingState == RecordingState.Recording); */ //Messenger.Default.Register<ShuttingDownMessage>(this, OnShuttingDown); //gonna have to f****n check these }
public AudioIrrKlangSession(string filePath) { _engine = new ISoundEngine(); _recorder = new IAudioRecorder(_engine); _path = filePath; _irrklangEventProxy = new ProxyForIrrklangEvents(this); }
public RecorderViewModel(IAudioRecorder recorder) { this.recorder = recorder; this.recorder.Stopped += OnRecorderStopped; beginRecordingCommand = new RelayCommand(BeginRecording, () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); stopCommand = new RelayCommand(Stop, () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; Messenger.Default.Register <ShuttingDownMessage>(this, OnShuttingDown); this.startWcommand = new RelayCommand(() => { if (startcount == 1) { bw.RunWorkerAsync(); startcount++; } }); this.stopWcommand = new RelayCommand(() => { Stop(); startcount = 1; }); bw.DoWork += new DoWorkEventHandler(bw_DoWork); }
private AudioManager() { _recorder = new AudioRecorder(this, TimeSpan.FromMilliseconds(Constants.FragmentLenght)); _localAudio = new DataFragmentCollection(); _recorder.DataAvailable += recorder_DataAvailable; }
//private int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; //private int[] audioFormats = new int[] { (int)Encoding.Pcm8bit, (int)Encoding.Pcm16bit }; //private int[] channelConfigs = new int[] { (int)ChannelIn.Mono, (int)ChannelIn.Stereo }; //private int rate = 44100; //private Encoding audioEncoding = Encoding.Pcm16bit; //private ChannelIn channelConfig = ChannelIn.Stereo; //public Action<bool> RecordingStateChanged; //public string[] PermissionsAudio { get; } = { // Manifest.Permission.RecordAudio // }; protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); _container = new TinyIoCContainer(); _container.Register <IAudioRecorder, AudioRecorder>().AsSingleton(); _container.Register <IAudioPlayback, AudioPlayback>().AsSingleton(); _container.Register <ITrackMixer, TrackMixer>().AsSingleton(); _audioPlayback = _container.Resolve <IAudioPlayback>(); _audioRecorder = _container.Resolve <IAudioRecorder>(); _trackMixer = _container.Resolve <ITrackMixer>(); // Set our view from the "main" layout resource SetContentView(Resource.Layout.Main); audioTracks = new List <byte[]>(); //layout = FindViewById<LinearLayout>(Resource.Layout.Main); //mainTextView = FindViewById<TextView>(Resource.Id.textView1); Button startRecordingButton = FindViewById <Button>(Resource.Id.btnStartRecording); Button stopRecordingButton = FindViewById <Button>(Resource.Id.btnStopRecording); Button playbackButton = FindViewById <Button>(Resource.Id.btnPlayback); Button mixtrackButton = FindViewById <Button>(Resource.Id.btnMixTracks); startRecordingButton.Click += async(sender, e) => await StartRecordingButton_Click(); stopRecordingButton.Click += async(sender, e) => await StopRecordingButton_Click(); playbackButton.Click += async(sender, e) => await PlaybackButton_Click(); mixtrackButton.Click += async(sender, e) => await MixtrackButton_Click(); }
public void Init( IBrain brain, IVideoProvider videoProvider, IVideoRecorder videoRecorder, IAudioRecorder audioRecorder, IAudioPlayer audioPlayer, IAudioRepository audioRepository) { this.brain = brain; this.videoProvider = videoProvider; this.videoRecorder = videoRecorder; this.audioRecorder = audioRecorder; var frameObserver = new FrameObserver(brain); var frameRecognizedObserver = new FrameRecognizedObserver(audioPlayer, audioRepository); recognizeAvailableFrameSubscription = videoProvider.FrameAvailable.Subscribe(frameObserver); this.lifetimeStreams = new CompositeDisposable { brain.FrameRecognized.Subscribe(frameRecognizedObserver), videoRecorder.RecordingAvailable.Subscribe(new VideoPublisher(brain)), audioRecorder.RecordingAvailable.Subscribe(new AudioPublisher(audioRepository)) }; }
/// <summary> /// Initializes a new instance of the MainViewModel class. /// </summary> public MainWindowViewModel(IAudioRecorder audioRecorder) { _audioRecorder = audioRecorder; //_dataService.GetData( // (item, error) => // { // if (error != null) // { // // Report error here // return; // } // WelcomeTitle = item.Title; // }); SeriesCollection = new SeriesCollection { new LineSeries { Title = "Frequency", LineSmoothness = 1, StrokeThickness = 1, DataLabels = false, PointGeometrySize = 0, Fill = System.Windows.Media.Brushes.Transparent, Values = new ChartValues <double> { 0 } } }; SeriesCollection2 = new SeriesCollection { new LineSeries { Title = "Frequency", LineSmoothness = 1, StrokeThickness = 1, DataLabels = false, PointGeometrySize = 0, Fill = System.Windows.Media.Brushes.Transparent, Values = new ChartValues <double> { 0 } } }; toggleCommand = new RelayCommand(ToggleRecording, () => this._audioRecorder.RecordingState == RecordingState.Stopped || this._audioRecorder.RecordingState == RecordingState.Monitoring || this._audioRecorder.RecordingState == RecordingState.Recording); this._audioRecorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; this._audioRecorder.SampleAggregator.WaveformCalculated += SampleAggregator_WaveformCalculated; Messenger.Default.Register <ShuttingDownMessage>(this, OnShuttingDown); this.ToggleButtonText = "Start Monitoring"; }
public AudioIrrKlangSession(string filePath) { _soundFile = new SoundFile(filePath); _engine.AddFileFactory(_soundFile); _recorder = new IAudioRecorder(_engine); _path = filePath; //_irrklangEventProxy = new ProxyForIrrklangEvents(this); }
public void Dispose() { if (_recorder != null) { _recorder.Dispose(); _recorder = null; } }
public VoiceEnrollmentPageViewModel(INavigationService navigationService, IAudioRecorder audioRecorder, IAudioPlayer audioPlayer) : base(navigationService) { _audioRecorder = audioRecorder; _audioPlayer = audioPlayer; _myVoiceIt = new VoiceIt2("key_140b7c835a984de2a4921ea57d128245", "tok_c3d96357247149479e5b13375c13e124"); RecordVoiceText = "Start Recording"; }
public RecordingManager(IAppConfiguration appConfiguration) { this.appConfiguration = appConfiguration; recorder = DependencyService.Get <IAudioRecorder>(); if (recorder == null) { Debug.WriteLine("Problem getting IAudioRecorder"); } }
private static void RecorderViewModel(IAudioRecorder rec) { recorder = rec; recorder.Stopped += (sender, e) => { new VoiceRecorderState(waveFileName, null); }; SampleAggregator.MaximumCalculated += (sender, e) => { lastPeak = Math.Max(e.MaxSample, Math.Abs(e.MinSample)); Console.WriteLine($"Voice level -> {CurrentInputLevel} | Time -> {RecordedTime}"); }; }
public MainViewModel(IAudioRecorder audioRecorder, Settings settings, ICallNumber callNumber, IReactOnCall reactOnCall, ICreateTimer createTimer, IUnMutePhone unmutePhone, IMutePhone mutePhone, ISpeaker speaker) { this.Settings = settings; this.m_RecorderViewModel = audioRecorder; this.m_RecorderViewModel.Start(); this.m_PhoneViewModel = new PhoneViewModel(m_RecorderViewModel, settings, callNumber, reactOnCall, createTimer, mutePhone, unmutePhone, speaker); this.m_InfoTimer = createTimer.Create(new TimeSpan(0, 0, 0, 0, 250)); this.m_InfoTimer.AutoReset = true; this.m_InfoTimer.MyElapsed += m_Timer_Elapsed; this.m_InfoTimer.Start(); }
public RecorderViewModel(IAudioRecorder recorder) { this.recorder = recorder; this.recorder.Stopped += new EventHandler(recorder_Stopped); this.beginRecordingCommand = new RelayCommand(() => BeginRecording(), () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); this.stopCommand = new RelayCommand(() => Stop(), () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += new EventHandler<MaxSampleEventArgs>(recorder_MaximumCalculated); Messenger.Default.Register<ShuttingDownMessage>(this, (message) => OnShuttingDown(message)); }
public RecorderViewModel(IAudioRecorder recorder) { this.recorder = recorder; this.recorder.Stopped += new EventHandler(recorder_Stopped); this.beginRecordingCommand = new RelayCommand(() => BeginRecording(), () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); this.stopCommand = new RelayCommand(() => Stop(), () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += new EventHandler <MaxSampleEventArgs>(recorder_MaximumCalculated); Messenger.Default.Register <ShuttingDownMessage>(this, (message) => OnShuttingDown(message)); }
public RecorderViewModel(IAudioRecorder recorder) { this.recorder = recorder; this.recorder.Stopped += OnRecorderStopped; beginRecordingCommand = new RelayCommand(BeginRecording, () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); stopCommand = new RelayCommand(Stop, () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; Messenger.Default.Register <ShuttingDownMessage>(this, OnShuttingDown); }
public RecorderViewModel(IAudioRecorder recorder) { this.recorder = recorder; this.recorder.Stopped += OnRecorderStopped; beginRecordingCommand = new RelayCommand(BeginRecording, () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); stopCommand = new RelayCommand(Stop, () => recorder.RecordingState == RecordingState.Recording); recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; Messenger.Default.Register<ShuttingDownMessage>(this, OnShuttingDown); }
private static void startVoIPSession() { DependencyService.Get <IPowerManager>().AquireLock("partial"); DependencyService.Get <IPowerManager>().AquireLock("wifi"); audioPlayer = DependencyService.Get <IAudioPlayer>(DependencyFetchTarget.NewInstance); audioPlayer.start(); audioRecorder = DependencyService.Get <IAudioRecorder>(DependencyFetchTarget.NewInstance); audioRecorder.start(); audioRecorder.setOnSoundDataReceived((data) => { StreamProcessor.sendAppData(currentCallContact, currentCallSessionId, data); }); }
public AudioProcessor(IAudioRecorder audioRecorder, IAudioEvaluator audioEvaluator, IWatcherAudioStorage watcherAudioStorage) { _audioRecorder = audioRecorder; _audioEvaluator = audioEvaluator; _watcherAudioStorage = watcherAudioStorage; _buffer = new CircularBuffer <byte>(BUFFER_SIZE); _sampleAggregator = new SampleAggregator(); _startedWatchers = new Dictionary <Guid, Watcher>(); _audioRecorder.SetSampleAggregator(_sampleAggregator); _timer = new Timer(1000); _timer.Elapsed += _timer_Elapsed; }
private void InitRecorder() { RecordingDevices = new ObservableCollection <string>(); Recorder = new AudioRecorder(); for (int n = 0; n < WaveIn.DeviceCount; n++) { RecordingDevices.Add(WaveIn.GetCapabilities(n).ProductName); OnMyPropertyChanged(() => RecordingDevices); } //cbDevices.ItemsSource = RecordingDevices; if (RecordingDevices.Count > 0) { cbDevices.SelectedIndex = 0; } Recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; }
void RecordBtn_Clicked(object sender, EventArgs e) { if (audioRecorder == null) { audioRecorder = DependencyService.Get<IAudioRecorder>(); } bool isAudioRecording = audioRecorder.RecordAudio(); if (isAudioRecording == false) { DisplayAlert("Audio recording", "Sorry the audio recording service is not available now, please try again later", "OK"); } else { DisplayAlert("Audio recording", "Audio recording stared", "OK"); } }
public PhoneViewModel(IAudioRecorder recorder, Settings settings, ICallNumber callNumber, IReactOnCall reactOnCall, ICreateTimer createTimer, IMutePhone mutePhone, IUnMutePhone unmutePhone, ISpeaker speaker) { this.IsStarted = false; this.m_Speaker = speaker; this.m_UnmutePhone = unmutePhone; this.m_MutePhone = mutePhone; this.m_Settings = settings; this.m_CallNumber = callNumber; this.m_AudioRecorderViewModel = recorder; this.m_PhoneTimer = createTimer.Create(new TimeSpan(0, 0, 0, 1, 0)); this.m_PhoneTimer.AutoReset = false; this.m_PhoneTimer.MyElapsed += m_PhoneTimer_Elapsed; this.m_ReactOnCall = reactOnCall; this.m_ReactOnCall.Register(this.OnHangUp); }
private void Start_OnClick(object sender, RoutedEventArgs e) { Log("Starting recording..."); var firstDevice = _nAudioApiWrapper.InputDevices.FirstOrDefault(); if (firstDevice == null) { Log("Error, no recording devices found!"); } else { this.recordingNumber++; _audioRecorder = firstDevice.CreateRecording("temp00" + this.recordingNumber + ".wav"); _audioRecorder.BytesAvailable += _dynamicAudioRenderer.StreamBytes; _audioRecorder.Start(); StopButton.IsEnabled = true; StartButton.IsEnabled = false; } }
private static void StopRecording(IAudioRecorder recorder, ISongWriter writer) { Task.Run(() => { var recorded = recorder.StopRecording(); if (recorded == null) return; ID3TagService service = new ID3TagService(); var tags = service.GetTags(recorded); tags.Artists = new[] { recorded.Song.Artist }; tags.Title = recorded.Song.Title; service.UpdateTags(tags, recorded); writer.WriteSong(recorded); }); }
private static void endVoIPSession() { DependencyService.Get <IPowerManager>().ReleaseLock("partial"); DependencyService.Get <IPowerManager>().ReleaseLock("wifi"); if (audioPlayer != null) { audioPlayer.Dispose(); audioPlayer = null; } if (audioRecorder != null) { audioRecorder.Dispose(); audioRecorder = null; } currentCallSessionId = null; currentCallContact = null; currentCallCalleeAccepted = false; currentCallAccepted = false; }
/// <summary> /// 清空释放音频资源。 /// </summary> public static void CleanUp() { if (AudioFileReader != null) { AudioFileReader?.Dispose(); AudioFileReader = null; } if (WavePlayer != null) { WavePlayer?.Stop(); WavePlayer?.Dispose(); WavePlayer = null; } if (Recorder != null) { Recorder?.Stop(); Recorder?.Dispose(); Recorder = null; } }
private void CleanUp() { if (null != this.m_PhoneTimer) { this.m_PhoneTimer.Stop(); this.m_PhoneTimer.MyElapsed -= m_PhoneTimer_Elapsed; this.m_PhoneTimer.Dispose(); this.m_PhoneTimer = null; } if (null != m_MutePhone) { this.m_MutePhone.Dispose(); this.m_MutePhone = null; } if (null != m_UnmutePhone) { this.m_UnmutePhone.Dispose(); this.m_UnmutePhone = null; } if (null != m_ReactOnCall) { m_ReactOnCall.Dispose(); m_ReactOnCall = null; } if (null != m_CallNumber) { m_CallNumber.Dispose(); m_CallNumber = null; } if (null != m_AudioRecorderViewModel) { m_AudioRecorderViewModel.Dispose(); m_AudioRecorderViewModel = null; } }
private static void startVoIPSession() { try { audioPlayer = DependencyService.Get <IAudioPlayer>(DependencyFetchTarget.NewInstance); audioPlayer.start(currentCallCodec); audioRecorder = DependencyService.Get <IAudioRecorder>(DependencyFetchTarget.NewInstance); audioRecorder.start(currentCallCodec); audioRecorder.setOnSoundDataReceived((data) => { StreamProcessor.sendAppData(currentCallContact, currentCallSessionId, data); }); currentCallStartedTime = Clock.getTimestamp(); startLastPacketReceivedCheck(); } catch (Exception e) { Logging.error("Exception occured while starting VoIP session: " + e); endVoIPSession(); } }
public RecorderViewModel(IAudioRecorder recorder) { messageHub = new MessageHub(); messageHub.OnCallBack = (string name, string message, string conversationId) => OnHubCallBack(name, message, conversationId); messageHub.ConnectionClosed = () => OnHubClosed(); Tts = new GoogleTts(); this.recorder = recorder; this.recorder.Stopped += OnRecorderStopped; beginRecordingCommand = new RelayCommand(BeginRecording, () => recorder.RecordingState == RecordingState.Stopped || recorder.RecordingState == RecordingState.Monitoring); stopCommand = new RelayCommand(Stop, () => recorder.RecordingState == RecordingState.Recording); signInCommand = new RelayCommand(SignedIn); sendMessageCommand = new RelayCommand(Sent); recorder.SampleAggregator.MaximumCalculated += OnRecorderMaximumCalculated; Messenger.Default.Register <ShuttingDownMessage>(this, OnShuttingDown); PrepareUI(); }
public void Run() { this._spotifyService.GetSong().Subscribe(song => { if (this._currentRecorder != null) { this.StopRecording(this._currentRecorder, this._songWriter, this._id3TagService); } this._currentRecorder = song != null ? this._recordingService.StartRecording(song) : null; this.ReRenderScreen(); }); bool closeApplication = false; while(closeApplication == false) { string command = System.Console.ReadLine(); switch (command) { case "render": this.ReRenderScreen(); break; case "clear": this._recordedSongs.Clear(); this.ReRenderScreen(); break; case "close": closeApplication = true; this._currentRecorder?.StopRecording(); break; } } }
private void Recorder_Stopped(IAudioRecorder arg1, ErrorEventArgs arg2) { Recorder.Stopped -= Recorder_Stopped; Directory.CreateDirectory(System.IO.Path.GetDirectoryName(PathToRecordableAudioForCurrentSegment)); // make sure audio directory exists try { var minimum = TimeSpan.FromMilliseconds(300); // this is arbitrary AudioRecorder.TrimWavFile(PathToTemporaryWav, PathToRecordableAudioForCurrentSegment, new TimeSpan(), TimeSpan.FromMilliseconds(_collectionAudioTrimEndMilliseconds), minimum); RobustFile.Delete(PathToTemporaryWav); // Otherwise, these continue to clutter up the temp directory. } catch (Exception error) { Logger.WriteEvent(error.Message); RobustFile.Copy(PathToTemporaryWav, PathToRecordableAudioForCurrentSegment, true); } //We could put this off entirely until we make the ePUB. //I'm just gating this for now because maybe the thought was that it's better to do it a little at a time? //That's fine so long as it doesn't make the UI unresponsive on slow machines. var mp3Path = _mp3Encoder.Encode(PathToRecordableAudioForCurrentSegment); // Got a good new recording, can safely clean up all backups related to old one. foreach (var path in Directory.EnumerateFiles( Path.GetDirectoryName(PathToRecordableAudioForCurrentSegment), Path.GetFileNameWithoutExtension(PathToRecordableAudioForCurrentSegment) + "*" + ".bak")) { RobustFile.Delete(path); } // BL-7617 Don't keep .wav file after .mp3 is created successfully. if (!string.IsNullOrEmpty(mp3Path) && File.Exists(mp3Path)) { RobustFile.Delete(PathToRecordableAudioForCurrentSegment); } _completingRecording.Set(); // will release HandleAudioFileRequest if it is waiting. }
private void Dispose(bool disposing) { if (null != m_InfoTimer) { m_InfoTimer.Stop(); m_InfoTimer.MyElapsed -= m_Timer_Elapsed; m_InfoTimer.Dispose(); m_InfoTimer = null; } if (null != m_PhoneViewModel) { m_PhoneViewModel.Stop(); m_PhoneViewModel.Dispose(); m_PhoneViewModel = null; } if (null != m_RecorderViewModel) { this.m_RecorderViewModel.Stop(); m_RecorderViewModel.Dispose(); m_RecorderViewModel = null; } }
private void StopRecording(IAudioRecorder recorder, ISongWriter writer, IID3TagService id3TagService) { Task.Run(() => { var recorded = recorder.StopRecording(); if (recorded == null) return; var tags = id3TagService.GetTags(recorded); tags.Artists = new[] { recorded.Song.Artist }; tags.Title = recorded.Song.Title; tags.Album = recorded.Song.Album; id3TagService.UpdateTags(tags, recorded); if (writer.WriteSong(recorded)) { this._recordedSongs.Add(recorded.Song); this.ReRenderScreen(); } }); }
public IAudioRecorder GetRecorder() => _recorder ?? (_recorder = DependencyService.Get <IAudioRecorder>());
private void Recorder_Stopped(IAudioRecorder arg1, ErrorEventArgs arg2) { Recorder.Stopped -= Recorder_Stopped; Directory.CreateDirectory(System.IO.Path.GetDirectoryName(PathToCurrentAudioSegment)); // make sure audio directory exists int millisecondsToTrimFromEndForMouseClick =100; try { var minimum = TimeSpan.FromMilliseconds(300); // this is arbitrary AudioRecorder.TrimWavFile(PathToTemporaryWav, PathToCurrentAudioSegment, new TimeSpan(), TimeSpan.FromMilliseconds(millisecondsToTrimFromEndForMouseClick), minimum); } catch (Exception error) { Logger.WriteEvent(error.Message); RobustFile.Copy(PathToTemporaryWav,PathToCurrentAudioSegment, true); } //We don't actually need the mp3 now, so let people play with recording even without LAME (previously it could crash BL-3159). //We could put this off entirely until we make the ePUB. //I'm just gating this for now because maybe the thought was that it's better to do it a little at a time? //That's fine so long as it doesn't make the UI unresponsive on slow machines. if (LameEncoder.IsAvailable()) { _mp3Encoder.Encode(PathToCurrentAudioSegment, PathToCurrentAudioSegment.Substring(0, PathToCurrentAudioSegment.Length - 4), new NullProgress()); // Note: we need to keep the .wav file as well as the mp3 one. The mp3 format (or alternative mp4) // is required for ePUB. The wav file is a better permanent record of the recording; also, // it is used for playback. } }
private void RenderStatus(IAudioRecorder currentRecorder) { WriteLine(); Write(" Status: "); WriteLine(currentRecorder != null ? "Recording".DarkGreen().OnGreen() : "Waiting for song".White().OnRed()); WriteLine(); Write(" Artist: "); WriteLine((currentRecorder?.Song.Artist ?? string.Empty).White()); Write(" Song: "); WriteLine((currentRecorder?.Song.Title ?? string.Empty).White()); Write(" Album: "); WriteLine((currentRecorder?.Song.Album ?? string.Empty).White()); WriteLine(); }
public void Test() { var engine = new ISoundEngine(); var recorder = new IAudioRecorder(engine); var data = recorder.RecordedAudioData; //throws exception. Should set data to null. }