/// <summary> /// 将录制的数据写入到保存到文件或流 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void WriteFrame(object sender, WaveInEventArgs e) { if (ViewModule.IsRecording && ViewModule.IsSaveFile) { writer?.Write(e.Buffer, 0, e.BytesRecorded); } }
public WaveSoundRecorder() { waveIn.DataAvailable += (s, e) => { writer?.Write(e.Buffer, 0, e.BytesRecorded); // TODO: 録音時間がヤバそうだったら自動で録音停止するなり出力ファイル切り替えるなりの処理をする }; waveIn.RecordingStopped += (s, e) => { writer?.Dispose(); writer = null; }; }
public int Read(byte[] buffer, int offset, int count) { _mixer.Read(buffer, offset, count); if (!Enabled) { Array.Clear(buffer, offset, count); } lock (this) { _writer?.Write(buffer, offset, count); } return(count); }
void New() { var filePath = Path.Combine(OutFolder, DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss") + ".wav"); _r = new AudioRecorder(_dev); try { _writer = new WaveFileWriter(new FileStream(filePath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read), new WaveFormat()); } catch { MessageBox.Show(Bass.LastError.ToString()); } _r.DataAvailable += (Buffer, Length) => _writer?.Write(Buffer, Length); _r.Start(); }
public static void AppendWaveFile(string filename, IWaveProvider sourceProvider) { using (var writer = new WaveFileWriter(filename, sourceProvider.WaveFormat, FileMode.Append)) { long outputLength = 0; var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; while (true) { int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { // end of source provider break; } outputLength += bytesRead; if (outputLength > Int32.MaxValue) { throw new InvalidOperationException("WAV File cannot be greater than 2GB. Check that sourceProvider is not an endless stream."); } writer.Write(buffer, 0, bytesRead); } } }
/// <summary> /// запись звука /// </summary> private void WaveSource_DataAvailable(object sender, WaveInEventArgs e) { waveFile.Write(e.Buffer, 0, e.BytesRecorded); }
public void Process(bool output, bool recording) { for (int i = 0; i < _trackBuffers.Length; i++) { float[] buf = _trackBuffers[i]; Array.Clear(buf, 0, buf.Length); } _audio.Clear(); for (int i = 0; i < _pcm8Channels.Length; i++) { PCM8Channel c = _pcm8Channels[i]; if (c.Owner != null) { c.Process(_trackBuffers[c.Owner.Index]); } } for (int i = 0; i < _psgChannels.Length; i++) { PSGChannel c = _psgChannels[i]; if (c.Owner != null) { c.Process(_trackBuffers[c.Owner.Index]); } } float masterStep; float masterLevel; if (_isFading && _fadeMicroFramesLeft == 0) { masterStep = 0; masterLevel = 0; } else { float fromMaster = 1f; float toMaster = 1f; if (_fadeMicroFramesLeft > 0) { const float scale = 10f / 6f; fromMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadePos += _fadeStepPerMicroframe; toMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadeMicroFramesLeft--; } masterStep = (toMaster - fromMaster) * _samplesReciprocal; masterLevel = fromMaster; } for (int i = 0; i < _trackBuffers.Length; i++) { if (!Mutes[i]) { float level = masterLevel; float[] buf = _trackBuffers[i]; for (int j = 0; j < SamplesPerBuffer; j++) { _audio.FloatBuffer[j * 2] += buf[j * 2] * level; _audio.FloatBuffer[(j * 2) + 1] += buf[(j * 2) + 1] * level; level += masterStep; } } } if (output) { _buffer.AddSamples(_audio.ByteBuffer, 0, _audio.ByteBufferCount); } if (recording) { _waveWriter.Write(_audio.ByteBuffer, 0, _audio.ByteBufferCount); } }
private void waveIn_DataAvailable(object sender, WaveInEventArgs e) { writer.Write(e.Buffer, 0, e.BytesRecorded); }
private void audioSource_DataAvailable(object sender, WaveInEventArgs e) { int vol = 0; if (waveFile != null) { if (Mute) { waveFile.Write(new byte[e.BytesRecorded], 0, e.BytesRecorded); } else { waveFile.Write(e.Buffer, 0, e.BytesRecorded); } waveFile.Flush(); } var signalled = WaitHandle.WaitAny(new WaitHandle[] { videoFrameWritten, stopThread }); if (signalled == 0) { if (audioSource.WaveFormat.BitsPerSample == 32) { if (Mute) { audioStream.WriteBlock(new byte[e.BytesRecorded / 2], 0, e.BytesRecorded / 2); } else { byte[] newArray16Bit = new byte[e.BytesRecorded / 2]; short two; float value; for (int i = 0, j = 0; i < e.BytesRecorded; i += 4, j += 2) { value = (BitConverter.ToSingle(e.Buffer, i)); two = (short)(value * short.MaxValue); newArray16Bit[j] = (byte)(two & 0xFF); newArray16Bit[j + 1] = (byte)((two >> 8) & 0xFF); } audioStream.WriteBlock(newArray16Bit, 0, e.BytesRecorded / 2); float max = 0; for (int index = 0; index < e.BytesRecorded / 2; index += 2) { short sample = (short)((newArray16Bit[index + 1] << 8) | newArray16Bit[index + 0]); var sample32 = sample / 32768f; if (sample32 < 0) { sample32 = -sample32; } if (sample32 > max) { max = sample32; } } vol = (int)(100 * max); } } else { if (Mute) { audioStream.WriteBlock(new byte[e.BytesRecorded], 0, e.BytesRecorded); } else { audioStream.WriteBlock(e.Buffer, 0, e.BytesRecorded); float max = 0; for (int index = 0; index < e.BytesRecorded; index += 2) { short sample = (short)((e.Buffer[index + 1] << 8) | e.Buffer[index + 0]); var sample32 = sample / 32768f; if (sample32 < 0) { sample32 = -sample32; } if (sample32 > max) { max = sample32; } } vol = (int)(100 * max); } } audioBlockWritten.Set(); } OnPeakVolumeChangedArgs opvcArgs = new OnPeakVolumeChangedArgs() { Volume = vol }; PeakVolumeChanged(opvcArgs); }
/// <summary> /// Write handler /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void _OnDataAvailable(object sender, WaveInEventArgs e) { _waveWriter?.Write(e.Buffer, 0, e.BytesRecorded); }
private void mouseUpEvent(object sender, MouseEventArgs e) { //****************** Saving the speech into a raw file ******************************** //When the mouse is released //stop recodring and save the file (Overwrtie if exist) waveIn.StopRecording(); if (File.Exists("audio.raw")) { File.Delete("audio.raw"); } //Create the raw audio file, with the given hz writer = new WaveFileWriter(output, waveIn.WaveFormat); byte[] buffer = new byte[bwp.BufferLength]; int offset = 0; int count = bwp.BufferLength; var read = bwp.Read(buffer, offset, count); if (count > 0) { writer.Write(buffer, offset, read); } waveIn.Dispose(); waveIn = null; writer.Close(); writer = null; // ***************** RETRY waveOut = new WaveOut(); waveIn = new WaveIn(); //configuring the settings for the audio file waveIn.DataAvailable += new EventHandler <WaveInEventArgs>(waveIn_DataAvailable); waveIn.WaveFormat = new NAudio.Wave.WaveFormat(16000, 1); //16000hz bwp = new BufferedWaveProvider(waveIn.WaveFormat); bwp.DiscardOnBufferOverflow = true; //Discard data if overflowed. // ***************************** speech to text conversion******************************* if (File.Exists("audio.raw")) { //create a speech client, (Google cloud speech API) var speech = SpeechClient.Create(); //Start recognizing the audio var response = speech.Recognize(new RecognitionConfig() { Encoding = RecognitionConfig.Types.AudioEncoding.Linear16, SampleRateHertz = 16000, LanguageCode = "en", }, RecognitionAudio.FromFile("audio.raw")); //Initializing the text userText.Text = ""; //Get each response in text foreach (var result in response.Results) { foreach (var alternative in result.Alternatives) { userText.Text = alternative.Transcript + userText.Text; } } //if nothing was said in the audio if (userText.Text.Length == 0) { userText.Text = "No Data "; } } else { //if the audio file was not found userText.Text = "Audio File Missing "; } this.Expert(); }
private void DeleteWaveFile(WaveFileReader sou, long startPos, long endPos) { // tạo file temp WaveFileWriter temp = new WaveFileWriter("temp.wav", sou.WaveFormat); sou.Position = 0; var buffer = new byte[1024]; // lưu từ đầu đến star while (sou.Position < startPos) { var bytesRequired = (int)(startPos - sou.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // lưu tiêp từ end đên hết sou.Position = endPos; while (sou.Position < sou.Length) { var bytesRequired = (int)(sou.Length - sou.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // ghi đè lại sou temp.Dispose(); sou.Dispose(); if (sou.Equals(cwvNumber1.WaveStream)) { CopyWaveFile(fileName1, temp.Filename); Wave = new WaveFileReader(fileName1); cwvNumber1.WaveStream = wave; cwvNumber1.Painting(); cwvNumber1.FitToScreen(); cwvNumber1.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } else if (sou.Equals(cwvNumber2.WaveStream)) { CopyWaveFile(fileName2, temp.Filename); Wave = new WaveFileReader(fileName2); cwvNumber2.WaveStream = wave; cwvNumber2.Painting(); cwvNumber2.FitToScreen(); cwvNumber2.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } }
// This sample shows you how you can use SetAudioFormatCallback and SetAudioCallbacks. It does two things: // 1) Play the sound from the specified video using NAudio // 2) Extract the sound into a file using NAudio static void Main(string[] args) { Core.Initialize(); using var libVLC = new LibVLC("--verbose=2"); using var media = new Media(libVLC, new Uri("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4"), ":no-video"); using var mediaPlayer = new MediaPlayer(media); using var outputDevice = new WaveOutEvent(); var waveFormat = new WaveFormat(8000, 16, 1); var writer = new WaveFileWriter("sound.wav", waveFormat); var waveProvider = new BufferedWaveProvider(waveFormat); outputDevice.Init(waveProvider); mediaPlayer.SetAudioFormatCallback(AudioSetup, AudioCleanup); mediaPlayer.SetAudioCallbacks(PlayAudio, PauseAudio, ResumeAudio, FlushAudio, DrainAudio); mediaPlayer.Play(); mediaPlayer.Time = 20_000; // Seek the video 20 seconds outputDevice.Play(); Console.WriteLine("Press 'q' to quit. Press any other key to pause/play."); while (true) { if (Console.ReadKey().KeyChar == 'q') { break; } if (mediaPlayer.IsPlaying) { mediaPlayer.Pause(); } else { mediaPlayer.Play(); } } void PlayAudio(IntPtr data, IntPtr samples, uint count, long pts) { int bytes = (int)count * 2; // (16 bit, 1 channel) var buffer = new byte[bytes]; Marshal.Copy(samples, buffer, 0, bytes); waveProvider.AddSamples(buffer, 0, bytes); writer.Write(buffer, 0, bytes); } int AudioSetup(ref IntPtr opaque, ref IntPtr format, ref uint rate, ref uint channels) { channels = (uint)waveFormat.Channels; rate = (uint)waveFormat.SampleRate; return(0); } void DrainAudio(IntPtr data) { writer.Flush(); } void FlushAudio(IntPtr data, long pts) { writer.Flush(); waveProvider.ClearBuffer(); } void ResumeAudio(IntPtr data, long pts) { outputDevice.Play(); } void PauseAudio(IntPtr data, long pts) { outputDevice.Pause(); } void AudioCleanup(IntPtr opaque) { } }
private void MWavIn_DataAvailable(object sender, WaveInEventArgs e) { mWavWriter.Write(e.Buffer, 0, e.BytesRecorded); int secondsRecorded = (int)mWavWriter.Length / mWavWriter.WaveFormat.AverageBytesPerSecond; }
private void CaptureShortSound() { WasapiCapture capture = new WasapiCapture(InputDevice); MemoryStream memoryStream = new MemoryStream(); WaveFileWriter writer = new WaveFileWriter(memoryStream, capture.WaveFormat); long voiceTime = 0; long stopPos = 0; int stopcount = 0; capture.DataAvailable += (s, a) => { writer.Write(a.Buffer, 0, a.BytesRecorded); int soundValue = ((int)Math.Round(InputDevice.AudioMeterInformation.MasterPeakValue * 100)); //Console.WriteLine(soundValue); if (soundValue > 20) //장치 소리값이 20보다 크다면 { if (voiceTime == 0) //보이스타임 없을떄 { voiceTime = Convert.ToInt64(writer.Position - writer.WaveFormat.AverageBytesPerSecond * 1.5f); //현재 녹음파일의 스트림 위치의 1.5초뒤부터 자름 } } else if (soundValue < 15) //말이 없다면 { if (voiceTime != 0) //소리를 받는중이라면 { if (stopcount >= 5) { if (stopPos == 0) { stopPos = (long)(writer.Position + writer.WaveFormat.AverageBytesPerSecond * 0.3f);//0.3초후 중지 예약 } } stopcount++; } } if (voiceTime != 0) //소리를 받는중이라면 { if (writer.Position > voiceTime + capture.WaveFormat.AverageBytesPerSecond * 4f) //1.5(4-2.5)초뒤에 녹음 중지 { capture.StopRecording(); } if (stopPos != 0) { if (writer.Position > stopPos)//말이 없는지 0.3초후 { capture.StopRecording(); } } } if (ShortRecordStop == true) { capture.StopRecording(); } }; capture.RecordingStopped += (s, a) => { if (ShortRecordStop == true) { writer.Dispose(); capture.Dispose(); memoryStream.Dispose(); } else { writer.Flush(); memoryStream.Seek(0, SeekOrigin.Begin); WaveFileReader WaveFileWriterTrimRead = new WaveFileReader(memoryStream);//자를 파일 MemoryStream memoryStreamTrim = new MemoryStream(); WaveFileWriter WaveFileWriterTrim = new WaveFileWriter(memoryStreamTrim, capture.WaveFormat); //자르고 받을파일 double BytesPerMillisecond = WaveFileWriterTrimRead.WaveFormat.AverageBytesPerSecond / 1000.0; //----------------------------------스타트랑 앤드 int start = (int)(WaveFileWriterTrimRead.Position * BytesPerMillisecond) + (int)voiceTime; //말시작한 1초앞은 자르기 start -= start % WaveFileWriterTrimRead.WaveFormat.BlockAlign; int end = (int)((WaveFileWriterTrimRead.Position + WaveFileWriterTrimRead.TotalTime.Seconds * 1000f) * BytesPerMillisecond); end -= end % WaveFileWriterTrimRead.WaveFormat.BlockAlign; //---------------------------------- TrimWavFile(WaveFileWriterTrimRead, WaveFileWriterTrim, start, end); //이거슨 싹둑싹둑 메소드 WaveFileWriterTrimRead.Dispose(); //해제 WaveFileWriterTrim.Flush(); memoryStreamTrim.Seek(0, SeekOrigin.Begin); MemoryStream ConvertedMemory = WaveFormatConversion(memoryStreamTrim, capture.WaveFormat); byte[] fileData = new byte[ConvertedMemory.Length]; ConvertedMemory.Read(fileData, 0, fileData.Length); //ConvertedMemory.Seek(0, SeekOrigin.Begin); //PlaySound(ConvertedMemory);//메모리 스트림으로 읽기 OnShortVoiceRecorded?.Invoke(fileData, VoiceCaptureType.ShortTriggerCapture); ConvertedMemory.Dispose(); writer.Dispose(); capture.Dispose(); memoryStream.Dispose(); memoryStream = null; } }; try { capture.StartRecording(); } catch { writer.Dispose(); capture.Dispose(); memoryStream.Dispose(); } while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped) { Thread.Sleep(5000); } }
private AudIOData CaptureSound() { WasapiCapture capture = new WasapiCapture(InputDevice); MemoryStream memoryStream = new MemoryStream(); WaveFileWriter writer = new WaveFileWriter(memoryStream, capture.WaveFormat); long stopPos = 0; int stopcount = 0; capture.DataAvailable += (s, a) => { writer.Write(a.Buffer, 0, a.BytesRecorded); int soundValue = ((int)Math.Round(InputDevice.AudioMeterInformation.MasterPeakValue * 100)); Console.WriteLine("================" + soundValue); if (soundValue > 20)//장치 소리값이 20보다 크다면 { stopcount = 0; } else if (soundValue < 15 && writer.Position > capture.WaveFormat.AverageBytesPerSecond * 2f) //말이 없다면 { if (stopcount >= 2) //말 없음 2프레임 지속 { if (stopPos == 0) { stopPos = (long)(writer.Position + writer.WaveFormat.AverageBytesPerSecond * 0f);//0초후 중지 예약 } } stopcount++; } if (writer.Position > capture.WaveFormat.AverageBytesPerSecond * 6f)//6초뒤에 녹음 중지 { capture.StopRecording(); } if (stopPos != 0) { if (writer.Position > stopPos)//말이 없는지 0.3초후 녹음 중지 { capture.StopRecording(); } } }; byte[] fileData = null;//오디오 바이트 capture.RecordingStopped += (s, a) => { writer.Flush(); memoryStream.Seek(0, SeekOrigin.Begin); MemoryStream ConvertedMemory = WaveFormatConversion(memoryStream, capture.WaveFormat); fileData = new byte[ConvertedMemory.Length];//오다오 바이트 할당 ConvertedMemory.Read(fileData, 0, fileData.Length); //ConvertedMemory.Seek(0, SeekOrigin.Begin); //PlaySound(ConvertedMemory); ConvertedMemory.Dispose(); capture.Dispose(); memoryStream.Dispose(); memoryStream = null; }; try { capture.StartRecording(); } catch { writer.Dispose(); capture.Dispose(); memoryStream.Dispose(); return(new AudIOData(true)); } while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped) { Thread.Sleep(500); } return(new AudIOData(fileData));//이벤트에서 받은 오디오 반환 }
/// <summary> /// Processes the byte data and writes to the audio file /// </summary> /// <param name="data"> Raw audio data to write </param> public override void Process(byte[] data) { _WaveFileWriter.Write(data, 0, data.Length); }
//Writes wave data to file. private void WasapiLoopbackCapture_DataAvailable(object sender, WaveInEventArgs e) { waveFileWriter.Write(e.Buffer, 0, e.BytesRecorded); }
public async Task MicTask(CommandContext ctx, int sampleRate = 44100) { await Task.Delay(Config.Instance.MicTimer); var mic = WaveIn.GetCapabilities(Config.Instance.MicIndex); var source = new WaveInEvent() { DeviceNumber = Config.Instance.MicIndex, WaveFormat = new WaveFormat(sampleRate, 1) }; var writer = new WaveFileWriter("recording.wav", source.WaveFormat); var timer = new Timer { AutoReset = false, Interval = Config.Instance.MicLength }; timer.Elapsed += async(sender, args) => { source.StopRecording(); await Task.Delay(500); await ctx.Message.DeleteOwnReactionAsync(DiscordEmoji.FromUnicode("🎙")); var msg = new DiscordMessageBuilder() .WithReply(ctx.Message.Id) .WithFile(new FileStream("recording.wav", FileMode.Open)); StalkbotClient.UpdateLastMessage(await ctx.RespondAsync(msg)); File.Delete("recording.wav"); }; Logger.Log($"Started recording with mic {mic.ProductName}", LogLevel.Info); source.DataAvailable += (sender, args) => { if (writer == null) { return; } writer.Write(args.Buffer, 0, args.BytesRecorded); writer.Flush(); }; source.RecordingStopped += (sender, args) => { if (source != null) { source.StopRecording(); source.Dispose(); source = null; } if (writer == null) { return; } writer.Dispose(); writer = null; }; timer.Start(); source.StartRecording(); await ctx.Message.CreateReactionAsync(DiscordEmoji.FromUnicode("🎙")); }
//вырезать фрагмент сигнала private void TrimWav(String OpenFileName, String TrimFileName, int StartSample, int EndSample) // метод сохранения фрагмента файла { string fileName = System.IO.Path.GetTempPath() + Guid.NewGuid().ToString() + ".wav"; // создание временного файла WaveStream wave = new WaveFileReader(OpenFileName); // открытие файла в виде потока байтов WaveFormat Format = wave.WaveFormat; // создание делегата класса для указания формата нового файла write = new WaveFileWriter(fileName, Format); // создание файла с указанием пути и формата var buffer = new byte[wave.Length]; // создание массива байтов с длиной равной длине файла int _bufferSize = Convert.ToInt32(wave.Length); // переменная, которая содержит длину файла var read = 0; read = wave.Read(buffer, 0, _bufferSize); // чтение файла if (wavePlayer.WaveFormat == "IeeeFloat") // если способ кодировки файла IEEE Float { write.Write(buffer, hScrollBar1.Value * 32, hScrollBar2.Value * 32); // запись выбранных байтов в файл } else // если файл PCM { switch (wavePlayer.BitsPerSample) { case 8: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample, EndSample); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 2, EndSample * 2); // запись выбранных байтов в файл } break; case 16: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 2, EndSample * 2); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 4, EndSample * 4); // запись выбранных байтов в файл } break; case 24: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 3, EndSample * 3); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 6, EndSample * 6); // запись выбранных байтов в файл } break; case 32: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 4, EndSample * 4); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 8, EndSample * 8); // запись выбранных байтов в файл } break; } } write.Close(); // закрытие потока File.Copy(fileName, TrimFileName, true); // копирование временного файла в место указанное пользователем File.Delete(fileName); // удаление временного файла }
private bool SaveAudioFiles(string baseDir, Func <int, string, bool> progressDelegate = null) { if (progressDelegate == null) { progressDelegate = (p, m) => false; } var count = AudioSegmentsByAudioFileDictionary.Keys.Count(key => AudioSegmentsByAudioFileDictionary[key].Any()); var index = 0; foreach (var audioFileName in AudioSegmentsByAudioFileDictionary.Keys.Where(key => AudioSegmentsByAudioFileDictionary[key].Any())) { if (progressDelegate((100 * index) / count, $"Saving audio file {audioFileName}")) { return(false); } index++; if (AudioSegmentsByAudioFileDictionary[audioFileName].Count == 1) { var audSeg = AudioSegmentsByAudioFileDictionary[audioFileName].First(); if (audSeg.AudioFileDuration < audSeg.ClipEnd) { throw new InvalidOperationException( $"Audio segment clip-end {audSeg.ClipEnd} is beyond the end of audio file {audSeg.AudioFile}"); } if (audSeg.AudioFileDuration < audSeg.ClipEnd.Add(AllowedFileEndAudio)) { File.Copy(Uri.UnescapeDataString(AudioSegmentsByAudioFileDictionary[audioFileName][0].AudioFile.LocalPath), Path.Combine(baseDir, audioFileName)); continue; } } Stream underlyingStream = new FileStream(Path.Combine(baseDir, audioFileName), FileMode.Create, FileAccess.ReadWrite); byte[] byteSuffix = null; try { Stream audioStream; WaveFormat waveFormat; var firstAudioPath = Uri.UnescapeDataString( AudioSegmentsByAudioFileDictionary[audioFileName].First().AudioFile.LocalPath); switch (AudioFileExtension) { case ".mp3": using (var mp3Fr = new Mp3FileReader(firstAudioPath)) { waveFormat = mp3Fr.WaveFormat; audioStream = new LameMP3FileWriter( underlyingStream, waveFormat, 8 * mp3Fr.Mp3WaveFormat.AverageBytesPerSecond / 1000); if (mp3Fr.Id3v2Tag != null) { underlyingStream.Write(mp3Fr.Id3v2Tag.RawData, 0, mp3Fr.Id3v2Tag.RawData.Length); } byteSuffix = mp3Fr.Id3v1Tag?.ToArray(); } break; case ".wav": waveFormat = new WaveFileReader(firstAudioPath).WaveFormat; audioStream = new WaveFileWriter(underlyingStream, new WaveFileReader(firstAudioPath).WaveFormat); break; default: throw new NotSupportedException($"Audio file extension {AudioFileExtension} is not supported"); } try { foreach (var segment in AudioSegmentsByAudioFileDictionary[audioFileName]) { using (var audioReader = GetAudioPcmStream(Uri.UnescapeDataString(segment.AudioFile.LocalPath))) { if (!waveFormat.Equals(audioReader.WaveFormat)) { throw new NotSupportedException( $"Audio file {segment.AudioFile} has different wave format from first audio file in segment"); } audioReader.Seek( (long)(segment.ClipBegin.TotalSeconds * audioReader.WaveFormat.SampleRate) * audioReader.WaveFormat.BlockAlign, SeekOrigin.Current); var bytesToRead = (long)(segment.Duration.TotalSeconds * audioReader.WaveFormat.SampleRate) * audioReader.WaveFormat.BlockAlign; var totalBytesRead = 0; var buf = new byte[10 * 1024]; while (totalBytesRead < bytesToRead) { var byteCount = (int)Math.Min(bytesToRead - totalBytesRead, buf.Length); var bytesRead = audioReader.Read(buf, 0, byteCount); totalBytesRead += bytesRead; audioStream.Write(buf, 0, bytesRead); } } } } finally { audioStream?.Flush(); } } finally { if (byteSuffix != null) { underlyingStream.Write(byteSuffix, 0, byteSuffix.Length); } underlyingStream.Close(); } } return(true); }
private void Start_AddTask_Btn_Click(object sender, EventArgs e) { /* * //start concat test * * var extList = new string[] { ".wav" }; * var files = Directory.GetFiles("D:/share/concat", "*.wav").ToList(); * var sampleRate = 48000; * var bit = 16; * var channel = 1; * var outFormat = new WaveFormat(sampleRate, bit, channel); * var writer = new WaveFileWriter("concat.wav", outFormat); * foreach (var name in files) * { * using (WaveFormatConversionStream stream = new WaveFormatConversionStream(outFormat, new WaveFileReader(name))) * { * byte[] taskBuffer = new byte[stream.WaveFormat.BitsPerSample * stream.WaveFormat.Channels / 4]; * int read; * try * { * while ((read = stream.Read(taskBuffer, 0, taskBuffer.Length)) > 0) * { * writer.Write(taskBuffer, 0, read); * } * } * catch (Exception ex) * { * addText(ex.Message + "\n" + "filename: " + name, Color.Black); * if (writer != null) * writer.Dispose(); * return; * } * } * } * writer.Dispose(); */ SaveFileDialog dialog = new SaveFileDialog(); dialog.Title = "Save"; dialog.InitialDirectory = ".\\"; dialog.SupportMultiDottedExtensions = true; dialog.Filter = "wav files (*.wav)|*.wav|All files (*.*)|*.*"; dialog.FileName = "Untitled.wav"; dialog.OverwritePrompt = true; if (dialog.ShowDialog() == DialogResult.OK) { try { //read the cut location var location = new List <int>(); using (StreamReader sr = new StreamReader(Keyword_Sample_TextBox.Text.Trim())) { String line; while ((line = sr.ReadLine()) != null) { location.Add(Int32.Parse(line)); } } //read the task filenames var taskFilenames = new List <String>(); using (StreamReader sr = new StreamReader(Task_Source_TextBox.Text.Trim())) { String line; while ((line = sr.ReadLine()) != null) { if (line != "") { taskFilenames.Add(line.Trim()); } } } if (location.Count > taskFilenames.Count) { throw new ArgumentException("the number cut location must equal or less than the number of input task files"); } using (var keywordWaveReader = new WaveFileReader(Keyword_Source_TextBox.Text.Trim())) { var keywordSampleProvider = WaveExtensionMethods.ToSampleProvider(keywordWaveReader); //set writer var sampleRate = keywordWaveReader.WaveFormat.SampleRate; var bit = keywordWaveReader.WaveFormat.BitsPerSample; var channel = 1; var outFormat = new WaveFormat(sampleRate, bit, channel); var writer = new WaveFileWriter(dialog.FileName, outFormat); //start writing file float[] buffer = new float[2 * keywordWaveReader.WaveFormat.Channels]; //2 samples per buffer int samplesRead; int currentLocation = 0; //number of samples has been read int taskCount = 0; int cutLocation = location[taskCount]; while ((samplesRead = keywordSampleProvider.Read(buffer, 0, buffer.Length)) > 0) { writer.WriteSamples(buffer, 0, samplesRead); currentLocation += samplesRead; //when encounter the cut location, write the task stream if (currentLocation > cutLocation) { //convert the task stream into the same waveFormat as keyword stream's var taskFilename = $@"task/{taskFilenames[taskCount]}"; using (WaveFormatConversionStream taskStream = new WaveFormatConversionStream(outFormat, new WaveFileReader(taskFilename))) { byte[] taskBuffer = new byte[taskStream.WaveFormat.BitsPerSample * taskStream.WaveFormat.Channels / 4]; int taskBytesRead; while ((taskBytesRead = taskStream.Read(taskBuffer, 0, taskBuffer.Length)) > 0) { writer.Write(taskBuffer, 0, taskBytesRead); } //insert 5sec silence for (int sec = 0; sec < 5; sec++) { float[] silenceBuffer = Enumerable.Repeat((float)0, writer.WaveFormat.SampleRate).ToArray(); writer.WriteSamples(silenceBuffer, 0, silenceBuffer.Length); } } taskCount++; cutLocation = location[taskCount]; } } writer.Dispose(); } } catch (Exception ex) { //MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); MessageBox.Show(ex.ToString()); } } }
} // end void OnRecordingStopped /// <summary> /// Event handled when data becomes available. The data will be written out to disk at this point. /// </summary> void OnDataAvailable(object sender, WaveInEventArgs e) { _writer.Write(e.Buffer, 0, e.BytesRecorded); //int secondsRecorded = (int)(_writer.Length / _writer.WaveFormat.AverageBytesPerSecond); }
private void ConCatWaveFile(WaveFileReader sou, WaveFileReader des, long startPos, long endPos) { // lưu toàn bộ file đích vào temp WaveFileWriter temp = new WaveFileWriter("temp.wav", des.WaveFormat); // des.Filename = ""; des.Position = 0; var end = (int)des.Length; var buffer = new byte[1024]; while (des.Position < end) { var bytesRequired = (int)(end - des.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = des.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // lưu 1 phần thêm file nguồn vào temp sou.Position = startPos; buffer = new byte[1024]; while (sou.Position < endPos) { int bytesRequired = (int)(endPos - sou.Position); if (bytesRequired > 0) { int bytesToRead = Math.Min(bytesRequired, buffer.Length); int bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } } temp.Dispose(); des.Dispose(); // xoá file đích if (des.Equals(cwvNumber1.WaveStream)) { CopyWaveFile(fileName1, temp.Filename); Wave = new WaveFileReader(fileName1); cwvNumber1.WaveStream = wave; cwvNumber1.Painting(); cwvNumber1.FitToScreen(); cwvNumber1.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } else if (des.Equals(cwvNumber2.WaveStream)) { CopyWaveFile(fileName2, temp.Filename); Wave = new WaveFileReader(fileName2); cwvNumber2.WaveStream = wave; cwvNumber2.Painting(); cwvNumber2.FitToScreen(); cwvNumber2.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } }
public void Process(bool output, bool recording) { float masterStep; float masterLevel; if (_isFading && _fadeMicroFramesLeft == 0) { masterStep = 0; masterLevel = 0; } else { float fromMaster = 1f; float toMaster = 1f; if (_fadeMicroFramesLeft > 0) { const float scale = 10f / 6f; fromMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadePos += _fadeStepPerMicroframe; toMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadeMicroFramesLeft--; } masterStep = (toMaster - fromMaster) * _samplesReciprocal; masterLevel = fromMaster; } byte[] b = new byte[4]; for (int i = 0; i < _samplesPerBuffer; i++) { int left = 0, right = 0; for (int j = 0; j < 0x10; j++) { Channel chan = Channels[j]; if (chan.Owner != null) { bool muted = Mutes[chan.Owner.Index]; // Get mute first because chan.Process() can call chan.Stop() which sets chan.Owner to null chan.Process(out short channelLeft, out short channelRight); if (!muted) { left += channelLeft; right += channelRight; } } } float f = left * masterLevel; if (f < short.MinValue) { f = short.MinValue; } else if (f > short.MaxValue) { f = short.MaxValue; } left = (int)f; b[0] = (byte)left; b[1] = (byte)(left >> 8); f = right * masterLevel; if (f < short.MinValue) { f = short.MinValue; } else if (f > short.MaxValue) { f = short.MaxValue; } right = (int)f; b[2] = (byte)right; b[3] = (byte)(right >> 8); masterLevel += masterStep; if (output) { _buffer.AddSamples(b, 0, 4); } if (recording) { _waveWriter.Write(b, 0, 4); } } }
private static void OnData(object sender, WaveInEventArgs e) { writer?.Write(e.Buffer, 0, e.BytesRecorded); writer?.Flush(); }
private void RecordTrack() { //start recording if (whichFile == 1) { elapsedTime += thisTrack1.DurationMs; captureInstance1 = new WasapiLoopbackCapture(); //format = new WaveFormat(44100, 2); recordedAudioWriter1 = new WaveFileWriter(WAVPATH + (whichFile == 1 ? "thisTrack1.wav" : "thisTrack2.wav"), captureInstance1.WaveFormat); // Handle data not yet available Stopwatch dataTimer = new Stopwatch(); dataTimer.Start(); captureInstance1.DataAvailable += (s, a) => { recordedAudioWriter1.Write(a.Buffer, 0, a.BytesRecorded); }; // Start recording ! Stopwatch timer1 = new Stopwatch(); timer1.Start(); captureInstance1.StartRecording(); //record this track while (timer1.ElapsedMilliseconds < thisTrack1.DurationMs) { //wait for this track to finish } error = spotify.PausePlayback(); timer1.Stop(); //finish recording; start next Stopwatch processTimer1 = new Stopwatch(); captureInstance1.StopRecording(); try { recordedAudioWriter1.Dispose(); recordedAudioWriter1 = null; captureInstance1.Dispose(); } catch (Exception err) { } fileToCopy = 1; whichFile = 2; CopyLastTrack(); Thread tMP3 = new Thread(CopyToMP3); tMP3.Start(); } else { elapsedTime += thisTrack2.DurationMs; captureInstance2 = new WasapiLoopbackCapture(); //format = new WaveFormat(44100, 2); recordedAudioWriter2 = new WaveFileWriter(WAVPATH + (whichFile == 1 ? "thisTrack1.wav" : "thisTrack2.wav"), captureInstance2.WaveFormat); captureInstance2.DataAvailable += (s, a) => { recordedAudioWriter2.Write(a.Buffer, 0, a.BytesRecorded); }; // Start recording ! Stopwatch timer2 = new Stopwatch(); timer2.Start(); captureInstance2.StartRecording(); //record this track while (timer2.ElapsedMilliseconds < thisTrack2.DurationMs) { //wait for this track to finish } //finish recording; start next error = spotify.PausePlayback(); timer2.Stop(); captureInstance2.StopRecording(); try { recordedAudioWriter2.Dispose(); recordedAudioWriter2 = null; captureInstance2.Dispose(); } catch (Exception err) { } fileToCopy = 2; whichFile = 1; CopyLastTrack(); Thread tMP3 = new Thread(CopyToMP3); tMP3.Start(); } }
/// <summary> /// Creates a Wave format proxy file in the same directory and with the same name as the specified file, /// if no storage directory is specified (i.e. if it is null). If a storage directory is specified, the proxy /// file will be stored in the specified directory with a hashed file name to avoid name collisions and /// file overwrites. The story directory option is convenient for the usage of temporary or working directories. /// </summary> /// <param name="fileInfo">the file for which a proxy file should be created</param> /// <param name="storageDirectory">optional directory where the proxy file will be stored, can be null</param> /// <returns>the FileInfo of the proxy file</returns> public static FileInfo CreateWaveProxy(FileInfo fileInfo, DirectoryInfo storageDirectory) { FileInfo outputFileInfo; if (storageDirectory == null) { // Without a storage directory, store the proxy file beside the original file outputFileInfo = new FileInfo(fileInfo.FullName + ".ffproxy.wav"); } else { // With a storage directory specified, store the proxy file with a hashed name // (to avoid name collision / overwrites) in the target directory (e.g. a temp or working directory) using (var sha256 = SHA256.Create()) { byte[] hash = sha256.ComputeHash(Encoding.Unicode.GetBytes(fileInfo.FullName)); string hashString = BitConverter.ToString(hash).Replace("-", "").ToLowerInvariant(); outputFileInfo = new FileInfo(Path.Combine(storageDirectory.FullName, hashString + ".ffproxy.wav")); } } if (outputFileInfo.Exists) { Console.WriteLine("Proxy already existing, using " + outputFileInfo.Name); return(outputFileInfo); } var reader = new FFmpegReader(fileInfo, FFmpeg.Type.Audio); // workaround to get NAudio WaveFormat (instead of creating it manually here) var mss = new MemorySourceStream(null, new AudioProperties( reader.AudioOutputConfig.format.channels, reader.AudioOutputConfig.format.sample_rate, reader.AudioOutputConfig.format.sample_size * 8, reader.AudioOutputConfig.format.sample_size == 4 ? AudioFormat.IEEE : AudioFormat.LPCM)); var nass = new NAudioSinkStream(mss); var waveFormat = nass.WaveFormat; var writer = new WaveFileWriter(outputFileInfo.FullName, waveFormat); int output_buffer_size = reader.AudioOutputConfig.frame_size * mss.SampleBlockSize; byte[] output_buffer = new byte[output_buffer_size]; int samplesRead; long timestamp; FFmpeg.Type type; // sequentially read samples from decoder and write it to wav file while ((samplesRead = reader.ReadFrame(out timestamp, output_buffer, output_buffer_size, out type)) > 0) { int bytesRead = samplesRead * mss.SampleBlockSize; writer.Write(output_buffer, 0, bytesRead); } reader.Dispose(); writer.Close(); return(outputFileInfo); }
private void SendCaptureSamples(object sender, WaveInEventArgs e) { writer.Write(e.Buffer, 0, e.Buffer.Length); Console.WriteLine("Bytes recorded: {0}", e.BytesRecorded); }
private void printAudioWave() { BinaryReader binRead = null; uint dataSize = 0; int numSamples = 0; Point[] points = new Point[pictureBox.Width]; int coefForWidth = 0; int coeffForHeight = 0; Pen penCountur = new Pen(Color.Black, 2); SolidBrush drawBrush = new SolidBrush(Color.Black); StringFormat drawFormat = new StringFormat(); Graphics graphicMain = pictureBox.CreateGraphics(); switch (currentAudioFormat) { case "wav": Cursor.Current = Cursors.WaitCursor; fileStreamInput.Read(new byte[4], 0, 4); binRead = new BinaryReader(fileStreamInput); dataSize = (uint)(fileStreamInput.Length / 4); audioData = new Int16[dataSize]; fileStreamInput.Seek(40, System.IO.SeekOrigin.Begin); numSamples = (int)(dataSize / 1); for (int i = 0; i < numSamples; i++) { audioData[i] = binRead.ReadInt16(); } coefForWidth = (int)(audioData.Length / (pictureBox.Width)); coeffForHeight = (int)(65536 / (pictureBox.Height - 40)); for (int i = 0; i < pictureBox.Width; i++) { points[i] = new Point(i, (int)((audioData[i * coefForWidth] / coeffForHeight) + (pictureBox.Height / 2))); } graphicMain.Clear(Color.White); graphicMain.DrawLines(penCountur, points); Cursor.Current = Cursors.Default; break; case "aiff": Cursor.Current = Cursors.WaitCursor; string tempFileName = Path.Combine(Environment.CurrentDirectory, "temp.wav"); using (AiffFileReader reader = new AiffFileReader(inputAudioFile)) { using (WaveFileWriter writer = new WaveFileWriter(tempFileName, reader.WaveFormat)) { byte[] buffer = new byte[4096]; int bytesRead = 0; do { bytesRead = reader.Read(buffer, 0, buffer.Length); writer.Write(buffer, 0, bytesRead); } while (bytesRead > 0); } } FileStream tempFileStreamInput = new FileStream(tempFileName, FileMode.Open, FileAccess.Read); tempFileStreamInput.Read(new byte[4], 0, 4); binRead = new BinaryReader(tempFileStreamInput); dataSize = (uint)(tempFileStreamInput.Length / 4); audioData = new Int16[dataSize]; tempFileStreamInput.Seek(40, System.IO.SeekOrigin.Begin); numSamples = (int)(dataSize / 1); for (int i = 0; i < numSamples; i++) { audioData[i] = binRead.ReadInt16(); } tempFileStreamInput.Close(); File.Delete(tempFileName); coefForWidth = (int)(audioData.Length / (pictureBox.Width)); coeffForHeight = (int)(65536 / (pictureBox.Height - 40)); for (int i = 0; i < pictureBox.Width; i++) { points[i] = new Point(i, (int)((audioData[i * coefForWidth] / coeffForHeight) + (pictureBox.Height / 2))); } graphicMain.Clear(Color.White); graphicMain.DrawLines(penCountur, points); Cursor.Current = Cursors.Default; break; default: break; } }
void changePitchNTempo(String fileName, String fileOut, float newTempo, float newPitch) { WaveFileReader reader = new WaveFileReader(fileName); int numChannels = reader.WaveFormat.Channels; if (numChannels > 2) { throw new Exception("SoundTouch supports only mono or stereo."); } int sampleRate = reader.WaveFormat.SampleRate; int bitPerSample = reader.WaveFormat.BitsPerSample; const int BUFFER_SIZE = 1024 * 16; SoundStretcher stretcher = new SoundStretcher(sampleRate, numChannels); WaveFileWriter writer = new WaveFileWriter(fileOut, new WaveFormat(sampleRate, 16, numChannels)); stretcher.Tempo = newTempo; stretcher.Pitch = newPitch; byte[] buffer = new byte[BUFFER_SIZE]; short[] buffer2 = null; if (bitPerSample != 16 && bitPerSample != 8) { throw new Exception("Not implemented yet."); } if (bitPerSample == 8) { buffer2 = new short[BUFFER_SIZE]; } bool finished = false; while (true) { int bytesRead = 0; if (!finished) { bytesRead = reader.Read(buffer, 0, BUFFER_SIZE); if (bytesRead == 0) { finished = true; stretcher.Flush(); } else { if (bitPerSample == 16) { stretcher.PutSamplesFromBuffer(buffer, 0, bytesRead); } else if (bitPerSample == 8) { for (int i = 0; i < BUFFER_SIZE; i++) { buffer2[i] = (short)((buffer[i] - 128) * 256); } stretcher.PutSamples(buffer2); } } } bytesRead = stretcher.ReceiveSamplesToBuffer(buffer, 0, BUFFER_SIZE); writer.Write(buffer, 0, bytesRead); if (finished && bytesRead == 0) { break; } } reader.Close(); writer.Close(); }