internal static void InvokePlaySoundEffect(object sender, NAudio.Wave.WaveFormat format, byte[] data) { if (OnPlaySoundEffect != null) { OnPlaySoundEffect(sender, new PlaySoundEffectEventArgs(format, data)); } }
/// <summary> /// Sets the audio format for capture. /// </summary> /// <param name="format">The format.</param> public static void SetAudioFormat(NAudio.Wave.WaveFormat format) { if (format == AudioFormats.Pcm32kHz16bitMono) { Settings.Default.CaptureFormat = "Pcm32kHz16bitMono"; } else if (format == AudioFormats.Pcm32kHz16bitStereo) { Settings.Default.CaptureFormat = "Pcm32kHz16bitStereo"; } else if (format == AudioFormats.Pcm44kHz16bitMono) { Settings.Default.CaptureFormat = "Pcm44kHz16bitMono"; } else if (format == AudioFormats.Pcm44kHz16bitStereo) { Settings.Default.CaptureFormat = "Pcm44kHz16bitStereo"; } else if (format == AudioFormats.Pcm48kHz16bitMono) { Settings.Default.CaptureFormat = "Pcm48kHz16bitMono"; } else { Settings.Default.CaptureFormat = "Pcm48kHz16bitStereo"; } }
static void Main(string[] args) { string songPath = (args.Length == 1) ? args[0] : Console.ReadLine(); string songName = Path.GetFileNameWithoutExtension(songPath); string simPath = "simOut\\" + songName + "\\"; string[] filenames = new string[] { simPath + "output" }; // TODO doesn't this mean the following loop should be removed? foreach (string e in filenames) { StreamReader file = new StreamReader(e + ".txt"); List <Int16> values = new List <Int16>(); while (!file.EndOfStream) { string line = file.ReadLine(); string[] stringValues = line.Split(','); for (int i = 0; i < stringValues.Length; i++) { double h; if (double.TryParse(stringValues[i], out h)) { values.Add((Int16)Math.Round(32768 * double.Parse(stringValues[i]))); } } } Int16[] buffer = values.ToArray(); FileStream outStream = new FileStream(e + ".wav", FileMode.Create); NAudio.Wave.WaveFormat format = new NAudio.Wave.WaveFormat(44100, 16, 1); NAudio.Wave.WaveFileWriter writer = new NAudio.Wave.WaveFileWriter(outStream, format); writer.WriteSamples(buffer, 0, buffer.Length); writer.Close(); } System.IO.File.Copy(filenames[0] + ".wav", "simOut\\" + songName + ".wav", true); }
/// <summary> /// Writes the wave file. /// </summary> /// <param name="inputArray">The input array.</param> void WriteFile(short[] inputArray, string filePath) { NAudio.Wave.WaveFormat waveFormat = new NAudio.Wave.WaveFormat(44100, 16, 1); NAudio.Wave.WaveFileWriter writer = new NAudio.Wave.WaveFileWriter(filePath, waveFormat); writer.WriteSamples(inputArray, 0, inputArray.Length); writer.Flush(); writer.Dispose(); }
public Mp3FrameDecompressor(NAudio.Wave.WaveFormat waveFormat) { // we assume waveFormat was calculated from the first frame already OutputFormat = NAudio.Wave.WaveFormat.CreateIeeeFloatWaveFormat(waveFormat.SampleRate, waveFormat.Channels); _decoder = new MpegFrameDecoder(); _frame = new Mp3FrameWrapper(); }
/// <summary> /// Init the audio playback /// </summary> public void Init() { NAudio.Wave.WaveFormat format = new NAudio.Wave.WaveFormat(44100, 16, 1); provider = new NAudio.Wave.BufferedWaveProvider(format); waveOut = new NAudio.Wave.WaveOut(); waveOut.Init(provider); waveOut.Play(); }
public LSDevice() { id = name = null; isRec = isPlay = false; serializationData = "undef"; capt1 = "undef"; capt2 = "undef"; mm = null; wf = null; }
public ChartSlice(double rate, byte[] rawData, string[] notes, BPM[] bpms, NAudio.Wave.WaveFormat wf, double start, double duration) { this.rate = rate; this.rawData = rawData; this.notes = notes; this.bpms = bpms; this.wf = wf; this.start = start; this.duration = duration; }
private static async void SendMP3AudioFile(string filePath) { Channel channel = _audioClient.Channel; filePath = ConvertToMp3(filePath); int channelCount = _client.GetService <AudioService>().Config.Channels; NAudio.Wave.WaveFormat OutFormat = new NAudio.Wave.WaveFormat(48000, 16, channelCount); using (NAudio.Wave.Mp3FileReader MP3Reader = new NAudio.Wave.Mp3FileReader(filePath)) { using (NAudio.Wave.MediaFoundationResampler resampler = new NAudio.Wave.MediaFoundationResampler(MP3Reader, OutFormat)) { resampler.ResamplerQuality = 60; int blockSize = OutFormat.AverageBytesPerSecond / 50; byte[] buffer = new byte[blockSize]; int byteCount; while ((byteCount = resampler.Read(buffer, 0, blockSize)) > 0) { if (byteCount < blockSize) { for (int i = byteCount; i < blockSize; ++i) { buffer[i] = 0; } } if (_audioClient.State == ConnectionState.Disconnecting || _audioClient.State == ConnectionState.Disconnected) { System.Threading.Thread.Sleep(1000); } try { _audioClient.Send(buffer, 0, blockSize); } #pragma warning disable CS0168 // Variable is declared but never used, supressed error because it must be declared to be caught catch (OperationCanceledException e) #pragma warning restore CS0168 { //if (!(_audioClient.State == ConnectionState.Disconnecting || _audioClient.State == ConnectionState.Disconnected)) //{ _audioClient = await JoinAudioChannel(channel); System.Threading.Thread.Sleep(1000); _audioClient.Send(buffer, 0, blockSize); //} } } //await _audioClient.Disconnect(); } } _nextSong = true; }
public static string stringer(NAudio.Wave.WaveFormat wf) { if (wf == null) { return("F****D"); } return("ABPS:" + wf.AverageBytesPerSecond + " BPS:" + wf.BitsPerSample + " BA:" + wf.BlockAlign + " CH:" + wf.Channels + " ENC:" + wf.Encoding + " ES:" + wf.ExtraSize + " SR:" + wf.SampleRate); }
private void convert(string outputFile) { #if UNITY_STANDALONE_WIN string tmpFile = outputFile.Substring(0, outputFile.Length - 4) + "_" + SampleRate + Speaker.AudioFileExtension; bool converted = false; try { using (var reader = new NAudio.Wave.WaveFileReader(outputFile)) { if (reader.WaveFormat.SampleRate != SampleRate) { var newFormat = new NAudio.Wave.WaveFormat(SampleRate, BitsPerSample, Channels); using (var conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, reader)) { NAudio.Wave.WaveFileWriter.CreateWaveFile(tmpFile, conversionStream); } converted = true; } //else //{ // Debug.Log("File ignored: " + outputFile); //} } } catch (System.Exception ex) { Debug.LogError("Could not convert audio file: " + ex); } if (converted) { try { if (!CreateCopy) { System.IO.File.Delete(outputFile); System.IO.File.Move(tmpFile, outputFile); } } catch (System.Exception ex) { Debug.LogError("Could not delete and move audio files: " + ex); } } #endif }
public VGM_Decoding(VGM_Stream vgmStream, StreamReader.IReader fileReader) { m_vgmStream = vgmStream; m_fileReader = fileReader; if (m_WavFormat != null) { m_WavFormat = null; } m_WavFormat = new NAudio.Wave.WaveFormat(vgmStream.vgmSampleRate, vgmStream.vgmChannelCount); m_FadeSamples = (int)(m_FadeSeconds * vgmStream.vgmSampleRate); m_vgmStream.vgmDecodedSamples = 0; m_vgmStream.vgmTotalSamplesWithLoop = VGM_Utils.get_vgmstream_play_samples(2, m_FadeSeconds, 0, vgmStream); }
private void convert(string outputFile) { #if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN string tmpFile = outputFile.Substring(0, outputFile.Length - 4) + "_" + SampleRate + Speaker.AudioFileExtension; bool converted = false; try { using (NAudio.Wave.WaveFileReader reader = new NAudio.Wave.WaveFileReader(outputFile)) { if (reader.WaveFormat.SampleRate != (int)SampleRate) { NAudio.Wave.WaveFormat newFormat = new NAudio.Wave.WaveFormat((int)SampleRate, BitsPerSample, Channels); using (NAudio.Wave.WaveFormatConversionStream conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, reader)) { NAudio.Wave.WaveFileWriter.CreateWaveFile(tmpFile, conversionStream); } converted = true; } } } catch (System.Exception ex) { Debug.LogError("Could not convert audio file: " + ex); } if (converted) { try { if (!CreateCopy) { System.IO.File.Delete(outputFile); System.IO.File.Move(tmpFile, outputFile); } } catch (System.Exception ex) { Debug.LogError("Could not delete and move audio files: " + ex); } } #else Debug.LogError("Can only convert WAV audio files under Windows standalone!"); #endif }
/// <summary> /// Creates a new instance of the AudioFormat class /// </summary> /// <param name="waveFormat">The WaveFormat representing the WAV header.</param> internal AudioFormat(NAudio.Wave.WaveFormat waveFormat) { averageBytesPerSecond = waveFormat.AverageBytesPerSecond; bitsPerSample = waveFormat.BitsPerSample; blockAlign = waveFormat.BlockAlign; channelCount = waveFormat.Channels; format = (int)waveFormat.Encoding; sampleRate = waveFormat.SampleRate; var stream = new MemoryStream(); using (var writer = new BinaryWriter(stream)) { waveFormat.Serialize(writer); nativeWaveFormat = new List <byte>(stream.ToArray()); } }
public static void ProcessFile(string fileName) { try { string fileExt = System.IO.Path.GetExtension(fileName.ToLower()); if (fileExt.Contains("mp3")) { using (NAudio.Wave.Mp3FileReader rdr = new NAudio.Wave.Mp3FileReader(fileName)) { //var newFormat = new NAudio.Wave.WaveFormat(48000, 16, 1); var newFormat = new NAudio.Wave.WaveFormat(16000, 16, 1); using (var conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, rdr)) { if (System.IO.File.Exists("mdc1200tmp.wav")) { System.IO.File.Delete("mdc1200tmp.wav"); } NAudio.Wave.WaveFileWriter.CreateWaveFile("mdc1200tmp.wav", conversionStream); } } } else { using (NAudio.Wave.WaveFileReader rdr = new NAudio.Wave.WaveFileReader(fileName)) { var newFormat = new NAudio.Wave.WaveFormat(16000, 16, 1); using (var conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, rdr)) { if (System.IO.File.Exists("mdc1200tmp.wav")) { System.IO.File.Delete("mdc1200tmp.wav"); } NAudio.Wave.WaveFileWriter.CreateWaveFile("mdc1200tmp.wav", conversionStream); } } } using (NAudio.Wave.AudioFileReader rdr = new NAudio.Wave.AudioFileReader("mdc1200tmp.wav")) { ProcessProvider(rdr, fileName); } } catch (Exception ex) { Console.WriteLine("Process File Exception: {0}", ex.Message); } }
public static void ProcessFile(string fileName) { try { string fileExt = System.IO.Path.GetExtension(fileName.ToLower()); if (fileExt.Contains("mp3")) { using (NAudio.Wave.Mp3FileReader rdr = new NAudio.Wave.Mp3FileReader(fileName)) { //var newFormat = new NAudio.Wave.WaveFormat(48000, 16, 1); var newFormat = new NAudio.Wave.WaveFormat(16000, 16, 1); using (var conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, rdr)) { if (System.IO.File.Exists("mdc1200tmp.wav")) System.IO.File.Delete("mdc1200tmp.wav"); NAudio.Wave.WaveFileWriter.CreateWaveFile("mdc1200tmp.wav", conversionStream); } } } else { using (NAudio.Wave.WaveFileReader rdr = new NAudio.Wave.WaveFileReader(fileName)) { var newFormat = new NAudio.Wave.WaveFormat(16000, 16, 1); using (var conversionStream = new NAudio.Wave.WaveFormatConversionStream(newFormat, rdr)) { if (System.IO.File.Exists("mdc1200tmp.wav")) System.IO.File.Delete("mdc1200tmp.wav"); NAudio.Wave.WaveFileWriter.CreateWaveFile("mdc1200tmp.wav", conversionStream); } } } using (NAudio.Wave.AudioFileReader rdr = new NAudio.Wave.AudioFileReader("mdc1200tmp.wav")) { ProcessProvider(rdr, fileName); } } catch (Exception ex) { Console.WriteLine("Process File Exception: {0}", ex.Message); } }
public static string AsString(NAudio.Wave.WaveFormat format) { string channelsText; switch (format.Channels) { case 1: channelsText = "Mono"; break; case 2: channelsText = "Stereo"; break; default: channelsText = format.Channels + " channels"; break; } ; return(string.Format("{0} Hz, {1} bit, {2}", format.SampleRate, format.BitsPerSample, channelsText)); }
public bool test() { tested = true; wf = null; try { if (mm == null || mm.State == NAudio.CoreAudioApi.DeviceState.NotPresent || mm.State == NAudio.CoreAudioApi.DeviceState.Unplugged) { return(false); } capt1 = mm.FriendlyName; // windows name capt2 = mm.DeviceFriendlyName; // just device if (capt1.EndsWith(capt2 + ")")) { capt1 = capt1.Substring(0, capt1.Length - (capt2.Length + 3)); } isRec = mm.DataFlow == NAudio.CoreAudioApi.DataFlow.All || mm.DataFlow == NAudio.CoreAudioApi.DataFlow.Capture; isPlay = mm.DataFlow == NAudio.CoreAudioApi.DataFlow.All || mm.DataFlow == NAudio.CoreAudioApi.DataFlow.Render; NAudio.Wave.IWaveIn dev = isPlay ? new NAudio.Wave.WasapiLoopbackCapture(mm) : new NAudio.CoreAudioApi.WasapiCapture(mm); if (dev != null) { wf = dev.WaveFormat; makeSerializationData(); dev.Dispose(); return(true); } } catch (System.Runtime.InteropServices.COMException) { mm = null; } return(false); }
public bool test() { tested = true; wf = null; try { if (mm == null) { return(false); } NAudio.Wave.IWaveIn dev = null; capt1 = mm.FriendlyName; // windows name capt2 = mm.DeviceFriendlyName; // just device if (capt1.EndsWith(capt2 + ")")) { capt1 = capt1.Substring(0, capt1.Length - (capt2.Length + 3)); } isRec = mm.DataFlow == NAudio.CoreAudioApi.DataFlow.All || mm.DataFlow == NAudio.CoreAudioApi.DataFlow.Capture; isPlay = mm.DataFlow == NAudio.CoreAudioApi.DataFlow.All || mm.DataFlow == NAudio.CoreAudioApi.DataFlow.Render; dev = isPlay ? dev = new NAudio.Wave.WasapiLoopbackCapture(mm) : dev = new NAudio.CoreAudioApi.WasapiCapture(mm); if (dev != null) { wf = dev.WaveFormat; makeSerializationData(); dev.Dispose(); return(true); } } catch { mm = null; } return(false); }
public VorbisWaveReader(string fileName) { _reader = new NVorbis.VorbisReader(fileName); _waveFormat = NAudio.Wave.WaveFormat.CreateIeeeFloatWaveFormat(_reader.SampleRate, _reader.Channels); }
/// <summary> /// Ensures valid AIFF header and then finds data offset. /// </summary> /// <param name="stream">The stream, positioned at the start of audio data</param> /// <param name="format">The format found</param> /// <param name="dataChunkPosition">The position of the data chunk</param> /// <param name="dataChunkLength">The length of the data chunk</param> /// <param name="chunks">Additional chunks found</param> public static void ReadAiffHeader(Stream stream, out NAudio.Wave.WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List<AiffChunk> chunks) { dataChunkPosition = -1; format = null; BinaryReader br = new BinaryReader(stream); if (ReadChunkName(br) != "FORM") { throw new FormatException("Not an AIFF file - no FORM header."); } uint fileSize = ConvertLong(br.ReadBytes(4)); if (ReadChunkName(br) != "AIFF") { throw new FormatException("Not an AIFF file - no AIFF header."); } dataChunkLength = 0; while (br.BaseStream.Position < br.BaseStream.Length) { AiffChunk nextChunk = ReadChunkHeader(br); if (nextChunk.chunkName == "COMM") { short numChannels = ConvertShort(br.ReadBytes(2)); uint numSampleFrames = ConvertLong(br.ReadBytes(4)); short sampleSize = ConvertShort(br.ReadBytes(2)); double sampleRate = ConvertExtended(br.ReadBytes(10)); format = new NAudio.Wave.WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels); br.ReadBytes((int)nextChunk.chunkLength - 18); } else if (nextChunk.chunkName == "SSND") { uint offset = ConvertLong(br.ReadBytes(4)); uint blockSize = ConvertLong(br.ReadBytes(4)); dataChunkPosition = nextChunk.chunkStart + 16 + offset; dataChunkLength = (int)nextChunk.chunkLength - 8; br.ReadBytes((int)nextChunk.chunkLength - 8); } else { if (chunks != null) { chunks.Add(nextChunk); } br.ReadBytes((int)nextChunk.chunkLength); } if (nextChunk.chunkName == "\0\0\0\0") break; //Console.WriteLine("Read chunk {0} with length {1}", nextChunk.chunkName, nextChunk.chunkLength); } if (format == null) { throw new FormatException("Invalid AIFF file - No COMM chunk found."); } if (dataChunkPosition == -1) { throw new FormatException("Invalid AIFF file - No SSND chunk found."); } }
public static WaveFormat ToCaptura(this Wf Wf) { return(Wf.Encoding == WfEnc.IeeeFloat ? WaveFormat.CreateIeeeFloatWaveFormat(Wf.SampleRate, Wf.Channels) : new WaveFormat(Wf.SampleRate, Wf.BitsPerSample, Wf.Channels)); }
/// <summary> /// Ensures valid AIFF header and then finds data offset. /// </summary> /// <param name="stream">The stream, positioned at the start of audio data</param> /// <param name="format">The format found</param> /// <param name="dataChunkPosition">The position of the data chunk</param> /// <param name="dataChunkLength">The length of the data chunk</param> /// <param name="chunks">Additional chunks found</param> public static void ReadAiffHeader(Stream stream, out NAudio.Wave.WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List <AiffChunk> chunks) { dataChunkPosition = -1; format = null; BinaryReader br = new BinaryReader(stream); if (ReadChunkName(br) != "FORM") { throw new FormatException("Not an AIFF file - no FORM header."); } uint fileSize = ConvertLong(br.ReadBytes(4)); if (ReadChunkName(br) != "AIFF") { throw new FormatException("Not an AIFF file - no AIFF header."); } dataChunkLength = 0; while (br.BaseStream.Position < br.BaseStream.Length) { AiffChunk nextChunk = ReadChunkHeader(br); if (nextChunk.chunkName == "COMM") { short numChannels = ConvertShort(br.ReadBytes(2)); uint numSampleFrames = ConvertLong(br.ReadBytes(4)); short sampleSize = ConvertShort(br.ReadBytes(2)); double sampleRate = ConvertExtended(br.ReadBytes(10)); format = new NAudio.Wave.WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels); br.ReadBytes((int)nextChunk.chunkLength - 18); } else if (nextChunk.chunkName == "SSND") { uint offset = ConvertLong(br.ReadBytes(4)); uint blockSize = ConvertLong(br.ReadBytes(4)); dataChunkPosition = nextChunk.chunkStart + 16 + offset; dataChunkLength = (int)nextChunk.chunkLength - 8; br.ReadBytes((int)nextChunk.chunkLength - 8); } else { if (chunks != null) { chunks.Add(nextChunk); } br.ReadBytes((int)nextChunk.chunkLength); } if (nextChunk.chunkName == "\0\0\0\0") { break; } //Console.WriteLine("Read chunk {0} with length {1}", nextChunk.chunkName, nextChunk.chunkLength); } if (format == null) { throw new FormatException("Invalid AIFF file - No COMM chunk found."); } if (dataChunkPosition == -1) { throw new FormatException("Invalid AIFF file - No SSND chunk found."); } }
public void chartBuildVoiceFilter() { int rate = 0; NAudio.Wave.WaveFormat format = null; var data = AudioFilter.readWav("input_prep.wav", out rate, out format); dt = 1.0 / rate; fcut = 1800; m = 128; pointsClear(); mainTitle.Text = "Filter Voice"; foreach (var area in chart.ChartAreas) { area.AxisX.Maximum = data.Length / cutter; area.AxisX.Minimum = 0; } foreach (var series in chart.Series) { series.Points.SuspendUpdates(); } //fill top left series titleTopLeft.Text = "Input"; seriesTopLeft.ChartType = SeriesChartType.FastLine; seriesTopLeft.BorderDashStyle = ChartDashStyle.Solid; Plots.minX = 0; Plots.maxX = data.Length; for (int i = 0; i < data.Length; i++) { seriesTopLeft.Points.AddXY(i, data[i]); } messageTopLeft = ""; //fill top right series titleTopRight.Text = "BPF"; seriesTopRight.ChartType = SeriesChartType.FastLine; seriesTopRight.BorderDashStyle = ChartDashStyle.Solid; List <double> filter = Plots.BPF_Filter(400, 900, m, dt); float[] data_out = new float[data.Length + 2 * m + 1]; for (int k = Plots.minX; k < Plots.maxX; k++) { double y = 0; for (int l = Plots.minX; l < data.Length + 2 * m + 1; l++) { if (k >= l && (k - l) < filter.Count) { y += filter[k - l] * data[l]; } } seriesTopRight.Points.AddXY(k, y); data_out[k] = (float)y; } messageTopRight = ""; //cut first m and last m+1 float[] result = new float[data.Length]; for (int i = m; i < data.Length + m; i++) { result[i - m] = data_out[i]; } //fill bottom left series titleBottomLeft.Text = "DPF Input"; seriesBottomLeft.ChartType = SeriesChartType.FastLine; seriesBottomLeft.BorderDashStyle = ChartDashStyle.Solid; Plots.minX = 0; Plots.maxX = (int)Plots.CalculateBorder(dt); Plots.PrepareDPF_Filter(seriesTopLeft.Points); chartAreaBottomLeft.AxisX.Maximum = Plots.CalculateBorder(dt); for (int i = 0; i < Plots.FourierArr.Length; i++) { seriesBottomLeft.Points.AddXY(2 * i * Plots.CalculateBorder(dt) / Plots.FourierArr.Length, Plots.FourierArr[i].C * (2 * m + 1)); } messageBottomLeft = ""; //fill bottom right series titleBottomRight.Text = "DPF Output"; seriesBottomRight.ChartType = SeriesChartType.FastLine; seriesBottomRight.BorderDashStyle = ChartDashStyle.Solid; Plots.minX = 0; Plots.maxX = (int)Plots.CalculateBorder(dt); Plots.PrepareDPF_Filter(seriesTopRight.Points); chartAreaBottomRight.AxisX.Maximum = Plots.CalculateBorder(dt); for (int i = 0; i < Plots.FourierArr.Length; i++) { seriesBottomRight.Points.AddXY(2 * i * Plots.CalculateBorder(dt) / Plots.FourierArr.Length, Plots.FourierArr[i].C * (2 * m + 1)); } messageBottomRight = ""; foreach (var series in chart.Series) { series.Points.ResumeUpdates(); } AudioFilter.writeWav("outputBPF_.wav", format, result); }
/// <summary> /// Creats a new Wave format. /// </summary> /// <param name="sampleRate">Sample rate.</param> /// <param name="channels">Channel numbers.</param> public WaveFormat(int sampleRate, int channels) { this.NAudioWaveFormat = NAudio.Wave.WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); }
public PlaySoundEffectEventArgs(NAudio.Wave.WaveFormat format, byte[] data) { this.Format = format; this.Data = data; }
public VorbisWaveReader(System.IO.Stream sourceStream) { _reader = new NVorbis.VorbisReader(sourceStream, false); _waveFormat = NAudio.Wave.WaveFormat.CreateIeeeFloatWaveFormat(_reader.SampleRate, _reader.Channels); }
/// <summary> /// Ensures valid AIFF header and then finds data offset. /// </summary> /// <param name="stream">The stream, positioned at the start of audio data</param> /// <param name="format">The format found</param> /// <param name="dataChunkPosition">The position of the data chunk</param> /// <param name="dataChunkLength">The length of the data chunk</param> /// <param name="chunks">Additional chunks found</param> public static void ReadAiffHeader(Stream stream, out NAudio.Wave.WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List <AiffChunk> chunks) { dataChunkPosition = -1; format = null; BinaryReader br = new BinaryReader(stream); if (ReadChunkName(br) != "FORM") { throw new FormatException("Not an AIFF file - no FORM header."); } uint fileSize = ConvertInt(br.ReadBytes(4)); string formType = ReadChunkName(br); if (formType != "AIFC" && formType != "AIFF") { throw new FormatException("Not an AIFF file - no AIFF/AIFC header."); } dataChunkLength = 0; while (br.BaseStream.Position < br.BaseStream.Length) { AiffChunk nextChunk = ReadChunkHeader(br); if (nextChunk.ChunkName == "COMM") { short numChannels = ConvertShort(br.ReadBytes(2)); uint numSampleFrames = ConvertInt(br.ReadBytes(4)); short sampleSize = ConvertShort(br.ReadBytes(2)); double sampleRate = IEEE.ConvertFromIeeeExtended(br.ReadBytes(10)); format = new WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels); if (nextChunk.ChunkLength > 18 && formType == "AIFC") { // In an AIFC file, the compression format is tacked on to the COMM chunk string compress = new string(br.ReadChars(4)).ToLower(); if (compress != "none") { throw new FormatException("Compressed AIFC is not supported."); } br.ReadBytes((int)nextChunk.ChunkLength - 22); } else { br.ReadBytes((int)nextChunk.ChunkLength - 18); } } else if (nextChunk.ChunkName == "SSND") { uint offset = ConvertInt(br.ReadBytes(4)); uint blockSize = ConvertInt(br.ReadBytes(4)); dataChunkPosition = nextChunk.ChunkStart + 16 + offset; dataChunkLength = (int)nextChunk.ChunkLength - 8; br.ReadBytes((int)nextChunk.ChunkLength - 8); } else { if (chunks != null) { chunks.Add(nextChunk); } br.ReadBytes((int)nextChunk.ChunkLength); } if (nextChunk.ChunkName == "\0\0\0\0") { break; } } if (format == null) { throw new FormatException("Invalid AIFF file - No COMM chunk found."); } if (dataChunkPosition == -1) { throw new FormatException("Invalid AIFF file - No SSND chunk found."); } }
private void UpdateWaveFormat() { _waveFormat = NAudio.Wave.WaveFormat.CreateIeeeFloatWaveFormat(_reader.SampleRate, _reader.Channels); ParameterChange?.Invoke(this, EventArgs.Empty); }
/// <summary> /// Creats a new Wave format. /// </summary> /// <param name="naudioWaveFormat">NAudio <see cref="NAudio.Wave.WaveFormat"/>.</param> internal WaveFormat(NAudio.Wave.WaveFormat naudioWaveFormat) : this(naudioWaveFormat.SampleRate, naudioWaveFormat.Channels) { }
public List <double[]> cropWords(NAudio.Wave.WaveFormat audio, String path = "") { StreamWriter w = new StreamWriter("Length.txt", true); int step = L; int cnt = 0; double Tw = findTw(); // double Tw = 0.01; Console.WriteLine(Tw + "=Tw"); if (Tw > 0.1) { Tw = 0.03; } string[] massiv = path.Split('\\'); string fileName = massiv[massiv.Length - 1].Split('.')[0]; List <int> list = new List <int>(); List <double[]> words = new List <double[]>(); int zeros = 0; for (int i = step; i < PPsamples.Length; i += step) { Boolean vad = VAD(i, L, Sc, Tw); // Console.Write(vad + " "); if (vad) { zeros = 0; list.Add(i - step); list.Add(i); } else { zeros++; } if (zeros > 35 && list.Count > 30) { zeros = 0; cnt++; double[] file = new double[list.Count / 2 * step]; int pos = 0; // printIntArray(list.ToArray()); for (int j = 0; j < list.Count; j += 2) { for (int k = list[j]; k < list[j + 1]; k++) { file[pos++] = PPsamples[k]; } } words.Add(file); w.WriteLine(file.Length); if (path.Length > 0) { float[] file2 = new float[file.Length]; var s = path.Substring(0, path.Length - 4); Directory.CreateDirectory(s); Console.WriteLine("Created directory " + s); MainWindow.doubleToFloat(file2, file); MainWindow.createWav(file2, s + "/" + fileName + "-" + cnt, audio); Console.WriteLine(cnt); } list.Clear(); } } if (list.Count > 30) { cnt++; double[] file = new double[list.Count / 2 * step]; int pos = 0; // printIntArray(list.ToArray()); for (int j = 0; j < list.Count; j += 2) { for (int k = list[j]; k < list[j + 1]; k++) { file[pos++] = PPsamples[k]; } } words.Add(file); w.WriteLine(file.Length); if (path.Length > 0) { float[] file2 = new float[file.Length]; var s = path.Substring(0, path.Length - 4); Directory.CreateDirectory(s); Console.WriteLine("Created directory " + s); MainWindow.doubleToFloat(file2, file); MainWindow.createWav(file2, s + "/" + fileName + "-" + cnt, audio); Console.WriteLine(cnt); } } w.Close(); List <Double> aw = new List <Double>(); foreach (var cw in words) { aw.AddRange(cw); } forGraph = aw.ToArray(); return(words); }