public SOXResampler(AudioPCMConfig inputPCM, AudioPCMConfig outputPCM, SOXResamplerConfig config) { this.inputPCM = inputPCM; this.outputPCM = outputPCM; if (inputPCM.ChannelCount != outputPCM.ChannelCount) throw new NotSupportedException(); if (outputPCM.SampleRate == inputPCM.SampleRate * 4 && config.Quality >= SOXResamplerQuality.Medium) { this.rate = new rate_t[inputPCM.ChannelCount]; this.shared = new rate_shared_t(); this.rateUp2 = new rate_t[inputPCM.ChannelCount]; this.sharedUp2 = new rate_shared_t(); for (int i = 0; i < inputPCM.ChannelCount; i++) { rateUp2[i] = new rate_t(inputPCM.SampleRate, inputPCM.SampleRate * 2, sharedUp2, 0.5, config.Quality, -1, config.Phase, config.Bandwidth, config.AllowAliasing); rate[i] = new rate_t(inputPCM.SampleRate * 2, inputPCM.SampleRate * 4, shared, 0.5, config.Quality, -1, 50, 90, true); } } else { this.rate = new rate_t[inputPCM.ChannelCount]; this.shared = new rate_shared_t(); for (int i = 0; i < inputPCM.ChannelCount; i++) { rate[i] = new rate_t(inputPCM.SampleRate, outputPCM.SampleRate, shared, (double)inputPCM.SampleRate / outputPCM.SampleRate, config.Quality, -1, config.Phase, config.Bandwidth, config.AllowAliasing); } } }
public void SOXResamplerConstructorTest() { AudioPCMConfig inputPCM = new AudioPCMConfig(32, 1, 44100); AudioPCMConfig outputPCM = new AudioPCMConfig(32, 1, 48000); SOXResamplerConfig cfg; cfg.Quality = SOXResamplerQuality.Very; cfg.Phase = 50; cfg.AllowAliasing = false; cfg.Bandwidth = 0; SOXResampler resampler = new SOXResampler(inputPCM, outputPCM, cfg); AudioBuffer src = new AudioBuffer(inputPCM, 400 * inputPCM.SampleRate / 1000); AudioBuffer dst = new AudioBuffer(outputPCM, src.Size * 3); int offs = 0; double delta = 0; for (int i = 0; i < 100; i++) { src.Prepare(-1); for (int j = 0; j < src.Size; j++) src.Float[j, 0] = (float)Math.Sin((i * src.Size + j) * Math.PI / 44100); src.Length = src.Size; resampler.Flow(src, dst); for (int j = 0; j < dst.Length; j++) delta += dst.Float[j, 0] - Math.Sin((offs + j) * Math.PI / 48000); offs += dst.Length; } Assert.IsTrue(Math.Abs(delta) < 0.00001, "Error too large"); }
public static IAudioDest GetAudioDest(AudioEncoderType audioEncoderType, string path, AudioPCMConfig pcm, long finalSampleCount, int padding, string extension, CUEConfig config) { IAudioDest dest; if (audioEncoderType == AudioEncoderType.NoAudio || extension == ".dummy") { dest = new DummyWriter(path, pcm); dest.FinalSampleCount = finalSampleCount; return dest; } CUEToolsFormat fmt; if (!extension.StartsWith(".") || !config.formats.TryGetValue(extension.Substring(1), out fmt)) throw new Exception("Unsupported audio type: " + path); CUEToolsUDC encoder = audioEncoderType == AudioEncoderType.Lossless ? fmt.encoderLossless : audioEncoderType == AudioEncoderType.Lossy ? fmt.encoderLossy : null; if (encoder == null) throw new Exception("Unsupported audio type: " + path); if (encoder.path != null) dest = new UserDefinedWriter(path, null, pcm, encoder.path, encoder.parameters, encoder.default_mode, padding); else if (encoder.type != null) { object o = Activator.CreateInstance(encoder.type, path, pcm); if (o == null || !(o is IAudioDest)) throw new Exception("Unsupported audio type: " + path + ": " + encoder.type.FullName); dest = o as IAudioDest; } else throw new Exception("Unsupported audio type: " + path); dest.CompressionLevel = encoder.DefaultModeIndex; dest.FinalSampleCount = finalSampleCount; dest.Padding = padding; dest.Settings = encoder.settings; return dest; }
private void CheckPCMConfig(AudioPCMConfig pcm) { if (pcm.BitsPerSample != 16 && pcm.BitsPerSample != 24) { throw new UnsupportedBitsPerSampleException("LAME only supports 16 bits/sample. 24 bits are supported through conversion."); } }
public NativeFlacWriter(Stream outputStream, AudioPCMConfig pcm) { this.compressionLevel = 5; this.pcm = pcm; this.outputStream = outputStream; }
public NoiseAndErrorsGenerator(AudioPCMConfig pcm, long sampleCount, int seed, int offset, int errors, int maxStrideErrors = 0) { if (offset < 0) throw new ArgumentOutOfRangeException("offset", "offset cannot be negative"); if (errors < 0) throw new ArgumentOutOfRangeException("offset", "errors cannot be negative"); this._sampleOffset = 0; this._sampleCount = sampleCount; this.pcm = pcm; this.rnd = new Random(seed); this.temp = new byte[8192 * pcm.BlockAlign]; this.tempOffs = temp.Length; int byteOff = offset * pcm.BlockAlign; for (int k = 0; k < byteOff / temp.Length; k++) rnd.NextBytes(temp); if (byteOff % temp.Length > 0) rnd.NextBytes(new byte[byteOff % temp.Length]); this.errors = new int[errors]; this.rnd2 = new Random(seed); var strideErrors = new int[10 * 588]; for (int i = 0; i < errors; i++) { do { this.errors[i] = this.rnd2.Next(0, (int)sampleCount); } while (maxStrideErrors > 0 && strideErrors[this.errors[i] % (10 * 588)] >= maxStrideErrors); strideErrors[this.errors[i] % (10 * 588)]++; } this.rnd2 = new Random(seed); Array.Sort(this.errors); this.nextError = 0; }
public WAVWriter(string path, Stream IO, AudioPCMConfig pcm) { _pcm = pcm; _path = path; _IO = IO != null ? IO : new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read); _bw = new BinaryWriter(_IO); }
public LameWriter(Stream output, AudioPCMConfig pcm) { this.CheckPCMConfig(pcm); this.outputStream = output; this.pcm = pcm; this.settings = new LameWriterSettings(); }
public RemoteWriterBase(IPAddress remoteAddress, string path, AudioPCMConfig pcm) { this.remoteEndpoint = new IPEndPoint(remoteAddress, EncodingServer.Port); this.pcm = pcm; this.outputPath = path; this.outputStream = File.Create(path); }
public NativeFlacWriter(string path, AudioPCMConfig pcm) { this.compressionLevel = 5; this.pcm = pcm; this.outputPath = path; this.outputStream = File.Create(path); }
public AudioPipe(AudioPCMConfig pcm, int size) { this.pcm = pcm; _readBuffer = new AudioBuffer(pcm, size); _writeBuffer = new AudioBuffer(pcm, size); _maxLength = size; _sampleLen = -1; _samplePos = 0; }
public LameWriter(string path, AudioPCMConfig pcm) { this.CheckPCMConfig(pcm); this.pcm = pcm; this.outputPath = path; this.outputStream = File.Create(path); this.settings = new LameWriterSettings(); }
public MixingBuffer(AudioPCMConfig pcm, int size, int sources) { source = new AudioBuffer[sources]; volume = new float[sources]; filled = new bool[sources]; for (int i = 0; i < sources; i++) { source[i] = new AudioBuffer(pcm, size); } }
public MixingSource(AudioPCMConfig pcm, int delay, int sources) { if (pcm.BitsPerSample != 32) throw new NotSupportedException("please use 32 bits per sample (float)"); this.pcm = pcm; this.size = delay * pcm.SampleRate / 1000; this.buf = new MixingBuffer[2]; this.buf[0] = new MixingBuffer(pcm, size, sources); this.buf[1] = new MixingBuffer(pcm, size, sources); this.playing = new bool[sources]; this.volume = new float[sources]; this.samplePos = 0; }
public ALACReader(AudioPCMConfig _pcm, int rice_historymult, int rice_initialhistory, int rice_kmodifier, int blocksize) { pcm = _pcm; setinfo_max_samples_per_frame = blocksize; setinfo_rice_historymult = (byte)rice_historymult; setinfo_rice_initialhistory = (byte)rice_initialhistory; setinfo_rice_kmodifier = (byte)rice_kmodifier; _predicterror_buffer_a = new int[setinfo_max_samples_per_frame]; _predicterror_buffer_b = new int[setinfo_max_samples_per_frame]; _outputsamples_buffer_a = new int[setinfo_max_samples_per_frame]; _outputsamples_buffer_b = new int[setinfo_max_samples_per_frame]; _framesBuffer = new byte[65536]; }
public UserDefinedWriter(string path, Stream IO, AudioPCMConfig pcm, string encoder, string encoderParams, string encoderMode, int padding) { _path = path; _encoder = encoder; _encoderParams = encoderParams; _encoderMode = encoderMode; useTempFile = _encoderParams.Contains("%I"); tempFile = path + ".tmp.wav"; _encoderProcess = new Process(); _encoderProcess.StartInfo.FileName = _encoder; _encoderProcess.StartInfo.Arguments = _encoderParams.Replace("%O", "\"" + path + "\"").Replace("%M", encoderMode).Replace("%P", padding.ToString()).Replace("%I", "\"" + tempFile + "\""); _encoderProcess.StartInfo.CreateNoWindow = true; if (!useTempFile) _encoderProcess.StartInfo.RedirectStandardInput = true; _encoderProcess.StartInfo.UseShellExecute = false; if (!_encoderParams.Contains("%O")) _encoderProcess.StartInfo.RedirectStandardOutput = true; if (useTempFile) { wrt = new WAVWriter(tempFile, null, pcm); return; } bool started = false; Exception ex = null; try { started = _encoderProcess.Start(); if (started) _encoderProcess.PriorityClass = Process.GetCurrentProcess().PriorityClass; } catch (Exception _ex) { ex = _ex; } if (!started) throw new Exception(_encoder + ": " + (ex == null ? "please check the path" : ex.Message)); if (_encoderProcess.StartInfo.RedirectStandardOutput) { Stream outputStream = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read); outputBuffer = new CyclicBuffer(2 * 1024 * 1024, _encoderProcess.StandardOutput.BaseStream, outputStream); } Stream inputStream = new CyclicBufferOutputStream(_encoderProcess.StandardInput.BaseStream, 128 * 1024); wrt = new WAVWriter(path, inputStream, pcm); }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.pcm = pcm; this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount); NAudio.Wave.WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) throw new NotSupportedException("PCM format mismatch"); Init(); bufferFrameCount = audioClient.BufferSize; readBuffers = new AudioBuffer[2]; readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount); readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount); //if (this.shareMode == AudioClientShareMode.Exclusive) // this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000); }
public WAVReader(string path, Stream IO, AudioPCMConfig _pcm) { _path = path; _IO = IO != null ? IO : new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, 0x10000, FileOptions.SequentialScan); _br = new BinaryReader(_IO); _largeFile = false; _dataOffset = 0; _samplePos = 0; pcm = _pcm; _dataLen = _IO.CanSeek ? _IO.Length : -1; if (_dataLen < 0) _sampleLen = -1; else { _sampleLen = _dataLen / pcm.BlockAlign; if ((_dataLen % pcm.BlockAlign) != 0) throw new Exception("odd file size"); } }
private byte[] Wav2FlacBuffConverter(byte[] Buffer) { Stream OutWavStream = new MemoryStream(); Stream OutFlacStream = new MemoryStream(); AudioPCMConfig pcmconf = new AudioPCMConfig(16, 1, 16000); WAVWriter wr = new WAVWriter(null, OutWavStream, pcmconf); wr.Write(new AudioBuffer(pcmconf, Buffer, Buffer.Length / 2)); OutWavStream.Seek(0, SeekOrigin.Begin); WAVReader audioSource = new WAVReader(null, OutWavStream); if (audioSource.PCM.SampleRate != 16000) return null; AudioBuffer buff = new AudioBuffer(audioSource, 0x10000); FlakeWriter flakeWriter = new FlakeWriter(null, OutFlacStream, audioSource.PCM); flakeWriter.CompressionLevel = 8; while (audioSource.Read(buff, -1) != 0) { flakeWriter.Write(buff); } OutFlacStream.Seek(0, SeekOrigin.Begin); byte[] barr = new byte[OutFlacStream.Length]; OutFlacStream.Read(barr, 0, (int)OutFlacStream.Length); return barr; }
public WAVReader(Stream IO, AudioPCMConfig _pcm) { if (IO == null) { throw new ArgumentNullException("IO"); } _IO = IO; _br = new BinaryReader(_IO); _largeFile = false; _dataOffset = 0; _samplePos = 0; pcm = _pcm; _dataLen = _IO.CanSeek ? _IO.Length : -1; if (_dataLen < 0) _sampleLen = -1; else { _sampleLen = _dataLen / pcm.BlockAlign; if ((_dataLen % pcm.BlockAlign) != 0) throw new Exception("odd file size"); } }
private void CheckPCMConfig(AudioPCMConfig pcm) { if (pcm.BitsPerSample != 16) { throw new ArgumentException("LAME only supports 16 bits/sample."); } }
public AudioEncoderSettings(AudioPCMConfig pcm) : this("", "") { this.PCM = pcm; }
public AudioBuffer(AudioPCMConfig _pcm, int _size) { pcm = _pcm; size = _size; length = 0; }
public LameWriter(Stream output, AudioPCMConfig pcm) { this.CheckPCMConfig(pcm); this.outputStream = output; this.pcm = pcm; }
public WAVWriterSettings(AudioPCMConfig pcm) : base(pcm) { }
public AudioBuffer(AudioPCMConfig _pcm, int[,] _samples, int _length) { PCM = _pcm; // assert _samples.GetLength(1) == pcm.ChannelCount Prepare(_samples, _length); }
public RemoteMp3VbrWriter(IPAddress remoteAddress, Stream output, AudioPCMConfig pcm) : base(remoteAddress, output, pcm) { this.CheckPCMConfig(pcm); }
public FlakeWriter(string path, AudioPCMConfig pcm) : this(path, null, pcm) { }
private void ParseHeaders() { const long maxFileSize = 0x7FFFFFFEL; const uint fccRIFF = 0x46464952; const uint fccWAVE = 0x45564157; const uint fccFormat = 0x20746D66; const uint fccData = 0x61746164; uint lenRIFF; bool foundFormat, foundData; if (_br.ReadUInt32() != fccRIFF) { throw new Exception("Not a valid RIFF file."); } lenRIFF = _br.ReadUInt32(); if (_br.ReadUInt32() != fccWAVE) { throw new Exception("Not a valid WAVE file."); } _largeFile = false; foundFormat = false; foundData = false; long pos = 12; do { uint ckID, ckSize, ckSizePadded; long ckEnd; ckID = _br.ReadUInt32(); ckSize = _br.ReadUInt32(); ckSizePadded = (ckSize + 1U) & ~1U; pos += 8; ckEnd = pos + (long)ckSizePadded; if (ckID == fccFormat) { foundFormat = true; uint fmtTag = _br.ReadUInt16(); int _channelCount = _br.ReadInt16(); int _sampleRate = _br.ReadInt32(); _br.ReadInt32(); // bytes per second int _blockAlign = _br.ReadInt16(); int _bitsPerSample = _br.ReadInt16(); pos += 16; if (fmtTag == 0xFFFEU && ckSize >= 34) // WAVE_FORMAT_EXTENSIBLE { _br.ReadInt16(); // CbSize _br.ReadInt16(); // ValidBitsPerSample int channelMask = _br.ReadInt32(); fmtTag = _br.ReadUInt16(); pos += 10; } if (fmtTag != 1) // WAVE_FORMAT_PCM { throw new Exception("WAVE format tag not WAVE_FORMAT_PCM."); } PCM = new AudioPCMConfig(_bitsPerSample, _channelCount, _sampleRate); if (PCM.BlockAlign != _blockAlign) { throw new Exception("WAVE has strange BlockAlign"); } } else if (ckID == fccData) { foundData = true; _dataOffset = pos; if (!_IO.CanSeek || _IO.Length <= maxFileSize) { if (ckSize >= 0x7fffffff) { _dataLen = -1; } else { _dataLen = (long)ckSize; } } else { _largeFile = true; _dataLen = _IO.Length - pos; } } if ((foundFormat & foundData) || _largeFile) { break; } if (_IO.CanSeek) { _IO.Seek(ckEnd, SeekOrigin.Begin); } else { _br.ReadBytes((int)(ckEnd - pos)); } pos = ckEnd; } while (true); if ((foundFormat & foundData) == false || PCM == null) { throw new Exception("Format or data chunk not found."); } if (PCM.ChannelCount <= 0) { throw new Exception("Channel count is invalid."); } if (PCM.SampleRate <= 0) { throw new Exception("Sample rate is invalid."); } if ((PCM.BitsPerSample <= 0) || (PCM.BitsPerSample > 32)) { throw new Exception("Bits per sample is invalid."); } if (pos != _dataOffset) { Position = 0; } }
public RemoteMp3VbrWriter(IPAddress remoteAddress, string outputPath, AudioPCMConfig pcm) : base(remoteAddress, outputPath, pcm) { this.CheckPCMConfig(pcm); }
private void ParseHeaders() { const long maxFileSize = 0x7FFFFFFEL; const uint fccRIFF = 0x46464952; const uint fccWAVE = 0x45564157; const uint fccFormat = 0x20746D66; const uint fccData = 0x61746164; uint lenRIFF; bool foundFormat, foundData; if (_br.ReadUInt32() != fccRIFF) { throw new Exception("Not a valid RIFF file."); } lenRIFF = _br.ReadUInt32(); if (_br.ReadUInt32() != fccWAVE) { throw new Exception("Not a valid WAVE file."); } _largeFile = false; foundFormat = false; foundData = false; long pos = 12; do { uint ckID, ckSize, ckSizePadded; long ckEnd; ckID = _br.ReadUInt32(); ckSize = _br.ReadUInt32(); ckSizePadded = (ckSize + 1U) & ~1U; pos += 8; ckEnd = pos + (long)ckSizePadded; if (ckID == fccFormat) { foundFormat = true; uint fmtTag = _br.ReadUInt16(); int _channelCount = _br.ReadInt16(); int _sampleRate = _br.ReadInt32(); _br.ReadInt32(); // bytes per second int _blockAlign = _br.ReadInt16(); int _bitsPerSample = _br.ReadInt16(); int _channelMask = 0; pos += 16; if (fmtTag == 0xFFFEU && ckSize >= 34) // WAVE_FORMAT_EXTENSIBLE { _br.ReadInt16(); // CbSize _br.ReadInt16(); // ValidBitsPerSample _channelMask = _br.ReadInt32(); fmtTag = _br.ReadUInt16(); pos += 10; } if (fmtTag != 1) // WAVE_FORMAT_PCM throw new Exception("WAVE format tag not WAVE_FORMAT_PCM."); pcm = new AudioPCMConfig(_bitsPerSample, _channelCount, _sampleRate, (AudioPCMConfig.SpeakerConfig)_channelMask); if (pcm.BlockAlign != _blockAlign) throw new Exception("WAVE has strange BlockAlign"); } else if (ckID == fccData) { foundData = true; _dataOffset = pos; if (!_IO.CanSeek || _IO.Length <= maxFileSize) { if (ckSize == 0 || ckSize >= 0x7fffffff) _dataLen = -1; else _dataLen = (long)ckSize; } else { _largeFile = true; _dataLen = _IO.Length - pos; } } if ((foundFormat & foundData) || _largeFile) break; if (_IO.CanSeek) _IO.Seek(ckEnd, SeekOrigin.Begin); else _br.ReadBytes((int)(ckEnd - pos)); pos = ckEnd; } while (true); if ((foundFormat & foundData) == false || pcm == null) throw new Exception("Format or data chunk not found."); if (pcm.ChannelCount <= 0) throw new Exception("Channel count is invalid."); if (pcm.SampleRate <= 0) throw new Exception("Sample rate is invalid."); if ((pcm.BitsPerSample <= 0) || (pcm.BitsPerSample > 32)) throw new Exception("Bits per sample is invalid."); if (pos != _dataOffset) Position = 0; }
public AudioBuffer(AudioPCMConfig _pcm, byte[] _bytes, int _length) { PCM = _pcm; Prepare(_bytes, _length); }
public WAVWriter(string path, AudioPCMConfig pcm) : this(path, null, pcm) { }
public DummyWriter(string path, AudioPCMConfig pcm) { _pcm = pcm; }
public FlakeWriter(string path, Stream IO, AudioPCMConfig pcm) { _pcm = pcm; //if (_pcm.BitsPerSample != 16) // throw new Exception("Bits per sample must be 16."); if (_pcm.ChannelCount != 2) throw new Exception("ChannelCount must be 2."); channels = pcm.ChannelCount; // flake_validate_params _path = path; _IO = IO; samplesBuffer = new int[Flake.MAX_BLOCKSIZE * (channels == 2 ? 4 : channels)]; residualBuffer = new int[Flake.MAX_BLOCKSIZE * (channels == 2 ? 10 : channels + 1)]; windowBuffer = new float[Flake.MAX_BLOCKSIZE * 2 * lpc.MAX_LPC_WINDOWS]; windowScale = new double[lpc.MAX_LPC_WINDOWS]; eparams.flake_set_defaults(_compressionLevel); eparams.padding_size = 8192; crc8 = new Crc8(); crc16 = new Crc16(); frame = new FlacFrame(channels * 2); }
private void WriteHeaders() { const uint fccRIFF = 0x46464952; const uint fccWAVE = 0x45564157; const uint fccFormat = 0x20746D66; const uint fccData = 0x61746164; bool wavex = (Settings.PCM.BitsPerSample != 16 && Settings.PCM.BitsPerSample != 24) || Settings.PCM.ChannelMask != AudioPCMConfig.GetDefaultChannelMask(Settings.PCM.ChannelCount); hdrLen += 36 + (wavex ? 24 : 0) + 8; uint dataLen = (uint)(_finalSampleCount * Settings.PCM.BlockAlign); uint dataLenPadded = dataLen + (dataLen & 1); _bw.Write(fccRIFF); if (_finalSampleCount <= 0) { _bw.Write((uint)0xffffffff); } else { _bw.Write((uint)(dataLenPadded + hdrLen - 8)); } _bw.Write(fccWAVE); _bw.Write(fccFormat); if (wavex) { _bw.Write((uint)40); _bw.Write((ushort)0xfffe); // WAVEX follows } else { _bw.Write((uint)16); _bw.Write((ushort)1); // PCM } _bw.Write((ushort)Settings.PCM.ChannelCount); _bw.Write((uint)Settings.PCM.SampleRate); _bw.Write((uint)(Settings.PCM.SampleRate * Settings.PCM.BlockAlign)); _bw.Write((ushort)Settings.PCM.BlockAlign); _bw.Write((ushort)((Settings.PCM.BitsPerSample + 7) / 8 * 8)); if (wavex) { _bw.Write((ushort)22); // length of WAVEX structure _bw.Write((ushort)Settings.PCM.BitsPerSample); _bw.Write((uint)Settings.PCM.ChannelMask); _bw.Write((ushort)1); // PCM Guid _bw.Write((ushort)0); _bw.Write((ushort)0); _bw.Write((ushort)0x10); _bw.Write((byte)0x80); _bw.Write((byte)0x00); _bw.Write((byte)0x00); _bw.Write((byte)0xaa); _bw.Write((byte)0x00); _bw.Write((byte)0x38); _bw.Write((byte)0x9b); _bw.Write((byte)0x71); } if (_chunks != null) { for (int i = 0; i < _chunks.Count; i++) { _bw.Write(_chunkFCCs[i]); _bw.Write((uint)_chunks[i].Length); _bw.Write(_chunks[i]); if ((_chunks[i].Length & 1) != 0) { _bw.Write((byte)0); } } } _bw.Write(fccData); if (_finalSampleCount <= 0) { _bw.Write((uint)0xffffffff); } else { _bw.Write(dataLen); } _headersWritten = true; }
public static IAudioDest GetAudioDest(AudioEncoderType audioEncoderType, string path, long finalSampleCount, int bitsPerSample, int sampleRate, int padding, CUEConfig config) { string extension = Path.GetExtension(path).ToLower(); string filename = Path.GetFileNameWithoutExtension(path); AudioPCMConfig pcm = new AudioPCMConfig(bitsPerSample, 2, sampleRate); if (audioEncoderType == AudioEncoderType.NoAudio || audioEncoderType == AudioEncoderType.Lossless || Path.GetExtension(filename).ToLower() != ".lossy") return GetAudioDest(audioEncoderType, path, pcm, finalSampleCount, padding, extension, config); string lwcdfPath = Path.Combine(Path.GetDirectoryName(path), Path.GetFileNameWithoutExtension(filename) + ".lwcdf" + extension); AudioPCMConfig lossypcm = new AudioPCMConfig((config.detectHDCD && config.decodeHDCD && !config.decodeHDCDtoLW16) ? 24 : 16, 2, sampleRate); IAudioDest lossyDest = GetAudioDest(AudioEncoderType.Lossless, path, lossypcm, finalSampleCount, padding, extension, config); IAudioDest lwcdfDest = audioEncoderType == AudioEncoderType.Hybrid ? GetAudioDest(AudioEncoderType.Lossless, lwcdfPath, lossypcm, finalSampleCount, padding, extension, config) : null; return new LossyWAVWriter(lossyDest, lwcdfDest, config.lossyWAVQuality, pcm); }
public AudioBuffer(IAudioSource source, int _size) { pcm = source.PCM; size = _size; }
public AudioBuffer(AudioPCMConfig _pcm, int _size) { PCM = _pcm; Size = _size; Length = 0; }