public void Encode() { if (this.trackGain == null && this.drMeter == null) { throw new SkipEncodingItemException("Neither ReplayGain nor DynamicRange to calculate."); } AudioBuffer buffer = new AudioBuffer(audioSource.PCM, FileEncoderBase.BufferSize); while (audioSource.Read(buffer, FileEncoderBase.BufferSize) > 0) { if (this.trackGain != null) { DspHelper.AnalyzeSamples(this.trackGain, buffer); } if (this.drMeter != null) { this.drMeter.Feed(buffer.Samples, buffer.Length); } ProgressChangedEventArgs eventArgs = new ProgressChangedEventArgs((double)this.audioSource.Position / this.audioSource.Length); this.OnProgressChanged(eventArgs); if (eventArgs.Cancel) { this.trackGain = null; this.drMeter = null; return; } } if (this.drMeter != null) { this.drMeter.Finish(); } }
public void SOXResamplerConstructorTest() { AudioPCMConfig inputPCM = new AudioPCMConfig(32, 1, 44100); AudioPCMConfig outputPCM = new AudioPCMConfig(32, 1, 48000); SOXResamplerConfig cfg; cfg.Quality = SOXResamplerQuality.Very; cfg.Phase = 50; cfg.AllowAliasing = false; cfg.Bandwidth = 0; SOXResampler resampler = new SOXResampler(inputPCM, outputPCM, cfg); AudioBuffer src = new AudioBuffer(inputPCM, 400 * inputPCM.SampleRate / 1000); AudioBuffer dst = new AudioBuffer(outputPCM, src.Size * 3); int offs = 0; double delta = 0; for (int i = 0; i < 100; i++) { src.Prepare(-1); for (int j = 0; j < src.Size; j++) src.Float[j, 0] = (float)Math.Sin((i * src.Size + j) * Math.PI / 44100); src.Length = src.Size; resampler.Flow(src, dst); for (int j = 0; j < dst.Length; j++) delta += dst.Float[j, 0] - Math.Sin((offs + j) * Math.PI / 48000); offs += dst.Length; } Assert.IsTrue(Math.Abs(delta) < 0.00001, "Error too large"); }
public void BytesTest() { AudioBuffer target = new AudioBuffer(AudioPCMConfig.RedBook, 1); target.Prepare(testSamples, testSamples.GetLength(0)); CollectionAssert.AreEqual(testBytes, target.Bytes, "CUETools.Codecs.AudioBuffer.Bytes was not set correctly."); target.Prepare(testSamples2, testSamples2.GetLength(0)); CollectionAssert.AreEqual(testBytes2, target.Bytes, "CUETools.Codecs.AudioBuffer.Bytes was not set correctly."); }
public AudioPipe(AudioPCMConfig pcm, int size) { this.pcm = pcm; _readBuffer = new AudioBuffer(pcm, size); _writeBuffer = new AudioBuffer(pcm, size); _maxLength = size; _sampleLen = -1; _samplePos = 0; }
public FlakeFileReader(string path) { _flakeFileReader = new FlakeReader(path, null); _streamInfo = _flakeFileReader.PCM; _waveFormat = new WaveFormat(_streamInfo.SampleRate, _streamInfo.BitsPerSample, _streamInfo.ChannelCount); var len = 65546 * _streamInfo.BitsPerSample / 8 * _streamInfo.ChannelCount; _audioBuffer = new AudioBuffer(_streamInfo, len); _decompressBuffer = new byte[len]; }
public MixingBuffer(AudioPCMConfig pcm, int size, int sources) { source = new AudioBuffer[sources]; volume = new float[sources]; filled = new bool[sources]; for (int i = 0; i < sources; i++) { source[i] = new AudioBuffer(pcm, size); } }
public static void AnalyzeSamples(TrackGain trackGain, AudioBuffer buffer) { int[] leftSamples = new int[buffer.Length]; int[] rightSamples = new int[buffer.Length]; for (int j = 0; j < buffer.Length; ++j) { leftSamples[j] = buffer.Samples[j, 0]; rightSamples[j] = buffer.Samples[j, 1]; } trackGain.AnalyzeSamples(leftSamples, rightSamples); }
public void Write(IAudioDest dest) { if (start < 0 || start > end || end > toc.AudioLength * 588) throw new ArgumentOutOfRangeException(); var src = new NoiseAndErrorsGenerator(AudioPCMConfig.RedBook, end - start, seed, offset + start, errors, maxStrideErrors); var buff = new AudioBuffer(src, 588 * 10); var rnd = new Random(seed); //dest.Position = start; while (src.Remaining > 0) { src.Read(buff, rnd.Next(1, buff.Size)); dest.Write(buff); } }
private static Tuple<int, string> WavToFlacHelper(WAVReader audioSource, string targetFlacPath) { int sampleRate; AudioBuffer buffer = new AudioBuffer(audioSource, 0x10000); FlakeWriterSettings settings = new FlakeWriterSettings(); settings.PCM = audioSource.PCM; FlakeWriter audioDestination = new FlakeWriter(targetFlacPath, settings); while (audioSource.Read(buffer, -1) != 0) { audioDestination.Write(buffer); } sampleRate = settings.PCM.SampleRate; audioDestination.Close(); audioSource.Close(); return new Tuple<int, string>(sampleRate, targetFlacPath); }
public int Step(AudioBuffer sampleBuffer) { if (Remaining == 0) return 0; int copyCount = source.Read(sampleBuffer, Remaining); if (copyCount == 0) return 0; if (ar != null) ar.Write(sampleBuffer); if (hdcd != null) { hdcd.Write(sampleBuffer); if (cueSheet.Config.wait750FramesForHDCD && source.Position > start + 750 * 588 && string.Format("{0:s}", hdcd) == "") hdcd = null; } return copyCount; }
/// <summary> Конвертирование wav-файла во flac </summary> /// <returns>Частота дискретизации</returns> public static int Wav2Flac(Stream wavStream, Stream flacStream) { int sampleRate = 0; IAudioSource audioSource = new WAVReader(null, wavStream); AudioBuffer buff = new AudioBuffer(audioSource, 0x10000); FlakeWriter flakewriter = new FlakeWriter(null, flacStream, audioSource.PCM); sampleRate = audioSource.PCM.SampleRate; FlakeWriter audioDest = flakewriter; while (audioSource.Read(buff, -1) != 0) { audioDest.Write(buff); } return sampleRate; }
public static CDRepairEncode VerifyNoise(CDImageLayout toc, int seed, int offset, int start, int end, int errors, bool do_verify, bool do_encode) { if (start < 0 || start > end || end > toc.AudioLength * 588) throw new ArgumentOutOfRangeException(); var src = new NoiseAndErrorsGenerator(AudioPCMConfig.RedBook, end - start, seed, offset + start, errors); var buff = new AudioBuffer(src, 588 * 100); var ar = new AccurateRipVerify(toc, null); var encode = new CDRepairEncode(ar, stride, npar, do_verify, do_encode); var rnd = new Random(seed); ar.Position = start; while (src.Remaining > 0) { src.Read(buff, rnd.Next(1, buff.Size)); ar.Write(buff); } ar.Close(); return encode; }
public int Read(AudioBuffer result, int maxLength) { if (maxLength > (BufferSize - mixoffs) || maxLength < 0) maxLength = (BufferSize - mixoffs); result.Prepare(maxLength); if (mixbuff == null) mixbuff = LockFilledBuffer(); float sumVolume = 0.0f; for (int iSource = 0; iSource < mixbuff.source.Length; iSource++) if (mixbuff.filled[iSource]) sumVolume += mixbuff.volume[iSource]; for (int iSource = 0; iSource < mixbuff.source.Length; iSource++) volume[iSource] = mixbuff.filled[iSource] ? mixbuff.volume[iSource] / Math.Max(1.0f, sumVolume) : 0.0f; for (int iSmp = 0; iSmp < result.Length; iSmp++) { for (int iChan = 0; iChan < result.PCM.ChannelCount; iChan++) { float sample = 0.0f; for (int iSource = 0; iSource < mixbuff.source.Length; iSource++) sample += mixbuff.source[iSource].Float[mixoffs + iSmp, iChan] * volume[iSource]; result.Float[iSmp, iChan] = sample; } } mixoffs += result.Length; if (mixoffs == BufferSize) { UnlockFilledBuffer(mixbuff); mixbuff = null; mixoffs = 0; } samplePos += result.Length; if (AudioRead != null) { audioReadArgs.source = this; audioReadArgs.buffer = result; AudioRead(this, audioReadArgs); } return result.Length; }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.pcm = pcm; this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount); NAudio.Wave.WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) throw new NotSupportedException("PCM format mismatch"); Init(); bufferFrameCount = audioClient.BufferSize; readBuffers = new AudioBuffer[2]; readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount); readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount); //if (this.shareMode == AudioClientShareMode.Exclusive) // this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000); }
public static void MyClassInitialize(TestContext testContext) { toc = new CDImageLayout(1, 1, 1, string.Format("0 {0}", (finalSampleCount / 588).ToString())); ar = new AccurateRipVerify(toc, null); ar2 = new AccurateRipVerify(toc, null); ar3 = new AccurateRipVerify(toc, null); new Random(2423).NextBytes(wav); new Random(2423).NextBytes(wav2); Random rnd = new Random(987); for (int i = 0; i < stride / 4; i++ ) wav2[(int)(rnd.NextDouble() * (wav2.Length - 1))] = (byte)(rnd.NextDouble() * 255); AudioBuffer buff = new AudioBuffer(AudioPCMConfig.RedBook, 0); CDRepairEncode encode = new CDRepairEncode(ar, stride, npar, false, true); buff.Prepare(wav, finalSampleCount); ar.Init(toc); ar.Write(buff); ar.Close(); parity = encode.Parity; crc = encode.CRC; decode = new CDRepairEncode(ar2, stride, npar, true, false); buff.Prepare(wav2, finalSampleCount); ar2.Init(toc); ar2.Write(buff); ar2.Close(); int actualOffset; bool hasErrors; decode.FindOffset(npar, parity, 0, crc, out actualOffset, out hasErrors); fix = decode.VerifyParity(parity, actualOffset); decode2 = new CDRepairEncode(ar3, stride, npar, true, false); ar3.Init(toc); buff.Prepare(new byte[offset * 4], offset); ar3.Write(buff); buff.Prepare(wav2, finalSampleCount - offset); ar3.Write(buff); ar3.Close(); decode2.FindOffset(npar, parity, 0, crc, out actualOffset, out hasErrors); fix2 = decode2.VerifyParity(parity, actualOffset); }
public static int Wav2Flac(String wavName, string flacName) { int sampleRate = 0; IAudioSource audioSource = new WAVReader(wavName, null); AudioBuffer buff = new AudioBuffer(audioSource, 0x10000); FlakeWriter flakewriter = new FlakeWriter(flacName, audioSource.PCM); sampleRate = audioSource.PCM.SampleRate; FlakeWriter audioDest = flakewriter; while (audioSource.Read(buff, -1) != 0) { audioDest.Write(buff); } audioDest.Close(); audioDest.Close(); audioSource.Close(); return sampleRate; }
public void Write(AudioBuffer buff) { try { wrt.Write(buff); } catch (IOException ex) { if (_encoderProcess.HasExited) { throw new IOException(string.Format("{0} has exited prematurely with code {1}", m_settings.Path, _encoderProcess.ExitCode), ex); } else { throw ex; } } //_sampleLen += sampleCount; }
public void ComputeHashes() { try { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); this.CRC32 = 0; long totalSamples = this.audioSource.Length; long processedSamples = 0; AudioBuffer buffer = new AudioBuffer(this.audioSource.PCM, 44100); while (this.audioSource.Read(buffer, 44100) > 0) { byte[] bufferBytes = buffer.Bytes; if (this.audioSource.Position == this.audioSource.Length) { sha1.TransformFinalBlock(bufferBytes, 0, buffer.ByteLength); } else { sha1.TransformBlock(bufferBytes, 0, buffer.ByteLength, null, 0); } this.CRC32 = Crc32.ComputeChecksum(this.CRC32, buffer.Bytes, 0, buffer.ByteLength); processedSamples += buffer.Length; ProgressChangedEventArgs eventArgs = new ProgressChangedEventArgs((double)processedSamples / totalSamples); this.OnProgressChanged(eventArgs); if (eventArgs.Cancel) { return; } } this.SHA1 = sha1.Hash; } finally { this.audioSource.Close(); } }
public void Flow(AudioBuffer input, AudioBuffer output) { if (input.PCM.SampleRate != inputPCM.SampleRate || output.PCM.SampleRate != outputPCM.SampleRate || input.PCM.ChannelCount != inputPCM.ChannelCount || output.PCM.ChannelCount != outputPCM.ChannelCount) throw new NotSupportedException(); if (rateUp2 == null) { output.Prepare(-1); int odone = output.Size; for (int channel = 0; channel < inputPCM.ChannelCount; channel++) { rate[channel].input(input.Float, channel, input.Length); rate[channel].process(); rate[channel].output(output.Float, channel, ref odone); } output.Length = odone; } else throw new NotSupportedException(); }
public void Init(frmCUEPlayer parent) { MdiParent = parent; _device = WasapiOut.GetDefaultAudioEndpoint(); _device.AudioEndpointVolume.OnVolumeNotification += new AudioEndpointVolumeNotificationDelegate(AudioEndpointVolume_OnVolumeNotification); mediaSliderVolume.Value = (int)(_device.AudioEndpointVolume.MasterVolumeLevelScalar * 100); //mediaSliderVolume.Maximum = (int)(_device.AudioEndpointVolume.VolumeRange); Show(); int delay = 100; try { _player = new WasapiOut(_device, NAudio.CoreAudioApi.AudioClientShareMode.Shared, true, delay, new AudioPCMConfig(32, 2, 44100)); } catch { _player = null; } if (_player == null) { try { _player = new WasapiOut(_device, NAudio.CoreAudioApi.AudioClientShareMode.Shared, true, delay, new AudioPCMConfig(32, 2, 48000)); SOXResamplerConfig cfg; cfg.Quality = SOXResamplerQuality.Very; cfg.Phase = 50; cfg.AllowAliasing = false; cfg.Bandwidth = 0; _resampler = new SOXResampler(parent.Mixer.PCM, _player.PCM, cfg); resampled = new AudioBuffer(_player.PCM, parent.Mixer.BufferSize * 2 * parent.Mixer.PCM.SampleRate / _player.PCM.SampleRate); } catch (Exception ex) { _player = null; Trace.WriteLine(ex.Message); } } parent.Mixer.AudioRead += new EventHandler<AudioReadEventArgs>(Mixer_AudioRead); if (_player != null) _player.Play(); }
private void Decompress(object o) { #if !DEBUG try #endif { bool done = false; do { done = _source.Read(_writeBuffer, -1) == 0; lock (this) { while (_haveData && !_close) { Monitor.Wait(this); } if (_close) { break; } AudioBuffer temp = _writeBuffer; _writeBuffer = _readBuffer; _readBuffer = temp; _haveData = true; Monitor.Pulse(this); } } while (!done); } #if !DEBUG catch (Exception ex) { lock (this) { _ex = ex; Monitor.Pulse(this); } } #endif }
public void SeekTest() { var r = new FLACReader("test.flac", null); var buff1 = new AudioBuffer(r, 16536); var buff2 = new AudioBuffer(r, 16536); Assert.AreEqual(0, r.Position); r.Read(buff1, 7777); Assert.AreEqual(7777, r.Position); r.Position = 0; Assert.AreEqual(0, r.Position); r.Read(buff2, 7777); Assert.AreEqual(7777, r.Position); AudioBufferTest.AreEqual(buff1, buff2); r.Read(buff1, 7777); Assert.AreEqual(7777+7777, r.Position); r.Position = 7777; Assert.AreEqual(7777, r.Position); r.Read(buff2, 7777); Assert.AreEqual(7777+7777, r.Position); AudioBufferTest.AreEqual(buff1, buff2); r.Close(); }
public HDCDDotNet (int channels, int sample_rate, int output_bps, bool decode) { _decoder = IntPtr.Zero; #if !MONO if (decode) _audioBuffer = new AudioBuffer(new AudioPCMConfig(output_bps, channels, 44100), 256); _decoder = hdcd_decoder_new(); _channelCount = channels; _bitsPerSample = output_bps; if (_decoder == IntPtr.Zero) throw new Exception("Failed to initialize HDCD decoder."); bool b = true; b &= hdcd_decoder_set_num_channels(_decoder, (short) _channelCount); b &= hdcd_decoder_set_sample_rate(_decoder, sample_rate); b &= hdcd_decoder_set_input_bps(_decoder, 16); b &= hdcd_decoder_set_output_bps(_decoder, (short)_bitsPerSample); if (!b) throw new Exception("Failed to set up HDCD _decoder parameters."); _decoderCallback = decode ? new hdcd_decoder_write_callback(DecoderCallback) : null; _gch = GCHandle.Alloc(this); hdcd_decoder_init_status status = hdcd_decoder_init(_decoder, IntPtr.Zero, _decoderCallback, (IntPtr) _gch); switch (status) { case hdcd_decoder_init_status.HDCD_DECODER_INIT_STATUS_OK: break; case hdcd_decoder_init_status.HDCD_DECODER_INIT_STATUS_MEMORY_ALOCATION_ERROR: throw new Exception("Memory allocation error."); case hdcd_decoder_init_status.HDCD_DECODER_INIT_STATUS_INVALID_NUM_CHANNELS: throw new Exception("Invalid number of channels."); case hdcd_decoder_init_status.HDCD_DECODER_INIT_STATUS_INVALID_SAMPLE_RATE: throw new Exception("Invalid sample rate."); default: throw new Exception("Unknown error(" + status.ToString() + ")."); } #else throw new Exception("HDCD unsupported."); #endif }
private byte[] Wav2FlacBuffConverter(byte[] Buffer) { Stream OutWavStream = new MemoryStream(); Stream OutFlacStream = new MemoryStream(); AudioPCMConfig pcmconf = new AudioPCMConfig(16, 1, 16000); WAVWriter wr = new WAVWriter(null, OutWavStream, pcmconf); wr.Write(new AudioBuffer(pcmconf, Buffer, Buffer.Length / 2)); OutWavStream.Seek(0, SeekOrigin.Begin); WAVReader audioSource = new WAVReader(null, OutWavStream); if (audioSource.PCM.SampleRate != 16000) return null; AudioBuffer buff = new AudioBuffer(audioSource, 0x10000); FlakeWriter flakeWriter = new FlakeWriter(null, OutFlacStream, audioSource.PCM); flakeWriter.CompressionLevel = 8; while (audioSource.Read(buff, -1) != 0) { flakeWriter.Write(buff); } OutFlacStream.Seek(0, SeekOrigin.Begin); byte[] barr = new byte[OutFlacStream.Length]; OutFlacStream.Read(barr, 0, (int)OutFlacStream.Length); return barr; }
private static byte[] WaveSamplesToFlake(byte[] combinedChunks) { var audioBuffer = new AudioBuffer(new AudioPCMConfig(16, 1, 22050), combinedChunks, combinedChunks.Length/2); byte[] flakeBuffer; using (var flakeStream = new MemoryStream()) { var flakeWriter = new FlakeWriter(null, flakeStream, new FlakeWriterSettings {PCM = audioBuffer.PCM, EncoderMode = "7"}); flakeWriter.Settings.Padding = 1; flakeWriter.DoSeekTable = false; flakeWriter.FinalSampleCount = audioBuffer.Length; flakeWriter.Write(audioBuffer); flakeStream.Position = 0; using (var br = new BinaryReader(flakeStream)) { flakeBuffer = new byte[flakeStream.Length]; br.Read(flakeBuffer, 0, flakeBuffer.Length); } } return flakeBuffer; }
private void FlushThread() { AudioBuffer result = new AudioBuffer(_mixer.PCM, _mixer.BufferSize); while (true) { buffer.Read(result, -1); if (_icecastWriter != null && !close) { try { _icecastWriter.Write(result); } catch (Exception ex) { close = true; } } if (_icecastWriter != null && close) { _icecastWriter.Delete(); _icecastWriter = null; } } }
public int Read(AudioBuffer buff, int maxLength) { Go(); bool needToCopy = false; if (_bufferPos != 0) needToCopy = true; else lock (this) { while (!_haveData && _ex == null) Monitor.Wait(this); if (_ex != null) throw _ex; if (_bufferPos == 0 && (maxLength < 0 || _readBuffer.Length <= maxLength)) { buff.Swap(_readBuffer); _haveData = false; Monitor.Pulse(this); } else needToCopy = true; } if (needToCopy) { buff.Prepare(_readBuffer, _bufferPos, maxLength); _bufferPos += buff.Length; if (_bufferPos == _readBuffer.Length) { _bufferPos = 0; lock (this) { _haveData = false; Monitor.Pulse(this); } } } _samplePos += buff.Length; return buff.Length; }
//public new void Dispose() //{ // _buffer.Clear(); //} public void Close() { lock (this) { _close = true; Monitor.Pulse(this); } if (_workThread != null) { _workThread.Join(); _workThread = null; } if (_source != null) { if (own) _source.Close(); _source = null; } if (_readBuffer != null) { //_readBuffer.Clear(); _readBuffer = null; } if (_writeBuffer != null) { //_writeBuffer.Clear(); _writeBuffer = null; } }
public void Write(AudioBuffer buff) { }
public unsafe void Prepare(AudioBuffer _src, int _offset, int _length) { Length = Math.Min(Size, _src.Length - _offset); if (_length >= 0) Length = Math.Min(Length, _length); }
public int Write(AudioBuffer buff) { if (_writeBuffer.Size < _writeBuffer.Length + buff.Length) { AudioBuffer realloced = new AudioBuffer(pcm, _writeBuffer.Size + buff.Size); realloced.Prepare(_writeBuffer, 0, _writeBuffer.Length); _writeBuffer = realloced; } if (_writeBuffer.Length == 0) _writeBuffer.Prepare(buff, 0, buff.Length); else { _writeBuffer.Load(_writeBuffer.Length, buff, 0, buff.Length); _writeBuffer.Length += buff.Length; } lock (this) { if (!_haveData) { AudioBuffer temp = _writeBuffer; _writeBuffer = _readBuffer; _writeBuffer.Length = 0; _readBuffer = temp; _haveData = true; Monitor.Pulse(this); } } return _writeBuffer.Length; }
private void Decompress(object o) { #if !DEBUG try #endif { bool done = false; do { done = _source.Read(_writeBuffer, -1) == 0; lock (this) { while (_haveData && !_close) Monitor.Wait(this); if (_close) break; AudioBuffer temp = _writeBuffer; _writeBuffer = _readBuffer; _readBuffer = temp; _haveData = true; Monitor.Pulse(this); } } while (!done); } #if !DEBUG catch (Exception ex) { lock (this) { _ex = ex; Monitor.Pulse(this); } } #endif }
public void WriteAudioFilesPass(string dir, CUEStyle style, int[] destLengths, bool htoaToFile, bool noOutput) { int iTrack, iIndex; AudioBuffer sampleBuffer = new AudioBuffer(AudioPCMConfig.RedBook, 0x10000); TrackInfo track; IAudioSource audioSource = null; IAudioDest audioDest = null; bool discardOutput; int iSource = -1; int iDest = -1; int samplesRemSource = 0; ApplyWriteOffset(); int destBPS = 16; hdcdDecoder = null; if (_config.detectHDCD && CUEProcessorPlugins.hdcd != null) { // currently broken verifyThenConvert on HDCD detection!!!! need to check for HDCD results higher try { destBPS = ((_outputLossyWAV && _config.decodeHDCDtoLW16) || !_config.decodeHDCDto24bit) ? 20 : 24; hdcdDecoder = Activator.CreateInstance(CUEProcessorPlugins.hdcd, 2, 44100, destBPS, _config.decodeHDCD) as IAudioDest; } catch { } if (hdcdDecoder == null || !_config.decodeHDCD) destBPS = 16; } if (style == CUEStyle.SingleFile || style == CUEStyle.SingleFileWithCUE) { iDest++; if (_isCD && style == CUEStyle.SingleFileWithCUE) _padding += Encoding.UTF8.GetByteCount(GetCUESheetContents(style)); audioDest = GetAudioDest(_destPaths[iDest], destLengths[iDest], destBPS, _padding, noOutput); } int currentOffset = 0, previousOffset = 0; int trackLength = (int)_toc.Pregap * 588; int diskLength = 588 * (int)_toc.AudioLength; int diskOffset = 0; // we init AR before CTDB so that CTDB gets inited with correct TOC if (isUsingAccurateRip || isUsingCUEToolsDB) _arVerify.Init(_toc); if (isUsingCUEToolsDB && !isUsingCUEToolsDBFix) { _CUEToolsDB.TOC = _toc; // This might be unnecessary, because they point to the same structure - if we modify _toc, _CUEToolsDB.TOC gets updated. Unless we set cueSheet.TOC... _CUEToolsDB.Init(_arVerify); } ShowProgress(String.Format("{2} track {0:00} ({1:00}%)...", 0, 0, noOutput ? "Verifying" : "Writing"), 0.0, null, null); #if !DEBUG try #endif { for (iTrack = 0; iTrack < TrackCount; iTrack++) { track = _tracks[iTrack]; if ((style == CUEStyle.GapsPrepended) || (style == CUEStyle.GapsLeftOut)) { iDest++; if (hdcdDecoder != null) (hdcdDecoder as IAudioFilter).AudioDest = null; if (audioDest != null) audioDest.Close(); audioDest = GetAudioDest(_destPaths[iDest], destLengths[iDest], destBPS, _padding, noOutput); } for (iIndex = 0; iIndex <= _toc[_toc.FirstAudio + iTrack].LastIndex; iIndex++) { int samplesRemIndex = (int)_toc.IndexLength(_toc.FirstAudio + iTrack, iIndex) * 588; if (iIndex == 1) { previousOffset = currentOffset; currentOffset = 0; trackLength = (int)_toc[_toc.FirstAudio + iTrack].Length * 588; } if ((style == CUEStyle.GapsAppended) && (iIndex == 1)) { if (hdcdDecoder != null) (hdcdDecoder as IAudioFilter).AudioDest = null; if (audioDest != null) audioDest.Close(); iDest++; audioDest = GetAudioDest(_destPaths[iDest], destLengths[iDest], destBPS, _padding, noOutput); } if ((style == CUEStyle.GapsAppended) && (iIndex == 0) && (iTrack == 0)) { discardOutput = !htoaToFile; if (htoaToFile) { iDest++; audioDest = GetAudioDest(_destPaths[iDest], destLengths[iDest], destBPS, _padding, noOutput); } } else if ((style == CUEStyle.GapsLeftOut) && (iIndex == 0)) { discardOutput = true; } else { discardOutput = false; } while (samplesRemIndex != 0) { if (samplesRemSource == 0) { // if (_isCD && audioSource != null && audioSource is CDDriveReader) // updatedTOC = ((CDDriveReader)audioSource).TOC; if (audioSource != null) audioSource.Close(); audioSource = GetAudioSource(++iSource, _config.separateDecodingThread || _isCD); samplesRemSource = (int)_sources[iSource].Length; } int copyCount = Math.Min(samplesRemIndex, samplesRemSource); if (trackLength > 0 && !_isCD) { double trackPercent = (double)currentOffset / trackLength; ShowProgress(String.Format("{2} track {0:00} ({1:00}%)...", iIndex > 0 ? iTrack + 1 : iTrack, (int)(100 * trackPercent), noOutput ? "Verifying" : "Writing"), (int)diskOffset, (int)diskLength, _isCD ? string.Format("{0}: {1:00} - {2}", audioSource.Path, iTrack + 1, Metadata.Tracks[iTrack].Title) : audioSource.Path, discardOutput ? null : audioDest.Path); } copyCount = audioSource.Read(sampleBuffer, copyCount); if (copyCount == 0) throw new Exception("Unexpected end of file"); if (isUsingCUEToolsDB && isUsingCUEToolsDBFix) _CUEToolsDB.SelectedEntry.repair.Write(sampleBuffer); // we use AR after CTDB fix, so that we can verify what we fixed if (isUsingAccurateRip || isUsingCUEToolsDB) _arVerify.Write(sampleBuffer); if (!discardOutput) { if (!_config.detectHDCD || !_config.decodeHDCD) audioDest.Write(sampleBuffer); if (_config.detectHDCD && hdcdDecoder != null) { if (_config.wait750FramesForHDCD && diskOffset > 750 * 588 && string.Format("{0:s}", hdcdDecoder) == "") { (hdcdDecoder as IAudioFilter).AudioDest = null; hdcdDecoder = null; if (_config.decodeHDCD) { audioSource.Close(); audioDest.Delete(); throw new Exception("HDCD not detected."); } } else { if (_config.decodeHDCD) (hdcdDecoder as IAudioFilter).AudioDest = (discardOutput || noOutput) ? null : audioDest; hdcdDecoder.Write(sampleBuffer); } } } currentOffset += copyCount; diskOffset += copyCount; samplesRemIndex -= copyCount; samplesRemSource -= copyCount; CheckStop(); } } } if (hdcdDecoder != null) (hdcdDecoder as IAudioFilter).AudioDest = null; hdcdDecoder = null; if (audioSource != null) audioSource.Close(); audioSource = null; if (audioDest != null) audioDest.Close(); audioDest = null; } #if !DEBUG catch (Exception ex) { if (hdcdDecoder != null) (hdcdDecoder as IAudioFilter).AudioDest = null; hdcdDecoder = null; if (audioSource != null) try { audioSource.Close(); } catch { } audioSource = null; if (audioDest != null) try { audioDest.Delete(); } catch { } audioDest = null; throw ex; } #endif }
public void VerifyAudio() { ApplyWriteOffset(); hdcdDecoder = null; // we init AR before CTDB so that CTDB gets inited with correct TOC if (isUsingAccurateRip || isUsingCUEToolsDB) _arVerify.Init(_toc); if (isUsingCUEToolsDB && !isUsingCUEToolsDBFix) { _CUEToolsDB.TOC = _toc; _CUEToolsDB.Init(_arVerify); } ShowProgress(String.Format("Verifying ({0:00}%)...", 0), 0.0, null, null); AudioBuffer sampleBuffer = new AudioBuffer(AudioPCMConfig.RedBook, 0x10000); List<CUEToolsVerifyTask> tasks = new List<CUEToolsVerifyTask>(); // also make sure all sources are seekable!!! // use overlapped io with large buffers? // ar.verify in each thread? int nThreads = 1;// _isCD || !_config.separateDecodingThread || isUsingCUEToolsDB || _config.detectHDCD ? 1 : Environment.ProcessorCount; int diskLength = 588 * (int)_toc.AudioLength; tasks.Add(new CUEToolsVerifyTask(this, 0, diskLength / nThreads, _arVerify)); for (int iThread = 1; iThread < nThreads; iThread++) tasks.Add(new CUEToolsVerifyTask(this, iThread * diskLength / nThreads, (iThread + 1) * diskLength / nThreads)); #if !DEBUG try #endif { int lastProgress = -588 * 75; int diskOffset = 0; int sourcesActive; do { sourcesActive = 0; for (int iSource = 0; iSource < tasks.Count; iSource++) { CUEToolsVerifyTask task = tasks[iSource]; if (task.Remaining == 0) continue; sourcesActive++; if (tasks.Count == 1 && task.source.Position - lastProgress >= 588 * 75) { lastProgress = (int)task.source.Position; int pos = 0; int trackStart = 0; int trackLength = (int)_toc.Pregap * 588; for (int iTrack = 0; iTrack < TrackCount; iTrack++) for (int iIndex = 0; iIndex <= _toc[_toc.FirstAudio + iTrack].LastIndex; iIndex++) { int indexLen = (int)_toc.IndexLength(_toc.FirstAudio + iTrack, iIndex) * 588; if (iIndex == 1) { trackStart = pos; trackLength = (int)_toc[_toc.FirstAudio + iTrack].Length * 588; } if (task.source.Position < pos + indexLen) { if (trackLength > 0 && !_isCD) { double trackPercent = (double)(task.source.Position - trackStart) / trackLength; ShowProgress(String.Format("{2} track {0:00} ({1:00}%)...", iIndex > 0 ? iTrack + 1 : iTrack, (int)(100 * trackPercent), "Verifying"), diskOffset, diskLength, task.source.Path, null); } iTrack = TrackCount; break; } pos += indexLen; } } else if (tasks.Count > 1) { ShowProgress(String.Format("Verifying ({0:00}%)...", (uint)(100.0 * diskOffset / diskLength)), diskOffset, diskLength, InputPath, null); } int copyCount = task.Step(sampleBuffer); if (copyCount == 0) throw new Exception("Unexpected end of file"); diskOffset += copyCount; CheckStop(); } } while (sourcesActive > 0); } #if !DEBUG catch (Exception ex) { tasks.ForEach(t => t.TryClose()); tasks.Clear(); throw ex; } #endif hdcdDecoder = tasks[0].hdcd; for (int iThread = 1; iThread < nThreads; iThread++) tasks[0].Combine(tasks[iThread]); tasks.ForEach(t => t.Close()); tasks.Clear(); }
public int Read(AudioBuffer buff, int maxLength) { Initialize(); return(rdr.Read(buff, maxLength)); }