public void Encode() { if (this.trackGain == null && this.drMeter == null) { throw new SkipEncodingItemException("Neither ReplayGain nor DynamicRange to calculate."); } AudioBuffer buffer = new AudioBuffer(audioSource.PCM, FileEncoderBase.BufferSize); while (audioSource.Read(buffer, FileEncoderBase.BufferSize) > 0) { if (this.trackGain != null) { DspHelper.AnalyzeSamples(this.trackGain, buffer); } if (this.drMeter != null) { this.drMeter.Feed(buffer.Samples, buffer.Length); } ProgressChangedEventArgs eventArgs = new ProgressChangedEventArgs((double)this.audioSource.Position / this.audioSource.Length); this.OnProgressChanged(eventArgs); if (eventArgs.Cancel) { this.trackGain = null; this.drMeter = null; return; } } if (this.drMeter != null) { this.drMeter.Finish(); } }
public IEncoder CreateEncoder(int threadNumber, IParallelTask _task) { if (_task is ReplayGainTask) { ReplayGainTask task = (ReplayGainTask)_task; return(new ReplayGainTagEncoder(task)); } else if (_task is FileEncodeTask) { FileEncodeTask task = (FileEncodeTask)_task; IAudioSource audioSource = task.AudioSourceLazy(); if (audioSource == null) { throw new SkipEncodingItemException("Unsupported audio format."); } if (this.CalculateReplayGain) { task.TrackGain = DspHelper.CreateTrackGain(audioSource); } if (this.CalculateDynamicRange) { task.DrMeter = DspHelper.CreateDrMeter(audioSource); } return(new DspCalculatorEncoder(audioSource, task.TrackGain, task.DrMeter)); } throw new NotSupportedException(); }
protected IAudioSource SetupTask(FileEncodeTask task) { IAudioSource audioSource = task.AudioSourceLazy(); if (audioSource == null) { throw new SkipEncodingItemException("Audio source is not supported."); } if (this.CalculateRg) { task.TrackGain = DspHelper.CreateTrackGain(audioSource); } if (this.CalculateDr) { task.DrMeter = DspHelper.CreateDrMeter(audioSource); } return(audioSource); }
public void Encode() { AudioBuffer buffer = new AudioBuffer(audioSource.PCM, BufferSize); this.AudioDest.FinalSampleCount = this.audioSource.Length; while (audioSource.Read(buffer, BufferSize) > 0) { if (this.trackGain != null) { DspHelper.AnalyzeSamples(this.trackGain, buffer); } if (this.drMeter != null) { this.drMeter.Feed(buffer.Samples, buffer.Length); } this.AudioDest.Write(buffer); ProgressChangedEventArgs eventArgs = new ProgressChangedEventArgs((double)this.audioSource.Position / this.audioSource.Length); this.OnProgressChanged(eventArgs); if (eventArgs.Cancel) { this.AudioDest.Close(); this.AudioDest = null; Utility.TryDeleteFile(this.targetFilename); return; } } if (this.drMeter != null) { this.drMeter.Finish(); } this.AudioDest.Close(); this.AudioDest = null; if (this.tags != null) { this.tags.WriteToFile(this.targetFilename); } }
private void UpdateUI() { this.listTracks.Items.Clear(); if (this.releases == null) { } else if (this.releases.Length == 1) { this.columnArtists.Width = this.releases[0].HasTrackArtists ? 240 : 0; this.columnDisc.Width = this.releases[0].DiscCount != 1 ? 28 : 0; this.AddItemsForRelease(this.releases[0]); } else { this.columnArtists.Width = this.releases.Any(r => r.HasTrackArtists) ? 240 : 0; this.columnDisc.Width = this.releases.Any(r => r.DiscCount != 1) ? 28 : 0; foreach (Release release in this.releases) { this.listTracks.Items.Add(new CustomListViewItem() { JoinedArtists = release.JoinedAlbumArtists, Title = release.Title, FontWeight = FontWeights.Bold, DynamicRange = DspHelper.GetRoundedDr(release.DynamicRange), DynamicRangeBrush = UIHelper.GetDrBrush(DspHelper.GetRoundedDr(release.DynamicRange), drAlpha), AlbumGain = FormatGain(release.ReplayGainAlbumGain) }); this.AddItemsForRelease(release); } } }
private void AddItemsForRelease(Release release) { int currentDisc = 0; foreach (Track track in release.Tracklist) { if (release.DiscCount != 1 && track.Disc != currentDisc) { currentDisc = track.Disc; this.listTracks.Items.Add(new CustomListViewItem() { Title = "Disc " + currentDisc, FontStyle = FontStyles.Italic, DynamicRange = DspHelper.GetRoundedDr(release.DynamicRange), DynamicRangeBrush = UIHelper.GetDrBrush(DspHelper.GetRoundedDr(release.DynamicRange), drAlpha), AlbumGain = FormatGain(release.ReplayGainAlbumGain) }); } this.listTracks.Items.Add(new CustomListViewItem() { Disc = track.Disc.ToString(), Position = track.Position.ToString(), JoinedArtists = track.JoinedArtists, Title = track.Title, DynamicRange = DspHelper.GetRoundedDr(track.DynamicRange), DynamicRangeBrush = UIHelper.GetDrBrush(DspHelper.GetRoundedDr(release.DynamicRange), drAlpha), AlbumGain = FormatGain(release.ReplayGainAlbumGain), TrackGain = FormatGain(track.ReplayGainTrackGain) }); } }
// private double _doubleReadCounter; // private int _doubleReadFloor; #endregion #region Methods /// <returns>Length *in shorts* of the decoded data</returns> public int ReadSamples(short[] outputBuffer) { int length = 0; bool isSilent = false; lock (_queue) { // If there are too many frames in the queue, pull them out one by one and decode them (so the speex buffer stays OK), // but don't bother playing them. This is a case where downsampling would be helpful, but we'll ignore it for now. while (_queue.Count > queuedFramesTargetMax) { _logger.LogQueueFull(); var entry = _queue.Dequeue(); AudioDecoder = _codecFactory.GetAudioDecoder(entry.AudioCodecType); AudioDecoder.Decode(entry.Frame, 0, entry.DataLength, outputBuffer, length, entry.IsSilent); _entryPool.Recycle(entry); _videoQualityController.LogGlitch(1); } // If we haven't lost any frames since the last check, pull one frame out of the queue to reduce latency. // This is a case where downsampling would be helpful, but we'll ignore it for now. if (++_framesSinceLastCheck > _framesBetweenChecks && _firstPacketReceived) { // Keep a record of the queue size, so that we can know how "bad" it is when we miss a packet. // It's not a big deal to miss a read when the queue size is stable at < 4 frames, // but it's a pretty big deal when the queue size is jumping around between 0 and 50. while (_queueSizes.Count > maxQueueSizeEntries) { _queueSizes.RemoveAt(0); } _queueSizes.Add(_queue.Count); if (_framesLostSinceLastCheck == 0 && _queue.Count > queuedFramesTargetMin) { var entry = _queue.Dequeue(); AudioDecoder = _codecFactory.GetAudioDecoder(entry.AudioCodecType); AudioDecoder.Decode(entry.Frame, 0, entry.DataLength, outputBuffer, length, entry.IsSilent); _entryPool.Recycle(entry); if (_framesBetweenChecks > framesBetweenChecksMin) { _framesBetweenChecks -= goodTrafficAdjustment; // Speed up (slightly) the rate at which we can decrease the queue size. } _logger.LogQueueReduced(); } _framesLostSinceLastCheck = 0; _framesSinceLastCheck = 0; } // Calculate the number of packets we should retrieve. // Here's the logic. Let's say that we're only reading packets every 23.3 milliseconds instead of every 20 milliseconds. // This means that for about 3.3/20 = 16.5% of the reads, we actually need to request *two* packets. // So each time we read, we add .165 to a counter, and then take its floor. As soon as the counter floor // rolls over to a new integer, we know that we need to read a second packet. // Unfortunately, dammit, it doesn't look like this works. We'll need to create a better approach, // presumably using a resampler. //_doubleReadCounter += _logger.OverageRatio; //int packetsToRead = 1; //var newDoubleReadFloor = (int)Math.Floor(_doubleReadCounter); //if (newDoubleReadFloor > _doubleReadFloor) //{ // packetsToRead += newDoubleReadFloor - _doubleReadFloor; // _doubleReadFloor = newDoubleReadFloor; // _logger.LogMultipleRead(); //} //for (int i = 0; i < packetsToRead; i++) { if (_queue.Count > 0) { // If we have anything in the queue, fulfill the request. var entry = _queue.Dequeue(); isSilent = entry.IsSilent; _lastSequenceNumberRead = entry.SequenceNumber; AudioDecoder = _codecFactory.GetAudioDecoder(entry.AudioCodecType); length += AudioDecoder.Decode(entry.Frame, 0, entry.DataLength, outputBuffer, length, entry.IsSilent); _entryPool.Recycle(entry); _firstPacketReceived = true; } else { // Record the fact that we missed a read, so the rest of the system can adjust. _logger.LogQueueEmpty(); if (_firstPacketReceived) { double stdDev = DspHelper.GetStandardDeviation(_queueSizes); _videoQualityController.LogGlitch((int)Math.Floor(stdDev) + 1); } // If the frame hasn't arrived yet, let the last audio codec interpolate the missing packet. // Most likely, the additional frames will arrive in a bunch by the time the next read happens. // We may want to investigate our own upsampling algorithm at some point. length += AudioDecoder.Decode(null, 0, 0, outputBuffer, length, true); if (_framesBetweenChecks < framesBetweenChecksMax && _firstPacketReceived) { _framesBetweenChecks += badTrafficAdjustment; // Slow down (substantially) the rate at which we decrease the queue size. } } } } _logger.LogRead(_queue, _framesBetweenChecks, isSilent); return(length); }