public void AlignToSamplesTest() { Interval intervalToAlign = new Interval(1000, 10000); AudioProperties audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); double sampleTicks = AudioUtil.CalculateSampleTicks(audioProperties); Interval expected = new Interval((long)(intervalToAlign.From - ((double)intervalToAlign.From % sampleTicks)), (long)(intervalToAlign.To + sampleTicks - (intervalToAlign.To % sampleTicks))); Interval actual; actual = AudioUtil.AlignToSamples(intervalToAlign, audioProperties); Assert.AreEqual(expected, actual); }
protected override void OnRender(DrawingContext drawingContext) { base.OnRender(drawingContext); bool debug = DebugOutput; // draw background drawingContext.DrawRectangle(Background, null, new Rect(0, 0, ActualWidth, ActualHeight)); if (audioStream != null) { Interval audioInterval = new Interval(TrackOffset, TrackOffset + audioStream.TimeLength.Ticks); Interval viewportInterval = VirtualViewportInterval; if (!audioInterval.Intersects(viewportInterval)) { // audio track is outside the viewport return; } Interval visibleAudioInterval = audioInterval.Intersect(viewportInterval); Interval audioToLoadInterval = visibleAudioInterval - TrackOffset; // align interval to samples Interval audioToLoadIntervalAligned = AudioUtil.AlignToSamples(audioToLoadInterval, audioStream.Properties); int samplesToLoad = AudioUtil.CalculateSamples(audioStream.Properties, new TimeSpan(audioToLoadIntervalAligned.Length)) + 1; double sampleLength = AudioUtil.CalculateSampleTicks(audioStream.Properties); // calculate drawing measures double viewportToDrawingScaleFactor = ActualWidth / VirtualViewportWidth; int drawingOffsetAligned = (int)((-(audioToLoadInterval.From - audioToLoadIntervalAligned.From) + (visibleAudioInterval.From - viewportInterval.From)) * viewportToDrawingScaleFactor); int drawingWidthAligned = (int)((samplesToLoad - 1) * sampleLength * viewportToDrawingScaleFactor); int drawingOffset = (int)((visibleAudioInterval.From - viewportInterval.From) * viewportToDrawingScaleFactor); if (visibleAudioInterval.Length < sampleLength) { drawingContext.DrawText(DebugText("VISIBLE INTERVAL WARNING: " + visibleAudioInterval.Length + " < SAMPLE LENGTH " + sampleLength), new Point(0, 0)); return; } if (drawingWidthAligned <= 1) { // the visible width of the track is too narrow to be drawn/visible, so draw a line to visually indicate the presence of a waveform drawingContext.DrawRectangle(_lineBrush, null, new Rect(drawingOffsetAligned, 0, 1, ActualHeight)); return; } // load audio samples DateTime beforeLoading = DateTime.Now; float[][] samples = null; int sampleCount = 0; if (RenderMode != WaveViewRenderMode.None) { bool peaks = samplesToLoad > drawingWidthAligned; // TODO don't recreate the array every time -> resize on demand samples = AudioUtil.CreateArray <float>(audioStream.Properties.Channels, drawingWidthAligned * 2); audioStream.TimePosition = new TimeSpan(audioToLoadIntervalAligned.From); if (peaks) { sampleCount = audioStream.ReadPeaks(samples, samplesToLoad, drawingWidthAligned); } else { sampleCount = audioStream.ReadSamples(samples, samplesToLoad); } if (sampleCount <= 1) { drawingContext.DrawText(DebugText("SAMPLE WARNING: " + sampleCount), new Point(0, 0)); return; } } DateTime afterLoading = DateTime.Now; DateTime beforeDrawing = DateTime.Now; // draw background drawingContext.DrawRectangle(_backgroundBrush, null, new Rect(drawingOffsetAligned, 0, drawingWidthAligned, ActualHeight)); if (debug) { drawingContext.DrawRectangle(null, new Pen(Brushes.Brown, 4), new Rect(drawingOffsetAligned, 0, drawingWidthAligned, ActualHeight)); } // draw waveform guides & create drawing guidelines GuidelineSet guidelineSet = new GuidelineSet(); drawingContext.PushGuidelineSet(guidelineSet); int channels = audioStream.Properties.Channels; double channelHeight = ActualHeight / channels; double channelHalfHeight = channelHeight / 2; for (int channel = 0; channel < channels; channel++) { // waveform zero-line guidelineSet.GuidelinesY.Add((channelHeight * channel + channelHalfHeight) + 0.5); drawingContext.DrawLine(new Pen(Brushes.LightGray, 1), new Point(drawingOffsetAligned, channelHeight * channel + channelHalfHeight), new Point(drawingOffsetAligned + drawingWidthAligned, channelHeight * channel + channelHalfHeight)); // waveform spacers if (channel > 0) { guidelineSet.GuidelinesY.Add((channelHeight * channel) + 0.5); drawingContext.DrawLine(new Pen(Brushes.DarkGray, 1), new Point(drawingOffsetAligned, channelHeight * channel), new Point(drawingOffsetAligned + drawingWidthAligned, channelHeight * channel)); } } drawingContext.Pop(); // draw waveforms if (channelHeight >= 1) { IWaveformRenderer[] renderers = null; switch (RenderMode) { case WaveViewRenderMode.None: renderers = null; break; case WaveViewRenderMode.Bitmap: renderers = waveformBitmapRenderers; break; case WaveViewRenderMode.Geometry: renderers = waveformGeometryRenderers; break; case WaveViewRenderMode.Auto: // Automatically select the waveform renderer: // - when zoom to sample level, draw a nice geometry // - when zoomed out and single samples are not visible, draw bitmap because geometry is too slow if (sampleCount >= drawingWidthAligned) { renderers = waveformBitmapRenderers; } else { renderers = waveformGeometryRenderers; } break; } if (renderers != null) { for (int channel = 0; channel < channels; channel++) { // calculate the balance factor for the first two channels only (balance only applies to stereo) // TODO extend for multichannel (needs implementation of a multichannel balance adjustment control) float balanceFactor = 1; if (channels == 2) { if (channel == 0) { balanceFactor = AudioTrack.Balance < 0 ? 1 : 1 - AudioTrack.Balance; } else if (channel == 1) { balanceFactor = AudioTrack.Balance > 0 ? 1 : 1 + AudioTrack.Balance; } } Drawing waveform = renderers[channel].Render(samples[channel], sampleCount, drawingWidthAligned, (int)channelHeight, AudioTrack.Volume * balanceFactor); DrawingGroup drawing = new DrawingGroup(); drawing.Children.Add(waveform); drawing.Transform = new TranslateTransform((int)drawingOffsetAligned, (int)(channelHeight * channel)); drawingContext.DrawDrawing(drawing); } } } DateTime afterDrawing = DateTime.Now; // draw track name if (DrawTrackName) { FormattedText formattedTrackName = new FormattedText(AudioTrack.FileInfo.Name, CultureInfo.CurrentCulture, FlowDirection.LeftToRight, new Typeface("Segoe UI"), 10f, Brushes.White); drawingContext.DrawRectangle(Brushes.Black, null, new Rect(4 + drawingOffset, 5, formattedTrackName.Width + 4, formattedTrackName.Height + 2)); drawingContext.DrawText(formattedTrackName, new Point(6 + drawingOffset, 6)); } if (debug) { // DEBUG OUTPUT drawingContext.DrawText(DebugText(String.Format("source:" + "n/a" + " load:{0}ms render:{1}ms", (afterLoading - beforeLoading).TotalMilliseconds, (afterDrawing - beforeDrawing).TotalMilliseconds)), new Point(0, 20)); drawingContext.DrawText(DebugText("visibleAudioInterval: " + visibleAudioInterval + ", audioToLoadInterval: " + audioToLoadInterval + ", audioToLoadIntervalAligned: " + audioToLoadIntervalAligned), new Point(0, ActualHeight) + new Vector(0, -50)); drawingContext.DrawText(DebugText("Drawing Offset: " + drawingOffsetAligned + ", Width: " + drawingWidthAligned + ", ScalingFactor: " + viewportToDrawingScaleFactor + ", Samples: " + sampleCount + ", Peakratio 1:" + Math.Round(VirtualViewportWidth / sampleLength / ActualWidth, 2)), new Point(0, ActualHeight) + new Vector(0, -40)); } } if (debug) { // DEBUG OUTPUT drawingContext.DrawText(DebugText("ActualWidth: " + ActualWidth + ", ActualHeight: " + ActualHeight), new Point(0, ActualHeight) + new Vector(0, -30)); drawingContext.DrawText(DebugText("TrackLength: " + TrackLength + ", TrackOffset: " + TrackOffset), new Point(0, ActualHeight) + new Vector(0, -20)); drawingContext.DrawText(DebugText("ViewportOffset: " + VirtualViewportOffset + ", ViewportWidth: " + VirtualViewportWidth), new Point(0, ActualHeight) + new Vector(0, -10)); } }