/// <inheritdoc />
        public int Read(byte[] targetBuffer, int targetBufferOffset, int requestedBytes)
        {
            // We sync-lock the reads to avoid null reference exceptions as destroy might have been called
            var lockTaken = false;

            Monitor.TryEnter(SyncLock, SyncLockTimeout, ref lockTaken);

            if (lockTaken == false || HasFiredAudioDeviceStopped)
            {
                Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
                return(requestedBytes);
            }

            try
            {
                WaitForReadyEvent.Complete();
                var speedRatio = MediaCore.State.SpeedRatio;

                // Render silence if we don't need to output samples
                if (MediaCore.State.IsPlaying == false || speedRatio <= 0d || MediaCore.State.HasAudio == false || AudioBuffer.ReadableCount <= 0)
                {
                    Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
                    return(requestedBytes);
                }

                // Ensure a pre-allocated ReadBuffer
                if (ReadBuffer == null || ReadBuffer.Length < Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio))
                {
                    ReadBuffer = new byte[Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio)];
                }

                // First part of DSP: Perform AV Synchronization if needed
                if (MediaCore.State.HasVideo && Synchronize(targetBuffer, targetBufferOffset, requestedBytes, speedRatio) == false)
                {
                    return(requestedBytes);
                }

                var startPosition = Position;

                // Perform DSP
                if (speedRatio < 1.0)
                {
                    if (AudioProcessor != null)
                    {
                        ReadAndUseAudioProcessor(requestedBytes, speedRatio);
                    }
                    else
                    {
                        ReadAndSlowDown(requestedBytes, speedRatio);
                    }
                }
                else if (speedRatio > 1.0)
                {
                    if (AudioProcessor != null)
                    {
                        ReadAndUseAudioProcessor(requestedBytes, speedRatio);
                    }
                    else
                    {
                        ReadAndSpeedUp(requestedBytes, true, speedRatio);
                    }
                }
                else
                {
                    if (requestedBytes > AudioBuffer.ReadableCount)
                    {
                        Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
                        return(requestedBytes);
                    }

                    AudioBuffer.Read(requestedBytes, ReadBuffer, 0);
                }

                ApplyVolumeAndBalance(targetBuffer, targetBufferOffset, requestedBytes);
                MediaElement.RaiseRenderingAudioEvent(
                    targetBuffer, requestedBytes, startPosition, WaveFormat.ConvertByteSizeToDuration(requestedBytes));
            }
            catch (Exception ex)
            {
                this.LogError(Aspects.AudioRenderer, $"{nameof(AudioRenderer)}.{nameof(Read)} has faulted.", ex);
                Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
            }
            finally
            {
                Monitor.Exit(SyncLock);
            }

            return(requestedBytes);
        }
Пример #2
0
        private bool Synchronize(byte[] targetBuffer, int targetBufferOffset, int requestedBytes, double speedRatio)
        {
            #region Documentation

            /*
             * Wikipedia says:
             * For television applications, audio should lead video by no more than 15 milliseconds and audio should
             * lag video by no more than 45 milliseconds. For film, acceptable lip sync is considered to be no more
             * than 22 milliseconds in either direction.
             *
             * The Media and Acoustics Perception Lab says:
             * The results of the experiment determined that the average audio leading threshold for a/v sync
             * detection was 185.19 ms, with a standard deviation of 42.32 ms
             *
             * The ATSC says:
             * At first glance it seems loose: +90 ms to -185 ms as a Window of Acceptability
             * - Undetectable from -100 ms to +25 ms
             * - Detectable at -125 ms & +45 ms
             * - Becomes unacceptable at -185 ms & +90 ms
             *
             * NOTE: (- Sound delayed, + Sound advanced)
             */

            #endregion

            var hardwareLatencyMs   = WaveFormat.ConvertByteSizeToDuration(requestedBytes).TotalMilliseconds;
            var bufferLatencyMs     = BufferLatency.TotalMilliseconds; // we want the buffer latency to be the negative of the device latency
            var minAcceptableLeadMs = -1.5 * hardwareLatencyMs;        // less than this and we need to rewind samples
            var maxAcceptableLagMs  = -0.5 * hardwareLatencyMs;        // more than this and we need to skip samples
            var isLoggingEnabled    = Math.Abs(speedRatio - 1.0) <= double.Epsilon;
            var operationName       = string.Empty;

            try
            {
                RealTimeLatency = default;

                // we don't want to perform AV sync if the latency is huge
                // or if we have simply disabled it
                if (MediaElement.RendererOptions.AudioDisableSync)
                {
                    return(true);
                }

                // The ideal target latency is the negative of the audio device's desired latency.
                // this is approximately -40ms (i.e. have the buffer position 40ms ahead (negative lag) of the playback clock
                // so that samples are rendered right on time.)
                if (bufferLatencyMs >= minAcceptableLeadMs && bufferLatencyMs <= maxAcceptableLagMs)
                {
                    return(true);
                }

                if (bufferLatencyMs > maxAcceptableLagMs)
                {
                    // this is the case where the buffer latency is too positive (i.e. buffer is lagging by too much)
                    // the goal is to skip some samples to make the buffer latency approximately that of the hardware latency
                    // so that the buffer leads by the hardware lag and we get sync-perferct results.
                    var audioLatencyBytes = WaveFormat.ConvertMillisToByteSize(bufferLatencyMs + hardwareLatencyMs);

                    if (AudioBuffer.ReadableCount > audioLatencyBytes)
                    {
                        operationName = "SKIP OK ";
                        AudioBuffer.Skip(audioLatencyBytes);
                        return(true);
                    }

                    // render silence and return
                    operationName = "SKIP ERR";
                    Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
                    return(false);
                }
                else if (bufferLatencyMs < minAcceptableLeadMs)
                {
                    // this is the case where the buffer latency is too negative (i.e. buffer is leading by too much)
                    // the goal is to rewind some samples to make the buffer latency approximately that of the hardware latency
                    // so that the buffer leads by the hardware lag and we get sync-perferct results.
                    var audioLatencyBytes = WaveFormat.ConvertMillisToByteSize(Math.Abs(bufferLatencyMs) + hardwareLatencyMs);

                    if (AudioBuffer.RewindableCount > audioLatencyBytes)
                    {
                        operationName = "RWND OK ";
                        AudioBuffer.Rewind(audioLatencyBytes);
                        return(true);
                    }

                    // render silence and return
                    operationName = "RWND ERR";
                    Array.Clear(targetBuffer, targetBufferOffset, requestedBytes);
                    return(false);
                }
            }
            finally
            {
                RealTimeLatency = BufferLatency + TimeSpan.FromMilliseconds(hardwareLatencyMs);
                if (isLoggingEnabled && !string.IsNullOrWhiteSpace(operationName))
                {
                    this.LogWarning(Aspects.AudioRenderer,
                                    $"SYNC AUDIO: {operationName} | Initial: {bufferLatencyMs:0} ms. Current: {BufferLatency.TotalMilliseconds:0} ms. Device: {hardwareLatencyMs:0} ms.");
                }
            }

            return(true);
        }