private void LogEventDone(RoutedEvent e) { if (WindowsPlatform.Instance.IsInDebugMode) { MediaCore?.Log(MediaLogMessageType.Trace, $"EVENT DONE : {e.Name}"); } }
/// <summary> /// Performs actions when the command has been executed. /// This is useful to notify exceptions or update the state of the media. /// </summary> public override void PostProcess() { MediaCore.SendOnMediaClosed(); MediaCore.State.UpdateFixedContainerProperties(); LogReferenceCounter(MediaCore); MediaCore.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Completed"); }
internal void RaiseMediaFailedEvent(Exception ex) { LogEventStart(MediaFailedEvent); MediaCore?.Log(MediaLogMessageType.Error, $"Media Failure - {ex?.GetType()}: {ex?.Message}"); WindowsPlatform.Instance.Gui?.Invoke(DispatcherPriority.DataBind, () => { RaiseEvent(CreateExceptionRoutedEventArgs(MediaFailedEvent, this, ex)); }); LogEventDone(MediaFailedEvent); }
internal void RaiseMediaFailedEvent(Exception ex) { LogEventStart(MediaFailedEvent); MediaCore?.Log(MediaLogMessageType.Error, $"Media Failure - {ex?.GetType()}: {ex?.Message}"); GuiContext.Current.EnqueueInvoke(() => { RaiseEvent(CreateExceptionRoutedEventArgs(MediaFailedEvent, this, ex)); LogEventDone(MediaFailedEvent); }); }
/// <summary> /// Opens the specified URI. /// This command gets processed in a threadpool thread asynchronously. /// </summary> /// <param name="uri">The URI.</param> /// <returns>The asynchronous task</returns> public async Task OpenAsync(Uri uri) { // Check Uri Argument if (uri == null) { MediaCore?.Log( MediaLogMessageType.Warning, $"{nameof(MediaCommandManager)}.{nameof(OpenAsync)}: '{nameof(uri)}' cannot be null"); return; // Task.CompletedTask; } if (CanExecuteCommands == false) { return; // Task.CompletedTask; } else { IsOpening.Value = true; } var command = new OpenCommand(this, uri); ExecutingCommand = command; ClearCommandQueue(); var action = new Action(() => { try { if (command.HasCompleted) { return; } command.RunSynchronously(); } catch (Exception ex) { MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(MediaCommandManager)}.{nameof(OpenAsync)}: {ex.GetType()} - {ex.Message}"); } finally { ExecutingCommand?.Complete(); ExecutingCommand = null; IsOpening.Value = false; } }); await TaskEx.Run(action); }
private void LogEventDone(RoutedEvent e) { if (e.Name.Equals(nameof(BufferingEnded))) { MediaCore?.Log(MediaLogMessageType.Debug, $"EVENT DONE: {e.Name}"); return; } if (WindowsPlatform.Instance.IsInDebugMode) { MediaCore?.Log(MediaLogMessageType.Trace, $"EVENT DONE : {e.Name}"); } }
/// <summary> /// Performs actions when the command has been executed. /// This is useful to notify exceptions or update the state of the media. /// </summary> public override void PostProcess() { MediaCore.State.UpdateFixedContainerProperties(); if (ErrorException == null) { MediaCore.SendOnMediaChanged(); } else { MediaCore.SendOnMediaFailed(ErrorException); } MediaCore.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Completed"); }
/// <summary> /// Opens the specified custom input stream. /// This command gets processed in a threadpool thread asynchronously. /// </summary> /// <param name="stream">The custom input stream.</param> /// <returns> /// The asynchronous task /// </returns> public async Task OpenAsync(IMediaInputStream stream) { // Check Uri Argument if (stream == null) { MediaCore?.Log( MediaLogMessageType.Warning, $"{nameof(MediaCommandManager)}.{nameof(OpenAsync)}: '{nameof(stream)}' cannot be null"); return; } if (CanExecuteCommands == false) { return; } else { IsOpening.Value = true; } var command = new OpenCommand(this, stream); ExecutingCommand = command; ClearCommandQueue(); try { if (command.HasCompleted) { return; } await command.StartAsync(); } catch (Exception ex) { MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(MediaCommandManager)}.{nameof(OpenAsync)}: {ex.GetType()} - {ex.Message}"); } finally { ExecutingCommand?.Complete(); ExecutingCommand = null; IsOpening.Value = false; } }
/// <summary> /// Performs actions when the command has been executed. /// This is useful to notify exceptions or update the state of the media. /// </summary> public override void PostProcess() { MediaCore.State.UpdateFixedContainerProperties(); if (ExceptionResult == null) { MediaCore.State.UpdateMediaState(PlaybackStatus.Stop); MediaCore.SendOnMediaOpened(); } else { MediaCore.State.UpdateMediaState(PlaybackStatus.Close); MediaCore.SendOnMediaFailed(ExceptionResult); } MediaCore.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Completed"); }
/// <summary> /// Closes the specified media. /// This command gets processed in a threadpool thread asynchronously. /// </summary> /// <returns>Returns the background task.</returns> public async Task CloseAsync() { if (CanExecuteCommands == false) { return; } else { IsClosing.Value = true; } var command = new CloseCommand(this); ExecutingCommand = command; ClearCommandQueue(); var action = new Action(() => { try { if (command.HasCompleted) { return; } command.RunSynchronously(); } catch (Exception ex) { MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(MediaCommandManager)}.{nameof(CloseAsync)}: {ex.GetType()} - {ex.Message}"); } finally { ExecutingCommand?.Complete(); ExecutingCommand = null; IsClosing.Value = false; } }); await TaskEx.Run(action); }
/// <summary> /// Outputs the state of the queue /// </summary> /// <param name="operation">The operation.</param> /// <param name="outputEmpty">if set to <c>true</c> [output empty].</param> private void DumpQueue(string operation, bool outputEmpty) { if (MediaEngine.Platform.IsInDebugMode == false) { return; } lock (SyncLock) { if (outputEmpty == false && Commands.Count <= 0) { return; // Prevent output for empty commands } MediaCore.Log(MediaLogMessageType.Trace, $"Command Queue ({Commands.Count} commands): {operation}"); foreach (var c in Commands) { MediaCore.Log(MediaLogMessageType.Trace, $" {c.ToString()}"); } } }
/// <summary> /// Performs actions when the command has been executed. /// This is useful to notify exceptions or update the state of the media. /// </summary> public override void PostProcess() { var m = MediaCore; if (m == null) { return; } // Update notification properties m.State.ResetAll(); m.ResetPosition(); m.State.UpdateMediaState(PlaybackStatus.Close); m.State.UpdateSource(null); // Notify media has closed MediaCore.SendOnMediaClosed(); LogReferenceCounter(MediaCore); MediaCore.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Completed"); }
/// <summary> /// Initializes the audio renderer. /// Call the Play Method to start reading samples /// </summary> private void Initialize() { Destroy(); // Enumerate devices. The default device is the first one so we check // that we have more than 1 device (other than the default stub) var hasAudioDevices = MediaElement.RendererOptions.UseLegacyAudioOut ? LegacyAudioPlayer.EnumerateDevices().Count > 1 : DirectSoundPlayer.EnumerateDevices().Count > 1; // Check if we have an audio output device. if (hasAudioDevices == false) { WaitForReadyEvent = null; MediaCore.Log(MediaLogMessageType.Warning, $"AUDIO OUT: No audio device found for output."); return; } // Initialize the SoundTouch Audio Processor (if available) AudioProcessor = (SoundTouch.IsAvailable == false) ? null : new SoundTouch { Channels = Convert.ToUInt32(WaveFormat.Channels), SampleRate = Convert.ToUInt32(WaveFormat.SampleRate) }; // Initialize the Audio Device AudioDevice = MediaElement.RendererOptions.UseLegacyAudioOut ? new LegacyAudioPlayer(this, MediaElement.RendererOptions.LegacyAudioDevice?.DeviceId ?? -1) as IWavePlayer : new DirectSoundPlayer(this, MediaElement.RendererOptions.DirectSoundDevice?.DeviceId ?? DirectSoundPlayer.DefaultPlaybackDeviceId); // Create the Audio Buffer SampleBlockSize = Constants.Audio.BytesPerSample * Constants.Audio.ChannelCount; var bufferLength = WaveFormat.ConvertMillisToByteSize(2000); // 2-second buffer AudioBuffer = new CircularBuffer(bufferLength); AudioDevice.Start(); }
/// <summary> /// Processes the next command in the command queue. /// This method is called in every block rendering cycle. /// </summary> public void ProcessNext() { DumpQueue($"Before {nameof(ProcessNext)}", false); if (MediaCore.IsTaskCancellationPending) { return; } MediaCommand command = null; lock (SyncLock) { if (Commands.Count == 0) { return; } command = Commands[0]; Commands.RemoveAt(0); } try { ExecutingCommand = command; command.RunSynchronously(); DumpQueue($"After {nameof(ProcessNext)}", false); } catch (Exception ex) { MediaCore?.Log(MediaLogMessageType.Error, $"{ex.GetType()}: {ex.Message}"); throw; } finally { ExecutingCommand = null; } }
/// <summary> /// Performs actions when the command has been executed. /// This is useful to notify exceptions or update the state of the media. /// </summary> public override void PostProcess() { MediaCore.State.UpdateFixedContainerProperties(); if (ErrorException == null) { MediaCore.SendOnMediaChanged(); if (PlayWhenCompleted) { MediaCore.Clock.Play(); } MediaCore.State.UpdateMediaState( MediaCore.Clock.IsRunning ? PlaybackStatus.Play : PlaybackStatus.Pause); } else { MediaCore.SendOnMediaFailed(ErrorException); MediaCore.State.UpdateMediaState(PlaybackStatus.Pause); } MediaCore.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Completed"); }
/// <summary> /// Renders the specified media block. /// This needs to return immediately so the calling thread is not disturbed. /// </summary> /// <param name="mediaBlock">The media block.</param> /// <param name="clockPosition">The clock position.</param> public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) { var block = mediaBlock as VideoBlock; if (block == null) { return; } if (IsRenderingInProgress.Value == true) { if (MediaCore?.State.IsPlaying ?? false) { MediaCore?.Log(MediaLogMessageType.Debug, $"{nameof(VideoRenderer)}: Frame skipped at {mediaBlock.StartTime}"); } return; } // Flag the start of a rendering cycle IsRenderingInProgress.Value = true; // Send the packets to the CC renderer MediaElement?.CaptionsView?.SendPackets(block, MediaCore); // Create an action that holds GUI thread actions var foregroundAction = new Action(() => { MediaElement?.CaptionsView?.Render(MediaElement.ClosedCaptionsChannel, clockPosition); ApplyLayoutTransforms(block); }); var canStartForegroundTask = MediaElement.VideoView.ElementDispatcher != MediaElement.Dispatcher; var foregroundTask = canStartForegroundTask ? MediaElement.Dispatcher.InvokeAsync(foregroundAction) : null; // Ensure the target bitmap can be loaded MediaElement?.VideoView?.InvokeAsync(DispatcherPriority.Render, () => { if (block.IsDisposed) { IsRenderingInProgress.Value = false; return; } // Run the foreground action if we could not start it in parallel. if (foregroundTask == null) { try { foregroundAction(); } catch (Exception ex) { MediaElement?.MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(VideoRenderer)} {ex.GetType()}: {nameof(Render)} layout/CC failed. {ex.Message}."); } } try { // Render the bitmap data var bitmapData = LockTargetBitmap(block); if (bitmapData != null) { LoadTargetBitmapBuffer(bitmapData, block); MediaElement.RaiseRenderingVideoEvent(block, bitmapData, clockPosition); RenderTargetBitmap(bitmapData, clockPosition); } } catch (Exception ex) { MediaElement?.MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(VideoRenderer)} {ex.GetType()}: {nameof(Render)} bitmap failed. {ex.Message}."); } finally { if (foregroundTask != null) { try { foregroundTask?.Wait(); } catch (Exception ex) { MediaElement?.MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(VideoRenderer)} {ex.GetType()}: {nameof(Render)} layout/CC failed. {ex.Message}."); } } // Always reset the rendering state IsRenderingInProgress.Value = false; } }); }
/// <summary> /// Renders the specified media block. /// </summary> /// <param name="mediaBlock">The media block.</param> /// <param name="clockPosition">The clock position.</param> public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) { // We don't need to render anything while we are seeking. Simply drop the blocks. if (MediaCore.State.IsSeeking || HasFiredAudioDeviceStopped) { return; } var lockTaken = false; Monitor.TryEnter(SyncLock, SyncLockTimeout, ref lockTaken); if (lockTaken == false) { return; } try { if ((AudioDevice?.IsRunning ?? false) == false) { if (HasFiredAudioDeviceStopped == false) { MediaElement.RaiseAudioDeviceStoppedEvent(); HasFiredAudioDeviceStopped = true; } return; } if (AudioBuffer == null) { return; } // Capture Media Block Reference if (mediaBlock is AudioBlock == false) { return; } var audioBlock = mediaBlock as AudioBlock; if (audioBlock == null) { return; } var audioBlocks = MediaCore.Blocks[MediaType.Audio]; while (audioBlock != null) { if (audioBlock.TryAcquireReaderLock(out var readLock) == false) { return; } using (readLock) { // Write the block if we have to, avoiding repeated blocks. if (AudioBuffer.WriteTag < audioBlock.StartTime) { AudioBuffer.Write(audioBlock.Buffer, audioBlock.SamplesBufferLength, audioBlock.StartTime, true); } // Stop adding if we have too much in there. if (AudioBuffer.CapacityPercent >= 0.5) { break; } // Retrieve the following block audioBlock = audioBlocks.ContinuousNext(audioBlock) as AudioBlock; } } } catch (Exception ex) { MediaCore?.Log(MediaLogMessageType.Error, $"{ex.GetType()} in {nameof(AudioRenderer)}.{nameof(Read)}: {ex.Message}. Stack Trace:\r\n{ex.StackTrace}"); } finally { Monitor.Exit(SyncLock); } }
private bool Synchronize(byte[] targetBuffer, int targetBufferOffset, int requestedBytes, double speedRatio) { /* * Wikipedia says: * For television applications, audio should lead video by no more than 15 milliseconds and audio should * lag video by no more than 45 milliseconds. For film, acceptable lip sync is considered to be no more * than 22 milliseconds in either direction. * * The Media and Acoustics Perception Lab says: * The results of the experiment determined that the average audio leading threshold for a/v sync * detection was 185.19 ms, with a standard deviation of 42.32 ms * * The ATSC says: * At first glance it seems loose: +90 ms to -185 ms as a Window of Acceptability * - Undetectable from -100 ms to +25 ms * - Detectable at -125 ms & +45 ms * - Becomes unacceptable at -185 ms & +90 ms * * NOTE: (- Sound delayed, + Sound advanced) */ var audioLatencyMs = Latency.TotalMilliseconds; var isBeyondThreshold = false; var readableCount = AudioBuffer.ReadableCount; var rewindableCount = AudioBuffer.RewindableCount; if (audioLatencyMs > SyncThresholdLagging) { isBeyondThreshold = true; // a positive audio latency means we are rendering audio behind (after) the clock (skip some samples) // and therefore we need to advance the buffer before we read from it. if (speedRatio == 1.0) { MediaCore.Log(MediaLogMessageType.Warning, $"SYNC AUDIO: LATENCY: {Latency.Format()} | SKIP (samples being rendered too late)"); } // skip some samples from the buffer. var audioLatencyBytes = WaveFormat.ConvertLatencyToByteSize(Convert.ToInt32(Math.Ceiling(audioLatencyMs))); AudioBuffer.Skip(Math.Min(audioLatencyBytes, readableCount)); } else if (audioLatencyMs < SyncThresholdLeading) { isBeyondThreshold = true; // Compute the latency in bytes var audioLatencyBytes = WaveFormat.ConvertLatencyToByteSize(Convert.ToInt32(Math.Ceiling(Math.Abs(audioLatencyMs)))); // audioLatencyBytes = requestedBytes; // uncomment this line to enable rewinding. if (audioLatencyBytes > requestedBytes && audioLatencyBytes < rewindableCount) { // This means we have the audio pointer a little too ahead of time and we need to // rewind it the requested amount of bytes. AudioBuffer.Rewind(Math.Min(audioLatencyBytes, rewindableCount)); } else { // a negative audio latency means we are rendering audio ahead (before) the clock // and therefore we need to render some silence until the clock catches up if (speedRatio == 1.0) { MediaCore.Log(MediaLogMessageType.Warning, $"SYNC AUDIO: LATENCY: {Latency.Format()} | WAIT (samples being rendered too early)"); } // render silence for the wait time and return Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(false); } } // Perform minor adjustments until the delay is less than 10ms in either direction if (MediaCore.State.HasVideo && speedRatio == 1.0 && isBeyondThreshold == false && Math.Abs(audioLatencyMs) > SyncThresholdPerfect) { var stepDurationMillis = Convert.ToInt32(Math.Min(SyncThresholdMaxStep, Math.Abs(audioLatencyMs))); var stepDurationBytes = WaveFormat.ConvertLatencyToByteSize(stepDurationMillis); if (audioLatencyMs > SyncThresholdPerfect) { AudioBuffer.Skip(Math.Min(stepDurationBytes, readableCount)); } else if (audioLatencyMs < -SyncThresholdPerfect) { AudioBuffer.Rewind(Math.Min(stepDurationBytes, rewindableCount)); } } return(true); }
/// <summary> /// Called whenever the audio driver requests samples. /// Do not call this method directly. /// </summary> /// <param name="targetBuffer">The render buffer.</param> /// <param name="targetBufferOffset">The render buffer offset.</param> /// <param name="requestedBytes">The requested bytes.</param> /// <returns>The number of bytes that were read.</returns> public int Read(byte[] targetBuffer, int targetBufferOffset, int requestedBytes) { // We sync-lock the reads to avoid null reference exceptions as detaroy might have been called lock (SyncLock) { try { WaitForReadyEvent.Complete(); var speedRatio = MediaCore?.State.SpeedRatio ?? 0; // Render silence if we don't need to output samples if (MediaCore.State.IsPlaying == false || speedRatio <= 0d || MediaCore.State.HasAudio == false || AudioBuffer.ReadableCount <= 0) { Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(requestedBytes); } // Ensure a preallocated ReadBuffer if (ReadBuffer == null || ReadBuffer.Length < Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio)) { ReadBuffer = new byte[Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio)]; } // First part of DSP: Perform AV Synchronization if needed if (MediaCore.State.HasVideo && Synchronize(targetBuffer, targetBufferOffset, requestedBytes, speedRatio) == false) { return(requestedBytes); } // Perform DSP if (speedRatio < 1.0) { if (AudioProcessor != null) { ReadAndUseAudioProcessor(requestedBytes, speedRatio); } else { ReadAndSlowDown(requestedBytes, speedRatio); } } else if (speedRatio > 1.0) { if (AudioProcessor != null) { ReadAndUseAudioProcessor(requestedBytes, speedRatio); } else { ReadAndSpeedUp(requestedBytes, true, speedRatio); } } else { if (requestedBytes > AudioBuffer.ReadableCount) { Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(requestedBytes); } AudioBuffer.Read(requestedBytes, ReadBuffer, 0); } ApplyVolumeAndBalance(targetBuffer, targetBufferOffset, requestedBytes); } catch (Exception ex) { MediaCore?.Log(MediaLogMessageType.Error, $"{ex.GetType()} in {nameof(AudioRenderer)}.{nameof(Read)}: {ex.Message}. Stack Trace:\r\n{ex.StackTrace}"); Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); } return(requestedBytes); } }
private bool Synchronize(byte[] targetBuffer, int targetBufferOffset, int requestedBytes, double speedRatio) { #region Documentation /* * Wikipedia says: * For television applications, audio should lead video by no more than 15 milliseconds and audio should * lag video by no more than 45 milliseconds. For film, acceptable lip sync is considered to be no more * than 22 milliseconds in either direction. * * The Media and Acoustics Perception Lab says: * The results of the experiment determined that the average audio leading threshold for a/v sync * detection was 185.19 ms, with a standard deviation of 42.32 ms * * The ATSC says: * At first glance it seems loose: +90 ms to -185 ms as a Window of Acceptability * - Undetectable from -100 ms to +25 ms * - Detectable at -125 ms & +45 ms * - Becomes unacceptable at -185 ms & +90 ms * * NOTE: (- Sound delayed, + Sound advanced) */ #endregion #region Private State var audioLatencyMs = Latency.TotalMilliseconds; var isBeyondThreshold = false; var readableCount = AudioBuffer.ReadableCount; var rewindableCount = AudioBuffer.RewindableCount; #endregion #region Sync Give-up Conditions if (MediaElement.RendererOptions.AudioDisableSync) { return(true); } // Determine if we should continue to perform syncs. // Some live, non-seekable streams will send out-of-sync audio packets // and after trying too many times we simply give up. // The Sync conditions are reset in the Update method. if ((MediaCore?.State?.IsSeekable ?? true) == false && PlaySyncGaveUp.Value == false) { // 1. Determine if a sync is required if (audioLatencyMs > SyncThresholdLagging || audioLatencyMs < SyncThresholdLeading || Math.Abs(audioLatencyMs) > SyncThresholdPerfect) { PlaySyncCount++; } // 2. Compute the variables to determine give-up conditions var playbackElapsedSeconds = PlaySyncStartTime.HasValue == false ? 0 : DateTime.UtcNow.Subtract(PlaySyncStartTime.Value).TotalSeconds; var syncsPerSecond = PlaySyncCount / playbackElapsedSeconds; // 3. Determine if we need to give up if (playbackElapsedSeconds >= 3 && syncsPerSecond >= 3) { MediaCore.Log(MediaLogMessageType.Warning, $"SYNC AUDIO: GIVE UP | SECS: {playbackElapsedSeconds:0.00}; SYN: {PlaySyncCount}; RATE: {syncsPerSecond:0.00} SYN/s; LAT: {audioLatencyMs} ms."); PlaySyncGaveUp.Value = true; } } // Detect if we have given up if (PlaySyncGaveUp.Value == true) { return(true); } #endregion #region Large Latency Handling if (audioLatencyMs > SyncThresholdLagging) { isBeyondThreshold = true; // a positive audio latency means we are rendering audio behind (after) the clock (skip some samples) // and therefore we need to advance the buffer before we read from it. if (speedRatio == 1.0) { MediaCore.Log(MediaLogMessageType.Warning, $"SYNC AUDIO: LATENCY: {audioLatencyMs} ms. | SKIP (samples being rendered too late)"); } // skip some samples from the buffer. var audioLatencyBytes = WaveFormat.ConvertMillisToByteSize(Convert.ToInt32(Math.Ceiling(audioLatencyMs))); AudioBuffer.Skip(Math.Min(audioLatencyBytes, readableCount)); } else if (audioLatencyMs < SyncThresholdLeading) { isBeyondThreshold = true; // Compute the latency in bytes var audioLatencyBytes = WaveFormat.ConvertMillisToByteSize(Convert.ToInt32(Math.Ceiling(Math.Abs(audioLatencyMs)))); // audioLatencyBytes = requestedBytes; // uncomment this line to enable rewinding. if (audioLatencyBytes > requestedBytes && audioLatencyBytes < rewindableCount) { // This means we have the audio pointer a little too ahead of time and we need to // rewind it the requested amount of bytes. AudioBuffer.Rewind(Math.Min(audioLatencyBytes, rewindableCount)); } else { // a negative audio latency means we are rendering audio ahead (before) the clock // and therefore we need to render some silence until the clock catches up if (speedRatio == 1.0) { MediaCore.Log(MediaLogMessageType.Warning, $"SYNC AUDIO: LATENCY: {audioLatencyMs} ms. | WAIT (samples being rendered too early)"); } // render silence for the wait time and return Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(false); } } #endregion #region Small Latency Handling // Perform minor adjustments until the delay is less than 10ms in either direction if (MediaCore.State.HasVideo && speedRatio == 1.0 && isBeyondThreshold == false && Math.Abs(audioLatencyMs) > SyncThresholdPerfect) { var stepDurationMillis = Convert.ToInt32(Math.Min(SyncThresholdMaxStep, Math.Abs(audioLatencyMs))); var stepDurationBytes = WaveFormat.ConvertMillisToByteSize(stepDurationMillis); if (audioLatencyMs > SyncThresholdPerfect) { AudioBuffer.Skip(Math.Min(stepDurationBytes, readableCount)); } else if (audioLatencyMs < -SyncThresholdPerfect) { AudioBuffer.Rewind(Math.Min(stepDurationBytes, rewindableCount)); } } #endregion return(true); }