/// <summary> /// Renders the specified media block. /// </summary> /// <param name="mediaBlock">The media block.</param> /// <param name="clockPosition">The clock position.</param> public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) { // We don't need to render anything while we are seeking. Simply drop the blocks. lock (SyncLock) { if (MediaCore.State.IsSeeking) { return; } if (AudioBuffer == null) { return; } // Capture Media Block Reference if (mediaBlock is AudioBlock == false) { return; } var audioBlock = mediaBlock as AudioBlock; if (audioBlock == null) { return; } var audioBlocks = MediaCore.Blocks[MediaType.Audio]; while (audioBlock != null) { if (audioBlock.TryAcquireReaderLock(out var readLock) == false) { return; } using (readLock) { // Write the block if we have to, avoiding repeated blocks. if (AudioBuffer.WriteTag < audioBlock.StartTime) { MediaElement.RaiseRenderingAudioEvent(audioBlock, clockPosition); AudioBuffer.Write(audioBlock.Buffer, audioBlock.SamplesBufferLength, audioBlock.StartTime, true); } // Stop adding if we have too much in there. if (AudioBuffer.CapacityPercent >= 0.8) { break; } // Retrieve the following block audioBlock = audioBlocks.Next(audioBlock) as AudioBlock; } } } }
/// <summary> /// Renders the specified media block. /// </summary> /// <param name="mediaBlock">The media block.</param> /// <param name="clockPosition">The clock position.</param> public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) { // We don't need to render anything while we are seeking. Simply drop the blocks. if (MediaElement.IsSeeking) { return; } // Update the speedratio SpeedRatio = MediaElement?.Clock?.SpeedRatio ?? 0d; if (AudioBuffer == null) { return; } var block = mediaBlock as AudioBlock; if (block == null) { return; } var audioBlocks = MediaElement.Blocks[MediaType.Audio]; var audioBlock = mediaBlock as AudioBlock; while (audioBlock != null) { // Write the block if we have to, avoiding repeated blocks. if (AudioBuffer.WriteTag < audioBlock.StartTime) { MediaElement.RaiseRenderingAudioEvent(audioBlock, clockPosition); AudioBuffer.Write(audioBlock.Buffer, audioBlock.BufferLength, audioBlock.StartTime, true); } // Stop adding if we have too much in there. if (AudioBuffer.CapacityPercent >= 0.8) { break; } // Retrieve the following block audioBlock = audioBlocks.Next(audioBlock) as AudioBlock; } }
/// <inheritdoc /> public int Read(byte[] targetBuffer, int targetBufferOffset, int requestedBytes) { // We sync-lock the reads to avoid null reference exceptions as destroy might have been called var lockTaken = false; Monitor.TryEnter(SyncLock, SyncLockTimeout, ref lockTaken); if (lockTaken == false || HasFiredAudioDeviceStopped) { Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(requestedBytes); } try { WaitForReadyEvent.Complete(); var speedRatio = MediaCore.State.SpeedRatio; // Render silence if we don't need to output samples if (MediaCore.State.IsPlaying == false || speedRatio <= 0d || MediaCore.State.HasAudio == false || AudioBuffer.ReadableCount <= 0) { Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(requestedBytes); } // Ensure a pre-allocated ReadBuffer if (ReadBuffer == null || ReadBuffer.Length < Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio)) { ReadBuffer = new byte[Convert.ToInt32(requestedBytes * Constants.Controller.MaxSpeedRatio)]; } // First part of DSP: Perform AV Synchronization if needed if (MediaCore.State.HasVideo && Synchronize(targetBuffer, targetBufferOffset, requestedBytes, speedRatio) == false) { return(requestedBytes); } var startPosition = Position; // Perform DSP if (speedRatio < 1.0) { if (AudioProcessor != null) { ReadAndUseAudioProcessor(requestedBytes, speedRatio); } else { ReadAndSlowDown(requestedBytes, speedRatio); } } else if (speedRatio > 1.0) { if (AudioProcessor != null) { ReadAndUseAudioProcessor(requestedBytes, speedRatio); } else { ReadAndSpeedUp(requestedBytes, true, speedRatio); } } else { if (requestedBytes > AudioBuffer.ReadableCount) { Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); return(requestedBytes); } AudioBuffer.Read(requestedBytes, ReadBuffer, 0); } ApplyVolumeAndBalance(targetBuffer, targetBufferOffset, requestedBytes); MediaElement.RaiseRenderingAudioEvent( targetBuffer, requestedBytes, startPosition, WaveFormat.ConvertByteSizeToDuration(requestedBytes)); } catch (Exception ex) { this.LogError(Aspects.AudioRenderer, $"{nameof(AudioRenderer)}.{nameof(Read)} has faulted.", ex); Array.Clear(targetBuffer, targetBufferOffset, requestedBytes); } finally { Monitor.Exit(SyncLock); } return(requestedBytes); }
/// <summary> /// Renders the specified media block. /// </summary> /// <param name="mediaBlock">The media block.</param> /// <param name="clockPosition">The clock position.</param> public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) { // We don't need to render anything while we are seeking. Simply drop the blocks. if (MediaCore.State.IsSeeking) { return; } var lockTaken = false; Monitor.TryEnter(SyncLock, SyncLockTimeout, ref lockTaken); if (lockTaken == false) { return; } try { if ((AudioDevice?.IsRunning ?? false) == false) { // TODO: Handle this? -- see issue #93 return; } if (AudioBuffer == null) { return; } // Capture Media Block Reference if (mediaBlock is AudioBlock == false) { return; } var audioBlock = mediaBlock as AudioBlock; if (audioBlock == null) { return; } var audioBlocks = MediaCore.Blocks[MediaType.Audio]; while (audioBlock != null) { if (audioBlock.TryAcquireReaderLock(out var readLock) == false) { return; } using (readLock) { // Write the block if we have to, avoiding repeated blocks. if (AudioBuffer.WriteTag < audioBlock.StartTime) { MediaElement.RaiseRenderingAudioEvent(audioBlock, clockPosition); AudioBuffer.Write(audioBlock.Buffer, audioBlock.SamplesBufferLength, audioBlock.StartTime, true); } // Stop adding if we have too much in there. if (AudioBuffer.CapacityPercent >= 0.8) { break; } // Retrieve the following block audioBlock = audioBlocks.Next(audioBlock) as AudioBlock; } } } catch (Exception ex) { MediaCore?.Log(MediaLogMessageType.Error, $"{ex.GetType()} in {nameof(AudioRenderer)}.{nameof(Read)}: {ex.Message}. Stack Trace:\r\n{ex.StackTrace}"); } finally { Monitor.Exit(SyncLock); } }