static void Callback(IWaitEvent waitEvent) { try { waitEvent.Complete(); waitEvent.Begin(); } catch { // ignore } }
private void DelayThreadPool() { if (DelayEvent == null) { DelayEvent = WaitEventFactory.Create(isCompleted: true, useSlim: true); } DelayEvent.Begin(); ThreadPool.QueueUserWorkItem((s) => { DelaySleep(); DelayEvent.Complete(); }); DelayEvent.Wait(); }
private void DelayThreadPool() { if (_delayEvent == null) { _delayEvent = WaitEventFactory.Create(true, true); } _delayEvent.Begin(); ThreadPool.QueueUserWorkItem(s => { DelaySleep(); _delayEvent.Complete(); }); _delayEvent.Wait(); }
/// <summary> /// Implementation using a Timer with a wait event. /// </summary> private void DelayTimerEvent() { const int timeoutMilliseconds = 15; lock (SyncRoot) { if (DelayEvent == null) { DelayEvent = WaitEventFactory.Create(isCompleted: false, useSlim: false); } if (DelayTimer == null) { DelayTimer = new Timer((s) => DelayEvent.Complete(), DelayEvent, timeoutMilliseconds, timeoutMilliseconds); } DelayEvent.Wait(TimeSpan.FromMilliseconds(timeoutMilliseconds)); DelayEvent.Begin(); } }
/// <summary> /// Initializes the audio renderer. /// Call the Play Method to start reading samples /// </summary> private void Initialize() { Destroy(); // Enumerate devices. The default device is the first one so we check // that we have more than 1 device (other than the default stub) var hasAudioDevices = MediaElement.RendererOptions.UseLegacyAudioOut ? LegacyAudioPlayer.EnumerateDevices().Count > 1 : DirectSoundPlayer.EnumerateDevices().Count > 1; // Check if we have an audio output device. if (hasAudioDevices == false) { WaitForReadyEvent = null; MediaCore.Log(MediaLogMessageType.Warning, $"AUDIO OUT: No audio device found for output."); return; } // Initialize the SoundTouch Audio Processor (if available) AudioProcessor = (SoundTouch.IsAvailable == false) ? null : new SoundTouch { Channels = Convert.ToUInt32(WaveFormat.Channels), SampleRate = Convert.ToUInt32(WaveFormat.SampleRate) }; // Initialize the Audio Device AudioDevice = MediaElement.RendererOptions.UseLegacyAudioOut ? new LegacyAudioPlayer(this, MediaElement.RendererOptions.LegacyAudioDevice?.DeviceId ?? -1) as IWavePlayer : new DirectSoundPlayer(this, MediaElement.RendererOptions.DirectSoundDevice?.DeviceId ?? DirectSoundPlayer.DefaultPlaybackDeviceId); // Create the Audio Buffer SampleBlockSize = Constants.Audio.BytesPerSample * Constants.Audio.ChannelCount; var bufferLength = WaveFormat.ConvertMillisToByteSize(2000); // 2-second buffer AudioBuffer = new CircularBuffer(bufferLength); AudioDevice.Start(); }
/// <summary> /// Initializes a new instance of the <see cref="CommandManager" /> class. /// </summary> /// <param name="mediaCore">The media core.</param> public CommandManager(MediaEngine mediaCore) { DirectCommandEvent = WaitEventFactory.Create(isCompleted: true, useSlim: true); SeekingDone = WaitEventFactory.Create(isCompleted: true, useSlim: true); MediaCore = mediaCore; }
/// <summary> /// Performs the actions represented by this deferred task. /// </summary> protected override void PerformActions() { var m = MediaCore; m.Log(MediaLogMessageType.Debug, $"Command {CommandType}: Entered"); try { m.State.UpdateMediaState(PlaybackStatus.Manual); // Signal the start of a sync-buffering scenario m.Clock.Pause(); // Wait for the cycles to complete var workerEvents = new IWaitEvent[] { m.BlockRenderingCycle, m.PacketReadingCycle }; foreach (var workerEvent in workerEvents) { workerEvent.Wait(); } // Signal a change so the user get the chance to update // selected streams and options m.SendOnMediaChanging(); // Side load subtitles m.PreloadSubtitles(); // Capture the current media types before components change var oldMediaTypes = m.Container.Components.MediaTypes.ToArray(); // Recreate selected streams as media components var mediaTypes = m.Container.UpdateComponents(); m.State.UpdateFixedContainerProperties(); // find all existing component blocks and renderers that no longer exist // We always remove the audio component in case there is a change in audio device var removableMediaTypes = oldMediaTypes .Where(t => mediaTypes.Contains(t) == false) .Union(new[] { MediaType.Audio }) .Distinct() .ToArray(); // find all existing component blocks and renderers that no longer exist foreach (var t in removableMediaTypes) { // Remove the renderer for the component if (m.Renderers.ContainsKey(t)) { m.Renderers[t].Close(); m.Renderers.Remove(t); } // Remove the block buffer for the component if (m.Blocks.ContainsKey(t)) { m.Blocks[t]?.Dispose(); m.Blocks.Remove(t); } } // Create the block buffers and renderers as necessary foreach (var t in mediaTypes) { if (m.Blocks.ContainsKey(t) == false) { m.Blocks[t] = new MediaBlockBuffer(Constants.MaxBlocks[t], t); } if (m.Renderers.ContainsKey(t) == false) { m.Renderers[t] = MediaEngine.Platform.CreateRenderer(t, m); } m.Blocks[t].Clear(); m.Renderers[t].WaitForReadyState(); } // Depending on whether or not the media is seekable // perform either a seek operation or a quick buffering operation. if (m.State.IsSeekable) { // Let's simply do an automated seek var seekCommand = new SeekCommand(m, m.WallClock); seekCommand.Execute(); return; } else { // Let's perform quick-buffering m.Container.Components.RunQuickBuffering(m); // Mark the renderers as invalidated foreach (var t in mediaTypes) { m.InvalidateRenderer(t); } } } catch (Exception ex) { ErrorException = ex; } }
/// <summary> /// Provides the implementation of the command /// </summary> /// <returns>The awaitable task.</returns> internal override async Task ExecuteInternal() { var m = Manager.MediaCore; // Avoid running the command if run conditions are not met if (m == null || m.IsDisposed || m.State.IsOpen == false || m.State.IsOpening) { return; } var resumeClock = false; var isSeeking = m.State.IsSeeking; try { // Signal the start of a changing event m.MediaChangingDone.Begin(); m.State.IsSeeking = true; // Signal the start of a sync-buffering scenario m.HasDecoderSeeked = true; resumeClock = m.Clock.IsRunning; m.Clock.Pause(); // Wait for the cycles to complete var workerEvents = new IWaitEvent[] { m.BlockRenderingCycle, m.PacketReadingCycle }; foreach (var workerEvent in workerEvents) { workerEvent.Wait(); } // Send the changing event to the connector var beforeComponentTypes = m.Container.Components.MediaTypes; await m.SendOnMediaChanging(); m.Container.UpdateComponents(); var afterComponentTypes = m.Container.Components.MediaTypes; var disposableComponentTypes = beforeComponentTypes .Where(c => afterComponentTypes.Contains(c) == false) .ToArray(); // Remove components that are no longer needed foreach (var t in disposableComponentTypes) { if (m.Renderers.ContainsKey(t)) { m.Renderers[t].Close(); m.Renderers.Remove(t); } if (m.Blocks.ContainsKey(t)) { m.Blocks[t]?.Dispose(); m.Blocks.Remove(t); } } // Create the block buffers and renderers as necessary // TODO: For smoother transition, only invalidate/change the components // that actually changed. foreach (var t in afterComponentTypes) { if (m.Blocks.ContainsKey(t) == false) { m.Blocks[t] = new MediaBlockBuffer(Constants.MaxBlocks[t], t); } if (m.Renderers.ContainsKey(t) == false) { m.Renderers[t] = MediaEngine.Platform.CreateRenderer(t, m); } m.Blocks[t].Clear(); m.Renderers[t].WaitForReadyState(); } if (m.State.IsSeekable) { var seekCommand = new SeekCommand(Manager, m.WallClock); seekCommand.RunSynchronously(); } else { foreach (var t in afterComponentTypes) { m.InvalidateRenderer(t); } } } catch { // TODO: Handle errors here } finally { if (resumeClock) { m?.Clock?.Play(); } m.State.IsSeeking = isSeeking; m.MediaChangingDone.Complete(); } }
/// <summary> /// Provides the implementation of the command /// </summary> /// <returns> /// The awaitable task. /// </returns> internal override async Task ExecuteInternal() { var m = Manager.MediaCore; // Avoid running the command if run conditions are not met if (m == null || m.IsDisposed || m.State.IsOpen == false || m.State.IsOpening || m.State.IsChanging) { return; } m.State.IsChanging = true; var resumeClock = false; var isSeeking = m.State.IsSeeking; try { // Signal the start of a changing event m.MediaChangingDone.Begin(); m.State.IsSeeking = true; // Signal the start of a sync-buffering scenario m.HasDecoderSeeked = true; resumeClock = m.Clock.IsRunning; m.Clock.Pause(); // Wait for the cycles to complete var workerEvents = new IWaitEvent[] { m.BlockRenderingCycle, m.PacketReadingCycle }; foreach (var workerEvent in workerEvents) { workerEvent.Wait(); } // Signal a change so the user get the chance to update // selected streams and options await m.SendOnMediaChanging(); // Capture the current media types before components change var oldMediaTypes = m.Container.Components.MediaTypes.ToArray(); // Recreate selected streams as media components var mediaTypes = m.Container.UpdateComponents(); // remove all exiting component blocks and renderers that no longer exist var removableMediaTypes = oldMediaTypes .Where(t => mediaTypes.Contains(t) == false).ToArray(); foreach (var t in removableMediaTypes) { if (m.Renderers.ContainsKey(t)) { m.Renderers[t].Close(); m.Renderers.Remove(t); } if (m.Blocks.ContainsKey(t)) { m.Blocks[t]?.Dispose(); m.Blocks.Remove(t); } } // Create the block buffers and renderers as necessary foreach (var t in mediaTypes) { if (m.Blocks.ContainsKey(t) == false) { m.Blocks[t] = new MediaBlockBuffer(Constants.MaxBlocks[t], t); } if (m.Renderers.ContainsKey(t) == false) { m.Renderers[t] = MediaEngine.Platform.CreateRenderer(t, m); } m.Blocks[t].Clear(); m.Renderers[t].WaitForReadyState(); } // Mark a seek operation in order to invalidate renderers if (m.State.IsSeekable) { // Let's simply do an automated seek var seekCommand = new SeekCommand(Manager, m.WallClock); seekCommand.RunSynchronously(); return; } // We need to perform some packet reading and decoding var main = m.Container.Components.Main.MediaType; var auxs = m.Container.Components.MediaTypes.ExcludeMediaType(main); // Read and decode blocks until the main component is half full while (m.ShouldReadMorePackets && m.CanReadMorePackets) { // Read some packets m.Container.Read(); // Decode frames and add the blocks foreach (var t in mediaTypes) { var frames = m.Container.Components[t].ReceiveFrames(); foreach (var frame in frames) { if (frame != null) { m.Blocks[t].Add(frame, m.Container); } } } // Check if we have at least a half a buffer on main if (m.Blocks[main].CapacityPercent >= 0.5) { break; } } // Check if we have a valid range. If not, just set it what the main component is dictating if (m.Blocks[main].Count > 0 && m.Blocks[main].IsInRange(m.WallClock) == false) { m.Clock.Update(m.Blocks[main].RangeStartTime); } // Have the other components catch up foreach (var t in auxs) { if (m.Blocks[main].Count <= 0) { break; } if (t != MediaType.Audio && t != MediaType.Video) { continue; } while (m.Blocks[t].RangeEndTime < m.Blocks[main].RangeEndTime) { if (m.ShouldReadMorePackets == false || m.CanReadMorePackets == false) { break; } // Read some packets m.Container.Read(); // Decode frames and add the blocks var frames = m.Container.Components[t].ReceiveFrames(); foreach (var frame in frames) { if (frame != null) { m.Blocks[t].Add(frame, m.Container); } } } } foreach (var t in mediaTypes) { m.InvalidateRenderer(t); } m.HasDecoderSeeked = true; } catch { // TODO: Handle errors here } finally { if (resumeClock) { m?.Clock?.Play(); } m.State.IsSeeking = isSeeking; m.MediaChangingDone.Complete(); m.State.IsChanging = false; await m.SendOnMediaChanged(); } }
private void Dispose(bool disposing) { //if(! disposing) // Monitor.Enter(olock); try { if(m_disposed) return; if(disposing) GC.SuppressFinalize(this); loop.RemoveSource(msg_src); while(msgs.Count > 0) Dispatch(); evt.Dispose(); evt = null; loop = null; m_disposed = true; } catch(Exception ex) { throw ex; } finally { Application.WriteDebug("ThreadQueue.Dispose({0}) [Thread {1}]", disposing, Thread.CurrentThread.GetHashCode()); //if(! disposing) // Monitor.Exit(olock); } }
public ThreadQueue(IEventLoop the_loop) { loop = the_loop; evt = Native.Factory.CreateWaitEvent(); msg_src = loop.AddWatch(evt.Handle, WatchEventKind.In, OnPingReceived); }