public void OnSingleManifest(Object obj) { var manifest = obj.JavaCast <HlsPlaylist>(); if (_canceled) { return; } var mainHandler = _player.MainHandler; var loadControl = new DefaultLoadControl(new DefaultAllocator(BufferSegmentSize)); var bandwidthMeter = new DefaultBandwidthMeter(); var timestampAdjusterProvider = new PtsTimestampAdjusterProvider(); var dataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var chunkSource = new HlsChunkSource( true, dataSource, manifest, DefaultHlsTrackSelector.NewDefaultInstance(_context), bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.AdaptiveModeSplice); var sampleSource = new HlsSampleSource(chunkSource , loadControl , BufferSegments * BufferSegmentSize , mainHandler , _player , MvxVideoPlayer.TypeVideo); var videoRenderer = new MediaCodecVideoTrackRenderer(_context , sampleSource , MediaCodecSelector.Default , (int)VideoScalingMode.ScaleToFit , 5000 , mainHandler , _player , 50); var audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource , MediaCodecSelector.Default , null , true , _player.MainHandler , _player , AudioCapabilities.GetCapabilities(_context) , (int)Stream.Music); // TODO: The Id3Parser is currently not part of the binding //MetadataTrackRenderer id3Renderer = new MetadataTrackRenderer(sampleSource, new Id3Parser(), player, mainHandler.getLooper()); var closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, _player, mainHandler.Looper); var renderers = new TrackRenderer[MvxVideoPlayer.RendererCount]; renderers[MvxVideoPlayer.TypeVideo] = videoRenderer; renderers[MvxVideoPlayer.TypeAudio] = audioRenderer; //renderers[DemoPlayer.TYPE_METADATA] = id3Renderer; renderers[MvxVideoPlayer.TypeText] = closedCaptionRenderer; _player.OnRenderers(renderers, bandwidthMeter); }
public void OnAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) { if (_player == null) { return; } var backgrounded = _player.Backgrounded; var playWhenReady = _player.PlayWhenReady; ReleasePlayer(); PreparePlayer(playWhenReady); _player.Backgrounded = backgrounded; }
public void BuildRenderers(VideoPlayer player) { var allocator = new DefaultAllocator(BufferSegmentSize); // Build the video and audio renderers. var bandwidthMeter = new DefaultBandwidthMeter(player.MainHandler, null); var dataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var sampleSource = new ExtractorSampleSource(_uri , dataSource , allocator , BufferSegmentCount * BufferSegmentSize); var videoRenderer = new MediaCodecVideoTrackRenderer(_context , sampleSource , MediaCodecSelector.Default , (int)VideoScalingMode.ScaleToFit , 5000 , player.MainHandler , player , 50); var audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource , MediaCodecSelector.Default , null , true , player.MainHandler , player , AudioCapabilities.GetCapabilities(_context) , (int)Stream.Music); var textRenderer = new TextTrackRenderer(sampleSource , player , player.MainHandler.Looper); // Invoke the callback. var renderers = new TrackRenderer[VideoPlayer.RendererCount]; renderers[VideoPlayer.TypeVideo] = videoRenderer; renderers[VideoPlayer.TypeAudio] = audioRenderer; renderers[VideoPlayer.TypeText] = textRenderer; player.OnRenderers(renderers, bandwidthMeter); }
private void BuildRenderers() { var period = _manifest.GetPeriod(0); var mainHandler = _player.MainHandler; var loadControl = new DefaultLoadControl(new DefaultAllocator(BufferSegmentSize)); var bandwidthMeter = new DefaultBandwidthMeter(mainHandler, _player); var hasContentProtection = false; var sets = period.AdaptationSets .OfType <Object>() .Select(item => item.JavaCast <AdaptationSet>()) .ToList(); foreach (var set in sets) { if (set.Type != AdaptationSet.TypeUnknown) { hasContentProtection |= set.HasContentProtection; } } // Check drm support if necessary. var filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (ExoPlayerUtil.SdkInt < 18) { _player.OnRenderersError(new UnsupportedDrmException(UnsupportedDrmException.ReasonUnsupportedScheme)); return; } try { drmSessionManager = StreamingDrmSessionManager.NewWidevineInstance(_player.PlaybackLooper, _drmCallback, null, _player.MainHandler, _player); filterHdContent = GetWidevineSecurityLevel(drmSessionManager) != SecurityLevel1; } catch (UnsupportedDrmException e) { _player.OnRenderersError(e); return; } } // Build the video renderer. var videoDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var videoChunkSource = new DashChunkSource(_manifestFetcher , DefaultDashTrackSelector.NewVideoInstance(_context, true, filterHdContent) , videoDataSource , new FormatEvaluatorAdaptiveEvaluator(bandwidthMeter) , LiveEdgeLatencyMs , _elapsedRealtimeOffset , mainHandler , _player , EventSourceId); var videoSampleSource = new ChunkSampleSource(videoChunkSource , loadControl , VideoBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeVideo); var videoRenderer = new MediaCodecVideoTrackRenderer(_context , videoSampleSource , MediaCodecSelector.Default , (int)VideoScalingMode.ScaleToFit , 5000 , drmSessionManager , true , mainHandler , _player , 50); // Build the audio renderer. var audioDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var audioChunkSource = new DashChunkSource(_manifestFetcher , DefaultDashTrackSelector.NewAudioInstance() , audioDataSource , null , LiveEdgeLatencyMs , _elapsedRealtimeOffset , mainHandler , _player , VideoPlayer.TypeAudio); var audioSampleSource = new ChunkSampleSource(audioChunkSource , loadControl , AudioBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeAudio); var audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource , MediaCodecSelector.Default , drmSessionManager , true , mainHandler , _player , AudioCapabilities.GetCapabilities(_context) , (int)Stream.Music); // Build the text renderer. var textDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var textChunkSource = new DashChunkSource(_manifestFetcher , DefaultDashTrackSelector.NewTextInstance() , textDataSource , null , LiveEdgeLatencyMs , _elapsedRealtimeOffset , mainHandler , _player , VideoPlayer.TypeText); var textSampleSource = new ChunkSampleSource(textChunkSource , loadControl , TextBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeText); var textRenderer = new TextTrackRenderer(textSampleSource , _player , mainHandler.Looper); // Invoke the callback. var renderers = new TrackRenderer[VideoPlayer.RendererCount]; renderers[VideoPlayer.TypeVideo] = videoRenderer; renderers[VideoPlayer.TypeAudio] = audioRenderer; renderers[VideoPlayer.TypeText] = textRenderer; _player.OnRenderers(renderers, bandwidthMeter); }
public void OnSingleManifest(Object obj) { var manifest = obj.JavaCast <SmoothStreamingManifest>(); if (_canceled) { return; } var mainHandler = _player.MainHandler; var loadControl = new DefaultLoadControl(new DefaultAllocator(BufferSegmentSize)); var bandwidthMeter = new DefaultBandwidthMeter(mainHandler, _player); // Check drm support if necessary. IDrmSessionManager drmSessionManager = null; if (manifest.ProtectionElement != null) { if (ExoPlayerUtil.SdkInt < 18) { _player.OnRenderersError( new UnsupportedDrmException(UnsupportedDrmException.ReasonUnsupportedScheme)); return; } try { drmSessionManager = new StreamingDrmSessionManager(manifest.ProtectionElement.Uuid, _player.PlaybackLooper, _drmCallback, null, _player.MainHandler, _player); } catch (Exception e) { _player.OnRenderersError(e); return; } } // Build the video renderer. var videoDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var videoChunkSource = new SmoothStreamingChunkSource(_manifestFetcher , DefaultSmoothStreamingTrackSelector.NewVideoInstance(_context, true, false) , videoDataSource , new FormatEvaluatorAdaptiveEvaluator(bandwidthMeter) , LiveEdgeLatencyMs); var videoSampleSource = new ChunkSampleSource(videoChunkSource , loadControl , VideoBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeVideo); var videoRenderer = new MediaCodecVideoTrackRenderer(_context , videoSampleSource , MediaCodecSelector.Default , (int)VideoScalingMode.ScaleToFit , 5000 , drmSessionManager , true , mainHandler , _player , 50); // Build the audio renderer. var audioDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var audioChunkSource = new SmoothStreamingChunkSource(_manifestFetcher , DefaultSmoothStreamingTrackSelector.NewAudioInstance() , audioDataSource , null , LiveEdgeLatencyMs); var audioSampleSource = new ChunkSampleSource(audioChunkSource , loadControl , AudioBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeAudio); var audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource , MediaCodecSelector.Default , drmSessionManager , true , mainHandler , _player , AudioCapabilities.GetCapabilities(_context) , (int)Stream.Music); // Build the text renderer. var textDataSource = new DefaultUriDataSource(_context, bandwidthMeter, _userAgent); var textChunkSource = new SmoothStreamingChunkSource(_manifestFetcher , DefaultSmoothStreamingTrackSelector.NewTextInstance() , textDataSource , null , LiveEdgeLatencyMs); var textSampleSource = new ChunkSampleSource(textChunkSource , loadControl , TextBufferSegments * BufferSegmentSize , mainHandler , _player , VideoPlayer.TypeText); var textRenderer = new TextTrackRenderer(textSampleSource , _player , mainHandler.Looper); // Invoke the callback. var renderers = new TrackRenderer[VideoPlayer.RendererCount]; renderers[VideoPlayer.TypeVideo] = videoRenderer; renderers[VideoPlayer.TypeAudio] = audioRenderer; renderers[VideoPlayer.TypeText] = textRenderer; _player.OnRenderers(renderers, bandwidthMeter); }
private void CheckAudioCapabilities(LogLevel logLevel) { capabilities = new AudioCapabilities(); capabilities.SupportsEfx = Efx.IsInitialized; }