protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs args) { base.OnElementPropertyChanged(sender, args); if (args.PropertyName == Media.StreamProperty.PropertyName) { _stream = Element.Stream; _videoTrack = Element.Stream.GetVideoTracks().FirstOrDefault(); _audioTrack = _stream.GetAudioTracks().FirstOrDefault(); _mediaView.SetTrack(_videoTrack); } else if (args.PropertyName == Media.LabelProperty.PropertyName) { _label = Element.Label; } else if (args.PropertyName == Media.HangupProperty.PropertyName) { _hangup = Element.Hangup; } else if (args.PropertyName == Media.VideoMutedProperty.PropertyName) { } else if (args.PropertyName == Media.AudioMutedProperty.PropertyName) { } else if (args.PropertyName == Media.CameraTypeProperty.PropertyName) { _cameraType = Element.CameraType; } else if (args.PropertyName == Media.ShowControlsProperty.PropertyName) { _showControls = Element.ShowControls; } }
public IRTCRtpSender AddTrack(IMediaStreamTrack track, IMediaStream stream) => RTCRtpSender.Create(JsRuntime, JsRuntime.CallJsMethod <JsObjectRef>(NativeObject, "addTrack", new object[] { ((MediaStreamTrack)track).NativeObject, ((MediaStream)stream).NativeObject }));
private void SelectPlaylistVideoStream() { if (string.IsNullOrEmpty(this.VideoStreamName)) { return; } ISegment segment = this.CurrentSegment; if (segment != null && segment.AvailableStreams != null) { IMediaStream videoStream = segment.AvailableStreams.Where(s => s.Type == StreamType.Video).FirstOrDefault(); if (videoStream == null) { return; } // get all media tracks that which match the camera name (might be one per bitrate) IEnumerable <IMediaTrack> tracks = videoStream.AvailableTracks.Where( t => t.CustomAttributes.Any(kvp => IsCameraAttribute(kvp, this.VideoStreamName))); if (tracks.Count() > 0) { videoStream.SetSelectedTracks(tracks); } } }
private void Config() { int hr; IMediaStream ms; IAMMediaTypeStream mts; IMediaStream ms2 = null; IAMMultiMediaStream mms = (IAMMultiMediaStream) new AMMultiMediaStream(); IAMMediaTypeStream amts = (IAMMediaTypeStream) new AMMediaTypeStream(); IAMMediaTypeSample mtp; //AMMediaType pmt = new AMMediaType(); //pmt.majorType = MediaType.Video; // hr = amts.SetFormat(pmt, 0); // MsError.ThrowExceptionForHR(hr); hr = mms.Initialize(StreamType.Write, AMMMultiStream.None, null); hr = mms.AddMediaStream(amts, null, AMMStream.None, ms2); MsError.ThrowExceptionForHR(hr); hr = mms.EnumMediaStreams(0, out ms); MsError.ThrowExceptionForHR(hr); mts = ms as IAMMediaTypeStream; hr = mts.CreateSample(100, IntPtr.Zero, 0, null, out mtp); MsError.ThrowExceptionForHR(hr); m_ss = mtp as IStreamSample; hr = mms.SetState(StreamState.Run); MsError.ThrowExceptionForHR(hr); }
private void Config() { int hr; IMediaStream ms; IMediaStream ms2 = null; IAMMultiMediaStream mms = (IAMMultiMediaStream) new AMMultiMediaStream(); IAMMediaTypeStream amts = (IAMMediaTypeStream) new AMMediaTypeStream(); AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; //hr = mms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.None, null); //MsError.ThrowExceptionForHR(hr); //pmt = null; //hr = amts.GetFormat(pmt, 0); //hr = amts.SetFormat(pmt, 0); hr = mms.AddMediaStream(amts, null, AMMStream.None, ms2); MsError.ThrowExceptionForHR(hr); hr = mms.EnumMediaStreams(0, out ms); MsError.ThrowExceptionForHR(hr); m_mts = ms as IAMMediaTypeStream; }
public async Task OnPageAppearingAsync(ConnectionParameters connectionParameters, Action reRender = null) { _connectionParameters = connectionParameters; _reRender = reRender; _cameraStream = await _localMediaStream.GetCameraMediaStreamAsync(); _mediaStreamManager.Add(new MediaStreamParameters { Stream = _cameraStream, Label = connectionParameters.Name, Hangup = false, VideoMuted = false, AudioMuted = true, // prevents local echo CameraType = CameraType.Default, ShowControls = false }); reRender?.Invoke(); _connection = _connectionFactory.SelectConnection(connectionParameters.ConnectionType); _userContext = new() { ConnectionType = connectionParameters.ConnectionType, Id = _guid, Name = connectionParameters.Name, Room = connectionParameters.Room, LocalStream = _cameraStream }; Connect(); }
/// <summary> /// Initializes a new instance of the <see cref="BotMediaStream" /> class. /// </summary> /// <param name="mediaSession">he media session.</param> /// <param name="callId">The call identity</param> /// <param name="logger">The logger.</param> /// <param name="eventPublisher">Event Publisher</param> /// <param name="settings">Azure settings</param> /// <exception cref="InvalidOperationException">A mediaSession needs to have at least an audioSocket</exception> public BotMediaStream( ILocalMediaSession mediaSession, string callId, IGraphLogger logger, IEventPublisher eventPublisher, IAzureSettings settings ) : base(logger) { ArgumentVerifier.ThrowOnNullArgument(mediaSession, nameof(mediaSession)); ArgumentVerifier.ThrowOnNullArgument(logger, nameof(logger)); ArgumentVerifier.ThrowOnNullArgument(settings, nameof(settings)); this.participants = new List <IParticipant>(); _eventPublisher = eventPublisher; _callId = callId; _mediaStream = new MediaStream( settings, logger, mediaSession.MediaSessionId.ToString() ); // Subscribe to the audio media. this._audioSocket = mediaSession.AudioSocket; if (this._audioSocket == null) { throw new InvalidOperationException("A mediaSession needs to have at least an audioSocket"); } this._audioSocket.AudioMediaReceived += this.OnAudioMediaReceived; }
public async Task OnPageAppearingAsync(ConnectionParameters connectionParameters, Action reRender = null) { _connectionParameters = connectionParameters; _reRender = reRender; _userName = connectionParameters.UserName; _cameraStream = await _mediaStreamService.GetCameraMediaStreamAsync(); _displayStream = await _mediaStreamService.GetDisplayMediaStreamAync(); _mediaManagerService.Add(new MediaParameters { Label = connectionParameters.UserName, Stream = _cameraStream, VideoMuted = false, AudioMuted = false, ShowControls = false }); reRender?.Invoke(); var connectionRequestParameters = new ConnectionRequestParameters { ConnectionParameters = connectionParameters, LocalStream = _cameraStream, }; Connect(connectionRequestParameters); }
private void TestAdd() { int hr; IMediaStream pStream = null; hr = m_mms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.AddDefaultRenderer, pStream); MsError.ThrowExceptionForHR(hr); }
private void TestGetMediaStream() { int hr; IMediaStream pStream = null; hr = m_msf.GetMediaStream(MSPID.PrimaryAudio, out pStream); MsError.ThrowExceptionForHR(hr); }
public static void SetVideoSource(IJSRuntime jsRuntime, ElementReference videoElementReference, IMediaStream mediaStream, bool muted = false) { jsRuntime.SetJsProperty(videoElementReference, "srcObject", mediaStream.NativeObject); //jsRuntime.SetJsProperty(videoElementReference, "autoplay", true); //jsRuntime.SetJsProperty(videoElementReference, "playsInline", true); jsRuntime.SetJsProperty(videoElementReference, "muted", muted); }
public int AddMediaStream(Object pStreamObject, Guid PurposeID, int dwFlags, out IMediaStream ppNewStream) { ppNewStream = null; int hr = MSStatus.MS_E_HANDLE; if (IsValid) { hr = _pMMS.AddMediaStream(pStreamObject, PurposeID, dwFlags, out ppNewStream); } return hr; }
private static IMediaStream[] ConvertToNative(MediaStream[] source) { var arr = new IMediaStream[source.Length]; for (int i = 0; i < source.Length; i++) { arr[i] = new MediaStreamNative(source[i]); } return(arr); }
public void SelectMaxAvailableBitrateTracks(string key, string value) { IMediaStream videoStream = this.CurrentSegment.AvailableStreams.FirstOrDefault(x => x.Type == StreamType.Video); if (videoStream != null) { long bitrate = videoStream.AvailableTracks.Max(x => x.Bitrate); this.SelectTracks(key, value, bitrate, bitrate); } }
private MediaRecorder(IJSRuntime jsRuntime, JsObjectRef jsObjectRef, IMediaStream stream, MediaRecorderOptions options) : base(jsRuntime, jsObjectRef) { AddNativeEventListenerForObjectRef("dataavailable", (s, e) => OnDataAvailable?.Invoke(s, e), BlobEvent.Create); AddNativeEventListenerForObjectRef("error", (s, e) => OnError?.Invoke(s, e), DOMException.Create); AddNativeEventListener("pause", (s, e) => OnPause?.Invoke(s, e)); AddNativeEventListener("resume", (s, e) => OnResume?.Invoke(s, e)); AddNativeEventListener("start", (s, e) => OnStart?.Invoke(s, e)); AddNativeEventListener("stop", (s, e) => OnStop?.Invoke(s, e)); }
public void SelectTracks(string key, string value, long minBitrate, long maxBitrate) { if (this.CurrentSegment != null) { IMediaStream videoStream = this.CurrentSegment.AvailableStreams.FirstOrDefault(x => x.Type == StreamType.Video); if (videoStream != null) { bool attributeAvailable = false; IList <IMediaTrack> tracks = new List <IMediaTrack>(); if (key != null && value != null) { foreach (IMediaTrack trackInfo in videoStream.AvailableTracks) { string keyValue; trackInfo.CustomAttributes.TryGetValue(key, out keyValue); if (!string.IsNullOrEmpty(keyValue) && keyValue.ToUpper(CultureInfo.InvariantCulture) == value.ToUpper(CultureInfo.InvariantCulture)) { attributeAvailable = true; if (trackInfo.Bitrate >= minBitrate && trackInfo.Bitrate <= maxBitrate) { tracks.Add(trackInfo); } } } } if (!attributeAvailable) { tracks = videoStream.AvailableTracks.Where(x => x.Bitrate >= minBitrate && x.Bitrate <= maxBitrate).ToList(); } if (tracks.Count > 0) { if (this.singleBitrate && tracks.Count > 1) { long bitrate = tracks.Max(x => x.Bitrate); IMediaTrack track = tracks.FirstOrDefault(x => x.Bitrate == bitrate); tracks.Clear(); tracks.Add(track); } videoStream.SetSelectedTracks(tracks); } } } }
internal void ConnectCamera() { NotifyCameraStateChanged(IPCameraState.Connected); // start stream var playStreamName = Guid.NewGuid().ToString(); _mediaStream = streamService.CreateStream(playStreamName); _videoReceiver = new MediaGatewayVideoReceiver(_mediaStream); Connector.Connect(_camera.VideoSender, _forwarder); Connector.Connect(_forwarder, _videoReceiver); // notify to client the stream name OnPlayRemoteStream(playStreamName); }
public RawVideoTrackInfo(IMediaStream inStream, int trackID) : this((IVideoTrack)inStream[CodecTypes.Video, trackID]) { if (inStream.ObjectDescriptor != null) { this.ObjectDescriptor = inStream.ObjectDescriptor; } if (inStream.UserData != null) { this.UserData = inStream.UserData; } this.MovieDurationIn100NanoSecs = inStream.DurationIn100NanoSecs; this.MovieTimeScale = Hints.StreamTimeScale; }
public static List <IsochronousTrackInfo> GetTrackCharacteristics(IMediaStream source, TracksIncluded audioVideo, int videoTrackID) { List <IsochronousTrackInfo> tracksInfo = new List <IsochronousTrackInfo>(source.MediaTracks.Count); IsochronousTrackInfo baseTrkInfo = null; int videoID = 1; // first destination video track ID should be 1 foreach (IMediaTrack track in source.MediaTracks) { if (((track.Codec.CodecType == CodecTypes.Audio) && (audioVideo != TracksIncluded.Video)) || ((track.Codec.CodecType == CodecTypes.Video) && ((videoTrackID == 0) || (track.TrackID == videoTrackID)) && (audioVideo != TracksIncluded.Audio))) { switch (track.Codec.CodecType) { case CodecTypes.Audio: baseTrkInfo = new RawAudioTrackInfo(source); break; case CodecTypes.Augment: baseTrkInfo = null; // FIXME: need IsochronousTrackInfo class for Augment break; case CodecTypes.Meta: baseTrkInfo = null; // FIXME: need IsochronousTrackInfo classe for Meta break; case CodecTypes.Video: int sourceVideoID = (videoTrackID == 0) ? track.TrackID : videoTrackID; baseTrkInfo = new RawVideoTrackInfo(source, sourceVideoID); baseTrkInfo.TrackID = videoID++; break; default: throw new Exception("Unknown track type in input"); } if (baseTrkInfo != null) { tracksInfo.Add(baseTrkInfo); } } } if ((videoID == 1) && (videoTrackID > 0)) { throw new Exception("Track ID specified not found in source stream"); } return(tracksInfo); }
private void OnStreamSelected(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream) { try { if ((stream.Type == StreamType.Binary || stream.Type == StreamType.Text) && stream.AvailableTracks.Count() > 0) { IMediaTrack track = stream.AvailableTracks.First(); this.rceMediaPlugin.DownloadStreamData(track); } } catch (Exception ex) { } }
private void Config() { int hr; IMediaStream pStream = null; IAMMultiMediaStream mms = (IAMMultiMediaStream) new AMMultiMediaStream(); hr = mms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.None, pStream); MsError.ThrowExceptionForHR(hr); hr = mms.GetFilter(out m_msf); MsError.ThrowExceptionForHR(hr); hr = mms.OpenFile("foo.avi", AMOpenModes.RenderAllStreams); MsError.ThrowExceptionForHR(hr); }
public BaseRecode(IMediaStream srcStream, IMediaStream destStream) : this() { // streams must be open already if (srcStream.Stream == null) throw new ArgumentException("Source Stream must be open already"); if (destStream.Stream == null) { Thread.Sleep(100); if (destStream.Stream == null) throw new ArgumentException("Destination Stream must have been created already"); } this.SourceStream = srcStream; this.DestStream = destStream; this.SourceStream.MediaTrackAdded += new MediaTrackAddedHandler(SourceStream_MediaTrackAdded); }
private void Config2() { int hr; IAMMultiMediaStream mms = (IAMMultiMediaStream) new AMMultiMediaStream(); IMediaStream pStream = null; hr = mms.Initialize(StreamType.Read, AMMMultiStream.None, null); MsError.ThrowExceptionForHR(hr); hr = mms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.None, pStream); MsError.ThrowExceptionForHR(hr); hr = mms.EnumMediaStreams(0, out pStream); MsError.ThrowExceptionForHR(hr); m_ams = pStream as IAudioMediaStream; }
private void TestAdd() { int hr; IAMMediaStream aStream; IMediaStream pStream = null; hr = m_msf.GetMediaStream(MSPID.PrimaryAudio, out pStream); MsError.ThrowExceptionForHR(hr); aStream = pStream as IAMMediaStream; hr = m_msf.AddMediaStream(aStream); // If it can read the purpose id, it must have been able to read the aStream // and that's close enough Debug.Assert(hr == MsResults.E_PurposeId, "AddMediaStream"); }
public BaseRecode(IMediaStream srcStream, IMediaStream destStream) : this() { // streams must be open already if (srcStream.Stream == null) { throw new ArgumentException("Source Stream must be open already"); } if (destStream.Stream == null) { Thread.Sleep(100); if (destStream.Stream == null) { throw new ArgumentException("Destination Stream must have been created already"); } } this.SourceStream = srcStream; this.DestStream = destStream; this.SourceStream.MediaTrackAdded += new MediaTrackAddedHandler(SourceStream_MediaTrackAdded); }
private void Config() { int hr; IAMMultiMediaStream amms = (IAMMultiMediaStream) new AMMultiMediaStream(); IMultiMediaStream mms = (IMultiMediaStream)amms; IMediaStream pStream = null; hr = amms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.None, pStream); MsError.ThrowExceptionForHR(hr); hr = amms.OpenFile("foo.avi", AMOpenModes.RenderAllStreams); MsError.ThrowExceptionForHR(hr); hr = mms.EnumMediaStreams(0, out pStream); MsError.ThrowExceptionForHR(hr); m_ams = pStream as IAudioMediaStream; }
public GenericRecodeWRC(IMediaStream srcStream, IMediaStream destStream, int videoTrackID, TracksIncluded audioOrVideoOnly = TracksIncluded.Both, bool cttsOut = false) : base(srcStream, destStream) { audioOrVideoOrBoth = audioOrVideoOnly; CTTSOut = cttsOut; //Common.Logger.Instance.Info("[GenericRecodeWRC::Ctor] srcStream [" + srcStream.GetType().Name + "], destStream [" + destStream.GetType().Name + "], videoTrackId [" + videoTrackID + "]"); // get characteristics of input stream, and set FetchNextBlock callback on each track. TrackInfo = IsochronousTrackInfo.GetTrackCharacteristics(SourceStream, audioOrVideoOrBoth, videoTrackID); if ((!TrackInfo.Any(t => t is RawVideoTrackInfo)) && (audioOrVideoOnly != TracksIncluded.Audio)) throw new ArgumentOutOfRangeException("Video track specified does not exist"); AdjustTrackSpecsToDestination(); // adjust recode params according to output // setup destination stream here (initialize headers in output tracks) DestStream.InitializeForWriting(TrackInfo); }
private void Config() { int hr; IMediaStream pStream = null; IAMMultiMediaStream amms = (IAMMultiMediaStream) new AMMultiMediaStream(); hr = amms.AddMediaStream(null, MSPID.PrimaryAudio, AMMStream.None, pStream); MsError.ThrowExceptionForHR(hr); hr = amms.OpenFile("foo.avi", AMOpenModes.RenderAllStreams); MsError.ThrowExceptionForHR(hr); IGraphBuilder pGraphBuilder; hr = amms.GetFilterGraph(out pGraphBuilder); MsError.ThrowExceptionForHR(hr); DsROTEntry rot = new DsROTEntry(pGraphBuilder); m_mms = (IMultiMediaStream)amms; }
protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs args) { base.OnElementPropertyChanged(sender, args); if (args.PropertyName == Media.StreamProperty.PropertyName) { _stream = Element.Stream; _videoTrack = Element.Stream.GetVideoTracks().FirstOrDefault(); _audioTrack = _stream.GetAudioTracks().FirstOrDefault(); _mediaView.SetTrack(_videoTrack); } else if (args.PropertyName == Media.LabelProperty.PropertyName) { } else if (args.PropertyName == Media.VideoMutedProperty.PropertyName) { } else if (args.PropertyName == Media.AudioMutedProperty.PropertyName) { } }
public virtual void Initialize(GenericMediaTrack cachedTrack) { track = cachedTrack; format = track.TrackFormat; stream = track.ParentStream; if (stream.IsForReading) { track.BlockWithSlice += new NextBlock(track_BlockWithSlice); track.TrackFormat.FetchNextBatch += new LazyRead(GetMoreBoxes); if ((writeCache == 0) && (readCache == 0)) { PrepareSampleInfo(0UL); // fill up the cache with first four blocks } } else { track.PrepareMediaHeaders += new SlicePutRequest(track_PrepareMediaHeaders); } }
private void SelectAudioStream() { if (this.SelectedAudioStream != null && this.CurrentSegment != null && this.CurrentSegment.AvailableStreams != null && this.CurrentSegment.SelectedStreams != null && !this.CurrentSegment.SelectedStreams.Any(i => i.Id == this.SelectedAudioStream.Id)) { try { // Get currently selected streams List <IMediaStream> audioStreams = this.CurrentSegment.AvailableStreams.Where(i => i.Type == StreamType.Audio).ToList(); IMediaStream audioStream = audioStreams .Where(i => i.Id == this.SelectedAudioStream.Id) .FirstOrDefault(); // Remove all audio streams from the list List <IMediaStream> streamsToRemove = audioStreams.Where(i => i.Id != audioStream.Id) .ToList(); if (audioStream != null && audioStream.AvailableTracks.Count() > 0) { // If one exists w/ the currently specified language add it to the list and set the streams List <IMediaStream> streamsToAdd = new List <IMediaStream>(); streamsToAdd.Add(audioStream); this.ModifySegmentSelectedStreams(this.CurrentSegment, streamsToAdd, streamsToRemove); } else { this.UpdateSelectedAudioStream(); } } catch (Exception err) { } } }
protected override void OnElementChanged(ElementChangedEventArgs <Media> e) { base.OnElementChanged(e); if (e.OldElement != null) { // Unsubscribe from event handlers and cleanup any resources. } if (e.NewElement != null) { if (Control == null) { _stream = e.NewElement.Stream; _hangup = e.NewElement.Hangup; _label = e.NewElement.Label; _videoMuted = e.NewElement.VideoMuted; _audioMuted = e.NewElement.AudioMuted; _cameraType = e.NewElement.CameraType; _showControls = e.NewElement.ShowControls; if (_stream is not null) { _videoTrack = _stream.GetVideoTracks().FirstOrDefault(); _audioTrack = _stream.GetAudioTracks().FirstOrDefault(); } // Instantiate the native control and assign it to the Control property with // the SetNativeControl method. var context = global::Xamarin.Essentials.Platform.CurrentActivity.ApplicationContext; _mediaView = new MediaView(context); if (_videoTrack is not null) { _mediaView.SetTrack(_videoTrack); } SetNativeControl(_mediaView); } // Configure the control and subscribe to event handlers. } }
public GenericRecodeWRC(IMediaStream srcStream, IMediaStream destStream, int videoTrackID, TracksIncluded audioOrVideoOnly = TracksIncluded.Both, bool cttsOut = false) : base(srcStream, destStream) { audioOrVideoOrBoth = audioOrVideoOnly; CTTSOut = cttsOut; //Common.Logger.Instance.Info("[GenericRecodeWRC::Ctor] srcStream [" + srcStream.GetType().Name + "], destStream [" + destStream.GetType().Name + "], videoTrackId [" + videoTrackID + "]"); // get characteristics of input stream, and set FetchNextBlock callback on each track. TrackInfo = IsochronousTrackInfo.GetTrackCharacteristics(SourceStream, audioOrVideoOrBoth, videoTrackID); if ((!TrackInfo.Any(t => t is RawVideoTrackInfo)) && (audioOrVideoOnly != TracksIncluded.Audio)) { throw new ArgumentOutOfRangeException("Video track specified does not exist"); } AdjustTrackSpecsToDestination(); // adjust recode params according to output // setup destination stream here (initialize headers in output tracks) DestStream.InitializeForWriting(TrackInfo); }
public int SetMediaStream(IMediaStream pMediaStream) { if (pMediaStream == null) { throw new ArgumentNullException("pMediaStream"); } if (IsValid) { throw new InvalidOperationException("Instance is already active."); } int hr = MSStatus.MS_E_HANDLE; _pAudioStream = pMediaStream as IAudioMediaStream; if (_pAudioStream != null) { hr = _pAudioStream.GetFormat(out _wfmt); if (MSStatus.Succeed(hr)) { AMAudioData amAudio = new AMAudioData(); _pAudioData = (IAudioData)amAudio; hr = _pAudioData.SetFormat(ref _wfmt); } else { _pAudioStream = null; } } return hr; }
public GenericRecodeWRC(IMediaStream srcStream, IMediaStream destStream) : this(srcStream, destStream, 0) { }
public MediaStreamEventArgs(IMediaStream stream) { Contract.Requires<ArgumentNullException>(stream != null, nameof(stream)); Stream = stream; }
private void MediaPlugin_StreamSelected(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream) { Dispatcher.BeginInvoke(() => OnStreamSelected(mediaPlugin, stream)); }
public int GetMediaStream(Guid idPurpose, out IMediaStream ppMediaStream) { ppMediaStream = null; int hr = MSStatus.MS_E_HANDLE; if (IsValid) { hr = _pMMS.GetMediaStream(idPurpose, out ppMediaStream); } return hr; }
private void MediaPlugin_StreamDataRemoved(IAdaptiveMediaPlugin mediaPlugin, IMediaStream mediaStream, TimeSpan timeStamp) { SendLogEntry(KnownLogEntryTypes.StreamDataRemoved, extendedProperties: new Dictionary<string, object> { { "StreamName", mediaStream.Name } }); }
public static List<IsochronousTrackInfo> GetTrackCharacteristics(IMediaStream source, TracksIncluded audioVideo, int videoTrackID) { List<IsochronousTrackInfo> tracksInfo = new List<IsochronousTrackInfo>(source.MediaTracks.Count); IsochronousTrackInfo baseTrkInfo = null; int videoID = 1; // first destination video track ID should be 1 foreach (IMediaTrack track in source.MediaTracks) { if (((track.Codec.CodecType == CodecTypes.Audio) && (audioVideo != TracksIncluded.Video)) || ((track.Codec.CodecType == CodecTypes.Video) && ((videoTrackID == 0) || (track.TrackID == videoTrackID)) && (audioVideo != TracksIncluded.Audio))) { switch (track.Codec.CodecType) { case CodecTypes.Audio: baseTrkInfo = new RawAudioTrackInfo(source); break; case CodecTypes.Augment: baseTrkInfo = null; // FIXME: need IsochronousTrackInfo class for Augment break; case CodecTypes.Meta: baseTrkInfo = null; // FIXME: need IsochronousTrackInfo classe for Meta break; case CodecTypes.Video: int sourceVideoID = (videoTrackID == 0) ? track.TrackID : videoTrackID; baseTrkInfo = new RawVideoTrackInfo(source, sourceVideoID); baseTrkInfo.TrackID = videoID++; break; default: throw new Exception("Unknown track type in input"); } if (baseTrkInfo != null) tracksInfo.Add(baseTrkInfo); } } if ((videoID == 1) && (videoTrackID > 0)) throw new Exception("Track ID specified not found in source stream"); return tracksInfo; }
public virtual void Initialize(GenericMediaTrack cachedTrack) { track = cachedTrack; format = track.TrackFormat; stream = track.ParentStream; if (stream.IsForReading) { track.BlockWithSlice += new NextBlock(track_BlockWithSlice); track.TrackFormat.FetchNextBatch += new LazyRead(GetMoreBoxes); if ((writeCache == 0) && (readCache == 0)) PrepareSampleInfo(0UL); // fill up the cache with first four blocks } else { track.PrepareMediaHeaders += new SlicePutRequest(track_PrepareMediaHeaders); } }
protected virtual void OnStreamUnselected(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream) { try { if ((stream.Type == StreamType.Binary || stream.Type == StreamType.Text) && stream.AvailableTracks.Any()) { IMediaTrack track = stream.AvailableTracks.First(); ActiveAdaptiveMediaPlugin.CancelDownloadStreamData(track); SendLogEntry(KnownLogEntryTypes.StreamUnselected, LogLevel.Debug, "OnStreamUnselected", extendedProperties: new Dictionary<string, object> { { "StreamName", stream.Name } }); } } catch (Exception err) { string message = string.Format(SilverlightMediaFrameworkResources.GenericErrorOccurredLogMessage, "OnStreamUnselected", err.Message); SendLogEntry(KnownLogEntryTypes.GeneralErrorOccurred, LogLevel.Error, message); } }
protected virtual void OnStreamDataAdded(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream, IDataChunk dataChunk) { try { if (stream.AvailableTracks.Count() > 0) { IMediaTrack track = stream.AvailableTracks.First(); ActiveAdaptiveMediaPlugin.DownloadStreamData(track, dataChunk); SendLogEntry(KnownLogEntryTypes.StreamDataAdded, extendedProperties: new Dictionary<string, object> { { "StreamName", stream.Name } }); } } catch (Exception err) { string message = string.Format(SilverlightMediaFrameworkResources.GenericErrorOccurredLogMessage, "OnStreamDataAdded", err.Message); SendLogEntry(KnownLogEntryTypes.GeneralErrorOccurred, LogLevel.Error, message); } }
private void MediaPlugin_StreamUnselected(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream) { OnStreamUnselected(mediaPlugin, stream); }
private void MediaPlugin_StreamDataAdded(IAdaptiveMediaPlugin mediaPlugin, IMediaStream stream, IDataChunk dataChunk) { Dispatcher.BeginInvoke(() => OnStreamDataAdded(mediaPlugin, stream, dataChunk)); }
private static bool IsCaptionStream(IMediaStream mediaStream) { return mediaStream.Type == StreamType.Text && AllowedCaptionStreamSubTypes.Any( i => string.Equals(i, mediaStream.SubType, StringComparison.CurrentCultureIgnoreCase)); }
public RawAudioTrackInfo(IMediaStream stream) : this((IAudioTrack)stream[CodecTypes.Audio, 0]) { this.MovieDurationIn100NanoSecs = stream.DurationIn100NanoSecs; this.MovieTimeScale = Hints.StreamTimeScale; }
/// <summary> /// Indicates the client starts publish his IMediastream. /// </summary> /// <param name="client">That client reference who call this method.</param> /// <param name="mediaStream">Published IMediaStream</param> public override void OnStreamPublishStart(IClient client, IMediaStream mediaStream) { Console.WriteLine("client OnStreamPublishStart"); base.OnStreamPublishStart(client, mediaStream); }