/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId">The media session identifier. /// This should be a unique value for each call.</param> /// <returns>The <see cref="ILocalMediaSession" />.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default) { try { var videoSocketSettings = new List <VideoSocketSettings> { // add the main video socket sendrecv capable new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, // We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. // So include only NV12 video in supportedSendVideoFormats ReceiveColorFormat = VideoColorFormat.NV12, SupportedSendVideoFormats = BotMediaStream.VideoFormatMap.Values.OfType <VideoFormat>().ToList(), MaxConcurrentSendStreams = 1, }, }; // create the receive only sockets settings for the multiview support for (int i = 0; i < BotConstants.NumberOfMultiviewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { VideoFormat.NV12_1920x1080_15Fps, }, }; // create media session object, this is needed to establish call connections return(this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, // Note! Currently, the only audio format supported when receiving unmixed audio is Pcm16K SupportedAudioFormat = AudioFormat.Pcm16K, ReceiveUnmixedMeetingAudio = true //get the extra buffers for the speakers }, videoSocketSettings, vbssSocketSettings, mediaSessionId: mediaSessionId)); } catch (Exception e) { _logger.Log(System.Diagnostics.TraceLevel.Error, e.Message); throw; } }
/// <summary> /// Creates the local media session. /// </summary> /// <param name="correlationId">The correlation identifier.</param> /// <returns>The <see cref="ILocalMediaSession"/>.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid correlationId) { var videoSocketSettings = new List <VideoSocketSettings> { // add the main video socket sendrecv capable new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, ReceiveColorFormat = VideoColorFormat.H264, // We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. // So include only NV12 video in supportedSendVideoFormats SupportedSendVideoFormats = Constants.SupportedSendVideoFormats, MaxConcurrentSendStreams = 1, }, }; // create the receive only sockets settings for the multiview support for (int i = 0; i < Constants.NumberOfMultivewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.H264, }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { // fps 1.875 is required for h264 in vbss scenario: // refer to Raw/Encoded Frame Format Recommendation - VbSS section in // http://msrtc/documentation/cloud_video_interop/#platform-capabilities-for-encodedecode VideoFormat.H264_320x180_1_875Fps, }, }; // create media session object, this is needed to establish call connectionS var mediaSession = this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, SupportedAudioFormat = AudioFormat.Pcm16K, }, videoSocketSettings, vbssSocketSettings, correlationId); return(mediaSession); }
/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId"> /// The media session identifier. /// This should be a unique value for each call. /// </param> /// <returns>The <see cref="ILocalMediaSession"/>.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default(Guid)) { var videoSocketSettings = new List <VideoSocketSettings> { // add the main video socket sendrecv capable new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, ReceiveColorFormat = VideoColorFormat.H264, // We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. // So include only NV12 video in supportedSendVideoFormats SupportedSendVideoFormats = SampleConstants.SupportedSendVideoFormats, MaxConcurrentSendStreams = 1, }, }; // create the receive only sockets settings for the multiview support for (int i = 0; i < SampleConstants.NumberOfMultiviewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.H264, }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.H264, MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { // fps 1.875 is required for h264 in vbss scenario. VideoFormat.H264_1920x1080_1_875Fps, }, }; // create media session object, this is needed to establish call connectionS var mediaSession = this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, SupportedAudioFormat = AudioFormat.Pcm16K, }, videoSocketSettings, vbssSocketSettings, mediaSessionId: mediaSessionId); return(mediaSession); }
/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId"> /// The media session identifier. /// This should be a unique value for each call. /// </param> /// <returns>The <see cref="ILocalMediaSession"/>.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default(Guid)) { var videoSocketSettings = new List <VideoSocketSettings>(); // create the receive only sockets settings for the multiview support for (int i = 0; i < SampleConstants.NumberOfMultiviewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.H264, }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.H264, MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { // fps 1.875 is required for h264 in vbss scenario. VideoFormat.H264_1920x1080_1_875Fps, }, }; // create media session object, this is needed to establish call connections var mediaSession = this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Recvonly, SupportedAudioFormat = AudioFormat.Pcm16K, }, videoSocketSettings, vbssSocketSettings, mediaSessionId: mediaSessionId); return(mediaSession); }
/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId"> /// The media session identifier. /// This should be a unique value for each call. /// </param> /// <returns>The <see cref="ILocalMediaSession"/>.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default(Guid)) { var videoSocketSettings = new List <VideoSocketSettings> { // add the main video socket sendrecv capable new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, #if USE_NV12 ReceiveColorFormat = VideoColorFormat.NV12, #else ReceiveColorFormat = VideoColorFormat.H264, #endif #if PLAY_MEDIA_FILE // We play a pre-recorded video from disk in this sample. The MediaPlatform always sends only H264 frames. // So include only H264 video in supportedSendVideoFormats SupportedSendVideoFormats = SampleConstants.SupportedSendVideoFormats, #else // Otherwise we can support a wide range of formats when manipulating raw video frames. SupportedSendVideoFormats = BotMediaStream.VideoFormatMap.Values.OfType <VideoFormat>().ToList(), #endif MaxConcurrentSendStreams = 1, }, }; // create the receive only sockets settings for the multiview support for (int i = 0; i < SampleConstants.NumberOfMultiviewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, #if USE_NV12 ReceiveColorFormat = VideoColorFormat.NV12, #else ReceiveColorFormat = VideoColorFormat.H264, #endif }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, #if USE_NV12 ReceiveColorFormat = VideoColorFormat.NV12, #else ReceiveColorFormat = VideoColorFormat.H264, #endif MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { // fps 1.875 is required for h264 in vbss scenario. #if USE_NV12 VideoFormat.NV12_1920x1080_1_875Fps, #else VideoFormat.H264_1920x1080_1_875Fps, #endif }, }; // create media session object, this is needed to establish call connectionS var mediaSession = this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, SupportedAudioFormat = AudioFormat.Pcm16K, }, videoSocketSettings, vbssSocketSettings, mediaSessionId: mediaSessionId); return(mediaSession); }