/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId">The media session identifier. /// This should be a unique value for each call.</param> /// <returns>The <see cref="ILocalMediaSession" />.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default) { try { var videoSocketSettings = new List <VideoSocketSettings> { // add the main video socket sendrecv capable new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, // We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. // So include only NV12 video in supportedSendVideoFormats ReceiveColorFormat = VideoColorFormat.NV12, SupportedSendVideoFormats = BotMediaStream.VideoFormatMap.Values.OfType <VideoFormat>().ToList(), MaxConcurrentSendStreams = 1, }, }; // create the receive only sockets settings for the multiview support for (int i = 0; i < BotConstants.NumberOfMultiviewSockets; i++) { videoSocketSettings.Add(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, }); } // Create the VBSS socket settings var vbssSocketSettings = new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, MediaType = MediaType.Vbss, SupportedSendVideoFormats = new List <VideoFormat> { VideoFormat.NV12_1920x1080_15Fps, }, }; // create media session object, this is needed to establish call connections return(this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, // Note! Currently, the only audio format supported when receiving unmixed audio is Pcm16K SupportedAudioFormat = AudioFormat.Pcm16K, ReceiveUnmixedMeetingAudio = true //get the extra buffers for the speakers }, videoSocketSettings, vbssSocketSettings, mediaSessionId: mediaSessionId)); } catch (Exception e) { _logger.Log(System.Diagnostics.TraceLevel.Error, e.Message); throw; } }
/// <summary> /// Creates the local media session. /// </summary> /// <param name="mediaSessionId">The media session identifier. /// This should be a unique value for each call.</param> /// <returns>The <see cref="ILocalMediaSession" />.</returns> private ILocalMediaSession CreateLocalMediaSession(Guid mediaSessionId = default) { try { // create media session object, this is needed to establish call connections return(this.Client.CreateMediaSession( new AudioSocketSettings { StreamDirections = StreamDirection.Recvonly, // Note! Currently, the only audio format supported when receiving unmixed audio is Pcm16K SupportedAudioFormat = AudioFormat.Pcm16K, ReceiveUnmixedMeetingAudio = true //get the extra buffers for the speakers }, new VideoSocketSettings { StreamDirections = StreamDirection.Inactive }, mediaSessionId: mediaSessionId)); } catch (Exception e) { _logger.Log(System.Diagnostics.TraceLevel.Error, e.Message); throw; } }
/// <summary> /// Creates a log entry a set of common information describing the unhandled exception. /// </summary> /// <param name="logger">The logger to write the log entry to.</param> /// <param name="exception">The exception that was unhandled.</param> /// <param name="logLevel">The log level to apply to the entry when its created.</param> public static void UnhandledExceptionEvent(this IGraphLogger logger, Exception exception, LogLevel logLevel = LogLevel.Critical) { var entry = new UnhandledExceptionLogEntry(exception); logger.Log(logLevel, entry); }