/// <summary> /// Create a new instance of the MediaSession. /// </summary> public MediaSession(string id, string correlationId) { this.Id = id; _audioSendStatusActive = new TaskCompletionSource <bool>(); _videoSendStatusActive = new TaskCompletionSource <bool>(); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created"); try { // create the audio socket _audioSocket = new AudioSocket(new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, SupportedAudioFormat = AudioFormat.Pcm16K, CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket"); _videoSocket = new VideoSocket(new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, ReceiveColorFormat = VideoColorFormat.NV12, SupportedSendVideoFormats = new List <VideoFormat>() { _defaultVideoFormat }, CallId = correlationId }); // subscribe to audio socket events _audioSocket.AudioSendStatusChanged += OnAudioSendStatusChanged; _videoSocket.VideoSendStatusChanged += OnVideoSendStatusChanged; // create the mediaconfiguration MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}"); // async start the TTS task StartTTVS().ConfigureAwait(false); } catch (Exception ex) { Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString()); Dispose(); throw; } }
/// <summary> /// Create a new instance of the MediaSession. /// </summary> public MediaSession(string id, string correlationId, RealTimeMediaCall call) { _correlationId = correlationId; this.Id = id; RealTimeMediaCall = call; Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created"); try { _audioSocket = new AudioSocket(new AudioSocketSettings { StreamDirections = StreamDirection.Recvonly, SupportedAudioFormat = AudioFormat.Pcm16K, // audio format is currently fixed at PCM 16 KHz. CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket"); // video socket _videoSocket = new VideoSocket(new VideoSocketSettings { StreamDirections = StreamDirection.Recvonly, ReceiveColorFormat = VideoColorFormat.NV12, CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Created VideoSocket"); //audio socket events _audioSocket.DominantSpeakerChanged += OnDominantSpeakerChanged; //Video socket events _videoSocket.VideoMediaReceived += OnVideoMediaReceived; MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}"); } catch (Exception ex) { Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString()); Dispose(); throw; } }
/// <summary> /// Create a new instance of the MediaSession. /// </summary> /// <param name="callerSkypeId"></param> /// <param name="startOutbound"></param> public MediaSession(string id, string correlationId, RealTimeMediaCall call) { _correlationId = CorrelationId.GetCurrentId(); this.Id = id; RealTimeMediaCall = call; _speechRecoginitionFinished = new ManualResetEvent(false); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created"); try { _audioSocket = new AudioSocket(new AudioSocketSettings { StreamDirections = StreamDirection.Sendrecv, SupportedAudioFormat = AudioFormat.Pcm16K, // audio format is currently fixed at PCM 16 KHz. CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket"); _videoSocket = new VideoSocket(new VideoSocketSettings { StreamDirections = StreamDirection.Sendrecv, ReceiveColorFormat = VideoColorFormat.NV12, //We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. So include only NV12 video in supportedSendVideoFormats SupportedSendVideoFormats = new List <VideoFormat>() { VideoFormat.NV12_270x480_15Fps, VideoFormat.NV12_320x180_15Fps, VideoFormat.NV12_360x640_15Fps, VideoFormat.NV12_424x240_15Fps, VideoFormat.NV12_480x270_15Fps, VideoFormat.NV12_480x848_30Fps, VideoFormat.NV12_640x360_15Fps, VideoFormat.NV12_720x1280_30Fps, VideoFormat.NV12_848x480_30Fps, VideoFormat.NV12_960x540_30Fps, VideoFormat.NV12_424x240_15Fps }, CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Created VideoSocket"); //audio socket events _audioSocket.AudioMediaReceived += OnAudioMediaReceived; _audioSocket.AudioSendStatusChanged += OnAudioSendStatusChanged; //Video socket events _videoSocket.VideoMediaReceived += OnVideoMediaReceived; _videoSocket.VideoSendStatusChanged += OnVideoSendStatusChanged; MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}"); StartSpeechRecognition(); } catch (Exception ex) { Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString()); Dispose(); throw; } }
/// <summary> /// Constructor /// </summary> /// <param name="id"></param> /// <param name="correlationId"></param> /// <param name="call"></param> public MediaSession(string id, string correlationId, RealTimeMediaCall call) { _correlationId = CorrelationId.GetCurrentId(); this.Id = id; RealTimeMediaCall = call; _audioSendStatusActive = new TaskCompletionSource <bool>(); _videoSendStatusActive = new TaskCompletionSource <bool>(); _videoMediaBuffers = new List <VideoMediaBuffer>(); _audioMediaBuffers = new List <AudioMediaBuffer>(); _startVideoPlayerCompleted = new ManualResetEvent(false); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created"); _downloadManager = new BlobDownloader(_mediaBufferToLoadInSeconds); try { _audioSocket = new AudioSocket(new AudioSocketSettings { StreamDirections = StreamDirection.Sendonly, SupportedAudioFormat = AudioFormat.Pcm16K, // audio format is currently fixed at PCM 16 KHz. CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket"); _videoSocket = new VideoSocket(new VideoSocketSettings { StreamDirections = StreamDirection.Sendonly, ReceiveColorFormat = VideoColorFormat.NV12, //We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. So include only NV12 video in supportedSendVideoFormats SupportedSendVideoFormats = new List <VideoFormat>() { VideoFormat.NV12_1280x720_30Fps, VideoFormat.NV12_270x480_15Fps, VideoFormat.NV12_320x180_15Fps, VideoFormat.NV12_360x640_15Fps, VideoFormat.NV12_424x240_15Fps, VideoFormat.NV12_480x270_15Fps, VideoFormat.NV12_480x848_30Fps, VideoFormat.NV12_640x360_15Fps, VideoFormat.NV12_720x1280_30Fps, VideoFormat.NV12_848x480_30Fps, VideoFormat.NV12_960x540_30Fps, VideoFormat.NV12_640x360_30Fps }, CallId = correlationId }); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Created VideoSocket"); //audio socket events _audioSocket.AudioSendStatusChanged += OnAudioSendStatusChanged; //Video socket events _videoSocket.VideoSendStatusChanged += OnVideoSendStatusChanged; MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket); Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}"); StartAudioVideoFramePlayer().ForgetAndLogException("Failed to start the player"); } catch (Exception ex) { Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString()); Dispose(); throw; } }