/// <summary>
        /// Callback from the media platform when raw audio received.  This method sends the raw
        /// audio to the transcriber. The audio is also loopbacked to the user.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnAudioMediaReceived(object sender, AudioMediaReceivedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);
            Log.Verbose(
                new CallerInfo(),
                LogContext.Media,
                "[{0}] [AudioMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, AudioFormat={4})]",
                this.Id,
                e.Buffer.Data.ToString(),
                e.Buffer.Length,
                e.Buffer.Timestamp,
                e.Buffer.AudioFormat);

            byte[] buffer = new byte[e.Buffer.Length];
            Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length);

            //If the recognize had completed with error/timeout, the underlying stream might have been swapped out on us and disposed.
            //so ignore the objectDisposedException
            try
            {
                _recognitionStream.Write(buffer, 0, buffer.Length);
            }
            catch (ObjectDisposedException)
            {
                Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Write on recognitionStream threw ObjectDisposed");
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Caught an exception while processing the audio buffer {ex.ToString()}");
            }
            finally
            {
                e.Buffer.Dispose();
            }
        }
Пример #2
0
        public Task OnRecognitionResult(RecognitionResult result)
        {
            CorrelationId.SetCurrentId(_correlationId);
            if (result.RecognitionStatus != RecognitionStatus.Success)
            {
                Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Speech recognize result {result.RecognitionStatus}");
                return(Task.CompletedTask);
            }
            else
            {
                Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Speech recognize success");
            }

            //since we had a success recognition
            try
            {
                foreach (RecognitionPhrase phrase in result.Phrases)
                {
                    string message = phrase.DisplayText.ToLower();
                    Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Received from speech api {message}");

                    int redIndex   = message.LastIndexOf("red");
                    int blueIndex  = message.LastIndexOf("blue");
                    int greenIndex = message.LastIndexOf("green");

                    int colorIndex = Math.Max(greenIndex, Math.Max(redIndex, blueIndex));
                    if (colorIndex == -1)
                    {
                        return(Task.CompletedTask);
                    }

                    if (colorIndex == redIndex)
                    {
                        DefaultHueColor = Color.Red;
                    }
                    else if (colorIndex == blueIndex)
                    {
                        DefaultHueColor = Color.Blue;
                    }
                    else
                    {
                        DefaultHueColor = Color.Green;
                    }

                    Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Changing hue to {DefaultHueColor.ToString()}");
                }
            }
            catch (Exception ex)
            {
                Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Exception in OnRecognitionResult {ex.ToString()}");
            }
            return(Task.CompletedTask);
        }
Пример #3
0
        /// <summary>
        /// Callback for informational updates from the media plaform about audio status changes.
        /// Once the status becomes active, audio can be loopbacked
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnAudioSendStatusChanged(object sender, AudioSendStatusChangedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);
            Log.Info(
                new CallerInfo(),
                LogContext.Media,
                $"[{this.Id}]: AudioSendStatusChangedEventArgs(MediaSendStatus={e.MediaSendStatus})"
                );

            if (e.MediaSendStatus == MediaSendStatus.Active && _sendAudio == false)
            {
                _sendAudio = true;
            }
        }
        /// <summary>
        /// Callback for informational updates from the media plaform about audio status changes.
        /// Once the status becomes active, audio can be loopbacked
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnAudioSendStatusChanged(object sender, AudioSendStatusChangedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);
            Log.Info(
                new CallerInfo(),
                LogContext.Media,
                "[AudioSendStatusChangedEventArgs(MediaSendStatus={0})]",
                e.MediaSendStatus);

            if (e.MediaSendStatus == MediaSendStatus.Active)
            {
                _audioSendStatusActive.SetResult(true);
            }
        }
        /// <summary>
        /// Callback for informational updates from the media plaform about video status changes.
        /// Once the Status becomes active, then video can be sent.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnVideoSendStatusChanged(object sender, VideoSendStatusChangedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);

            Log.Info(new CallerInfo(), LogContext.Media, "OnVideoSendStatusChanged start");

            Log.Info(
                new CallerInfo(),
                LogContext.Media,
                "[VideoSendStatusChangedEventArgs(MediaSendStatus=<{0}>;PreferredVideoSourceFormat=<{1}>]",
                e.MediaSendStatus,
                e.PreferredVideoSourceFormat.VideoColorFormat);

            if (e.MediaSendStatus == MediaSendStatus.Active)
            {
                _videoSendStatusActive.SetResult(true);
            }
        }
Пример #6
0
        /// <summary>
        /// Callback for informational updates from the media plaform about video status changes.
        /// Once the Status becomes active, then video can be sent.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnVideoSendStatusChanged(object sender, VideoSendStatusChangedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);

            Log.Info(
                new CallerInfo(),
                LogContext.Media,
                "[{0}]: [VideoSendStatusChangedEventArgs(MediaSendStatus=<{1}>;PreferredVideoSourceFormat=<{2}>]",
                this.Id,
                e.MediaSendStatus,
                e.PreferredVideoSourceFormat.VideoColorFormat);

            if (e.MediaSendStatus == MediaSendStatus.Active && _sendVideo == false)
            {
                //Start sending video once the Video Status changes to Active
                Log.Info(new CallerInfo(), LogContext.Media, $"[{this.Id}] Start sending video");

                _sendVideo = true;
            }
        }
        /// <summary>
        /// Listen for dominant speaker changes in the conference
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnDominantSpeakerChanged(object sender, DominantSpeakerChangedEventArgs e)
        {
            CorrelationId.SetCurrentId(_correlationId);
            Log.Info(
                new CallerInfo(),
                LogContext.Media,
                $"[{this.Id}:OnDominantSpeakerChanged(DominantSpeaker={e.CurrentDominantSpeaker})]"
                );

            Task.Run(async() =>
            {
                try
                {
                    await RealTimeMediaCall.Subscribe(e.CurrentDominantSpeaker, true).ConfigureAwait(false);
                }
                catch (Exception ex)
                {
                    Log.Warning(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Ignoring exception in subscribe {ex}");
                }
            });
        }
Пример #8
0
        /// <summary>
        /// Callback from the media platform when raw video is received. This is loopbacked to the user after adding the hue of the user's choice
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e)
        {
            try
            {
                CorrelationId.SetCurrentId(_correlationId);

                Log.Verbose(
                    new CallerInfo(),
                    LogContext.Media,
                    "[{0}] [VideoMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, Width={4}, Height={5}, ColorFormat={6}, FrameRate={7})]",
                    this.Id,
                    e.Buffer.Data.ToString(),
                    e.Buffer.Length,
                    e.Buffer.Timestamp,
                    e.Buffer.VideoFormat.Width,
                    e.Buffer.VideoFormat.Height,
                    e.Buffer.VideoFormat.VideoColorFormat,
                    e.Buffer.VideoFormat.FrameRate);


                byte[] buffer = new byte[e.Buffer.Length];
                Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length);

                VideoMediaBuffer videoRenderMediaBuffer = e.Buffer as VideoMediaBuffer;
                AddHue(DefaultHueColor, buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height);

                VideoFormat sendVideoFormat = GetSendVideoFormat(e.Buffer.VideoFormat);
                var         videoSendBuffer = new VideoSendBuffer(buffer, (uint)buffer.Length, sendVideoFormat);
                _videoSocket.Send(videoSendBuffer);
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Exception in VideoMediaReceived {ex.ToString()}");
            }
            finally
            {
                e.Buffer.Dispose();
            }
        }
Пример #9
0
        /// <summary>
        /// Create a new instance of the MediaSession.
        /// </summary>
        /// <param name="callerSkypeId"></param>
        /// <param name="startOutbound"></param>
        public MediaSession(string id, string correlationId, RealTimeMediaCall call)
        {
            _correlationId              = CorrelationId.GetCurrentId();
            this.Id                     = id;
            RealTimeMediaCall           = call;
            _speechRecoginitionFinished = new ManualResetEvent(false);

            Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created");

            try
            {
                _audioSocket = new AudioSocket(new AudioSocketSettings
                {
                    StreamDirections     = StreamDirection.Sendrecv,
                    SupportedAudioFormat = AudioFormat.Pcm16K, // audio format is currently fixed at PCM 16 KHz.
                    CallId = correlationId
                });

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket");

                _videoSocket = new VideoSocket(new VideoSocketSettings
                {
                    StreamDirections   = StreamDirection.Sendrecv,
                    ReceiveColorFormat = VideoColorFormat.NV12,

                    //We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. So include only NV12 video in supportedSendVideoFormats
                    SupportedSendVideoFormats = new List <VideoFormat>()
                    {
                        VideoFormat.NV12_270x480_15Fps,
                        VideoFormat.NV12_320x180_15Fps,
                        VideoFormat.NV12_360x640_15Fps,
                        VideoFormat.NV12_424x240_15Fps,
                        VideoFormat.NV12_480x270_15Fps,
                        VideoFormat.NV12_480x848_30Fps,
                        VideoFormat.NV12_640x360_15Fps,
                        VideoFormat.NV12_720x1280_30Fps,
                        VideoFormat.NV12_848x480_30Fps,
                        VideoFormat.NV12_960x540_30Fps,
                        VideoFormat.NV12_424x240_15Fps
                    },
                    CallId = correlationId
                });

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Created VideoSocket");


                //audio socket events
                _audioSocket.AudioMediaReceived     += OnAudioMediaReceived;
                _audioSocket.AudioSendStatusChanged += OnAudioSendStatusChanged;

                //Video socket events
                _videoSocket.VideoMediaReceived     += OnVideoMediaReceived;
                _videoSocket.VideoSendStatusChanged += OnVideoSendStatusChanged;

                MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket);

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}");

                StartSpeechRecognition();
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString());
                Dispose();
                throw;
            }
        }
        private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e)
        {
            try
            {
                CorrelationId.SetCurrentId(_correlationId);

                if (DateTime.Now > this._lastVideoCapturedTimeUtc + this.VideoCaptureFrequency)
                {
                    // Update the last capture timestamp
                    this._lastVideoCapturedTimeUtc = DateTime.Now;

                    Log.Info(
                        new CallerInfo(),
                        LogContext.Media,
                        "[{0}]: Capturing image: [VideoMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, Width={4}, Height={5}, ColorFormat={6}, FrameRate={7})]",
                        this.Id,
                        e.Buffer.Data.ToString(),
                        e.Buffer.Length,
                        e.Buffer.Timestamp,
                        e.Buffer.VideoFormat.Width,
                        e.Buffer.VideoFormat.Height,
                        e.Buffer.VideoFormat.VideoColorFormat,
                        e.Buffer.VideoFormat.FrameRate);

                    // Make a copy of the media buffer
                    Stopwatch watch = new Stopwatch();
                    watch.Start();

                    byte[] buffer = new byte[e.Buffer.Length];
                    Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length);

                    VideoMediaBuffer videoRenderMediaBuffer = e.Buffer as VideoMediaBuffer;

                    IntPtr ptrToBuffer = Marshal.AllocHGlobal(buffer.Length);
                    Marshal.Copy(buffer, 0, ptrToBuffer, buffer.Length);

                    watch.Stop();
                    Log.Info(new CallerInfo(), LogContext.Media, $"{this.Id} Took {watch.ElapsedMilliseconds} ms to copy buffer");

                    // Transform to bitmap object
                    Bitmap bmpObject = MediaUtils.TransformNV12ToBmpFaster(buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height);

                    bool sendChatMessage = (CurrentVideoImage == null);
                    Log.Info(new CallerInfo(), LogContext.Media, $"{this.Id} send chat message {sendChatMessage}");

                    // 3. Update the bitmap cache
                    CurrentVideoImage = bmpObject;

                    if (sendChatMessage)
                    {
                        Task.Run(async() => {
                            try
                            {
                                await RealTimeMediaCall.SendMessageForCall(RealTimeMediaCall);
                            }catch (Exception ex)
                            {
                                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"Exception in SendingChatMessage {ex}");
                            }
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.Media, $"{this.Id} Exception in VideoMediaReceived {ex.ToString()}");
            }

            e.Buffer.Dispose();
        }
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="id"></param>
        /// <param name="correlationId"></param>
        /// <param name="call"></param>
        public MediaSession(string id, string correlationId, RealTimeMediaCall call)
        {
            _correlationId             = CorrelationId.GetCurrentId();
            this.Id                    = id;
            RealTimeMediaCall          = call;
            _audioSendStatusActive     = new TaskCompletionSource <bool>();
            _videoSendStatusActive     = new TaskCompletionSource <bool>();
            _videoMediaBuffers         = new List <VideoMediaBuffer>();
            _audioMediaBuffers         = new List <AudioMediaBuffer>();
            _startVideoPlayerCompleted = new ManualResetEvent(false);

            Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Call created");

            _downloadManager = new BlobDownloader(_mediaBufferToLoadInSeconds);
            try
            {
                _audioSocket = new AudioSocket(new AudioSocketSettings
                {
                    StreamDirections     = StreamDirection.Sendonly,
                    SupportedAudioFormat = AudioFormat.Pcm16K, // audio format is currently fixed at PCM 16 KHz.
                    CallId = correlationId
                });

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]:Created AudioSocket");

                _videoSocket = new VideoSocket(new VideoSocketSettings
                {
                    StreamDirections   = StreamDirection.Sendonly,
                    ReceiveColorFormat = VideoColorFormat.NV12,

                    //We loop back the video in this sample. The MediaPlatform always sends only NV12 frames. So include only NV12 video in supportedSendVideoFormats
                    SupportedSendVideoFormats = new List <VideoFormat>()
                    {
                        VideoFormat.NV12_1280x720_30Fps,
                        VideoFormat.NV12_270x480_15Fps,
                        VideoFormat.NV12_320x180_15Fps,
                        VideoFormat.NV12_360x640_15Fps,
                        VideoFormat.NV12_424x240_15Fps,
                        VideoFormat.NV12_480x270_15Fps,
                        VideoFormat.NV12_480x848_30Fps,
                        VideoFormat.NV12_640x360_15Fps,
                        VideoFormat.NV12_720x1280_30Fps,
                        VideoFormat.NV12_848x480_30Fps,
                        VideoFormat.NV12_960x540_30Fps,
                        VideoFormat.NV12_640x360_30Fps
                    },
                    CallId = correlationId
                });

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: Created VideoSocket");

                //audio socket events
                _audioSocket.AudioSendStatusChanged += OnAudioSendStatusChanged;

                //Video socket events
                _videoSocket.VideoSendStatusChanged += OnVideoSendStatusChanged;

                MediaConfiguration = MediaPlatform.CreateMediaConfiguration(_audioSocket, _videoSocket);

                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"[{this.Id}]: MediaConfiguration={MediaConfiguration.ToString(Formatting.Indented)}");
                StartAudioVideoFramePlayer().ForgetAndLogException("Failed to start the player");
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.FrontEnd, "Error in MediaSession creation" + ex.ToString());
                Dispose();
                throw;
            }
        }