示例#1
0
        protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            // Set up media sources.
            _maxSource = new FFmpegFileSource(MP4_PATH, true, new AudioEncoder());
            _maxSource.Initialise();

            _testPatternSource  = new VideoTestPatternSource();
            _testPatternEncoder = new FFmpegVideoEndPoint();
            _testPatternSource.OnVideoSourceRawSample += _testPatternEncoder.ExternalVideoSourceRawSample;

            _musicSource = new AudioExtrasSource(new AudioEncoder(),
                                                 new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            // The same  sources are used for all connected peers (broadcast) so the codecs need
            // to be restricted to a single well supported option.
            _musicSource.RestrictCodecs(new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU
            });
            _maxSource.RestrictCodecs(new List <VideoCodecsEnum> {
                VIDEO_CODEC
            });
            _testPatternEncoder.RestrictCodecs(new List <VideoCodecsEnum> {
                VIDEO_CODEC
            });

            // Start web socket.
            _logger.LogInformation("Starting web socket server...");

            WebSocketServer webSocketServer = null;

            if (!string.IsNullOrWhiteSpace(_certificatePath))
            {
                if (!File.Exists(_certificatePath))
                {
                    throw new ApplicationException($"Certificate path could not be found {_certificatePath}.");
                }

                webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true);
                webSocketServer.SslConfiguration.ServerCertificate          = new X509Certificate2(_certificatePath);
                webSocketServer.SslConfiguration.CheckCertificateRevocation = false;
                webSocketServer.SslConfiguration.EnabledSslProtocols        = System.Security.Authentication.SslProtocols.Tls12;
            }
            else
            {
                webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT);
            }

            webSocketServer.AddWebSocketService <WebRTCWebSocketPeer>("/", (peer) => peer.CreatePeerConnection = () => CreatePeerConnection(null));
            webSocketServer.AddWebSocketService <WebRTCWebSocketPeer>($"/{MAX_URL}",
                                                                      (peer) => peer.CreatePeerConnection = () => CreatePeerConnection(MAX_URL));
            webSocketServer.Start();

            _logger.LogInformation($"Waiting for web socket connections on {webSocketServer.Address}:{webSocketServer.Port}...");

            await Task.Delay(Timeout.Infinite, stoppingToken);
        }
示例#2
0
        /// <summary>
        /// An asynchronous task that attempts to initiate a new call to a listening UAS.
        /// </summary>
        /// <param name="sipTransport">The transport object to use for the send.</param>
        /// <param name="dst">The destination end point to send the request to.</param>
        /// <returns>True if the expected response was received, false otherwise.</returns>
        private static async Task <bool> InitiateCallTaskAsync(SIPTransport sipTransport, SIPURI dst)
        {
            //UdpClient hepClient = new UdpClient(0, AddressFamily.InterNetwork);

            try
            {
                //sipTransport.SIPRequestOutTraceEvent += (localEP, remoteEP, req) =>
                //{
                //    logger.LogDebug($"Request sent: {localEP}->{remoteEP}");
                //    logger.LogDebug(req.ToString());

                //    //var hepBuffer = HepPacket.GetBytes(localEP, remoteEP, DateTimeOffset.Now, 333, "myHep", req.ToString());
                //    //hepClient.SendAsync(hepBuffer, hepBuffer.Length, "192.168.11.49", 9060);
                //};

                //sipTransport.SIPResponseInTraceEvent += (localEP, remoteEP, resp) =>
                //{
                //    logger.LogDebug($"Response received: {localEP}<-{remoteEP}");
                //    logger.LogDebug(resp.ToString());

                //    //var hepBuffer = HepPacket.GetBytes(remoteEP, localEP, DateTimeOffset.Now, 333, "myHep", resp.ToString());
                //    //hepClient.SendAsync(hepBuffer, hepBuffer.Length, "192.168.11.49", 9060);
                //};

                var ua = new SIPUserAgent(sipTransport, null);
                ua.ClientCallTrying   += (uac, resp) => logger.LogInformation($"{uac.CallDescriptor.To} Trying: {resp.StatusCode} {resp.ReasonPhrase}.");
                ua.ClientCallRinging  += (uac, resp) => logger.LogInformation($"{uac.CallDescriptor.To} Ringing: {resp.StatusCode} {resp.ReasonPhrase}.");
                ua.ClientCallFailed   += (uac, err, resp) => logger.LogWarning($"{uac.CallDescriptor.To} Failed: {err}");
                ua.ClientCallAnswered += (uac, resp) => logger.LogInformation($"{uac.CallDescriptor.To} Answered: {resp.StatusCode} {resp.ReasonPhrase}.");

                var audioOptions = new AudioSourceOptions {
                    AudioSource = AudioSourcesEnum.Silence
                };
                var audioExtrasSource = new AudioExtrasSource(new AudioEncoder(), audioOptions);
                audioExtrasSource.RestrictCodecs(new List <AudioCodecsEnum> {
                    AudioCodecsEnum.PCMU
                });
                var voipMediaSession = new VoIPMediaSession(new MediaEndPoints {
                    AudioSource = audioExtrasSource
                });

                var result = await ua.Call(dst.ToString(), null, null, voipMediaSession);

                ua.Hangup();

                await Task.Delay(200);

                return(result);
            }
            catch (Exception excp)
            {
                logger.LogError($"Exception InitiateCallTaskAsync. {excp.Message}");
                return(false);
            }
        }
示例#3
0
        static async Task Main()
        {
            Console.WriteLine("SIPSorcery Convert Audio");

            Log = AddConsoleLogger();

            //WaveFormatConversionStream converter = new WaveFormatConversionStream(_format_s16le48k, )
            _waveFile = new WaveFileWriter("output_s16le48k.mp3", _format_s16le48k);

            var sipTransport = new SIPTransport();
            var userAgent    = new SIPUserAgent(sipTransport, null);

            userAgent.OnCallHungup += (dialog) =>
            {
                Console.WriteLine("Call hungup.");
                _waveFile?.Close();
            };

            //EnableTraceLogs(sipTransport);

            var audioOptions = new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Silence
            };
            var audioCodecs = new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU
            };
            AudioExtrasSource audioExtrasSource = new AudioExtrasSource(new AudioEncoder(), audioOptions);

            audioExtrasSource.RestrictCodecs(audioCodecs);
            var rtpSession = new VoIPMediaSession(new MediaEndPoints {
                AudioSource = audioExtrasSource
            });

            rtpSession.OnRtpPacketReceived += RtpSession_OnRtpPacketReceived;

            // Place the call and wait for the result.
            bool callResult = await userAgent.Call(DESTINATION, null, null, rtpSession);

            Console.WriteLine($"Call result {((callResult) ? "success" : "failure")}.");

            Console.WriteLine("press any key to exit...");
            Console.Read();

            if (userAgent.IsCallActive)
            {
                Console.WriteLine("Hanging up.");
                userAgent.Hangup();
            }

            // Clean up.
            sipTransport.Shutdown();
        }
示例#4
0
        private VoIPMediaSession CreateMockVoIPMediaEndPoint(List <AudioCodecsEnum> supportedCodecs = null)
        {
            var audioSource = new AudioExtrasSource();

            audioSource.RestrictCodecs(supportedCodecs);

            MediaEndPoints mockEndPoints = new MediaEndPoints
            {
                AudioSource = audioSource
            };

            return(new VoIPMediaSession(mockEndPoints));
        }
示例#5
0
        /// <summary>
        /// Example of how to create a basic RTP session object and hook up the event handlers.
        /// </summary>
        /// <param name="ua">The user agent the RTP session is being created for.</param>
        /// <param name="dst">THe destination specified on an incoming call. Can be used to
        /// set the audio source.</param>
        /// <returns>A new RTP session object.</returns>
        private static VoIPMediaSession CreateRtpSession(SIPUserAgent ua, string dst)
        {
            List <AudioCodecsEnum> codecs = new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU, AudioCodecsEnum.PCMA, AudioCodecsEnum.G722
            };

            var audioSource = AudioSourcesEnum.SineWave;

            if (string.IsNullOrEmpty(dst) || !Enum.TryParse <AudioSourcesEnum>(dst, out audioSource))
            {
                audioSource = AudioSourcesEnum.Silence;
            }

            Log.LogInformation($"RTP audio session source set to {audioSource}.");

            AudioExtrasSource audioExtrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = audioSource
            });

            audioExtrasSource.RestrictCodecs(codecs);
            var rtpAudioSession = new VoIPMediaSession(new MediaEndPoints {
                AudioSource = audioExtrasSource
            });

            rtpAudioSession.AcceptRtpFromAny = true;

            // Wire up the event handler for RTP packets received from the remote party.
            rtpAudioSession.OnRtpPacketReceived += (ep, type, rtp) => OnRtpPacketReceived(ua, type, rtp);
            rtpAudioSession.OnTimeout           += (mediaType) =>
            {
                if (ua?.Dialogue != null)
                {
                    Log.LogWarning($"RTP timeout on call with {ua.Dialogue.RemoteTarget}, hanging up.");
                }
                else
                {
                    Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
                }

                ua.Hangup();
            };

            return(rtpAudioSession);
        }