コード例 #1
0
        public void TestCustom()
        {
            byte[] random = new byte[5];

            Media.Utility.Random.NextBytes(random);

            // Create SDP offer (Step 1).
            string originatorAndSession = String.Format("{0} {1} {2} {3} {4} {5}", "-", BitConverter.ToString(random).Replace("-", string.Empty), "0", "IN", "IP4", "10.1.1.2");

            using (var sdp = new Media.Sdp.SessionDescription(0, originatorAndSession, "sipsorcery"))
            {
                sdp.Add(new Media.Sdp.SessionDescriptionLine("c=IN IP4 10.1.1.2"), false);
                var audioAnnouncement = new Media.Sdp.MediaDescription(Media.Sdp.MediaType.audio, 0, "SDP_TRANSPORT", 0);
                sdp.Add(audioAnnouncement, false);

                // Set up the RTP channel (Step 2).
                using (var _rtpAudioClient = Media.Rtp.RtpClient.FromSessionDescription(sdp))
                {
                    var _audioRTPTransportContext = _rtpAudioClient.GetTransportContexts().FirstOrDefault(); // The tranpsort context is null at this point.

                    System.Diagnostics.Debug.Assert(_audioRTPTransportContext != null, "Cannot find the context");

                    //System.Diagnostics.Debug.Assert(_audioRTPTransportContext.IsActive == false, "Found a active context");

                    //_rtpAudioClient.Activate();

                    System.Diagnostics.Debug.Assert(_audioRTPTransportContext.IsActive == true, "Found a active context");
                }
            }
        }
コード例 #2
0
                public TestFramework()
                {
                    //  Create a receiving socket.
                    _receiving = new System.Net.Sockets.Socket(_rtspServer.AddressFamily, System.Net.Sockets.SocketType.Stream, System.Net.Sockets.ProtocolType.Tcp);

                    //  Connect to the server.
                    System.IAsyncResult connectResult = null;
                    connectResult = _receiving.BeginConnect(_rtspServer, new System.AsyncCallback((iar) =>
                    {
                        try { _receiving.EndConnect(iar); }
                        catch { }
                    }), null);

                    //  Get the sender socket to be used by the "server".
                    _sender = _listenSocket.Accept();

                    //  RtspClient default size
                    byte[] buffer = new byte[8192];

                    _client = new Media.Rtp.RtpClient(new Media.Common.MemorySegment(buffer, Media.Rtsp.RtspMessage.MaximumLength, buffer.Length - Media.Rtsp.RtspMessage.MaximumLength));
                    _client.OutOfBandData      += ProcessInterleaveData;
                    _client.RtpPacketReceieved += ProcessRtpPacket;

                    Media.Sdp.MediaDescription md = new Media.Sdp.MediaDescription(Media.Sdp.MediaType.video, 999, "H.264", 0);

                    Media.Rtp.RtpClient.TransportContext tc = new Media.Rtp.RtpClient.TransportContext(0, 1,
                                                                                                       Media.RFC3550.Random32(9876), md, false, _senderSSRC);
                    //  Create a Duplexed reciever using the RtspClient socket.
                    tc.Initialize(_receiving);

                    _client.TryAddContext(tc);
                }
コード例 #3
0
        public void TestCustom()
        {
            byte[] random = new byte[5];

            Media.Utility.Random.NextBytes(random);

            // Create SDP offer (Step 1).
            string originatorAndSession = String.Format("{0} {1} {2} {3} {4} {5}", "-", BitConverter.ToString(random).Replace("-", string.Empty), "0", "IN", "IP4", "10.1.1.2");
            using (var sdp = new Media.Sdp.SessionDescription(0, originatorAndSession, "sipsorcery"))
            {
                sdp.Add(new Media.Sdp.SessionDescriptionLine("c=IN IP4 10.1.1.2"), false);
                var audioAnnouncement = new Media.Sdp.MediaDescription(Media.Sdp.MediaType.audio, 0, "SDP_TRANSPORT", 0);
                sdp.Add(audioAnnouncement, false);

                // Set up the RTP channel (Step 2).
                using (var _rtpAudioClient = Media.Rtp.RtpClient.FromSessionDescription(sdp))
                {
                    var _audioRTPTransportContext = _rtpAudioClient.GetTransportContexts().FirstOrDefault();

                    System.Diagnostics.Debug.Assert(_audioRTPTransportContext != null, "Cannot find the context");

                    System.Diagnostics.Debug.Assert(_audioRTPTransportContext.IsActive == false, "Found an Active context");

                    //Activate the RtpClient
                    _rtpAudioClient.Activate();

                    System.Diagnostics.Debug.Assert(_audioRTPTransportContext.IsActive == false, "Found an Active context");

                    System.Diagnostics.Debug.Assert(_rtpAudioClient.IsActive == true, "Did not find an Active RtpClient");

                    System.Diagnostics.Debug.Assert(_rtpAudioClient.SendReports() == false, "SendReports cannot be true, context is not active.");
                }
            }
        }
コード例 #4
0
    public void CreateMediaDesciptionTest()
    {
        //RtpClient has the following property
        //Media.Rtp.RtpClient.AvpProfileIdentifier
        //I don't think it should be specified in the SDP Classes but I can figure out something else if desired.

        string profile = "RTP/AVP";

        Media.Sdp.MediaType mediaType = Media.Sdp.MediaType.audio;

        int mediaPort = 15000;

        //Iterate all possible byte values (should do a seperate test for the list of values?)
        for (int mediaFormat = 0; mediaFormat <= 999; ++mediaFormat)
        {
            //Create a MediaDescription
            using (var mediaDescription = new Media.Sdp.MediaDescription(mediaType, mediaPort, profile, mediaFormat))
            {
                System.Diagnostics.Debug.Assert(mediaDescription.MediaProtocol == profile, "Did not find MediaProtocol '" + profile + "'");

                System.Diagnostics.Debug.Assert(mediaDescription.PayloadTypes.Count() == 1, "Found more then 1 payload type in the PayloadTypes List");

                System.Diagnostics.Debug.Assert(mediaDescription.PayloadTypes.First() == mediaFormat, "Did not find correct MediaFormat");

                System.Diagnostics.Debug.Assert(mediaDescription.ToString() == string.Format("m={0} {1} RTP/AVP {2}\r\n", mediaType, mediaPort, mediaFormat), "Did not output correct result");
            }
        }
    }
コード例 #5
0
    public void ParseMediaDescriptionUnitTest()
    {
        string testVector = @" m=audio 49230 RTP/AVP 96 97 98
            a=rtpmap:96 L8/8000
            a=rtpmap:97 L16/8000
            a=rtpmap:98 L16/11025/2";

        using (var md = new Media.Sdp.MediaDescription(testVector))
        {
            System.Diagnostics.Debug.Assert(md.Lines.Count() == 4, "MediaDescription must have 4 lines");

            //CLR not assert correctly with == ....
            //md.MediaDescriptionLine.ToString() == "m=audio 49230 RTP/AVP 96 97 98"

            System.Diagnostics.Debug.Assert(md.PayloadTypes.Count() == 3, "Could not read the Payload List");

            System.Diagnostics.Debug.Assert(md.PayloadTypes.First() == 96, "Could not read the Payload List");

            System.Diagnostics.Debug.Assert(md.PayloadTypes.ToArray()[1] == 97, "Could not read the Payload List");

            System.Diagnostics.Debug.Assert(md.PayloadTypes.Last() == 98, "Could not read the Payload List");

            System.Diagnostics.Debug.Assert(string.Compare(md.MediaDescriptionLine.ToString(), "m=audio 49230 RTP/AVP 96 97 98\r\n") == 0, "Did not handle Payload List Correct");

        }
    }
コード例 #6
0
                public TestFramework()
                {
                    //  Create a receiving socket.
                    _receiving = new System.Net.Sockets.Socket(_rtspServer.AddressFamily, System.Net.Sockets.SocketType.Stream, System.Net.Sockets.ProtocolType.Tcp);

                    //  Connect to the server.
                    System.IAsyncResult connectResult = null;
                    connectResult = _receiving.BeginConnect(_rtspServer, new System.AsyncCallback((iar) =>
                    {
                        try { _receiving.EndConnect(iar); }
                        catch { }
                    }), null);

                    //  Get the sender socket to be used by the "server".
                    _sender = _listenSocket.Accept();

                    //  RtspClient default size
                    byte[] buffer = new byte[8192];

                    _client = new Media.Rtp.RtpClient(new Media.Common.MemorySegment(buffer, Media.Rtsp.RtspMessage.MaximumLength, buffer.Length - Media.Rtsp.RtspMessage.MaximumLength));
                    _client.InterleavedData += ProcessInterleaveData;
                    _client.RtpPacketReceieved += ProcessRtpPacket;

                    Media.Sdp.MediaDescription md = new Media.Sdp.MediaDescription(Media.Sdp.MediaType.video, 999, "H.264", 0);

                    Media.Rtp.RtpClient.TransportContext tc = new Media.Rtp.RtpClient.TransportContext(0, 1,
                        Media.RFC3550.Random32(9876), md, false, _senderSSRC);
                    //  Create a Duplexed reciever using the RtspClient socket.
                    tc.Initialize(_receiving, _receiving);

                    _client.TryAddContext(tc);
                }