Beispiel #1
0
        private QuicStream OnStreamFrame(Frame frame)
        {
            QuicStream stream;

            StreamFrame sf       = (StreamFrame)frame;
            StreamId    streamId = sf.StreamId;

            if (_streams.ContainsKey(streamId.Id) == false)
            {
                stream = new QuicStream(this, streamId);

                if ((UInt64)_streams.Count < MaxStreams)
                {
                    _streams.Add(streamId.Id, stream);
                }
                else
                {
                    SendMaximumStreamReachedError();
                }

                OnStreamOpened?.Invoke(stream);
            }
            else
            {
                stream = _streams[streamId.Id];
            }

            stream.ProcessData(sf);

            return(stream);
        }
Beispiel #2
0
        static async Task Main(string[] args)
        {
            // Run this together with SnoopApp for diagnostics
            connection = new DeviceConnection();
            connection.UnknownCommandReceived += Connection_UnknownCommandReceived;
            await connection.Connect("127.0.0.1", 5000);

            Console.WriteLine("Connected");
            Console.Write("Password: "******"admin", password);

            Console.WriteLine("Logged in");
            Console.WriteLine("Device name: {0}", loginSuccess.ProductInfo.DeviceName);
            Console.WriteLine("Firmware version: {0}", loginSuccess.ProductInfo.FirmwareVersion);
            StreamFrame keyframe = await connection.SnapKeyframe(0);

            Console.WriteLine("Keyframe received");
            Console.WriteLine("Width: {0}", keyframe.Width);
            Console.WriteLine("Height: {0}", keyframe.Height);
            // File.WriteAllBytes("C:\\temp\\keyframe.h264", keyframe.Data);
            connection.StreamFrameReceived += Connection_StreamFrameReceived;
            fs       = new FileStream("C:\\temp\\channel0a.h264", FileMode.Create, FileAccess.Write, FileShare.Read);
            streamId = await connection.StreamStart(0);

            // Console.ReadKey();
            await(new TaskCompletionSource <bool>().Task);
        }
Beispiel #3
0
        public void Stream1ClientInchoateGoogleCachedServerParametersClientMessage()
        {
            var tags          = Messages.ClientHandshakeMessageTest.ClientInchoateGoogleCachedServerParametersClientMessageFactory.Value;
            var message2      = new ClientHandshakeMessage(tags);
            var messageBytes2 = message2.ToByteArray();

            var stream = new StreamFrame(1, 0);

            stream.SetData(messageBytes2, false);

            var streamBytes = stream.ToByteArray();

            Assert.IsNotNull(streamBytes);
            Debug.WriteLine(streamBytes.GenerateHexDumpWithASCII());

            //Assert.AreEqual(streamBytes.Length, FrameLibrary.ClientInchoateGoogleCachedServerParametersStream1Subset.Length);

            // Soft warn
            for (var i = 0; i < streamBytes.Length; i++)
            {
                if (streamBytes[i] != FrameLibrary.ClientInchoateGoogleCachedServerParametersStream1Subset[i])
                {
                    Debug.WriteLine($"Byte difference at position {i}: generated byte is {streamBytes[i]:x2} but reference byte was {FrameLibrary.ClientInchoateGoogleCachedServerParametersStream1Subset[i]:x2}");
                }
            }

            // Hard test fail
            for (var i = 0; i < streamBytes.Length; i++)
            {
                Assert.AreEqual(streamBytes[i], FrameLibrary.ClientInchoateGoogleCachedServerParametersStream1Subset[i], $"Byte difference at position {i}: generated byte is {streamBytes[i]:x2} but reference byte was {FrameLibrary.ClientInchoateGoogleCachedServerParametersStream1Subset[i]:x2}");
            }
        }
Beispiel #4
0
        public void ProcessData(StreamFrame frame)
        {
            // Do not accept data if the stream is reset.
            if (State == StreamState.ResetRecvd)
            {
                return;
            }

            byte[] data = frame.StreamData;
            if (frame.Offset != null)
            {
                _data.Add(frame.Offset.Value, frame.StreamData);
            }
            else
            {
                // TODO: Careful with duplicate 0 offset packets on the same stream. Probably PROTOCOL_VIOLATION?
                _data.Add(0, frame.StreamData);
            }

            // Either this frame marks the end of the stream,
            // or fin frame came before the data frames
            if (frame.EndOfStream)
            {
                State = StreamState.SizeKnown;
            }

            if (State == StreamState.SizeKnown && IsStreamFull())
            {
                _connection.Context.DataReceived(_data.SelectMany(v => v.Value).ToArray(), StreamId);
            }
        }
Beispiel #5
0
 private void addFrame(StreamFrame f)
 {
     lock (locker)
     {
         // if (frameList.Count > 2000)
         //     frameList.TryDequeue(out _);
         frameList.Enqueue(f);
         //System.Console.WriteLine(System.DateTime.Now + " ==> New frame");
     }
 }
Beispiel #6
0
        /// <summary>
        /// Read a received packet and process its frames.
        /// </summary>
        /// <param name="packet">The received packet</param>
        public void ReadPacket(Packet packet)
        {
            // Process every new packet
            if (!_packetManager.IsPacketOld(packet))
            {
                packet.DecodeFrames();


                foreach (Frame frame in packet.Frames)
                {
                    if (frame is StreamFrame)
                    {
                        StreamFrame sf = frame as StreamFrame;
                        Logger.Write($"Received StreamFrame in packet number {packet.PacketNumber} with message: {System.Text.Encoding.UTF8.GetString(sf.Data)}");
                        QuicStream stream;
                        try
                        {
                            stream = GetStream(sf.StreamID.Value);
                        }
                        catch (ArgumentException)
                        {
                            stream = CreateStream(0x00);
                        }
                        stream.AddFrameToRead(sf);
                    }
                    if (frame is AckFrame)
                    {
                        AckFrame af = frame as AckFrame;
                        Logger.Write($"Received AckFrame in packet #{packet.PacketNumber}");
                        _packetManager.ProcessAckFrame(af);
                    }
                }

                // Store received PacketNumber for further implementation of acknowledgement procedure
                _received.Add(packet.PacketNumber);
            }
            else
            {
                // The packet was sent again so we send another ack for it
                packet.IsAckEliciting = true;
            }

            // Generate a new Ack Frame and send it directly
            // Even if the packet is old, we send a new ack for this ; the ack packet may not have been received
            if (packet.IsAckEliciting)
            {
                AckFrame ack = new AckFrame(new List <UInt32>()
                {
                    packet.PacketNumber
                }, 100);
                AddFrame(ack);
                SendCurrentPacket();
                Logger.Write($"Acked packet #{packet.PacketNumber}");
            }
        }
Beispiel #7
0
        /// <summary>
        /// Return the byte array corresponding to the data from the next frame to be read
        /// </summary>
        /// <returns></returns>
        public byte[] Read()
        {
            _mre.WaitOne();
            StreamFrame frame = _toRead.Dequeue();

            if (_toRead.Count == 0)
            {
                _mre.Reset();
            }
            return(frame.Data);
        }
Beispiel #8
0
        protected internal void FromByteArray([NotNull] byte[] bytes)
        {
            var index = 0;

            // Message authentication hash
            if (this.PacketNumber == 1)
            {
                // First 12 bytes are the message authentication hash
                this.MessageAuthenticationHash = new byte[12];
                Array.Copy(bytes, this.MessageAuthenticationHash, 12);
                index += 12;
            }

            // Private header
            this.Entropy = (bytes[index] & (1 << 0)) != 0;
            if ((bytes[index] & (1 << 2)) != 0)
            {
                // https://tools.ietf.org/html/draft-tsvwg-quic-protocol-02#section-6.1
                throw new NotImplementedException("This implementation does not yet handle FLAG_FEC");
            }

            if ((bytes[index] & (1 << 1)) != 0)
            {
                index++;
                this.FecGroup = bytes[index];
            }
            index++;

            // Decode frames
            while (index < bytes.Length)
            {
                // PADDING frame
                if (bytes[index] == 0)
                {
                    this.AddFrame(new PaddingFrame(bytes.Length - index));
                    break;
                }

                // STREAM frame
                if ((bytes[index] & (1 << 7)) != 0)
                {
                    var sf = StreamFrame.FromByteArray(bytes, index);
                    this.AddFrame(sf.Item1);
                    index = sf.Item2;
                }
            }

            Console.WriteLine("Decoding frame");
        }
Beispiel #9
0
        /// <summary>
        /// Write a byte array in a stream. Then send this byte array
        /// </summary>
        /// <param name="buffer">The byte array to send</param>
        /// <param name="offset">The offset to consider in the byte array</param>
        /// <param name="size">Number of byte to send from the offset</param>
        public void Write(byte[] buffer, int offset, int size)
        {
            if (buffer == null || buffer.Length < offset + size)
            {
                throw new ArgumentException();
            }

            // TODO: check if the user is authorized to write (thanks to Type)
            byte[] data = new byte[size];
            Array.Copy(buffer, offset, data, 0, size);
            // TODO: may split the message on multiple frames
            StreamFrame frame = new StreamFrame(StreamId, 0, data, true, false);

            _connection.AddFrame(frame);
            _connection.SendCurrentPacket();
        }
Beispiel #10
0
        private void OnStreamFrame(Frame frame)
        {
            StreamFrame sf = (StreamFrame)frame;

            if (_streams.ContainsKey(sf.StreamId.Value) == false)
            {
                QuicStream stream = new QuicStream(this, sf.ConvertedStreamId);
                stream.ProcessData(sf);

                _streams.Add(sf.StreamId.Value, stream);
            }
            else
            {
                QuicStream stream = _streams[sf.StreamId];
                stream.ProcessData(sf);
            }
        }
Beispiel #11
0
        public void ProcessData(StreamFrame frame)
        {
            // Do not accept data if the stream is reset.
            if (State == StreamState.ResetRecvd)
            {
                return;
            }

            var data = frame.StreamData;

            if (frame.Offset != null)
            {
                _data.Add(frame.Offset.Value, frame.StreamData);
            }
            else
            {
                // TODO: Careful with duplicate 0 offset packets on the same stream. Probably PROTOCOL_VIOLATION?
                _data.Add(0, frame.StreamData);
            }

            // Either this frame marks the end of the stream,
            // or fin frame came before the data frames
            if (frame.EndOfStream)
            {
                State = StreamState.SizeKnown;
            }

            _currentTransferRate += (ulong)data.Length;

            // Terminate connection if maximum stream data is reached
            if (_currentTransferRate >= _maximumStreamData)
            {
                var errorPacket = _connection.PacketCreator.CreateConnectionClosePacket(ErrorCode.FLOW_CONTROL_ERROR,
                                                                                        "Maximum stream data transfer reached.");
                _connection.SendData(errorPacket);
                _connection.TerminateConnection();

                return;
            }

            if (State == StreamState.SizeKnown && IsStreamFull())
            {
                _connection.DataReceived(this);
                State = StreamState.DataRecvd;
            }
        }
Beispiel #12
0
        public void ProcessData(StreamFrame frame)
        {
            // Do not accept data if the stream is reset.
            if (State == StreamState.ResetRecvd)
            {
                return;
            }

            byte[] data = frame.StreamData;
            if (frame.Offset != null)
            {
                _data.Add(frame.Offset.Value, frame.StreamData);
            }
            else
            {
                // TODO: Careful with duplicate 0 offset packets on the same stream. Probably PROTOCOL_VIOLATION?
                _data.Add(0, frame.StreamData);
            }

            // Either this frame marks the end of the stream,
            // or fin frame came before the data frames
            if (frame.EndOfStream)
            {
                State = StreamState.SizeKnown;
            }

            _currentTransferRate += (UInt64)data.Length;

            // Terminate connection if maximum stream data is reached
            if (_currentTransferRate >= _maximumStreamData)
            {
                ShortHeaderPacket errorPacket = _connection.PacketCreator.CreateConnectionClosePacket(Infrastructure.ErrorCode.FLOW_CONTROL_ERROR, frame.ActualType, ErrorConstants.MaxDataTransfer);
                _connection.SendData(errorPacket);
                _connection.TerminateConnection();

                return;
            }

            if (State == StreamState.SizeKnown && IsStreamFull())
            {
                State = StreamState.DataRecvd;

                OnStreamDataReceived?.Invoke(this, Data);
            }
        }
Beispiel #13
0
 private static void Connection_StreamFrameReceived(StreamFrame frame)
 {
     if (frame.StreamId == streamId && frame.FrameType == FrameType.Video)
     {
         // Console.WriteLine("Frame received");
         // Console.WriteLine("Attrib: 0x{0}", Convert.ToString((uint)frame.FrameAttrib, 16));
         fs.Write(frame.Data, 0, frame.Data.Length);
         if (frame.KeyFrame)
         {
             Console.WriteLine("Keyframe received");
             connection.StreamChange(streamId); // Should send this to KeepAlive every few seconds, so anytime a keyframe is received works!
         }
     }
     else
     {
         Console.WriteLine("Non-frame received");
     }
 }
Beispiel #14
0
        /// <summary>
        /// dotnet SnapshotForZR04RN.dll ipaddr 5000 admin passwd 0 | ffmpeg -i - -vframes 1 -f image2pipe -vcodec mjpeg -q 3 - > snapshot.jpg
        /// </summary>
        /// <param name="args"></param>
        /// <returns></returns>
        static async Task Main(string[] args)
        {
            int channel = args.Length > 4 ? int.Parse(args[4]) : 0;
            int sub     = args.Length > 5 ? int.Parse(args[5]) : 0;

            connection = new DeviceConnection();
            await connection.Connect(args[0], int.Parse(args[1]));

            LoginSuccess loginSuccess = await connection.Login(args[2], args[3]);

            StreamFrame keyframe = await connection.SnapKeyframe(channel, sub);

            Stream stdout = Console.OpenStandardOutput();
            await stdout.WriteAsync(keyframe.Data, 0, keyframe.Data.Length);

            connection.Disconnect();
            await stdout.FlushAsync();

            stdout.Close();
        }
Beispiel #15
0
        public Frame GetFrame()
        {
            Frame result;
            byte  frameType = _array.PeekByte();

            switch (frameType)
            {
            case 0x00: result = new PaddingFrame(); break;

            case 0x01: result = new RSTStreamFrame(); break;

            case 0x02: result = new ConnectionCloseFrame(); break;

            case 0x06: result = new MaxStreamIdFrame(); break;

            case 0x10: result = new StreamFrame(); break;

            case 0x11: result = new StreamFrame(); break;

            case 0x12: result = new StreamFrame(); break;

            case 0x13: result = new StreamFrame(); break;

            case 0x14: result = new StreamFrame(); break;

            case 0x15: result = new StreamFrame(); break;

            case 0x16: result = new StreamFrame(); break;

            case 0x17: result = new StreamFrame(); break;

            default: result = null; break;
            }

            if (result != null)
            {
                result.Decode(_array);
            }

            return(result);
        }
Beispiel #16
0
        private void OnStreamFrame(Frame frame)
        {
            StreamFrame sf = (StreamFrame)frame;

            if (_streams.ContainsKey(sf.StreamId.Value) == false)
            {
                QuicStream stream = new QuicStream(this, sf.ConvertedStreamId);
                stream.ProcessData(sf);

                if ((UInt64)_streams.Count < MaxStreams)
                {
                    _streams.Add(sf.StreamId.Value, stream);
                }
                else
                {
                    SendMaximumStreamReachedError();
                }
            }
            else
            {
                QuicStream stream = _streams[sf.StreamId];
                stream.ProcessData(sf);
            }
        }
        public async Task <Packets.RegularPacket> CutNextPacketAsync()
        {
            if (!this._connectionid.HasValue)
            {
                throw new InvalidOperationException("Connection ID is not established!");
            }

            if (this.Count == 0)
            {
                return(null);
            }

            uint remaining = Packets.AbstractPacketBase.MTU;

            ulong packetNumber;

            lock (this._packetNumberLock)
            {
                this._packetNumber++;
                packetNumber = this._packetNumber;
            }

            var regular = new Packets.RegularPacket(this._connectionid.Value, packetNumber, null); // TODO: FEC GROUP's.

            remaining -= regular.GetHeaderLength();

            MultiplexedTransfer nextTransfer;
            long        assignedDataSize;
            bool        fin;
            StreamFrame streamFrame;

            lock (this._dequeueLock)
            {
                MultiplexedTransfer peekedTransfer;
                do
                {
                    if (!this.TryPeek(out peekedTransfer))
                    {
                        return(null); // My queue is _now_ empty, just say nothing to do.
                    }
                    // Prototype our stream frame
                    var streamRemainingByteCount = peekedTransfer.Stream.Length - peekedTransfer.Stream.Position;

                    streamFrame = new StreamFrame(peekedTransfer.StreamId, Convert.ToUInt64(peekedTransfer.Stream.Position));
                    var prototypeLength = streamFrame.GetMetadataLength();

                    assignedDataSize = Math.Min(remaining, prototypeLength);
                    var transferDone = assignedDataSize == streamRemainingByteCount;
                    fin = transferDone && peekedTransfer.TerminateStream;

                    if (!this.TryDequeue(out nextTransfer))
                    {
                        return(null); // My queue is _now_ empty, just say nothing to do.
                    }
                    if (nextTransfer.TransferId != peekedTransfer.TransferId)
                    {
                        this.Enqueue(nextTransfer); // Whoops, something changed outside of our lock... so, redo our calculations.
                    }
                }while (nextTransfer.TransferId != peekedTransfer.TransferId);
            }

            try
            {
                // Hydrate our stream frame prototype
                var streamData = new byte[assignedDataSize];
                await nextTransfer.Stream.ReadAsync(streamData, (int)nextTransfer.Stream.Position, streamData.Length);

                streamFrame.SetData(streamData, fin);
            }
            catch (Exception)
            {
                // Something went wrong.  Requeue the transfer.
                this.Enqueue(nextTransfer);
                throw;
            }

            return(regular);
        }
Beispiel #18
0
    private void Nvr_StreamFrameReceived(StreamFrame obj)
    {
        if (obj.StreamId != streamId)
        {
            return;
        }

        if (obj.FrameType != FrameType.Video)
        {
            return;
        }

        Console.WriteLine(obj.Time);
        Console.WriteLine((obj.Time & 0xFFFFFFFF));
        if (obj.KeyFrame)
        {
            nvr.StreamChange(streamId);
            // nvr.StreamStop(streamId);
            // nvr.StreamStart(0);
            // ++streamId;
        }

        //
        // }

        // The 'Camera' (YUV TestCard) has generated a YUV image.
        // If there are RTSP clients connected then Compress the Video Frame (with H264) and send it to the client
        // void video_source_ReceivedYUVFrame(uint timestamp_ms, int width, int height, byte[] yuv_data)
        // {
        DateTime now = DateTime.UtcNow;
        int      current_rtp_play_count = 0;
        int      current_rtp_count      = 0;
        int      timeout_in_seconds     = 70; // must have a RTSP message every 70 seconds or we will close the connection

        lock (rtsp_list)
        {
            current_rtp_count = rtsp_list.Count;
            foreach (RTSPConnection connection in rtsp_list.ToArray())
            { // Convert to Array to allow us to delete from rtsp_list
                // RTSP Timeout (clients receiving RTP video over the RTSP session
                // do not need to send a keepalive (so we check for Socket write errors)
                bool sending_rtp_via_tcp = false;
                if ((connection.video_client_transport != null) &&
                    (connection.video_client_transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.TCP))
                {
                    sending_rtp_via_tcp = true;
                }

                if (sending_rtp_via_tcp == false && ((now - connection.time_since_last_rtsp_keepalive).TotalSeconds > timeout_in_seconds))
                {
                    Console.WriteLine("Removing session " + connection.video_session_id + " due to TIMEOUT");
                    connection.play = false; // stop sending data
                    if (connection.video_udp_pair != null)
                    {
                        connection.video_udp_pair.Stop();
                        connection.video_udp_pair = null;
                    }
                    connection.listener.Dispose();

                    rtsp_list.Remove(connection);
                    continue;
                }
                else if (connection.play)
                {
                    current_rtp_play_count++;
                }
            }
        }
        UpdateClients();

        // Take the YUV image and encode it into a H264 NAL
        // This returns a NAL with no headers (no 00 00 00 01 header and no 32 bit sizes)
        Console.WriteLine(current_rtp_count + " RTSP clients connected. " + current_rtp_play_count + " RTSP clients in PLAY mode");

        if (current_rtp_play_count == 0)
        {
            return;
        }

        byte[] raw_video_nal = obj.Data;
        bool   isKeyframe    = obj.KeyFrame;

        List <byte[]> nal_array = new List <byte[]>();

        // We may want to add the SPS and PPS to the H264 stream as in-band data.
        // This may be of use if the client did not parse the SPS/PPS in the SDP
        // or if the H264 encoder changes properties (eg a new resolution or framerate which
        // gives a new SPS or PPS).
        // Also looking towards H265, the VPS/SPS/PPS do not need to be in the SDP so would be added here.

#if false
        Boolean add_sps_pps_to_keyframe = true;

        if (add_sps_pps_to_keyframe && isKeyframe)
        {
            nal_array.Add(raw_sps);
            nal_array.Add(raw_pps);
        }
#endif

        // add the rest of the NALs
        nal_array.Add(raw_video_nal);

        /*
         * uint timestamp_ms = (uint)(obj.Time / 1000); // CHECK THIS
         * Console.WriteLine("timestamp_ms: {0}", timestamp_ms);
         * UInt32 rtp_timestamp = timestamp_ms * 90; // 90kHz clock
         */
        uint rtp_timestamp = (uint)((obj.Time & 0xFFFFFFFF) * 90 / 1000);
        if (firstStamp == 0)
        {
            firstStamp = rtp_timestamp;
        }
        rtp_timestamp -= firstStamp;
        if (rtp_timestamp - lastStamp > 90000)
        {
            uint override_timestamp = lastStamp + 6000;
            firstStamp    = (uint)((obj.Time & 0xFFFFFFFF) * 90 / 1000) - override_timestamp;
            rtp_timestamp = override_timestamp;
        }
        lastStamp = rtp_timestamp;

        uint timestamp_ms = rtp_timestamp / 90;

        // Build a list of 1 or more RTP packets
        // The last packet will have the M bit set to '1'
        List <byte[]> rtp_packets = new List <byte[]>();

        for (int x = 0; x < nal_array.Count; x++)
        {
            byte[] raw_nal  = nal_array[x];
            bool   last_nal = false;
            if (x == nal_array.Count - 1)
            {
                last_nal = true; // last NAL in our nal_array
            }

            // The H264 Payload could be sent as one large RTP packet (assuming the receiver can handle it)
            // or as a Fragmented Data, split over several RTP packets with the same Timestamp.
            bool fragmenting = false;
            int  packetMTU   = 1400; //65500; //
            if (raw_nal.Length > packetMTU)
            {
                fragmenting = true;
            }


            if (fragmenting == false)
            {
                // Put the whole NAL into one RTP packet.
                // Note some receivers will have maximum buffers and be unable to handle large RTP packets.
                // Also with RTP over RTSP there is a limit of 65535 bytes for the RTP packet.

                byte[] rtp_packet = new byte[12 + raw_nal.Length]; // 12 is header size when there are no CSRCs or extensions
                // Create an single RTP fragment

                // RTP Packet Header
                // 0 - Version, P, X, CC, M, PT and Sequence Number
                //32 - Timestamp. H264 uses a 90kHz clock
                //64 - SSRC
                //96 - CSRCs (optional)
                //nn - Extension ID and Length
                //nn - Extension header

                int rtp_version      = 2;
                int rtp_padding      = 0;
                int rtp_extension    = 0;
                int rtp_csrc_count   = 0;
                int rtp_marker       = (last_nal == true ? 1 : 0); // set to 1 if the last NAL in the array
                int rtp_payload_type = 96;

                RTPPacketUtil.WriteHeader(rtp_packet, rtp_version, rtp_padding, rtp_extension, rtp_csrc_count, rtp_marker, rtp_payload_type);

                uint empty_sequence_id = 0;
                RTPPacketUtil.WriteSequenceNumber(rtp_packet, empty_sequence_id);

                RTPPacketUtil.WriteTS(rtp_packet, rtp_timestamp);

                uint empty_ssrc = 0;
                RTPPacketUtil.WriteSSRC(rtp_packet, empty_ssrc);

                // Now append the raw NAL
                System.Array.Copy(raw_nal, 0, rtp_packet, 12, raw_nal.Length);

                rtp_packets.Add(rtp_packet);
            }
            else
            {
                int data_remaining = raw_nal.Length;
                int nal_pointer    = 0;
                int start_bit      = 1;
                int end_bit        = 0;

                // consume first byte of the raw_nal. It is used in the FU header
                byte first_byte = raw_nal[0];
                nal_pointer++;
                data_remaining--;

                while (data_remaining > 0)
                {
                    int payload_size = Math.Min(packetMTU, data_remaining);
                    if (data_remaining - payload_size == 0)
                    {
                        end_bit = 1;
                    }

                    byte[] rtp_packet = new byte[12 + 2 + payload_size]; // 12 is header size. 2 bytes for FU-A header. Then payload

                    // RTP Packet Header
                    // 0 - Version, P, X, CC, M, PT and Sequence Number
                    //32 - Timestamp. H264 uses a 90kHz clock
                    //64 - SSRC
                    //96 - CSRCs (optional)
                    //nn - Extension ID and Length
                    //nn - Extension header

                    int rtp_version      = 2;
                    int rtp_padding      = 0;
                    int rtp_extension    = 0;
                    int rtp_csrc_count   = 0;
                    int rtp_marker       = (last_nal == true ? 1 : 0); // Marker set to 1 on last packet
                    int rtp_payload_type = 96;

                    RTPPacketUtil.WriteHeader(rtp_packet, rtp_version, rtp_padding, rtp_extension, rtp_csrc_count, rtp_marker, rtp_payload_type);

                    uint empty_sequence_id = 0;
                    RTPPacketUtil.WriteSequenceNumber(rtp_packet, empty_sequence_id);

                    RTPPacketUtil.WriteTS(rtp_packet, rtp_timestamp);

                    uint empty_ssrc = 0;
                    RTPPacketUtil.WriteSSRC(rtp_packet, empty_ssrc);

                    // Now append the Fragmentation Header (with Start and End marker) and part of the raw_nal
                    byte f_bit = 0;
                    byte nri   = (byte)((first_byte >> 5) & 0x03); // Part of the 1st byte of the Raw NAL (NAL Reference ID)
                    byte type  = 28;                               // FU-A Fragmentation

                    rtp_packet[12] = (byte)((f_bit << 7) + (nri << 5) + type);
                    rtp_packet[13] = (byte)((start_bit << 7) + (end_bit << 6) + (0 << 5) + (first_byte & 0x1F));

                    System.Array.Copy(raw_nal, nal_pointer, rtp_packet, 14, payload_size);
                    nal_pointer    = nal_pointer + payload_size;
                    data_remaining = data_remaining - payload_size;

                    rtp_packets.Add(rtp_packet);

                    start_bit = 0;
                }
            }
        }

        lock (rtsp_list)
        {
            // Go through each RTSP connection and output the NAL on the Video Session
            foreach (RTSPConnection connection in rtsp_list.ToArray()) // ToArray makes a temp copy of the list.
                                                                       // This lets us delete items in the foreach
                                                                       // eg when there is Write Error
            {
                // Only process Sessions in Play Mode
                if (connection.play == false)
                {
                    continue;
                }

                string connection_type = "";
                if (connection.video_client_transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.TCP)
                {
                    connection_type = "TCP";
                }
                if (connection.video_client_transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.UDP &&
                    connection.video_client_transport.IsMulticast == false)
                {
                    connection_type = "UDP";
                }
                if (connection.video_client_transport.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.UDP &&
                    connection.video_client_transport.IsMulticast == true)
                {
                    connection_type = "Multicast";
                }
                Console.WriteLine("Sending video session " + connection.video_session_id + " " + connection_type + " Timestamp(ms)=" + timestamp_ms + ". RTP timestamp=" + rtp_timestamp + ". Sequence=" + connection.video_sequence_number);

                // There could be more than 1 RTP packet (if the data is fragmented)
                bool write_error = false;
                foreach (byte[] rtp_packet in rtp_packets)
                {
                    // Add the specific data for each transmission
                    RTPPacketUtil.WriteSequenceNumber(rtp_packet, connection.video_sequence_number);
                    connection.video_sequence_number++;

                    // Add the specific SSRC for each transmission
                    RTPPacketUtil.WriteSSRC(rtp_packet, connection.ssrc);


                    // Send as RTP over RTSP (Interleaved)
                    if (connection.video_transport_reply.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.TCP)
                    {
                        int    video_channel = connection.video_transport_reply.Interleaved.First; // second is for RTCP status messages)
                        object state         = new object();
                        try
                        {
                            // send the whole NAL. With RTP over RTSP we do not need to Fragment the NAL (as we do with UDP packets or Multicast)
                            //session.listener.BeginSendData(video_channel, rtp_packet, new AsyncCallback(session.listener.EndSendData), state);
                            connection.listener.SendData(video_channel, rtp_packet);
                        }
                        catch
                        {
                            Console.WriteLine("Error writing to listener " + connection.listener.RemoteAdress);
                            write_error = true;
                            break; // exit out of foreach loop
                        }
                    }

                    // Send as RTP over UDP
                    if (connection.video_transport_reply.LowerTransport == Rtsp.Messages.RtspTransport.LowerTransportType.UDP && connection.video_transport_reply.IsMulticast == false)
                    {
                        try
                        {
                            // send the whole NAL. ** We could fragment the RTP packet into smaller chuncks that fit within the MTU
                            // Send to the IP address of the Client
                            // Send to the UDP Port the Client gave us in the SETUP command
                            connection.video_udp_pair.Write_To_Data_Port(rtp_packet, connection.client_hostname, connection.video_client_transport.ClientPort.First);
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine("UDP Write Exception " + e.ToString());
                            Console.WriteLine("Error writing to listener " + connection.listener.RemoteAdress);
                            write_error = true;
                            break; // exit out of foreach loop
                        }
                    }

                    // TODO. Add Multicast
                }
                if (write_error)
                {
                    Console.WriteLine("Removing session " + connection.video_session_id + " due to write error");
                    connection.play = false; // stop sending data
                    if (connection.video_udp_pair != null)
                    {
                        connection.video_udp_pair.Stop();
                        connection.video_udp_pair = null;
                    }
                    connection.listener.Dispose();
                    rtsp_list.Remove(connection); // remove the session. It is dead
                }
            }
        }
        UpdateClients();
    }
Beispiel #19
0
        public Frame GetFrame()
        {
            Frame result;
            var   frameType = _array.PeekByte();

            switch (frameType)
            {
            case 0x00:
                result = new PaddingFrame();
                break;

            case 0x01:
                result = new PingFrame();
                break;

            case 0x02:
                result = new AckFrame();
                break;

            case 0x03:
                result = new AckFrame();
                break;

            case 0x04:
                result = new ResetStreamFrame();
                break;

            case 0x05:
                result = new StopSendingFrame();
                break;

            case 0x06:
                result = new CryptoFrame();
                break;

            case 0x07:
                result = new NewTokenFrame();
                break;

            case 0x08:
                result = new StreamFrame();
                break;

            case 0x09:
                result = new StreamFrame();
                break;

            case 0x0a:
                result = new StreamFrame();
                break;

            case 0x0b:
                result = new StreamFrame();
                break;

            case 0x0c:
                result = new StreamFrame();
                break;

            case 0x0d:
                result = new StreamFrame();
                break;

            case 0x0e:
                result = new StreamFrame();
                break;

            case 0x0f:
                result = new StreamFrame();
                break;

            case 0x10:
                result = new MaxDataFrame();
                break;

            case 0x11:
                result = new MaxStreamDataFrame();
                break;

            case 0x12:
                result = new MaxStreamsFrame();
                break;

            case 0x13:
                result = new MaxStreamsFrame();
                break;

            case 0x14:
                result = new DataBlockedFrame();
                break;

            case 0x15:
                result = new StreamDataBlockedFrame();
                break;

            case 0x16:
                result = new StreamsBlockedFrame();
                break;

            case 0x17:
                result = new StreamsBlockedFrame();
                break;

            case 0x18:
                result = new NewConnectionIdFrame();
                break;

            case 0x19:
                result = new RetireConnectionIdFrame();
                break;

            case 0x1a:
                result = new PathChallengeFrame();
                break;

            case 0x1b:
                result = new PathResponseFrame();
                break;

            case 0x1c:
                result = new ConnectionCloseFrame();
                break;

            case 0x1d:
                result = new ConnectionCloseFrame();
                break;

            default:
                result = null;
                break;
            }

            if (result != null)
            {
                result.Decode(_array);
            }

            return(result);
        }
Beispiel #20
0
 /// <summary>
 /// Add a stream frame to the queue of frames to read
 /// </summary>
 /// <param name="sf">StreamFrame to be added to the queue</param>
 public void AddFrameToRead(StreamFrame sf)
 {
     _toRead.Enqueue(sf);
     _mre.Set();
 }
Beispiel #21
0
        private async Task ConnectAsync()
        {
            //TimeSpan delay = TimeSpan.FromSeconds(5);

            var serverUri            = new Uri("rtsp://192.168.0.103:554/videoMain");
            var credentials          = new NetworkCredential("admin", "admin");
            var connectionParameters = new ConnectionParameters(serverUri, credentials);

            connectionParameters.RtpTransport = RtpTransportProtocol.TCP;

            this.rtspClient = new RtspClient(connectionParameters);

            this.rtspClient.FrameReceived +=
                async(sender, frame) =>
            {
                FrameCounter++;
                var fileName = $"{hostingEnvironment.ContentRootPath}/frames/frame-{FrameCounter}.mp4";
                await File.WriteAllBytesAsync(fileName, frame.FrameSegment.Array);


                Func <string, string> outputFileNameBuilder = (number) => { return($"{hostingEnvironment.ContentRootPath}/images/frame-{FrameCounter}.png"); };

                IMediaInfo info = await FFmpeg.GetMediaInfo(fileName);

                IVideoStream videoStream = info.VideoStreams.First().SetCodec(VideoCodec.h264);

                IConversionResult conversionResult = await FFmpeg.Conversions.New()
                                                     .AddStream(videoStream)
                                                     .ExtractNthFrame(1, outputFileNameBuilder)
                                                     .Start();



                var img = Convert.ToBase64String(frame.FrameSegment.Array);
                for (int i = 0; i < frameChannles.Count; i++)
                {
                    var data = new StreamFrame()
                    {
                        Base64ImageString = img,
                        TimeStamp         = frame.Timestamp
                    };

                    await frameChannles[i].Writer.WriteAsync(data, cancellationTokens[i]);
                    LastFrame = DateTime.UtcNow;
                }


                if (FrameCounter > 50)
                {
                    File.Delete($"{hostingEnvironment.ContentRootPath}/frames/frame-{FrameCounter-50}.mp4");
                    FrameCounter--;
                }
                ;
            };

            //while (true)
            //{
            Console.WriteLine("Connecting...");
            await rtspClient.ConnectAsync(tokenSoruce.Token);

            Console.WriteLine("Connected.");

            var task = Task.Factory.StartNew(async() =>
            {
                await rtspClient.ReceiveAsync(tokenSoruce.Token);
            }, TaskCreationOptions.LongRunning);
        }