Exemplo n.º 1
0
    internal static bool Deserialize(Stream raw, StreamCapabilities _streamCapabilities)
    {
        var reader = new N2HBinaryReader(raw);
        //var length = raw.GetAvaliableByteCounts();
        //if (length < 28)
        //{
        //    Logger.FATAL("Not enough data");
        //    return false;
        //}
        var ver = reader.ReadUInt64();

        if (ver != Utils.__STREAM_CAPABILITIES_VERSION)
        {
            Logger.FATAL("Invalid stream capabilities version. Wanted: {0}; Got: {1}",
                         Utils.__STREAM_CAPABILITIES_VERSION, ver);
            return(false);
        }
        _streamCapabilities.Clear();
        _streamCapabilities.VideoCodecId  = (VideoCodec)reader.ReadByte();
        _streamCapabilities.AudioCodecId  = (AudioCodec)reader.ReadByte();
        _streamCapabilities.BandwidthHint = reader.ReadUInt32();

        if (_streamCapabilities.VideoCodecId == VideoCodec.H264 && !VideoAvc.Deserialize(raw, out _streamCapabilities.Avc))
        {
            Logger.FATAL("Unable to deserialize avc");
            return(false);
        }
        if (_streamCapabilities.AudioCodecId == AudioCodec.Aac &&
            !AudioAac.Deserialize(raw, out _streamCapabilities.Aac))
        {
            Logger.FATAL("Unable to deserialize aac");
            return(false);
        }
        return(true);
    }
Exemplo n.º 2
0
		/// <summary>
		/// Determines if a stream supports a given set of operations
		/// </summary>
		/// <param name="stream">The stream to validate</param>
		/// <param name="requiredCapabilities">Flags that specify the capabilities to validate</param>
		public static void ValidateCapabilities(this Stream stream, StreamCapabilities requiredCapabilities)
		{
			StreamCapabilities missingCapabilities = 0;
			if (!stream.CanRead && ((requiredCapabilities & StreamCapabilities.Read) == StreamCapabilities.Read))
				missingCapabilities |= StreamCapabilities.Read;
			if (!stream.CanWrite && ((requiredCapabilities & StreamCapabilities.Write) == StreamCapabilities.Write))
				missingCapabilities |= StreamCapabilities.Write;
			if (!stream.CanSeek && ((requiredCapabilities & StreamCapabilities.Seek) == StreamCapabilities.Seek))
				missingCapabilities |= StreamCapabilities.Seek;
			if (!stream.CanTimeout && ((requiredCapabilities & StreamCapabilities.Timeout) == StreamCapabilities.Timeout))
				missingCapabilities |= StreamCapabilities.Timeout;

			if (missingCapabilities != 0)
			{
				var missingCapabilitiesList = missingCapabilities.GetSetFlagNames().ToList();
				var errorString = new StringBuilder();
				errorString.Append("This stream does not support the following requirements: ");
				for (var i = 0; i < missingCapabilitiesList.Count; i++)
				{
					errorString.Append(missingCapabilitiesList[i]);
					if (i < missingCapabilitiesList.Count - 1)
						errorString.Append(", ");
				}
				throw new NotSupportedException(errorString.ToString());
			}
		}
Exemplo n.º 3
0
    public static bool Deserialize(string seekFilePath, StreamCapabilities capabilities)
    {
        using (var file = MediaFile.Initialize(seekFilePath))
        {
            if (file == null)
            {
                Logger.FATAL("Unable to open seek file {0}", seekFilePath);
                return(false);
            }

            var length = file.Br.ReadUInt32();
            if (length > 1024 * 1024)
            {
                Logger.FATAL("Invalid stream capabilities length in file {0}: {1}", seekFilePath, length);
                return(false);
            }

            //var buffer = new MemoryStream();
            //buffer.ReadFromRepeat(0, (int)length);
            //if (!file.ReadBuffer(buffer, (int)length))
            //{
            //    Logger.FATAL("Unable to read stream capabilities payload from file {0}",seekFilePath);
            //    return false;
            //}

            //file.Close();

            if (!Deserialize(file.DataStream, capabilities))
            {
                Logger.FATAL("Unable to deserialize stream capabilities from file {0}", seekFilePath);
                return(false);
            }
        }
        return(true);
    }
Exemplo n.º 4
0
 public InNetRTPStream(RtspProtocol _rtsp, StreamsManager streamsManager, string _streamName, StreamCapabilities _streamCapabilities, TimeSpan _rtcpDetectionInterval) : base(_rtsp, streamsManager, _streamName)
 {
     bandwidth = _streamCapabilities.BandwidthHint;
     this._rtcpDetectionInterval = _rtcpDetectionInterval;
     Capabilities  = _streamCapabilities;
     _currentNalu  = Utils.Rms.GetStream();
     _hasAudio     = _streamCapabilities.AudioCodecId != AudioCodec.Unknown;
     _hasVideo     = _streamCapabilities.VideoCodecId != VideoCodec.Unknown;
     _rtcpPresence = RtcpPresence.Unknown;
 }
Exemplo n.º 5
0
        /// <summary>
        /// Determines if a stream supports a given set of operations
        /// </summary>
        /// <param name="stream">The stream to validate</param>
        /// <param name="requiredCapabilities">Flags that specify the capabilities to validate</param>
        public static void ValidateCapabilities(this Stream stream, StreamCapabilities requiredCapabilities)
        {
            StreamCapabilities missingCapabilities = 0;

            if (!stream.CanRead && ((requiredCapabilities & StreamCapabilities.Read) == StreamCapabilities.Read))
            {
                missingCapabilities |= StreamCapabilities.Read;
            }
            if (!stream.CanWrite && ((requiredCapabilities & StreamCapabilities.Write) == StreamCapabilities.Write))
            {
                missingCapabilities |= StreamCapabilities.Write;
            }
            if (!stream.CanSeek && ((requiredCapabilities & StreamCapabilities.Seek) == StreamCapabilities.Seek))
            {
                missingCapabilities |= StreamCapabilities.Seek;
            }
            if (!stream.CanTimeout && ((requiredCapabilities & StreamCapabilities.Timeout) == StreamCapabilities.Timeout))
            {
                missingCapabilities |= StreamCapabilities.Timeout;
            }

            if (missingCapabilities != 0)
            {
                var missingCapabilitiesList = missingCapabilities.GetSetFlagNames().ToList();
                var errorString             = new StringBuilder();
                errorString.Append("This stream does not support the following requirements: ");
                for (var i = 0; i < missingCapabilitiesList.Count; i++)
                {
                    errorString.Append(missingCapabilitiesList[i]);
                    if (i < missingCapabilitiesList.Count - 1)
                    {
                        errorString.Append(", ");
                    }
                }

                throw new NotSupportedException(errorString.ToString());
            }
        }
Exemplo n.º 6
0
        public override bool Initialize(int clientSideBufferLength)
        {
            if (!base.Initialize(clientSideBufferLength))
            {
                FATAL("Unable to initialize stream");
                return(false);
            }
            //2. Get stream capabilities
            StreamCapabilities pCapabilities = Capabilities;

            if (pCapabilities == null)
            {
                FATAL("Invalid stream capabilities");
                return(false);
            }
            //3. Create the video builder

            switch (pCapabilities.VideoCodecId)
            {
            case VideoCodec.H264:
                _pVideoBuilder = new AVCBuilder();
                break;

            case VideoCodec.PassThrough:
                _pVideoBuilder = new BaseBuilder();
                break;

            case VideoCodec.Unknown:
                WARN("Invalid video stream capabilities:{0}", pCapabilities.VideoCodecId);
                break;

            default:
                FATAL("Invalid video stream capabilities:{0}", pCapabilities.VideoCodecId);
                return(false);
            }

            //4. Create the audio builder

            switch (pCapabilities.AudioCodecId)
            {
            case AudioCodec.Aac:
                _pAudioBuilder = new AACBuilder();
                break;

            case AudioCodec.Mp3:
                _pAudioBuilder = new MP3Builder();
                break;

            case AudioCodec.PassThrough:
                _pAudioBuilder = new BaseBuilder();
                break;

            case AudioCodec.Unknown:
                WARN("Invalid audio stream capabilities: {0}", pCapabilities.AudioCodecId);
                break;

            default:
                FATAL("Invalid audio stream capabilities: {0}", pCapabilities.AudioCodecId);
                return(false);
            }
            _amf0Reader = new AMF0Reader(_pFile.DataStream);
            _pFile.Br   = _amf0Reader;
            return(true);
        }
        public bool Initialize()
        {
            if (_rtsp.Application == null)
            {
                FATAL("RTSP protocol not yet assigned to an application");
                return(false);
            }
            //2. Compute the bandwidthHint
            uint bandwidth = 0;

            if (_videoTrack != null)
            {
                bandwidth += _videoTrack["bandwidth"];
            }
            if (_audioTrack != null)
            {
                bandwidth += _audioTrack["bandwidth"];
            }
            if (bandwidth == 0)
            {
                bandwidth = _bandwidthHint;
            }
            if (_streamName == "")
            {
                _streamName = $"rtsp_{_rtsp.Id}";
            }
            if (!_rtsp.Application.StreamNameAvailable(_streamName, _rtsp))
            {
                FATAL("Stream name {0} already taken", _streamName);
                return(false);
            }
            var streamCapabilities = new StreamCapabilities {
                BandwidthHint = bandwidth
            };

            if (_videoTrack != null)
            {
                streamCapabilities.VideoCodecId = VideoCodec.H264;
                streamCapabilities.InitVideoH264(Utils.DecodeFromBase64(_videoTrack["h264SPS"]), Utils.DecodeFromBase64(_videoTrack["h264PPS"]));
            }
            if (_audioTrack != null)
            {
                streamCapabilities.AudioCodecId = (AudioCodec)(byte)_audioTrack["codec"];
                switch (streamCapabilities.AudioCodecId)
                {
                case AudioCodec.Aac:
                    var aac = Utils.DecodeFromHex(_audioTrack["codecSetup"]);
                    streamCapabilities.InitAudioAAC(new MemoryStream(aac), aac.Length);
                    streamCapabilities.Samplerate = streamCapabilities.Aac._sampleRate;
                    break;

                default:
                    streamCapabilities.Samplerate = _audioTrack["rate"];
                    break;
                }
            }
            _inStream = new InNetRTPStream(_rtsp, _rtsp.Application.StreamsManager, _streamName, streamCapabilities, _rtcpDetectionInterval);
            var session = _rtsp.CustomParameters;

            if (session["customParameters", "externalStreamConfig", "width"] != null &&
                session["customParameters", "externalStreamConfig", "height"] != null)
            {
                StreamCapabilities cap = _inStream.Capabilities;
                if (cap.VideoCodecId == VideoCodec.H264)
                {
                    cap.Avc._widthOverride  = session["customParameters", "externalStreamConfig", "width"];
                    cap.Avc._heightOverride = session["customParameters", "externalStreamConfig", "height"];
                }
            }
            if (_rtpVideo != null)
            {
                _rtpVideo.SetStream(_inStream, false);
                _rtpVideo.InboundConnectivity = this;
                _rtcpVideo.SetInbboundConnectivity(this, false);
            }
            if (_rtpAudio != null)
            {
                _rtpAudio.SetStream(_inStream, true);
                _rtpAudio.InboundConnectivity = this;
                _rtcpAudio.SetInbboundConnectivity(this, true);
            }
            //7. Pickup all outbound waiting streams
            var subscribedOutStreams =
                _rtsp.Application.StreamsManager.GetWaitingSubscribers(
                    _streamName, _inStream.Type);

            //FINEST("subscribedOutStreams count: %"PRIz"u", subscribedOutStreams.size());

            //8. Bind the waiting subscribers
            foreach (var subscribedOutStream in subscribedOutStreams)
            {
                subscribedOutStream.Link(_inStream);
            }

            return(true);
        }
Exemplo n.º 8
0
        public Variant GetMetaData(string streamName, bool extractInnerMetadata, Variant configuration)
        {
            bool keyframeSeek = configuration[Defines.CONF_APPLICATION_KEYFRAMESEEK];
            int clientSideBuffer = configuration[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER];
            var seekGranularity = (uint)((double)configuration[Defines.CONF_APPLICATION_SEEKGRANULARITY] * 1000);
            bool renameBadFiles = configuration[Defines.CONF_APPLICATION_RENAMEBADFILES];
            bool externSeekGenerator = configuration[Defines.CONF_APPLICATION_EXTERNSEEKGENERATOR];
            var result = Variant.Get();
            result[Defines.META_REQUESTED_STREAM_NAME] = streamName;
            result[Defines.CONF_APPLICATION_KEYFRAMESEEK] = keyframeSeek;
            result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] = clientSideBuffer;
            result[Defines.CONF_APPLICATION_SEEKGRANULARITY] = seekGranularity;
            result[Defines.CONF_APPLICATION_RENAMEBADFILES] = renameBadFiles;
            result[Defines.CONF_APPLICATION_EXTERNSEEKGENERATOR] = externSeekGenerator;
            var parts = streamName.Split(':');
            if (parts.Length != 1 && parts.Length != 2 && parts.Length != 5)
            {
                Logger.FATAL("Invalid stream name format:{0}", streamName);
                return result;
            }
            result[Defines.META_MEDIA_TYPE] = parts.Length == 1 ? Defines.MEDIA_TYPE_LIVE_OR_FLV : parts[0].ToLower();
            var searchFor = "";
            switch ((string)result[Defines.META_MEDIA_TYPE])
            {
                case Defines.MEDIA_TYPE_LIVE_OR_FLV:
                    searchFor = parts[0] + ".flv";
                    break;
                case Defines.MEDIA_TYPE_MP3:
                    searchFor = parts[1] + ".mp3";
                    break;
                default:
                    searchFor = parts[1];
                    break;
            }
            result[Defines.META_SERVER_FILE_NAME] = searchFor;
            var _mediaFolder = Application.MediaPath;
            result[Defines.META_SERVER_MEDIA_DIR] = _mediaFolder;

            result[Defines.META_SERVER_FULL_PATH] = searchFor[0] == Path.DirectorySeparatorChar
                ? (searchFor.StartsWith(_mediaFolder.NormalizePath())
                    ? searchFor
                    : "")
                : _mediaFolder.NormalizePath(searchFor);
            if (string.IsNullOrEmpty(result[Defines.META_SERVER_FULL_PATH])) return result;
            var metaPath = result[Defines.META_SERVER_FULL_PATH] + "." + Defines.MEDIA_TYPE_META;
            var seekPath = result[Defines.META_SERVER_FULL_PATH] + "." + Defines.MEDIA_TYPE_SEEK;
            var regenerateFiles = true;
            if (File.Exists(metaPath) && File.Exists(seekPath))
            {
                var capabilities = new StreamCapabilities();
                var originalServerFullPath = (string)result[Defines.META_SERVER_FULL_PATH];
                regenerateFiles =
                (new FileInfo(metaPath).LastWriteTime < new FileInfo(result[Defines.META_SERVER_FULL_PATH]).LastWriteTime)
                || (new FileInfo(seekPath).LastWriteTime < new FileInfo(result[Defines.META_SERVER_FULL_PATH]).LastWriteTime)
                || !Variant.DeserializeFromFile(metaPath, out result)
                || (!StreamCapabilities.Deserialize(seekPath, capabilities));
                regenerateFiles |=
                        (result[Defines.META_SERVER_FULL_PATH] == null)
                        || ((string)result[Defines.META_SERVER_FULL_PATH] != originalServerFullPath)
                        || (result[Defines.CONF_APPLICATION_KEYFRAMESEEK] == null)
                        || ((bool)result[Defines.CONF_APPLICATION_KEYFRAMESEEK] != keyframeSeek)
                        || (result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] == null)
                        || ((int)result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] != clientSideBuffer)
                        || (result[Defines.CONF_APPLICATION_SEEKGRANULARITY] == null)
                        || ((uint)result[Defines.CONF_APPLICATION_SEEKGRANULARITY] != seekGranularity);
                if (regenerateFiles)
                {
                    result[Defines.META_SERVER_FULL_PATH] = originalServerFullPath;
                    result[Defines.CONF_APPLICATION_KEYFRAMESEEK] = keyframeSeek;
                    result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] = clientSideBuffer;
                    result[Defines.CONF_APPLICATION_SEEKGRANULARITY] = seekGranularity;
                }
            }
            if (!regenerateFiles)
            {
                result[Defines.META_REQUESTED_STREAM_NAME] = streamName;
                return result;
            }
            this.Log().Info("Generate seek/meta for file {0}", result[Defines.META_SERVER_FULL_PATH]);
            //8. We either have a bad meta file or we don't have it at all. Build it
            if (extractInnerMetadata)
            {
                if (!BaseInFileStream.ResolveCompleteMetadata(ref result))
                {
                    Logger.FATAL("Unable to get metadata. Partial result:\n{0}",
                            result);
                    return Variant.Get();
                }
            }
            result.SerializeToFile(metaPath);
            return result;
        }
Exemplo n.º 9
0
    internal static bool Deserialize(Stream raw, StreamCapabilities _streamCapabilities)
    {
        var reader = new N2HBinaryReader(raw);
        //var length = raw.GetAvaliableByteCounts();
        //if (length < 28)
        //{
        //    Logger.FATAL("Not enough data");
        //    return false;
        //}
        var ver = reader.ReadUInt64();
        if (ver != Utils.__STREAM_CAPABILITIES_VERSION)
        {
            Logger.FATAL("Invalid stream capabilities version. Wanted: {0}; Got: {1}",
            Utils.__STREAM_CAPABILITIES_VERSION, ver);
            return false;
        }
        _streamCapabilities.Clear();
        _streamCapabilities.VideoCodecId = (VideoCodec)reader.ReadByte();
        _streamCapabilities.AudioCodecId = (AudioCodec)reader.ReadByte();
        _streamCapabilities.BandwidthHint = reader.ReadUInt32();

        if (_streamCapabilities.VideoCodecId == VideoCodec.H264 && !VideoAvc.Deserialize(raw, out _streamCapabilities.Avc))
        {
            Logger.FATAL("Unable to deserialize avc");
            return false;
        }
        if (_streamCapabilities.AudioCodecId == AudioCodec.Aac &&
            !AudioAac.Deserialize(raw, out _streamCapabilities.Aac))
        {
            Logger.FATAL("Unable to deserialize aac");
            return false;
        }
        return true;
    }
Exemplo n.º 10
0
    public static bool Deserialize(string seekFilePath, StreamCapabilities capabilities)
    {
        using (var file = MediaFile.Initialize(seekFilePath))
        {
            if (file == null)
            {
                Logger.FATAL("Unable to open seek file {0}", seekFilePath);
                return false;
            }

            var length = file.Br.ReadUInt32();
            if (length > 1024 * 1024)
            {
                Logger.FATAL("Invalid stream capabilities length in file {0}: {1}", seekFilePath, length);
                return false;
            }

            //var buffer = new MemoryStream();
            //buffer.ReadFromRepeat(0, (int)length);
            //if (!file.ReadBuffer(buffer, (int)length))
            //{
            //    Logger.FATAL("Unable to read stream capabilities payload from file {0}",seekFilePath);
            //    return false;
            //}

            //file.Close();

            if (!Deserialize(file.DataStream, capabilities))
            {
                Logger.FATAL("Unable to deserialize stream capabilities from file {0}", seekFilePath);
                return false;
            }
        }
        return true;
    }
Exemplo n.º 11
0
        public virtual bool Initialize(int clientSideBufferLength)
        {
            //1. Check to see if we have an universal seeking file
            var seekFilePath = Name + "." + MEDIA_TYPE_SEEK;

            if (!File.Exists(seekFilePath))
            {
                var meta = Variant.GetMap(new VariantMapHelper {
                    { META_SERVER_FULL_PATH, Name }
                });
                if (!ResolveCompleteMetadata(ref meta))
                {
                    Logger.FATAL("Unable to generate metadata");
                    return(false);
                }
            }
            //2. Open the seek file
            _pSeekFile = MediaFile.Initialize(seekFilePath);
            if (_pSeekFile == null)
            {
                Logger.FATAL("Unable to open seeking file {0}", seekFilePath);
                return(false);
            }

            //3. read stream capabilities
            var streamCapabilitiesSize = _pSeekFile.Br.ReadUInt32();

            //var raw = new MemoryStream();
            //_pSeekFile.CopyPartTo(raw, (int)streamCapabilitiesSize);

            if (streamCapabilitiesSize < 14 || !StreamCapabilities.Deserialize(_pSeekFile.DataStream, Capabilities))
            {
                Logger.FATAL("Unable to deserialize stream Capabilities. Please delete {0} and {1} files so they can be regenerated",
                             Name + "." + MEDIA_TYPE_SEEK,
                             Name + "." + MEDIA_TYPE_META);
                return(false);
            }
            //4. compute offsets
            _seekBaseOffset   = _pSeekFile.Position;
            _framesBaseOffset = _seekBaseOffset + 4;
            //5. Compute the optimal window size by reading the biggest frame size
            //from the seek file.
            if (!_pSeekFile.SeekTo(_pSeekFile.Length - 8))
            {
                Logger.FATAL("Unable to seek to {0} position", _pSeekFile.Position - 8);
                return(false);
            }
            ulong maxFrameSize = _pSeekFile.Br.ReadUInt64();

            if (!_pSeekFile.SeekBegin())
            {
                Logger.FATAL("Unable to seek to beginning of the file");
                return(false);
            }
            //3. Open the media file
            var windowSize = (uint)maxFrameSize * 16;

            //windowSize = windowSize < 65536 ? 65536 : windowSize;
            //windowSize = (windowSize > (1024 * 1024)) ? (windowSize / 2) : windowSize;
            _pFile = MediaFile.Initialize(Name);
            //4. Read the frames count from the file
            if (!_pSeekFile.SeekTo(_seekBaseOffset))
            {
                Logger.FATAL("Unable to seek to _seekBaseOffset: {0}", _seekBaseOffset);
                return(false);
            }
            _totalFrames       = _pSeekFile.Br.ReadUInt32();
            _timeToIndexOffset = _framesBaseOffset + _totalFrames * MediaFrame.MediaFrameSize;

            //5. Set the client side buffer length
            _clientSideBufferLength = clientSideBufferLength;

            //6. Create the timer
            _pTimer = new InFileStreamTimer(this);
            _pTimer.EnqueueForTimeEvent((uint)(_clientSideBufferLength - _clientSideBufferLength / 3));

            //7. Done
            return(true);
        }
Exemplo n.º 12
0
        public bool Initialize()
        {
            if (_rtsp.Application == null)
            {
                FATAL("RTSP protocol not yet assigned to an application");
                return false;
            }
            //2. Compute the bandwidthHint
            uint bandwidth = 0;
            if (_videoTrack != null)
            {
                bandwidth += _videoTrack["bandwidth"];
            }
            if (_audioTrack != null)
            {
                bandwidth += _audioTrack["bandwidth"];
            }
            if (bandwidth == 0)
            {
                bandwidth = _bandwidthHint;
            }
            if (_streamName == "") _streamName = $"rtsp_{_rtsp.Id}";
            if (!_rtsp.Application.StreamNameAvailable(_streamName, _rtsp))
            {
                FATAL("Stream name {0} already taken", _streamName);
                return false;
            }
            var streamCapabilities = new StreamCapabilities {BandwidthHint = bandwidth };
            if (_videoTrack != null)
            {
                streamCapabilities.VideoCodecId = VideoCodec.H264;
                streamCapabilities.InitVideoH264(Utils.DecodeFromBase64(_videoTrack["h264SPS"]), Utils.DecodeFromBase64(_videoTrack["h264PPS"]));
            }
            if (_audioTrack != null)
            {
                streamCapabilities.AudioCodecId = (AudioCodec) (byte) _audioTrack["codec"];
                switch (streamCapabilities.AudioCodecId)
                {
                    case AudioCodec.Aac:
                        var aac = Utils.DecodeFromHex(_audioTrack["codecSetup"]);
                        streamCapabilities.InitAudioAAC(new MemoryStream(aac), aac.Length);
                        streamCapabilities.Samplerate = streamCapabilities.Aac._sampleRate;
                        break;
                    default:
                        streamCapabilities.Samplerate = _audioTrack["rate"];
                        break;
                }
            }
            _inStream = new InNetRTPStream(_rtsp, _rtsp.Application.StreamsManager,_streamName, streamCapabilities, _rtcpDetectionInterval);
            var session = _rtsp.CustomParameters;
            if (session["customParameters", "externalStreamConfig", "width"] != null &&
                session["customParameters", "externalStreamConfig", "height"] != null)
            {
                StreamCapabilities cap = _inStream.Capabilities;
                if(cap.VideoCodecId == VideoCodec.H264)
                {
                    cap.Avc._widthOverride = session["customParameters", "externalStreamConfig", "width"];
                    cap.Avc._heightOverride = session["customParameters", "externalStreamConfig", "height"];
                }
            }
            if (_rtpVideo != null)
            {
                _rtpVideo.SetStream(_inStream, false);
                _rtpVideo.InboundConnectivity = this;
                _rtcpVideo.SetInbboundConnectivity(this,false);
            }
            if (_rtpAudio != null)
            {
                _rtpAudio.SetStream(_inStream,true);
                _rtpAudio.InboundConnectivity = this;
                _rtcpAudio.SetInbboundConnectivity(this,true);
            }
            //7. Pickup all outbound waiting streams
           var subscribedOutStreams =
                    _rtsp.Application.StreamsManager.GetWaitingSubscribers(
                    _streamName, _inStream.Type);
            //FINEST("subscribedOutStreams count: %"PRIz"u", subscribedOutStreams.size());

            //8. Bind the waiting subscribers
            foreach (var subscribedOutStream in subscribedOutStreams)
            {
                subscribedOutStream.Link(_inStream);
            }

            return true;
        }
Exemplo n.º 13
0
        public Variant GetMetaData(string streamName, bool extractInnerMetadata, Variant configuration)
        {
            bool keyframeSeek        = configuration[Defines.CONF_APPLICATION_KEYFRAMESEEK];
            int  clientSideBuffer    = configuration[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER];
            var  seekGranularity     = (uint)((double)configuration[Defines.CONF_APPLICATION_SEEKGRANULARITY] * 1000);
            bool renameBadFiles      = configuration[Defines.CONF_APPLICATION_RENAMEBADFILES];
            bool externSeekGenerator = configuration[Defines.CONF_APPLICATION_EXTERNSEEKGENERATOR];
            var  result = Variant.Get();

            result[Defines.META_REQUESTED_STREAM_NAME]           = streamName;
            result[Defines.CONF_APPLICATION_KEYFRAMESEEK]        = keyframeSeek;
            result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER]    = clientSideBuffer;
            result[Defines.CONF_APPLICATION_SEEKGRANULARITY]     = seekGranularity;
            result[Defines.CONF_APPLICATION_RENAMEBADFILES]      = renameBadFiles;
            result[Defines.CONF_APPLICATION_EXTERNSEEKGENERATOR] = externSeekGenerator;
            var parts = streamName.Split(':');

            if (parts.Length != 1 && parts.Length != 2 && parts.Length != 5)
            {
                Logger.FATAL("Invalid stream name format:{0}", streamName);
                return(result);
            }
            result[Defines.META_MEDIA_TYPE] = parts.Length == 1 ? Defines.MEDIA_TYPE_LIVE_OR_FLV : parts[0].ToLower();
            var searchFor = "";

            switch ((string)result[Defines.META_MEDIA_TYPE])
            {
            case Defines.MEDIA_TYPE_LIVE_OR_FLV:
                searchFor = parts[0] + ".flv";
                break;

            case Defines.MEDIA_TYPE_MP3:
                searchFor = parts[1] + ".mp3";
                break;

            default:
                searchFor = parts[1];
                break;
            }
            result[Defines.META_SERVER_FILE_NAME] = searchFor;
            var _mediaFolder = Application.MediaPath;

            result[Defines.META_SERVER_MEDIA_DIR] = _mediaFolder;

            result[Defines.META_SERVER_FULL_PATH] = searchFor[0] == Path.DirectorySeparatorChar
                ? (searchFor.StartsWith(_mediaFolder.NormalizePath())
                    ? searchFor
                    : "")
                : _mediaFolder.NormalizePath(searchFor);
            if (string.IsNullOrEmpty(result[Defines.META_SERVER_FULL_PATH]))
            {
                return(result);
            }
            var metaPath        = result[Defines.META_SERVER_FULL_PATH] + "." + Defines.MEDIA_TYPE_META;
            var seekPath        = result[Defines.META_SERVER_FULL_PATH] + "." + Defines.MEDIA_TYPE_SEEK;
            var regenerateFiles = true;

            if (File.Exists(metaPath) && File.Exists(seekPath))
            {
                var capabilities           = new StreamCapabilities();
                var originalServerFullPath = (string)result[Defines.META_SERVER_FULL_PATH];
                regenerateFiles =
                    (new FileInfo(metaPath).LastWriteTime < new FileInfo(result[Defines.META_SERVER_FULL_PATH]).LastWriteTime) ||
                    (new FileInfo(seekPath).LastWriteTime < new FileInfo(result[Defines.META_SERVER_FULL_PATH]).LastWriteTime) ||
                    !Variant.DeserializeFromFile(metaPath, out result) ||
                    (!StreamCapabilities.Deserialize(seekPath, capabilities));
                regenerateFiles |=
                    (result[Defines.META_SERVER_FULL_PATH] == null) ||
                    ((string)result[Defines.META_SERVER_FULL_PATH] != originalServerFullPath) ||
                    (result[Defines.CONF_APPLICATION_KEYFRAMESEEK] == null) ||
                    ((bool)result[Defines.CONF_APPLICATION_KEYFRAMESEEK] != keyframeSeek) ||
                    (result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] == null) ||
                    ((int)result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] != clientSideBuffer) ||
                    (result[Defines.CONF_APPLICATION_SEEKGRANULARITY] == null) ||
                    ((uint)result[Defines.CONF_APPLICATION_SEEKGRANULARITY] != seekGranularity);
                if (regenerateFiles)
                {
                    result[Defines.META_SERVER_FULL_PATH]             = originalServerFullPath;
                    result[Defines.CONF_APPLICATION_KEYFRAMESEEK]     = keyframeSeek;
                    result[Defines.CONF_APPLICATION_CLIENTSIDEBUFFER] = clientSideBuffer;
                    result[Defines.CONF_APPLICATION_SEEKGRANULARITY]  = seekGranularity;
                }
            }
            if (!regenerateFiles)
            {
                result[Defines.META_REQUESTED_STREAM_NAME] = streamName;
                return(result);
            }
            this.Log().Info("Generate seek/meta for file {0}", result[Defines.META_SERVER_FULL_PATH]);
            //8. We either have a bad meta file or we don't have it at all. Build it
            if (extractInnerMetadata)
            {
                if (!BaseInFileStream.ResolveCompleteMetadata(ref result))
                {
                    Logger.FATAL("Unable to get metadata. Partial result:\n{0}",
                                 result);
                    return(Variant.Get());
                }
            }
            result.SerializeToFile(metaPath);
            return(result);
        }
        private string GetAudioTrack(RtspProtocol pFrom, StreamCapabilities pCapabilities)
        {
            pFrom.CustomParameters["audioTrackId"] = "1"; //md5(format("A%u%s",pFrom->GetId(), STR(generateRandomString(4))), true);
            string result = "";
            
            switch (pCapabilities.AudioCodecId)
            {
                case AudioCodec.Aac:
                    result += "m=audio 0 RTP/AVP 96\r\n";
                    result += "a=recvonly\r\n";
                    result += $"a=rtpmap:96 mpeg4-generic/{pCapabilities.Aac._sampleRate}/2\r\n";
                    //FINEST("result: %s", STR(result));
                    result += "a=control:trackID="
                              + pFrom.CustomParameters["audioTrackId"] + "\r\n";
                    //rfc3640-fmtp-explained.txt Chapter 4.1
                    result +=
                        $"a=fmtp:96 streamtype=5; profile-level-id=15; mode=AAC-hbr; {pCapabilities.Aac.GetRTSPFmtpConfig()}; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n";
                    break;
                case AudioCodec.Speex:
                    result += "m=audio 0 RTP/AVP 98\r\n";
                    result += "a=rtpmap:98 speex/16000\r\n";
                    //FINEST("result: %s", STR(result));
                    result += "a=control:trackID="+ pFrom.CustomParameters["audioTrackId"] + "\r\n";

                    //http://www.rfc-editor.org/rfc/rfc5574.txt
                    result +="a=fmtp:98 mode=\"7,any\"\r\n";
                    break;
                default:
                    WARN("Unsupported audio codec: {0}", pCapabilities.AudioCodecId);
                    break;
            }
            return result;
        }
        private string GetVideoTrack(RtspProtocol pFrom, StreamCapabilities pCapabilities)
        {
            pFrom.CustomParameters["videoTrackId"] = "2"; //md5(format("V%u%s",pFrom->GetId(), STR(generateRandomString(4))), true);
            var sw = new StringBuilder();
            
            if (pCapabilities.VideoCodecId == VideoCodec.H264)
            {
                sw.AppendLine("m=video 0 RTP/AVP 97");
                sw.AppendLine("a=recvonly");
                sw.Append("a=control:trackID=");
                sw.AppendLine(pFrom.CustomParameters["videoTrackId"]);
                sw.AppendLine("a=rtpmap:97 H264/90000");
                sw.Append("a=fmtp:97 profile-level-id=");
                sw.Append($"{pCapabilities.Avc.SPS[1]:X2}{pCapabilities.Avc.SPS[2]:X2}{pCapabilities.Avc.SPS[3]:X2}");
                sw.Append("; packetization-mode=1; sprop-parameter-sets=");
                sw.Append(Convert.ToBase64String(pCapabilities.Avc.SPS) + ",");
                sw.AppendLine(Convert.ToBase64String(pCapabilities.Avc.PPS));
            }
            else
            {

                WARN("Unsupported video codec: %s", pCapabilities.VideoCodecId);
            }
            return sw.ToString();
        }