private void PushMessage(IMessage message) { _msgOut.PushMessage(message); _recordPipe.PushMessage(message); // Notify listeners about received packet if (message is RtmpMessage) { IRtmpEvent rtmpEvent = ((RtmpMessage)message).body; if (rtmpEvent is IStreamPacket) { foreach (IStreamListener listener in GetStreamListeners()) { try { listener.PacketReceived(this, (IStreamPacket)rtmpEvent); } catch (Exception ex) { log.Error("Error while notifying listener " + listener, ex); } } } } }
public static ByteBuffer EncodePacket(RtmpContext context, RtmpPacket packet) { RtmpHeader header = packet.Header; int channelId = header.ChannelId; IRtmpEvent message = packet.Message; if (message is ChunkSize) { ChunkSize size = (ChunkSize)message; context.SetWriteChunkSize(size.Size); } ByteBuffer input = EncodeMessage(context, header, message); if (input.Position != 0L) { input.Flip(); } else { input.Rewind(); } header.Size = input.Limit; RtmpHeader lastWriteHeader = context.GetLastWriteHeader(channelId); int num2 = CalculateHeaderSize(header, lastWriteHeader); context.SetLastWriteHeader(channelId, header); context.SetLastWritePacket(channelId, packet); int writeChunkSize = context.GetWriteChunkSize(); int num4 = 1; if (header.ChannelId > 320) { num4 = 3; } else if (header.ChannelId > 0x3f) { num4 = 2; } int num5 = (int)Math.Ceiling((double)(((float)header.Size) / ((float)writeChunkSize))); int capacity = (header.Size + num2) + ((num5 > 0) ? ((num5 - 1) * num4) : 0); ByteBuffer buffer = ByteBuffer.Allocate(capacity); EncodeHeader(header, lastWriteHeader, buffer); if (num5 == 1) { ByteBuffer.Put(buffer, input, buffer.Remaining); } else { for (int i = 0; i < (num5 - 1); i++) { ByteBuffer.Put(buffer, input, writeChunkSize); EncodeHeaderByte(buffer, 3, header.ChannelId); } ByteBuffer.Put(buffer, input, buffer.Remaining); } buffer.Flip(); return(buffer); }
/// <summary> /// Push message through pipe. /// Synchronize this method to avoid FLV corruption from abrupt disconnection. /// </summary> /// <param name="pipe">Pipe.</param> /// <param name="message">Message to push.</param> public void PushMessage(IPipe pipe, IMessage message) { lock (this.SyncRoot) { if (message is ResetMessage) { _startTimestamp = -1; _offset += _lastTimestamp; return; } else if (message is StatusMessage) { return; } if (!(message is RtmpMessage)) { return; } if (_writer == null) { Init(); } FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage rtmpMsg = message as FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage; IRtmpEvent msg = rtmpMsg.body; if (_startTimestamp == -1) { _startTimestamp = msg.Timestamp; } int timestamp = msg.Timestamp - _startTimestamp; if (timestamp < 0) { log.Warn("Skipping message with negative timestamp."); return; } _lastTimestamp = timestamp; ITag tag = new Tag(); tag.DataType = (byte)msg.DataType; tag.Timestamp = timestamp + _offset; if (msg is IStreamData) { ByteBuffer data = (msg as IStreamData).Data; tag.Body = data.ToArray(); } try { _writer.WriteTag(tag); } catch (IOException ex) { log.Error("Error writing tag", ex); } } }
/// <summary> /// RTMP event handler. /// </summary> /// <param name="evt">RTMP event</param> /// <returns>Timeframe since last notification (or audio or video packet sending).</returns> public int Add(IRtmpEvent evt) { _relative = true; int timestamp = evt.Timestamp; int tsOut = 0; switch (evt.DataType) { case Constants.TypeAudioData: if (_firstAudio) { tsOut = evt.Timestamp; _relative = false; _firstAudio = false; } else { tsOut = timestamp - _lastAudio; _lastAudio = timestamp; } break; case Constants.TypeVideoData: if (_firstVideo) { tsOut = evt.Timestamp; _relative = false; _firstVideo = false; } else { tsOut = timestamp - _lastVideo; _lastVideo = timestamp; } break; case Constants.TypeNotify: case Constants.TypeInvoke: if (_firstNotify) { tsOut = evt.Timestamp; _relative = false; _firstNotify = false; } else { tsOut = timestamp - _lastNotify; _lastNotify = timestamp; } break; default: // ignore other types break; } return(tsOut); }
public IMessage PullMessage(IPipe pipe) { lock (_syncLock) { if (_pipe != pipe) { return(null); } if (_reader == null) { Init(); } if (!_reader.HasMoreTags()) { // TODO send OOBCM to notify EOF // Do not unsubscribe as this kills VOD seek while in buffer // this.pipe.unsubscribe(this); return(null); } ITag tag = _reader.ReadTag(); IRtmpEvent msg = null; int timestamp = tag.Timestamp; switch (tag.DataType) { case Constants.TypeAudioData: msg = new AudioData(tag.Body); break; case Constants.TypeVideoData: msg = new VideoData(tag.Body); break; case Constants.TypeInvoke: msg = new Invoke(tag.Body); break; case Constants.TypeNotify: msg = new Notify(tag.Body); break; case Constants.TypeFlexStreamEnd: msg = new FlexStreamSend(tag.Body); break; default: log.Warn("Unexpected type " + tag.DataType); msg = new Unknown(tag.DataType, tag.Body); break; } msg.Timestamp = timestamp; RtmpMessage rtmpMsg = new RtmpMessage(); rtmpMsg.body = msg; return(rtmpMsg); } }
/// <summary> /// Writes packet from event data to the RTMP connection. /// </summary> /// <param name="message">Event data.</param> public void Write(IRtmpEvent message) { IClientStream stream = null; if (_connection is IStreamCapableConnection) stream = (_connection as IStreamCapableConnection).GetStreamByChannelId(_channelId); if (_channelId > 3 && stream == null) { //Stream doesn't exist any longer, discarding message return; } int streamId = (stream == null) ? 0 : stream.StreamId; Write(message, streamId); }
/// <summary> /// Writes packet from event data to the RTMP connection using the specified stream id. /// </summary> /// <param name="message">Event data.</param> /// <param name="streamId">Stream id.</param> private void Write(IRtmpEvent message, int streamId) { RtmpHeader header = new RtmpHeader(); RtmpPacket packet = new RtmpPacket(header, message); header.ChannelId = _channelId; header.Timer = message.Timestamp; header.StreamId = streamId; header.DataType = message.DataType; if (message.Header != null) header.IsTimerRelative = message.Header.IsTimerRelative; _connection.Write(packet); }
/// <summary> /// Pull the next message from IMessageInput and schedule it for push according to the timestamp. /// </summary> protected void ScheduleNextMessage() { bool first = _nextRTMPMessage == null; long delta; while (true) { _nextRTMPMessage = GetNextRTMPMessage(); if (_nextRTMPMessage == null) { OnItemEnd(); return; } IRtmpEvent rtmpEvent = _nextRTMPMessage.body; // filter all non-AV messages if (!(rtmpEvent is VideoData) && !(rtmpEvent is AudioData)) { continue; } rtmpEvent = _nextRTMPMessage.body; _nextTS = rtmpEvent.Timestamp; if (first) { _vodStartTS = _nextTS; first = false; } delta = _nextTS - _vodStartTS - (System.Environment.TickCount - _serverStartTS); if (delta < WAIT_THRESHOLD) { if (!DoPushMessage()) { return; } if (_state != State.PLAYING) { // Stream is paused, don't load more messages _nextRTMPMessage = null; return; } } else { break; } } VODScheduledJob job = new VODScheduledJob(this); _vodJobName = _schedulingService.AddScheduledOnceJob(delta, job); }
public int Add(IRtmpEvent evt) { this._relative = true; int timestamp = evt.Timestamp; int num2 = 0; switch (evt.DataType) { case 8: if (!this._firstAudio) { num2 = timestamp - this._lastAudio; this._lastAudio = timestamp; return(num2); } num2 = evt.Timestamp; this._relative = false; this._firstAudio = false; return(num2); case 9: if (!this._firstVideo) { num2 = timestamp - this._lastVideo; this._lastVideo = timestamp; return(num2); } num2 = evt.Timestamp; this._relative = false; this._firstVideo = false; return(num2); case 0x12: case 20: if (!this._firstNotify) { num2 = timestamp - this._lastNotify; this._lastNotify = timestamp; return(num2); } num2 = evt.Timestamp; this._relative = false; this._firstNotify = false; return(num2); case 0x13: return(num2); } return(num2); }
public void Write(IRtmpEvent message) { IClientStream streamByChannelId = null; if (this._connection is IStreamCapableConnection) { streamByChannelId = (this._connection as IStreamCapableConnection).GetStreamByChannelId(this._channelId); } if ((this._channelId <= 3) || (streamByChannelId != null)) { int streamId = (streamByChannelId == null) ? 0 : streamByChannelId.StreamId; this.Write(message, streamId); } }
public bool CanSendPacket(RtmpMessage message, long pending) { IRtmpEvent body = message.body; if (!(body is VideoData)) { return(true); } VideoData data = body as VideoData; FrameType frameType = data.FrameType; bool flag = false; switch (this._state) { case FrameDropperState.SEND_ALL: return(true); case FrameDropperState.SEND_INTERFRAMES: if (frameType != FrameType.KEYFRAME) { if (frameType == FrameType.INTERFRAME) { flag = true; } return(flag); } if (pending == 0L) { this._state = FrameDropperState.SEND_ALL; } return(true); case FrameDropperState.SEND_KEYFRAMES: flag = frameType == FrameType.KEYFRAME; if (flag && (pending == 0L)) { this._state = FrameDropperState.SEND_KEYFRAMES_CHECK; } return(flag); case FrameDropperState.SEND_KEYFRAMES_CHECK: flag = frameType == FrameType.KEYFRAME; if (flag && (pending == 0L)) { this._state = FrameDropperState.SEND_INTERFRAMES; } return(flag); } return(flag); }
public void PushMessage(IPipe pipe, IMessage message) { lock (this.SyncRoot) { if (message is ResetMessage) { this._startTimestamp = -1; this._offset += this._lastTimestamp; } else if (!(message is StatusMessage) && (message is RtmpMessage)) { if (this._writer == null) { this.Init(); } RtmpMessage message2 = message as RtmpMessage; IRtmpEvent body = message2.body; if (this._startTimestamp == -1) { this._startTimestamp = body.Timestamp; } int num = body.Timestamp - this._startTimestamp; if (num < 0) { log.Warn("Skipping message with negative timestamp."); } else { this._lastTimestamp = num; ITag tag = new Tag { DataType = body.DataType, Timestamp = num + this._offset }; if (body is IStreamData) { tag.Body = (body as IStreamData).Data.ToArray(); } try { this._writer.WriteTag(tag); } catch (IOException exception) { log.Error("Error writing tag", exception); } } } } }
/// <summary> /// Writes packet from event data to the RTMP connection using the specified stream id. /// </summary> /// <param name="message">Event data.</param> /// <param name="streamId">Stream id.</param> private void Write(IRtmpEvent message, int streamId) { RtmpHeader header = new RtmpHeader(); RtmpPacket packet = new RtmpPacket(header, message); header.ChannelId = _channelId; header.Timer = message.Timestamp; header.StreamId = streamId; header.DataType = message.DataType; if (message.Header != null) { header.IsTimerRelative = message.Header.IsTimerRelative; } _connection.Write(packet); }
public IMessage PullMessage(IPipe pipe) { lock (this._syncLock) { if (this._pipe != pipe) { return(null); } if (this._reader == null) { this.Init(); } if (!this._reader.HasMoreTags()) { return(null); } ITag tag = this._reader.ReadTag(); IRtmpEvent event2 = null; int timestamp = tag.Timestamp; switch (tag.DataType) { case 8: event2 = new AudioData(tag.Body); break; case 9: event2 = new VideoData(tag.Body); break; case 0x12: event2 = new Notify(tag.Body); break; case 20: event2 = new Invoke(tag.Body); break; default: log.Warn("Unexpected type " + tag.DataType); event2 = new Unknown(tag.DataType, tag.Body); break; } event2.Timestamp = timestamp; return(new RtmpMessage { body = event2 }); } }
/// <summary> /// RTMP event handler. /// </summary> /// <param name="evt">RTMP event</param> /// <returns>Timeframe since last notification (or audio or video packet sending).</returns> public int Add(IRtmpEvent evt) { _relative = true; int timestamp = evt.Timestamp; int tsOut = 0; switch (evt.DataType) { case Constants.TypeAudioData: if (_firstAudio) { tsOut = evt.Timestamp; _relative = false; _firstAudio = false; } else { tsOut = timestamp - _lastAudio; _lastAudio = timestamp; } break; case Constants.TypeVideoData: if (_firstVideo) { tsOut = evt.Timestamp; _relative = false; _firstVideo = false; } else { tsOut = timestamp - _lastVideo; _lastVideo = timestamp; } break; case Constants.TypeNotify: case Constants.TypeInvoke: if (_firstNotify) { tsOut = evt.Timestamp; _relative = false; _firstNotify = false; } else { tsOut = timestamp - _lastNotify; _lastNotify = timestamp; } break; default: // ignore other types break; } return tsOut; }
/// <summary> /// Writes packet from event data to the RTMP connection. /// </summary> /// <param name="message">Event data.</param> public void Write(IRtmpEvent message) { IClientStream stream = null; if (_connection is IStreamCapableConnection) { stream = (_connection as IStreamCapableConnection).GetStreamByChannelId(_channelId); } if (_channelId > 3 && stream == null) { //Stream doesn't exist any longer, discarding message return; } int streamId = (stream == null) ? 0 : stream.StreamId; Write(message, streamId); }
public void DropPacket(RtmpMessage message) { IRtmpEvent body = message.body; if (body is VideoData) { VideoData data = body as VideoData; FrameType frameType = data.FrameType; switch (this._state) { case FrameDropperState.SEND_ALL: if (frameType != FrameType.DISPOSABLE_INTERFRAME) { if (frameType == FrameType.INTERFRAME) { this._state = FrameDropperState.SEND_KEYFRAMES; } else if (frameType == FrameType.KEYFRAME) { this._state = FrameDropperState.SEND_KEYFRAMES; } break; } break; case FrameDropperState.SEND_INTERFRAMES: if (frameType != FrameType.INTERFRAME) { if (frameType == FrameType.KEYFRAME) { this._state = FrameDropperState.SEND_KEYFRAMES; } break; } this._state = FrameDropperState.SEND_KEYFRAMES_CHECK; break; case FrameDropperState.SEND_KEYFRAMES_CHECK: if (frameType == FrameType.KEYFRAME) { this._state = FrameDropperState.SEND_KEYFRAMES; } break; } } }
public void DispatchEvent(IEvent @event) { IRtmpEvent rtmpEvent = @event as IRtmpEvent; if (rtmpEvent != null) { /* * if (rtmpEvent is IStreamData) * { * } */ if (rtmpEvent is VideoData) { RaiseNetStreamVideo(rtmpEvent as VideoData); } if (rtmpEvent is AudioData) { RaiseNetStreamAudio(rtmpEvent as AudioData); } } }
/// <summary> /// Decodes RTMP message event. /// </summary> /// <param name="context">RTMP protocol state.</param> /// <param name="header">RTMP header.</param> /// <param name="stream">Buffer to be decoded.</param> /// <returns>Decoded RTMP event.</returns> public static IRtmpEvent DecodeMessage(RtmpContext context, RtmpHeader header, ByteBuffer stream) { IRtmpEvent message = null; /* * if(header.Timer == 0xffffff) * { * // Skip first four bytes * byte[] extendedTimestamp = new byte[4]; * stream.Read(extendedTimestamp, 0, 4); * log.Warn("Discarding extended timestamp"); * //int unknown = stream.ReadInt32(); * } */ switch (header.DataType) { case Constants.TypeChunkSize: message = DecodeChunkSize(stream); break; case Constants.TypeInvoke: message = DecodeInvoke(stream); break; case Constants.TypeFlexInvoke: message = DecodeFlexInvoke(stream); break; case Constants.TypeNotify: if (header.StreamId == 0) { message = DecodeNotify(stream, header); } else { message = DecodeStreamMetadata(stream); } break; case Constants.TypePing: message = DecodePing(stream); break; case Constants.TypeBytesRead: message = DecodeBytesRead(stream); break; case Constants.TypeAudioData: message = DecodeAudioData(stream); break; case Constants.TypeVideoData: message = DecodeVideoData(stream); break; case Constants.TypeSharedObject: message = DecodeSharedObject(stream); break; case Constants.TypeFlexSharedObject: message = DecodeFlexSharedObject(stream); break; case Constants.TypeServerBandwidth: message = DecodeServerBW(stream); break; case Constants.TypeClientBandwidth: message = DecodeClientBW(stream); break; default: #if !SILVERLIGHT log.Warn("Unknown object type: " + header.DataType); #endif message = DecodeUnknown(stream); break; } message.Header = header; message.Timestamp = header.Timer; return(message); }
/// <summary> /// Decodes a RTMP packet. /// </summary> /// <param name="context">RTMP protocol state.</param> /// <param name="stream">Buffer to be decoded.</param> /// <returns>The decoded RTMP packet.</returns> public static RtmpPacket DecodePacket(RtmpContext context, ByteBuffer stream) { int remaining = stream.Remaining; // We need at least one byte if (remaining < 1) { #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(__Res.GetString(__Res.Rtmp_DataBuffering, remaining, 1)); } #endif context.SetBufferDecoding(1); return(null); } int position = (int)stream.Position; byte headerByte = stream.Get(); int headerValue; int byteCount; if ((headerByte & 0x3f) == 0) { // Two byte header if (remaining < 2) { stream.Position = position; #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(__Res.GetString(__Res.Rtmp_DataBuffering, remaining, 2)); } #endif context.SetBufferDecoding(2); return(null); } headerValue = ((int)headerByte & 0xff) << 8 | ((int)stream.Get() & 0xff); byteCount = 2; } else if ((headerByte & 0x3f) == 1) { // Three byte header if (remaining < 3) { stream.Position = position; #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(__Res.GetString(__Res.Rtmp_DataBuffering, remaining, 3)); } #endif context.SetBufferDecoding(3); return(null); } headerValue = ((int)headerByte & 0xff) << 16 | ((int)stream.Get() & 0xff) << 8 | ((int)stream.Get() & 0xff); byteCount = 3; } else { // Single byte header headerValue = (int)headerByte & 0xff; byteCount = 1; } byte channelId = DecodeChannelId(headerValue, byteCount); if (channelId < 0) { throw new ProtocolException("Bad channel id: " + channelId); } byte headerSize = DecodeHeaderSize(headerValue, byteCount); int headerLength = GetHeaderLength(headerSize); headerLength += byteCount - 1; //if(headerLength > remaining) if (headerLength + byteCount - 1 > remaining) { #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(__Res.GetString(__Res.Rtmp_HeaderBuffering, remaining)); } #endif stream.Position = position; //context.SetBufferDecoding(headerLength); context.SetBufferDecoding(headerLength + byteCount - 1); return(null); } // Move the position back to the start stream.Position = position; RtmpHeader header = DecodeHeader(context, context.GetLastReadHeader(channelId), stream); #if !SILVERLIGHT log.Debug("Decoded " + header); #endif if (header == null) { throw new ProtocolException("Header is null, check for error"); } // Save the header context.SetLastReadHeader(channelId, header); // Check to see if this is a new packet or continue decoding an existing one. RtmpPacket packet = context.GetLastReadPacket(channelId); if (packet == null) { packet = new RtmpPacket(header); context.SetLastReadPacket(channelId, packet); } ByteBuffer buf = packet.Data; int addSize = (header.Timer == 0xffffff ? 4 : 0); //int addSize = 0; int readRemaining = header.Size + addSize - (int)buf.Position; int chunkSize = context.GetReadChunkSize(); int readAmount = (readRemaining > chunkSize) ? chunkSize : readRemaining; if (stream.Remaining < readAmount) { #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(__Res.GetString(__Res.Rtmp_ChunkSmall, stream.Remaining, readAmount)); } #endif //Skip the position back to the start stream.Position = position; context.SetBufferDecoding(headerLength + readAmount); //string path = FluorineFx.Context.FluorineContext.Current.GetFullPath(@"log\chunk.bin"); //stream.Dump(path); return(null); } //http://osflash.org/pipermail/free_osflash.org/2005-September/000261.html //http://www.acmewebworks.com/Downloads/openCS/091305-initialMeeting.txt ByteBuffer.Put(buf, stream, readAmount); if (buf.Position < header.Size + addSize) { context.ContinueDecoding(); return(null); } if (buf.Position > header.Size + addSize) { #if !SILVERLIGHT log.Warn(string.Format("Packet size expanded from {0} to {1} ({2})", header.Size + addSize, buf.Position, header)); #endif } buf.Flip(); try { IRtmpEvent message = DecodeMessage(context, packet.Header, buf); packet.Message = message; if (message is ChunkSize) { ChunkSize chunkSizeMsg = message as ChunkSize; context.SetReadChunkSize(chunkSizeMsg.Size); } } finally { context.SetLastReadPacket(channelId, null); } #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug("Decoded " + packet.ToString()); } #endif return(packet); }
public static ByteBuffer EncodeMessage(RtmpContext context, RtmpHeader header, IRtmpEvent message) { switch(header.DataType) { case Constants.TypeChunkSize: return EncodeChunkSize(context, message as ChunkSize); case Constants.TypeInvoke: return EncodeInvoke(context, message as Invoke); case Constants.TypeFlexInvoke: return EncodeFlexInvoke(context, message as FlexInvoke); case Constants.TypeSharedObject: return EncodeSharedObject(context, message as ISharedObjectMessage); case Constants.TypeFlexSharedObject: return EncodeFlexSharedObject(context, message as ISharedObjectMessage); case Constants.TypeNotify: if ((message as Notify).ServiceCall == null) { return EncodeStreamMetadata(context, message as Notify); } else { return EncodeNotify(context, message as Notify); } case Constants.TypePing: return EncodePing(context, message as Ping); case Constants.TypeBytesRead: return EncodeBytesRead(context, message as BytesRead); case Constants.TypeAudioData: return EncodeAudioData(context, message as AudioData); case Constants.TypeVideoData: return EncodeVideoData(context, message as VideoData); case Constants.TypeServerBandwidth: return EncodeServerBW(context, message as ServerBW); case Constants.TypeClientBandwidth: return EncodeClientBW(context, message as ClientBW); case Constants.TypeFlexStreamEnd: return EncodeFlexStreamSend(context, message as FlexStreamSend); default: #if !SILVERLIGHT if( _log.IsErrorEnabled ) _log.Error("Unknown object type: " + header.DataType); #endif return null; } }
public RtmpPacket(RtmpHeader header, IRtmpEvent message) { _header = header; _message = message; }
public void DropPacket(RtmpMessage message) { IRtmpEvent packet = message.body; if (!(packet is VideoData)) { // Only check video packets. return; } VideoData video = packet as VideoData; FrameType type = video.FrameType; switch (_state) { case FrameDropperState.SEND_ALL: if (type == FrameType.DisposableInterframe) { // Remain in state, packet is safe to drop. return; } else if (type == FrameType.Interframe) { // Drop all frames until the next keyframe. _state = FrameDropperState.SEND_KEYFRAMES; return; } else if (type == FrameType.Keyframe) { // Drop all frames until the next keyframe. _state = FrameDropperState.SEND_KEYFRAMES; return; } break; case FrameDropperState.SEND_INTERFRAMES: if (type == FrameType.Interframe) { // Drop all frames until the next keyframe. _state = FrameDropperState.SEND_KEYFRAMES_CHECK; return; } else if (type == FrameType.Keyframe) { // Drop all frames until the next keyframe. _state = FrameDropperState.SEND_KEYFRAMES; return; } break; case FrameDropperState.SEND_KEYFRAMES: // Remain in state. break; case FrameDropperState.SEND_KEYFRAMES_CHECK: if (type == FrameType.Keyframe) { // Switch back to sending keyframes, but don't move to SEND_INTERFRAMES afterwards. _state = FrameDropperState.SEND_KEYFRAMES; return; } break; default: break; } }
/// <summary> /// Close stream /// </summary> public void Close() { lock (this.SyncRoot) { if (_msgIn != null) { _msgIn.Unsubscribe(this); _msgIn = null; } _playlistSubscriberStream.State = State.CLOSED; ClearWaitJobs(); ReleasePendingMessage(); _lastMessage = null; SendClearPing(); } }
internal RtmpPacket(RtmpHeader header, IRtmpEvent message) { _header = header; _message = message; }
/// <summary> /// Send RTMP message /// </summary> /// <param name="message"></param> private void SendMessage(RtmpMessage message) { //TDJ / live relative timestamp if (_playDecision == 0 && _streamStartTS > 0) { message.body.Timestamp = message.body.Timestamp - _streamStartTS; } int ts = message.body.Timestamp; if( log.IsDebugEnabled ) log.Debug(string.Format("SendMessage: streamStartTS={0}, length={1}, streamOffset={2}, timestamp={3}", _streamStartTS, _currentItem.Length, _streamOffset, ts)); if (_streamStartTS == -1) { if (log.IsDebugEnabled) log.Debug("SendMessage: resetting streamStartTS"); _streamStartTS = ts; message.body.Timestamp = 0; } else { if (_currentItem.Length >= 0) { int duration = ts - _streamStartTS; if (duration - _streamOffset >= _currentItem.Length) { // Sent enough data to client Stop(); return; } } } _lastMessage = message.body; DoPushMessage(message); }
public void Write(IRtmpEvent message) { _connection.GetChannel(3).Write(message); }
public void Write(IRtmpEvent message) { throw new NotImplementedException(); }
public void PushMessage(IPipe pipe, IMessage message) { if (message is ResetMessage) { _timeStamper.Reset(); } else if (message is StatusMessage) { StatusMessage statusMsg = message as StatusMessage; _data.SendStatus(statusMsg.body as StatusASO); } else if (message is RtmpMessage) { // Make sure chunk size has been sent if (!_chunkSizeSent) { SendChunkSize(); } RtmpMessage rtmpMsg = message as RtmpMessage; IRtmpEvent msg = rtmpMsg.body; int eventTime = msg.Timestamp; #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(string.Format("Message timestamp: {0}", eventTime)); } #endif if (eventTime < 0) { #if !SILVERLIGHT if (log.IsDebugEnabled) { log.Debug(string.Format("Message has negative timestamp: {0}", eventTime)); } #endif return; } byte dataType = msg.DataType; // Create a new header for the consumer RtmpHeader header = _timeStamper.GetTimeStamp(dataType, eventTime); switch (msg.DataType) { case Constants.TypeStreamMetadata: Notify notify = new Notify((msg as Notify).Data); notify.Header = header; notify.Timestamp = header.Timer; _data.Write(notify); break; case Constants.TypeFlexStreamEnd: // TODO: okay to send this also to AMF0 clients? FlexStreamSend send = new FlexStreamSend((msg as Notify).Data); send.Header = header; send.Timestamp = header.Timer; _data.Write(send); break; case Constants.TypeVideoData: VideoData videoData = new VideoData((msg as VideoData).Data); videoData.Header = header; videoData.Timestamp = header.Timer; _video.Write(videoData); break; case Constants.TypeAudioData: AudioData audioData = new AudioData((msg as AudioData).Data); audioData.Header = header; audioData.Timestamp = header.Timer; _audio.Write(audioData); break; case Constants.TypePing: Ping ping = new Ping((msg as Ping).PingType, (msg as Ping).Value2, (msg as Ping).Value3, (msg as Ping).Value4); ping.Header = header; _connection.Ping(ping); break; case Constants.TypeBytesRead: BytesRead bytesRead = new BytesRead((msg as BytesRead).Bytes); bytesRead.Header = header; bytesRead.Timestamp = header.Timer; _connection.GetChannel((byte)2).Write(bytesRead); break; default: _data.Write(msg); break; } } }
public static ByteBuffer EncodeMessage(RtmpContext context, RtmpHeader header, IRtmpEvent message) { switch (header.DataType) { case Constants.TypeChunkSize: return(EncodeChunkSize(context, message as ChunkSize)); case Constants.TypeInvoke: return(EncodeInvoke(context, message as Invoke)); case Constants.TypeFlexInvoke: return(EncodeFlexInvoke(context, message as FlexInvoke)); case Constants.TypeSharedObject: return(EncodeSharedObject(context, message as ISharedObjectMessage)); case Constants.TypeFlexSharedObject: return(EncodeFlexSharedObject(context, message as ISharedObjectMessage)); case Constants.TypeNotify: if ((message as Notify).ServiceCall == null) { return(EncodeStreamMetadata(context, message as Notify)); } else { return(EncodeNotify(context, message as Notify)); } case Constants.TypePing: return(EncodePing(context, message as Ping)); case Constants.TypeBytesRead: return(EncodeBytesRead(context, message as BytesRead)); case Constants.TypeAudioData: return(EncodeAudioData(context, message as AudioData)); case Constants.TypeVideoData: return(EncodeVideoData(context, message as VideoData)); case Constants.TypeServerBandwidth: return(EncodeServerBW(context, message as ServerBW)); case Constants.TypeClientBandwidth: return(EncodeClientBW(context, message as ClientBW)); case Constants.TypeFlexStreamEnd: return(EncodeFlexStreamSend(context, message as FlexStreamSend)); default: #if !SILVERLIGHT if (_log.IsErrorEnabled) { _log.Error("Unknown object type: " + header.DataType); } #endif return(null); } }
/// <summary> /// Message recieved. /// </summary> /// <param name="connection">Connection object.</param> /// <param name="obj">Message object.</param> public void MessageReceived(RtmpConnection connection, object obj) { IRtmpEvent message = null; RtmpPacket packet = null; RtmpHeader header = null; RtmpChannel channel = null; IClientStream stream = null; try { packet = obj as RtmpPacket; message = packet.Message; header = packet.Header; channel = connection.GetChannel(header.ChannelId); if (connection is IStreamCapableConnection) { stream = (connection as IStreamCapableConnection).GetStreamById(header.StreamId); } // Support stream ids #if !SILVERLIGHT FluorineContext.ValidateContext(); FluorineContext.Current.Connection.SetAttribute(FluorineContext.FluorineStreamIdKey, header.StreamId); #endif // Increase number of received messages connection.MessageReceived(); #if !SILVERLIGHT if (log != null && log.IsDebugEnabled) { log.Debug("RtmpConnection message received, type = " + header.DataType); } #endif if (message != null) { message.Source = connection; } switch (header.DataType) { case Constants.TypeInvoke: OnInvoke(connection, channel, header, message as Invoke); if (message.Header.StreamId != 0 && (message as Invoke).ServiceCall.ServiceName == null && (message as Invoke).ServiceCall.ServiceMethodName == BaseRtmpHandler.ACTION_PUBLISH) { if (stream != null) //Dispatch if stream was created { (stream as IEventDispatcher).DispatchEvent(message); } } break; case Constants.TypeFlexInvoke: OnFlexInvoke(connection, channel, header, message as FlexInvoke); if (message.Header.StreamId != 0 && (message as Invoke).ServiceCall.ServiceName == null && (message as Invoke).ServiceCall.ServiceMethodName == BaseRtmpHandler.ACTION_PUBLISH) { if (stream != null) //Dispatch if stream was created { (stream as IEventDispatcher).DispatchEvent(message); } } break; case Constants.TypeNotify: // just like invoke, but does not return if ((message as Notify).Data != null && stream != null) { // Stream metadata (stream as IEventDispatcher).DispatchEvent(message); } else { OnInvoke(connection, channel, header, message as Notify); } break; case Constants.TypePing: OnPing(connection, channel, header, message as Ping); break; case Constants.TypeBytesRead: OnStreamBytesRead(connection, channel, header, message as BytesRead); break; case Constants.TypeSharedObject: case Constants.TypeFlexSharedObject: OnSharedObject(connection, channel, header, message as SharedObjectMessage); break; case Constants.TypeFlexStreamEnd: if (stream != null) { (stream as IEventDispatcher).DispatchEvent(message); } break; case Constants.TypeChunkSize: OnChunkSize(connection, channel, header, message as ChunkSize); break; case Constants.TypeAudioData: case Constants.TypeVideoData: // NOTE: If we respond to "publish" with "NetStream.Publish.BadName", // the client sends a few stream packets before stopping. We need to // ignore them. if (stream != null) { ((IEventDispatcher)stream).DispatchEvent(message); } break; case Constants.TypeServerBandwidth: OnServerBW(connection, channel, header, message as ServerBW); break; case Constants.TypeClientBandwidth: OnClientBW(connection, channel, header, message as ClientBW); break; default: #if !SILVERLIGHT if (log != null && log.IsDebugEnabled) { log.Debug("RtmpService event not handled: " + header.DataType); } #endif break; } } catch (Exception ex) { #if !SILVERLIGHT if (log.IsErrorEnabled) { log.Error(__Res.GetString(__Res.Rtmp_HandlerError), ex); log.Error(__Res.GetString(__Res.Error_ContextDump)); //log.Error(Environment.NewLine); log.Error(packet); } #endif } }
public bool CanSendPacket(RtmpMessage message, long pending) { IRtmpEvent packet = message.body; if (!(packet is VideoData)) { // We currently only drop video packets. return(true); } VideoData video = packet as VideoData; FrameType type = video.FrameType; bool result = false; switch (_state) { case FrameDropperState.SEND_ALL: // All packets will be sent. result = true; break; case FrameDropperState.SEND_INTERFRAMES: // Only keyframes and interframes will be sent. if (type == FrameType.Keyframe) { if (pending == 0) { // Send all frames from now on. _state = FrameDropperState.SEND_ALL; } result = true; } else if (type == FrameType.Interframe) { result = true; } break; case FrameDropperState.SEND_KEYFRAMES: // Only keyframes will be sent. result = (type == FrameType.Keyframe); if (result && pending == 0) { // Maybe switch back to SEND_INTERFRAMES after the next keyframe _state = FrameDropperState.SEND_KEYFRAMES_CHECK; } break; case FrameDropperState.SEND_KEYFRAMES_CHECK: // Only keyframes will be sent. result = (type == FrameType.Keyframe); if (result && pending == 0) { // Continue with sending interframes as well _state = FrameDropperState.SEND_INTERFRAMES; } break; default: break; } return(result); }
public void DispatchEvent(IEvent @event) { try { if (@event is IRtmpEvent) { IRtmpEvent rtmpEvent = @event as IRtmpEvent; if (_livePipe != null) { RtmpMessage msg = new RtmpMessage(); msg.body = rtmpEvent; if (_creationTime == -1) { _creationTime = rtmpEvent.Timestamp; } try { if (@event is AudioData) { (_codecInfo as StreamCodecInfo).HasAudio = true; } else if (@event is VideoData) { IVideoStreamCodec videoStreamCodec = null; if (_codecInfo.VideoCodec == null) { videoStreamCodec = _videoCodecFactory.GetVideoCodec((@event as VideoData).Data); (_codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec; } else if (_codecInfo != null) { videoStreamCodec = _codecInfo.VideoCodec; } if (videoStreamCodec != null) { videoStreamCodec.AddData((rtmpEvent as VideoData).Data); } if (_codecInfo != null) { (_codecInfo as StreamCodecInfo).HasVideo = true; } } _livePipe.PushMessage(msg); // Notify listeners about received packet if (rtmpEvent is IStreamPacket) { foreach (IStreamListener listener in GetStreamListeners()) { try { listener.PacketReceived(this, rtmpEvent as IStreamPacket); } catch (Exception ex) { log.Error("Error while notifying listener " + listener, ex); } } } } catch (Exception ex) { // ignore log.Error("DispatchEvent exception", ex); } } } } finally { } }
/// <summary> /// Seek position in file /// </summary> /// <param name="position"></param> public void Seek(int position) { lock (this.SyncRoot) { if (_playlistSubscriberStream.State != State.PLAYING && _playlistSubscriberStream.State != State.PAUSED && _playlistSubscriberStream.State != State.STOPPED) { throw new IllegalStateException(); } if (!_isPullMode) { throw new NotSupportedException(); } ReleasePendingMessage(); ClearWaitJobs(); _bwController.ResetBuckets(_bwContext); _waitingForToken = false; SendClearPing(); SendReset(); SendSeekStatus(_currentItem, position); SendStartStatus(_currentItem); int seekPos = SendVODSeekCM(_msgIn, position); // We seeked to the nearest keyframe so use real timestamp now if (seekPos == -1) { seekPos = position; } _playbackStart = System.Environment.TickCount - seekPos; _playlistSubscriberStream.NotifyItemSeek(_currentItem, seekPos); bool messageSent = false; bool startPullPushThread = false; if ((_playlistSubscriberStream.State == State.PAUSED || _playlistSubscriberStream.State == State.STOPPED) && SendCheckVideoCM(_msgIn)) { // we send a single snapshot on pause. // XXX we need to take BWC into account, for // now send forcefully. IMessage msg; try { msg = _msgIn.PullMessage(); } catch (Exception ex) { log.Error("Error while pulling message.", ex); msg = null; } while (msg != null) { if (msg is RtmpMessage) { RtmpMessage rtmpMessage = (RtmpMessage)msg; IRtmpEvent body = rtmpMessage.body; if (body is VideoData && ((VideoData)body).FrameType == FrameType.Keyframe) { body.Timestamp = seekPos; DoPushMessage(rtmpMessage); //rtmpMessage.body.Release(); messageSent = true; _lastMessage = body; break; } } try { msg = _msgIn.PullMessage(); } catch (Exception ex) { log.Error("Error while pulling message.", ex); msg = null; } } } else { startPullPushThread = true; } if (!messageSent) { // Send blank audio packet to notify client about new position AudioData audio = new AudioData(); audio.Timestamp = seekPos; audio.Header = new RtmpHeader(); audio.Header.Timer = seekPos; audio.Header.IsTimerRelative = false; RtmpMessage audioMessage = new RtmpMessage(); audioMessage.body = audio; _lastMessage = audio; DoPushMessage(audioMessage); } if (startPullPushThread) { EnsurePullAndPushRunning(); } if (_playlistSubscriberStream.State != State.STOPPED && _currentItem.Length >= 0 && (position - _streamOffset) >= _currentItem.Length) { // Seeked after end of stream Stop(); return; } } }
public static ByteBuffer EncodeMessage(RtmpContext context, RtmpHeader header, IRtmpEvent message) { switch (header.DataType) { case 1: return(EncodeChunkSize(context, message as ChunkSize)); case 3: return(EncodeBytesRead(context, message as BytesRead)); case 4: return(EncodePing(context, message as Ping)); case 5: return(EncodeServerBW(context, message as ServerBW)); case 6: return(EncodeClientBW(context, message as ClientBW)); case 8: return(EncodeAudioData(context, message as AudioData)); case 9: return(EncodeVideoData(context, message as VideoData)); case 15: return(EncodeFlexStreamSend(context, message as FlexStreamSend)); case 0x10: return(EncodeFlexSharedObject(context, message as ISharedObjectMessage)); case 0x11: return(EncodeFlexInvoke(context, message as FlexInvoke)); case 0x12: if ((message as Notify).ServiceCall != null) { return(EncodeNotify(context, message as Notify)); } return(EncodeStreamMetadata(context, message as Notify)); case 0x13: return(EncodeSharedObject(context, message as ISharedObjectMessage)); case 20: return(EncodeInvoke(context, message as Invoke)); } if (_log.get_IsErrorEnabled()) { _log.Error("Unknown object type: " + header.DataType); } return(null); }
/// <summary> /// Check if it's okay to send the client more data. This takes the configured /// bandwidth as well as the requested client buffer into account. /// </summary> /// <param name="message"></param> /// <returns></returns> private bool OkayToSendMessage(IRtmpEvent message) { if (!(message is IStreamData)) { string itemName = "Undefined"; //If current item exists get the name to help debug this issue if (_currentItem != null) itemName = _currentItem.Name; throw new ApplicationException(string.Format("Expected IStreamData but got {0} (type {1}) for {2}" + message.GetType().ToString(), message.EventType, itemName)); } long now = System.Environment.TickCount; // Check client buffer length when we've already sent some messages if (_lastMessage != null) { // Duration the stream is playing / playback duration long delta = now - _playbackStart; // Buffer size as requested by the client long buffer = _playlistSubscriberStream.ClientBufferDuration; // Expected amount of data present in client buffer long buffered = _lastMessage.Timestamp - delta; if (log.IsDebugEnabled) { log.Debug(string.Format("OkayToSendMessage timestamp {0} delta {1} buffered {2} buffer {3}", _lastMessage.Timestamp, delta, buffered, buffer)); } //T his sends double the size of the client buffer if (buffer > 0 && buffered > (buffer*2)) { // Client is likely to have enough data in the buffer return false; } } long pending = GetPendingMessagesCount(); if (_bufferCheckInterval > 0 && now >= _nextCheckBufferUnderrun) { if (pending > _underrunTrigger) { // Client is playing behind speed, notify him SendInsufficientBandwidthStatus(_currentItem); } _nextCheckBufferUnderrun = now + _bufferCheckInterval; } if (pending > _underrunTrigger) { // Too many messages already queued on the connection return false; } ByteBuffer ioBuffer = ((IStreamData)message).Data; if (ioBuffer != null) { int size = ioBuffer.Limit; if (message is VideoData) { if (_checkBandwidth && !_videoBucket.AcquireTokenNonblocking(size, this)) { _waitingForToken = true; return false; } } else if (message is AudioData) { if (_checkBandwidth && !_audioBucket.AcquireTokenNonblocking(size, this)) { _waitingForToken = true; return false; } } } return true; }
public static ByteBuffer EncodePacket(RtmpContext context, RtmpPacket packet) { RtmpHeader header = packet.Header; int channelId = header.ChannelId; IRtmpEvent message = packet.Message; ByteBuffer data; if (message is ChunkSize) { ChunkSize chunkSizeMsg = (ChunkSize)message; context.SetWriteChunkSize(chunkSizeMsg.Size); } data = EncodeMessage(context, header, message); if (data.Position != 0) { data.Flip(); } else { data.Rewind(); } header.Size = (int)data.Limit; RtmpHeader lastHeader = context.GetLastWriteHeader(channelId); int headerSize = CalculateHeaderSize(header, lastHeader); context.SetLastWriteHeader(channelId, header); context.SetLastWritePacket(channelId, packet); int chunkSize = context.GetWriteChunkSize(); int chunkHeaderSize = 1; if (header.ChannelId > 320) { chunkHeaderSize = 3; } else if (header.ChannelId > 63) { chunkHeaderSize = 2; } int numChunks = (int)Math.Ceiling(header.Size / (float)chunkSize); int bufSize = (int)header.Size + headerSize + (numChunks > 0 ? (numChunks - 1) * chunkHeaderSize : 0); ByteBuffer output = ByteBuffer.Allocate(bufSize); EncodeHeader(header, lastHeader, output); if (numChunks == 1) { // we can do it with a single copy ByteBuffer.Put(output, data, output.Remaining); } else { for (int i = 0; i < numChunks - 1; i++) { ByteBuffer.Put(output, data, chunkSize); EncodeHeaderByte(output, (byte)HeaderType.HeaderContinue, header.ChannelId); } ByteBuffer.Put(output, data, output.Remaining); } //data.Close(); output.Flip(); return(output); }
public void DispatchEvent(IEvent evt) { if (!(evt is IRtmpEvent) && (evt.EventType != EventType.STREAM_CONTROL) && (evt.EventType != EventType.STREAM_DATA) || _closed) { // ignored event if (log.IsDebugEnabled) { log.Debug("DispatchEvent: " + evt.EventType); } return; } // Get stream codec IStreamCodecInfo codecInfo = this.CodecInfo; StreamCodecInfo info = null; if (codecInfo is StreamCodecInfo) { info = codecInfo as StreamCodecInfo; } IRtmpEvent rtmpEvent = evt as IRtmpEvent; if (rtmpEvent == null) { if (log.IsDebugEnabled) { log.Debug("IRtmpEvent expected in event dispatch"); } return; } int eventTime = -1; // If this is first packet save it's timestamp if (_firstPacketTime == -1) { _firstPacketTime = rtmpEvent.Timestamp; if (log.IsDebugEnabled) { log.Debug(string.Format("CBS: {0} firstPacketTime={1} {2}", this.Name, _firstPacketTime, rtmpEvent.Header.IsTimerRelative ? "(rel)" : "(abs)")); } } if (rtmpEvent is IStreamData && (rtmpEvent as IStreamData).Data != null) { _bytesReceived += (rtmpEvent as IStreamData).Data.Limit; } if (rtmpEvent is AudioData) { if (info != null) { info.HasAudio = true; } if (rtmpEvent.Header.IsTimerRelative) { if (_audioTime == 0) { log.Warn(string.Format("First Audio timestamp is relative! {0}", rtmpEvent.Timestamp)); } _audioTime += rtmpEvent.Timestamp; } else { _audioTime = rtmpEvent.Timestamp; } eventTime = _audioTime; } else if (rtmpEvent is VideoData) { IVideoStreamCodec videoStreamCodec = null; if (_videoCodecFactory != null && _checkVideoCodec) { videoStreamCodec = _videoCodecFactory.GetVideoCodec((rtmpEvent as VideoData).Data); if (codecInfo is StreamCodecInfo) { (codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec; } _checkVideoCodec = false; } else if (codecInfo != null) { videoStreamCodec = codecInfo.VideoCodec; } if (videoStreamCodec != null) { videoStreamCodec.AddData((rtmpEvent as VideoData).Data); } if (info != null) { info.HasVideo = true; } if (rtmpEvent.Header.IsTimerRelative) { if (_videoTime == 0) { log.Warn(string.Format("First Video timestamp is relative! {0}", rtmpEvent.Timestamp)); } _videoTime += rtmpEvent.Timestamp; } else { _videoTime = rtmpEvent.Timestamp; // Flash player may send first VideoData with old-absolute timestamp. // This ruins the stream's timebase in FileConsumer. // We don't want to discard the packet, as it may be a video keyframe. // Generally a Data or Audio packet has set the timebase to a reasonable value, // Eventually a new/correct absolute time will come on the video channel. // We could put this logic between livePipe and filePipe; // This would work for Audio Data as well, but have not seen the need. int cts = Math.Max(_audioTime, _dataTime); cts = Math.Max(cts, _minStreamTime); int fudge = 20; // Accept some slightly (20ms) retro timestamps [this may not be needed, // the publish Data should strictly precede the video data] if (_videoTime + fudge < cts) { if (log.IsDebugEnabled) { log.Debug(string.Format("DispatchEvent: adjust archaic videoTime, from: {0} to {1}", _videoTime, cts)); } _videoTime = cts; } } eventTime = _videoTime; } else if (rtmpEvent is Invoke) { if (rtmpEvent.Header.IsTimerRelative) { if (_dataTime == 0) { log.Warn(string.Format("First data [Invoke] timestamp is relative! {0}", rtmpEvent.Timestamp)); } _dataTime += rtmpEvent.Timestamp; } else { _dataTime = rtmpEvent.Timestamp; } return; } else if (rtmpEvent is Notify) { if (rtmpEvent.Header.IsTimerRelative) { if (_dataTime == 0) { log.Warn(string.Format("First data [Notify] timestamp is relative! {0}", rtmpEvent.Timestamp)); } _dataTime += rtmpEvent.Timestamp; } else { _dataTime = rtmpEvent.Timestamp; } eventTime = _dataTime; } // Notify event listeners CheckSendNotifications(evt); // Create new RTMP message, initialize it and push through pipe FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage msg = new FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage(); msg.body = rtmpEvent; msg.body.Timestamp = eventTime; try { if (_livePipe != null) { _livePipe.PushMessage(msg); } if (_recordPipe != null) { _recordPipe.PushMessage(msg); } } catch (System.IO.IOException ex) { SendRecordFailedNotify(ex.Message); Stop(); } // Notify listeners about received packet if (rtmpEvent is IStreamPacket) { foreach (IStreamListener listener in GetStreamListeners()) { try { listener.PacketReceived(this, rtmpEvent as IStreamPacket); } catch (Exception ex) { log.Error(string.Format("Error while notifying listener {0}", listener), ex); } } } }
public void PushMessage(IPipe pipe, IMessage message) { lock (this.SyncRoot) { if (message is ResetMessage) { SendReset(); return; } if (message is RtmpMessage) { RtmpMessage rtmpMessage = (RtmpMessage)message; IRtmpEvent body = rtmpMessage.body; if (!(body is IStreamData)) { throw new ApplicationException("expected IStreamData but got " + body.GetType().FullName); } int size = ((IStreamData)body).Data.Limit; if (body is VideoData) { IVideoStreamCodec videoCodec = null; if (_msgIn is IBroadcastScope) { IClientBroadcastStream stream = ((IBroadcastScope)_msgIn).GetAttribute(Constants.BroadcastScopeStreamAttribute) as IClientBroadcastStream; if (stream != null && stream.CodecInfo != null) { videoCodec = stream.CodecInfo.VideoCodec; } } if (videoCodec == null || videoCodec.CanDropFrames) { if (_playlistSubscriberStream.State == State.PAUSED) { // The subscriber paused the video _videoFrameDropper.DropPacket(rtmpMessage); return; } // Only check for frame dropping if the codec supports it long pendingVideos = GetPendingVideoMessageCount(); if (!_videoFrameDropper.CanSendPacket(rtmpMessage, pendingVideos)) { // Drop frame as it depends on other frames that were dropped before. return; } bool drop = !_videoBucket.AcquireToken(size, 0); if (!_receiveVideo || drop) { // The client disabled video or the app doesn't have enough bandwidth // allowed for this stream. _videoFrameDropper.DropPacket(rtmpMessage); return; } long[] writeDelta = GetWriteDelta(); if (pendingVideos > 1 /*|| writeDelta[0] > writeDelta[1]*/) { // We drop because the client has insufficient bandwidth. long now = System.Environment.TickCount; if (_bufferCheckInterval > 0 && now >= _nextCheckBufferUnderrun) { // Notify client about frame dropping (keyframe) SendInsufficientBandwidthStatus(_currentItem); _nextCheckBufferUnderrun = now + _bufferCheckInterval; } _videoFrameDropper.DropPacket(rtmpMessage); return; } _videoFrameDropper.SendPacket(rtmpMessage); } } else if (body is AudioData) { if (!_receiveAudio && _sendBlankAudio) { // Send blank audio packet to reset player _sendBlankAudio = false; body = new AudioData(); if (_lastMessage != null) { body.Timestamp = _lastMessage.Timestamp; } else { body.Timestamp = 0; } rtmpMessage.body = body; } else if (_playlistSubscriberStream.State == State.PAUSED || !_receiveAudio || !_audioBucket.AcquireToken(size, 0)) { return; } } if (body is IStreamData && ((IStreamData)body).Data != null) { _bytesSent += ((IStreamData)body).Data.Limit; } _lastMessage = body; } _msgOut.PushMessage(message); } }