Example #1
0
 protected override MessageBase CopyImpl(MessageBase clone)
 {
     // Instantiate the clone, if a derived type hasn't already.
     if (clone == null)
     {
         clone = new RtmpMessage();
     }
     return(base.CopyImpl(clone));
 }
Example #2
0
        /// <summary>
        /// Push message through pipe.
        /// Synchronize this method to avoid FLV corruption from abrupt disconnection.
        /// </summary>
        /// <param name="pipe">Pipe.</param>
        /// <param name="message">Message to push.</param>
        public void PushMessage(IPipe pipe, IMessage message)
        {
            lock (this.SyncRoot) {
                if (message is ResetMessage)
                {
                    _startTimestamp = -1;
                    _offset        += _lastTimestamp;
                    return;
                }
                else if (message is StatusMessage)
                {
                    return;
                }
                if (!(message is RtmpMessage))
                {
                    return;
                }

                if (_writer == null)
                {
                    Init();
                }
                GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage rtmpMsg = message as GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage;
                IRtmpEvent msg = rtmpMsg.body;
                if (_startTimestamp == -1)
                {
                    _startTimestamp = msg.Timestamp;
                }
                int timestamp = msg.Timestamp - _startTimestamp;
                if (timestamp < 0)
                {
                    log.Warn("Skipping message with negative timestamp.");
                    return;
                }
                _lastTimestamp = timestamp;

                ITag tag = new Tag();

                tag.DataType  = (byte)msg.DataType;
                tag.Timestamp = timestamp + _offset;
                if (msg is IStreamData)
                {
                    ByteBuffer data = (msg as IStreamData).Data;
                    tag.Body = data.ToArray();
                }

                try {
                    _writer.WriteTag(tag);
                } catch (IOException ex) {
                    log.Error("Error writing tag", ex);
                }
            }
        }
Example #3
0
		public bool CanSendPacket(RtmpMessage message, long pending) {
			IRtmpEvent packet = message.body;
			if (!(packet is VideoData)) {
				// We currently only drop video packets.
				return true;
			}

			VideoData video = packet as VideoData;
			FrameType type = video.FrameType;
			bool result = false;
			switch (_state) {
				case FrameDropperState.SEND_ALL:
					// All packets will be sent.
					result = true;
					break;
				case FrameDropperState.SEND_INTERFRAMES:
					// Only keyframes and interframes will be sent.
					if (type == FrameType.Keyframe) {
						if (pending == 0) {
							// Send all frames from now on.
							_state = FrameDropperState.SEND_ALL;
						}
						result = true;
					} else if (type == FrameType.Interframe) {
						result = true;
					}
					break;
				case FrameDropperState.SEND_KEYFRAMES:
					// Only keyframes will be sent.
					result = (type == FrameType.Keyframe);
					if (result && pending == 0) {
						// Maybe switch back to SEND_INTERFRAMES after the next keyframe
						_state = FrameDropperState.SEND_KEYFRAMES_CHECK;
					}
					break;
				case FrameDropperState.SEND_KEYFRAMES_CHECK:
					// Only keyframes will be sent.
					result = (type == FrameType.Keyframe);
					if (result && pending == 0) {
						// Continue with sending interframes as well
						_state = FrameDropperState.SEND_INTERFRAMES;
					}
					break;
				default:
					break;
			}
			return result;
		}
Example #4
0
		/// <summary>
		/// Play stream
		/// </summary>
		/// <param name="item">Playlist item.</param>
		/// <param name="withReset">Send reset status before playing.</param>
		public void Play(IPlayItem item, bool withReset) {
			lock (this.SyncRoot) {
				// Can't play if state is not stopped
				if (_playlistSubscriberStream.State != State.STOPPED)
					throw new IllegalStateException();
				if (_msgIn != null) {
					_msgIn.Unsubscribe(this);
					_msgIn = null;
				}
				// Play type determination
				// http://livedocs.adobe.com/flex/3/langref/flash/net/NetStream.html#play%28%29
				// The start time, in seconds. Allowed values are -2, -1, 0, or a positive number. 
				// The default value is -2, which looks for a live stream, then a recorded stream, 
				// and if it finds neither, opens a live stream. 
				// If -1, plays only a live stream. 
				// If 0 or a positive number, plays a recorded stream, beginning start seconds in.
				//
				// -2: live then recorded, -1: live, >=0: recorded
				int type = (int)(item.Start / 1000);
				// see if it's a published stream
				IScope thisScope = _playlistSubscriberStream.Scope;
				string itemName = item.Name;
				//check for input and type
				InputType sourceType = _providerService.LookupProviderInputType(thisScope, itemName);

				bool isPublishedStream = sourceType == InputType.Live;
				bool isFileStream = sourceType == InputType.Vod;
				bool sendNotifications = true;

				// decision: 0 for Live, 1 for File, 2 for Wait, 3 for N/A
				switch (type) {
					case -2:
						if (isPublishedStream)
							_playDecision = 0;
						else if (isFileStream)
							_playDecision = 1;
						else
							_playDecision = 2;
						break;
					case -1:
						if (isPublishedStream)
							_playDecision = 0;
						else
							_playDecision = 2;
						break;
					default:
						if (isFileStream)
							_playDecision = 1;
						break;
				}
				if (log.IsDebugEnabled)
					log.Debug(string.Format("Play decision is {0} (0=Live, 1=File, 2=Wait, 3=N/A)", _playDecision));
				_currentItem = item;
				long itemLength = item.Length;
				switch (_playDecision) {
					case 0:
						//get source input without create
						_msgIn = _providerService.GetLiveProviderInput(thisScope, itemName, false);
						// Drop all frames up to the next keyframe
						_videoFrameDropper.Reset(FrameDropperState.SEND_KEYFRAMES_CHECK);
						if (_msgIn is IBroadcastScope) {
							// Send initial keyframe
							IClientBroadcastStream stream = (_msgIn as IBroadcastScope).GetAttribute(Constants.BroadcastScopeStreamAttribute) as IClientBroadcastStream;
							if (stream != null && stream.CodecInfo != null) {
								IVideoStreamCodec videoCodec = stream.CodecInfo.VideoCodec;
								if (videoCodec != null) {
									if (withReset) {
										SendReset();
										SendResetStatus(item);
										SendStartStatus(item);
									}
									sendNotifications = false;
									//send decoder configuration if it exists
									ByteBuffer config = videoCodec.GetDecoderConfiguration();
									if (config != null) {
										VideoData conf = new VideoData(config);
										try {
											conf.Timestamp = 0;
											RtmpMessage confMsg = new RtmpMessage();
											confMsg.body = conf;
											_msgOut.PushMessage(confMsg);
										} finally {
											//conf.release();
										}
									}
									//Check for a keyframe to send
									ByteBuffer keyFrame = videoCodec.GetKeyframe();
									if (keyFrame != null) {
										VideoData video = new VideoData(keyFrame);
										try {
											video.Timestamp = 0;
											RtmpMessage videoMsg = new RtmpMessage();
											videoMsg.body = video;
											_msgOut.PushMessage(videoMsg);
											// Don't wait for keyframe
											_videoFrameDropper.Reset();
										} finally {
											//video.release();
										}
									}
								}
							}
						}
						_msgIn.Subscribe(this, null);
						break;
					case 2:
						//get source input with create
						_msgIn = _providerService.GetLiveProviderInput(thisScope, itemName, true);
						_msgIn.Subscribe(this, null);
						_waiting = true;
						if (type == -1 && itemLength >= 0) {
							//log.debug("Creating wait job");
							// Wait given timeout for stream to be published
							PlaylistSubscriberStreamJob1 job = new PlaylistSubscriberStreamJob1(this, itemName);
							_waitLiveJob = _schedulingService.AddScheduledOnceJob(item.Length, job);
						} else if (type == -2) {
							//log.debug("Creating wait job");
							// Wait x seconds for the stream to be published
							PlaylistSubscriberStreamJob2 job = new PlaylistSubscriberStreamJob2(this, itemName);
							_waitLiveJob = _schedulingService.AddScheduledOnceJob(15000, job);
						} else {
							ConnectToProvider(itemName);
						}
						break;
					case 1:
						_msgIn = _providerService.GetVODProviderInput(thisScope, itemName);
						if (_msgIn == null) {
							SendStreamNotFoundStatus(_currentItem);
							throw new StreamNotFoundException(itemName);
						}
						if (!_msgIn.Subscribe(this, null)) {
							log.Error("Input source subscribe failed");
						}
						break;
					default:
						SendStreamNotFoundStatus(_currentItem);
						throw new StreamNotFoundException(itemName);
				}
				_playlistSubscriberStream.State = State.PLAYING;
				IMessage msg = null;
				_streamOffset = 0;
				_streamStartTS = -1;
				if (_playDecision == 1) {
					if (withReset) {
						ReleasePendingMessage();
					}
					SendVODInitCM(_msgIn, item);
					// Don't use pullAndPush to detect IOExceptions prior to sending NetStream.Play.Start
					if (item.Start > 0) {
						_streamOffset = SendVODSeekCM(_msgIn, (int)item.Start);
						// We seeked to the nearest keyframe so use real timestamp now
						if (_streamOffset == -1) {
							_streamOffset = (int)item.Start;
						}
					}
					msg = _msgIn.PullMessage();
					if (msg is RtmpMessage) {
						IRtmpEvent body = ((RtmpMessage)msg).body;
						if (itemLength == 0) {
							// Only send first video frame
							body = ((RtmpMessage)msg).body;
							while (body != null && !(body is VideoData)) {
								msg = _msgIn.PullMessage();
								if (msg == null)
									break;
								if (msg is RtmpMessage)
									body = ((RtmpMessage)msg).body;
							}
						}
						if (body != null) {
							// Adjust timestamp when playing lists
							body.Timestamp = body.Timestamp + _timestampOffset;
						}
					}
				}
				if (sendNotifications) {
					if (withReset) {
						SendReset();
						SendResetStatus(item);
					}
					SendStartStatus(item);
					if (!withReset) {
						SendSwitchStatus();
					}
				}
				if (msg != null) {
					SendMessage((RtmpMessage)msg);
				}
				_playlistSubscriberStream.NotifyItemPlay(_currentItem, !_isPullMode);
				if (withReset) {
					long currentTime = System.Environment.TickCount;
					_playbackStart = currentTime - _streamOffset;
					_nextCheckBufferUnderrun = currentTime + _bufferCheckInterval;
					if (_currentItem.Length != 0) {
						EnsurePullAndPushRunning();
					}
				}
			}
		}
Example #5
0
		/// <summary>
		/// Recieve then send if message is data (not audio or video)
		/// </summary>
		internal void PullAndPush() {
			lock (this.SyncRoot) {
				if (_playlistSubscriberStream.State == State.PLAYING && _isPullMode && !_waitingForToken) {
					if (_pendingMessage != null) {
						IRtmpEvent body = _pendingMessage.body;
						if (!OkayToSendMessage(body))
							return;

						SendMessage(_pendingMessage);
						ReleasePendingMessage();
					} else {
						while (true) {
							IMessage msg = _msgIn.PullMessage();
							if (msg == null) {
								// No more packets to send
								Stop();
								break;
							} else {
								if (msg is RtmpMessage) {
									RtmpMessage rtmpMessage = (RtmpMessage)msg;
									IRtmpEvent body = rtmpMessage.body;
									if (!_receiveAudio && body is AudioData) {
										// The user doesn't want to get audio packets
										//((IStreamData) body).Data.Release();
										if (_sendBlankAudio) {
											// Send reset audio packet
											_sendBlankAudio = false;
											body = new AudioData();
											// We need a zero timestamp
											if (_lastMessage != null) {
												body.Timestamp = _lastMessage.Timestamp - _timestampOffset;
											} else {
												body.Timestamp = -_timestampOffset;
											}
											rtmpMessage.body = body;
										} else {
											continue;
										}
									} else if (!_receiveVideo && body is VideoData) {
										// The user doesn't want to get video packets
										//((IStreamData) body).Data.Release();
										continue;
									}

									// Adjust timestamp when playing lists
									body.Timestamp = body.Timestamp + _timestampOffset;
									if (OkayToSendMessage(body)) {
										if (log.IsDebugEnabled)
											log.Debug(string.Format("ts: {0}", rtmpMessage.body.Timestamp));
										SendMessage(rtmpMessage);
										//((IStreamData) body).Data.Release();
									} else {
										_pendingMessage = rtmpMessage;
									}
									EnsurePullAndPushRunning();
									break;
								}
							}
						}
					}
				}
			}
		}
Example #6
0
		/// <summary>
		/// Releases pending message body, nullifies pending message object
		/// </summary>
		private void ReleasePendingMessage() {
			lock (this.SyncRoot) {
				if (_pendingMessage != null) {
					IRtmpEvent body = _pendingMessage.body;
					if (body is IStreamData && ((IStreamData)body).Data != null) {
						//((IStreamData)body).Data.Release(); 
					}
					_pendingMessage.body = null;
					_pendingMessage = null;
				}
			}
		}
Example #7
0
		/// <summary>
		/// Seek position in file
		/// </summary>
		/// <param name="position"></param>
		public void Seek(int position) {
			lock (this.SyncRoot) {
				if (_playlistSubscriberStream.State != State.PLAYING && _playlistSubscriberStream.State != State.PAUSED && _playlistSubscriberStream.State != State.STOPPED) {
					throw new IllegalStateException();
				}
				if (!_isPullMode) {
					throw new NotSupportedException();
				}

				ReleasePendingMessage();
				ClearWaitJobs();
				_bwController.ResetBuckets(_bwContext);
				_waitingForToken = false;
				SendClearPing();
				SendReset();
				SendSeekStatus(_currentItem, position);
				SendStartStatus(_currentItem);
				int seekPos = SendVODSeekCM(_msgIn, position);
				// We seeked to the nearest keyframe so use real timestamp now
				if (seekPos == -1) {
					seekPos = position;
				}
				_playbackStart = System.Environment.TickCount - seekPos;
				_playlistSubscriberStream.NotifyItemSeek(_currentItem, seekPos);
				bool messageSent = false;
				bool startPullPushThread = false;
				if ((_playlistSubscriberStream.State == State.PAUSED || _playlistSubscriberStream.State == State.STOPPED) && SendCheckVideoCM(_msgIn)) {
					// we send a single snapshot on pause.
					// XXX we need to take BWC into account, for
					// now send forcefully.
					IMessage msg;
					try {
						msg = _msgIn.PullMessage();
					} catch (Exception ex) {
						log.Error("Error while pulling message.", ex);
						msg = null;
					}
					while (msg != null) {
						if (msg is RtmpMessage) {
							RtmpMessage rtmpMessage = (RtmpMessage)msg;
							IRtmpEvent body = rtmpMessage.body;
							if (body is VideoData && ((VideoData)body).FrameType == FrameType.Keyframe) {
								body.Timestamp = seekPos;
								DoPushMessage(rtmpMessage);
								//rtmpMessage.body.Release();
								messageSent = true;
								_lastMessage = body;
								break;
							}
						}

						try {
							msg = _msgIn.PullMessage();
						} catch (Exception ex) {
							log.Error("Error while pulling message.", ex);
							msg = null;
						}
					}
				} else {
					startPullPushThread = true;
				}

				if (!messageSent) {
					// Send blank audio packet to notify client about new position
					AudioData audio = new AudioData();
					audio.Timestamp = seekPos;
					audio.Header = new RtmpHeader();
					audio.Header.Timer = seekPos;
					audio.Header.IsTimerRelative = false;
					RtmpMessage audioMessage = new RtmpMessage();
					audioMessage.body = audio;
					_lastMessage = audio;
					DoPushMessage(audioMessage);
				}

				if (startPullPushThread) {
					EnsurePullAndPushRunning();
				}

				if (_playlistSubscriberStream.State != State.STOPPED && _currentItem.Length >= 0 && (position - _streamOffset) >= _currentItem.Length) {
					// Seeked after end of stream
					Stop();
					return;
				}
			}
		}
Example #8
0
		private void SendOnPlayStatus(String code, int duration, long bytes) {
			MemoryStream ms = new MemoryStream();
			AMFWriter writer = new AMFWriter(ms);
			writer.WriteString("onPlayStatus");
			Hashtable props = new Hashtable();
			props.Add("code", code);
			props.Add("level", "status");
			props.Add("duration", duration);
			props.Add("bytes", bytes);
			writer.WriteAssociativeArray(ObjectEncoding.AMF0, props);
			ByteBuffer buffer = new ByteBuffer(ms);
			IRtmpEvent evt = new Notify(buffer);
			if (_lastMessage != null) {
				int timestamp = _lastMessage.Timestamp;
				evt.Timestamp = timestamp;
			} else {
				evt.Timestamp = 0;
			}
			RtmpMessage msg = new RtmpMessage();
			msg.body = evt;
			DoPushMessage(msg);
		}
Example #9
0
		/// <summary>
		/// Send reset message
		/// </summary>
		private void SendReset() {
			if (_isPullMode) {
				Ping ping1 = new Ping();
				ping1.PingType = (short)Ping.RecordedStream;
				ping1.Value2 = this.StreamId;

				RtmpMessage ping1Msg = new RtmpMessage();
				ping1Msg.body = ping1;
				DoPushMessage(ping1Msg);
			}

			Ping ping2 = new Ping();
			ping2.PingType = (short)Ping.StreamBegin;
			ping2.Value2 = this.StreamId;

			RtmpMessage ping2Msg = new RtmpMessage();
			ping2Msg.body = ping2;
			DoPushMessage(ping2Msg);

			ResetMessage reset = new ResetMessage();
			DoPushMessage(reset);
		}
Example #10
0
		public IMessage PullMessage(IPipe pipe) {
			lock (_syncLock) {
				if (_pipe != pipe)
					return null;
				if (_reader == null)
					Init();
				if (!_reader.HasMoreTags()) {
					// TODO send OOBCM to notify EOF
					// Do not unsubscribe as this kills VOD seek while in buffer
					// this.pipe.unsubscribe(this);
					return null;
				}
				ITag tag = _reader.ReadTag();
				IRtmpEvent msg = null;
				int timestamp = tag.Timestamp;
				switch (tag.DataType) {
					case Constants.TypeAudioData:
						msg = new AudioData(tag.Body);
						break;
					case Constants.TypeVideoData:
						msg = new VideoData(tag.Body);
						break;
					case Constants.TypeInvoke:
						msg = new Invoke(tag.Body);
						break;
					case Constants.TypeNotify:
						msg = new Notify(tag.Body);
						break;
					case Constants.TypeFlexStreamEnd:
						msg = new FlexStreamSend(tag.Body);
						break;
					default:
						log.Warn("Unexpected type " + tag.DataType);
						msg = new Unknown(tag.DataType, tag.Body);
						break;
				}
				msg.Timestamp = timestamp;
				RtmpMessage rtmpMsg = new RtmpMessage();
				rtmpMsg.body = msg;
				return rtmpMsg;
			}
		}
Example #11
0
		/// <summary>
		/// Send RTMP message
		/// </summary>
		/// <param name="message"></param>
		private void SendMessage(RtmpMessage message) {
			//TDJ / live relative timestamp
			if (_playDecision == 0 && _streamStartTS > 0) {
				message.body.Timestamp = message.body.Timestamp - _streamStartTS;
			}
			int ts = message.body.Timestamp;
			if (log.IsDebugEnabled)
				log.Debug(string.Format("SendMessage: streamStartTS={0}, length={1}, streamOffset={2}, timestamp={3}", _streamStartTS, _currentItem.Length, _streamOffset, ts));
			if (_streamStartTS == -1) {
				if (log.IsDebugEnabled)
					log.Debug("SendMessage: resetting streamStartTS");
				_streamStartTS = ts;
				message.body.Timestamp = 0;
			} else {
				if (_currentItem.Length >= 0) {
					int duration = ts - _streamStartTS;
					if (duration - _streamOffset >= _currentItem.Length) {
						// Sent enough data to client
						Stop();
						return;
					}
				}
			}
			_lastMessage = message.body;
			DoPushMessage(message);
		}
Example #12
0
		public void DispatchEvent(IEvent evt) {
			if (!(evt is IRtmpEvent)
					&& (evt.EventType != EventType.STREAM_CONTROL)
					&& (evt.EventType != EventType.STREAM_DATA) || _closed) {
				// ignored event
				if (log.IsDebugEnabled) {
					log.Debug("DispatchEvent: " + evt.EventType);
				}
				return;
			}

			// Get stream codec
			IStreamCodecInfo codecInfo = this.CodecInfo;
			StreamCodecInfo info = null;
			if (codecInfo is StreamCodecInfo) {
				info = codecInfo as StreamCodecInfo;
			}

			IRtmpEvent rtmpEvent = evt as IRtmpEvent;
			if (rtmpEvent == null) {
				if (log.IsDebugEnabled)
					log.Debug("IRtmpEvent expected in event dispatch");
				return;
			}
			int eventTime = -1;
			// If this is first packet save it's timestamp
			if (_firstPacketTime == -1) {
				_firstPacketTime = rtmpEvent.Timestamp;
				if (log.IsDebugEnabled)
					log.Debug(string.Format("CBS: {0} firstPacketTime={1} {2}", this.Name, _firstPacketTime, rtmpEvent.Header.IsTimerRelative ? "(rel)" : "(abs)"));
			}
			if (rtmpEvent is IStreamData && (rtmpEvent as IStreamData).Data != null) {
				_bytesReceived += (rtmpEvent as IStreamData).Data.Limit;
			}

			if (rtmpEvent is AudioData) {
				if (info != null) {
					info.HasAudio = true;
				}
				if (rtmpEvent.Header.IsTimerRelative) {
					if (_audioTime == 0)
						log.Warn(string.Format("First Audio timestamp is relative! {0}", rtmpEvent.Timestamp));
					_audioTime += rtmpEvent.Timestamp;
				} else {
					_audioTime = rtmpEvent.Timestamp;
				}
				eventTime = _audioTime;
			} else if (rtmpEvent is VideoData) {
				IVideoStreamCodec videoStreamCodec = null;
				if (_videoCodecFactory != null && _checkVideoCodec) {
					videoStreamCodec = _videoCodecFactory.GetVideoCodec((rtmpEvent as VideoData).Data);
					if (codecInfo is StreamCodecInfo) {
						(codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec;
					}
					_checkVideoCodec = false;
				} else if (codecInfo != null) {
					videoStreamCodec = codecInfo.VideoCodec;
				}

				if (videoStreamCodec != null) {
					videoStreamCodec.AddData((rtmpEvent as VideoData).Data);
				}

				if (info != null) {
					info.HasVideo = true;
				}
				if (rtmpEvent.Header.IsTimerRelative) {
					if (_videoTime == 0)
						log.Warn(string.Format("First Video timestamp is relative! {0}", rtmpEvent.Timestamp));
					_videoTime += rtmpEvent.Timestamp;
				} else {
					_videoTime = rtmpEvent.Timestamp;
					// Flash player may send first VideoData with old-absolute timestamp.
					// This ruins the stream's timebase in FileConsumer.
					// We don't want to discard the packet, as it may be a video keyframe.
					// Generally a Data or Audio packet has set the timebase to a reasonable value,
					// Eventually a new/correct absolute time will come on the video channel.
					// We could put this logic between livePipe and filePipe;
					// This would work for Audio Data as well, but have not seen the need.
					int cts = Math.Max(_audioTime, _dataTime);
					cts = Math.Max(cts, _minStreamTime);
					int fudge = 20;
					// Accept some slightly (20ms) retro timestamps [this may not be needed,
					// the publish Data should strictly precede the video data]
					if (_videoTime + fudge < cts) {
						if (log.IsDebugEnabled)
							log.Debug(string.Format("DispatchEvent: adjust archaic videoTime, from: {0} to {1}", _videoTime, cts));
						_videoTime = cts;
					}
				}
				eventTime = _videoTime;
			} else if (rtmpEvent is Invoke) {
				if (rtmpEvent.Header.IsTimerRelative) {
					if (_dataTime == 0)
						log.Warn(string.Format("First data [Invoke] timestamp is relative! {0}", rtmpEvent.Timestamp));
					_dataTime += rtmpEvent.Timestamp;
				} else {
					_dataTime = rtmpEvent.Timestamp;
				}
				return;
			} else if (rtmpEvent is Notify) {
				if (rtmpEvent.Header.IsTimerRelative) {
					if (_dataTime == 0)
						log.Warn(string.Format("First data [Notify] timestamp is relative! {0}", rtmpEvent.Timestamp));
					_dataTime += rtmpEvent.Timestamp;
				} else {
					_dataTime = rtmpEvent.Timestamp;
				}
				eventTime = _dataTime;
			}

			// Notify event listeners
			CheckSendNotifications(evt);

			// Create new RTMP message, initialize it and push through pipe
			GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage msg = new GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage();
			msg.body = rtmpEvent;
			msg.body.Timestamp = eventTime;
			try {
				if (_livePipe != null)
					_livePipe.PushMessage(msg);
				if (_recordPipe != null)
					_recordPipe.PushMessage(msg);
			} catch (System.IO.IOException ex) {
				SendRecordFailedNotify(ex.Message);
				Stop();
			}

			// Notify listeners about received packet
			if (rtmpEvent is IStreamPacket) {
				foreach (IStreamListener listener in GetStreamListeners()) {
					try {
						listener.PacketReceived(this, rtmpEvent as IStreamPacket);
					} catch (Exception ex) {
						log.Error(string.Format("Error while notifying listener {0}", listener), ex);
					}
				}
			}
		}
Example #13
0
		public void DropPacket(RtmpMessage message) {
			IRtmpEvent packet = message.body;
			if (!(packet is VideoData)) {
				// Only check video packets.
				return;
			}

			VideoData video = packet as VideoData;
			FrameType type = video.FrameType;

			switch (_state) {
				case FrameDropperState.SEND_ALL:
					if (type == FrameType.DisposableInterframe) {
						// Remain in state, packet is safe to drop.
						return;
					} else if (type == FrameType.Interframe) {
						// Drop all frames until the next keyframe.
						_state = FrameDropperState.SEND_KEYFRAMES;
						return;
					} else if (type == FrameType.Keyframe) {
						// Drop all frames until the next keyframe.
						_state = FrameDropperState.SEND_KEYFRAMES;
						return;
					}
					break;
				case FrameDropperState.SEND_INTERFRAMES:
					if (type == FrameType.Interframe) {
						// Drop all frames until the next keyframe.
						_state = FrameDropperState.SEND_KEYFRAMES_CHECK;
						return;
					} else if (type == FrameType.Keyframe) {
						// Drop all frames until the next keyframe.
						_state = FrameDropperState.SEND_KEYFRAMES;
						return;
					}
					break;
				case FrameDropperState.SEND_KEYFRAMES:
					// Remain in state.
					break;
				case FrameDropperState.SEND_KEYFRAMES_CHECK:
					if (type == FrameType.Keyframe) {
						// Switch back to sending keyframes, but don't move to SEND_INTERFRAMES afterwards.
						_state = FrameDropperState.SEND_KEYFRAMES;
						return;
					}
					break;
				default:
					break;
			}
		}
Example #14
0
		public void SendPacket(RtmpMessage message) {
		}
Example #15
0
		protected override MessageBase CopyImpl(MessageBase clone) {
			// Instantiate the clone, if a derived type hasn't already.
			if (clone == null)
				clone = new RtmpMessage();
			return base.CopyImpl(clone);
		}
Example #16
0
        public void DispatchEvent(IEvent evt)
        {
            if (!(evt is IRtmpEvent) &&
                (evt.EventType != EventType.STREAM_CONTROL) &&
                (evt.EventType != EventType.STREAM_DATA) || _closed)
            {
                // ignored event
                if (log.IsDebugEnabled)
                {
                    log.Debug("DispatchEvent: " + evt.EventType);
                }
                return;
            }

            // Get stream codec
            IStreamCodecInfo codecInfo = this.CodecInfo;
            StreamCodecInfo  info      = null;

            if (codecInfo is StreamCodecInfo)
            {
                info = codecInfo as StreamCodecInfo;
            }

            IRtmpEvent rtmpEvent = evt as IRtmpEvent;

            if (rtmpEvent == null)
            {
                if (log.IsDebugEnabled)
                {
                    log.Debug("IRtmpEvent expected in event dispatch");
                }
                return;
            }
            int eventTime = -1;

            // If this is first packet save it's timestamp
            if (_firstPacketTime == -1)
            {
                _firstPacketTime = rtmpEvent.Timestamp;
                if (log.IsDebugEnabled)
                {
                    log.Debug(string.Format("CBS: {0} firstPacketTime={1} {2}", this.Name, _firstPacketTime, rtmpEvent.Header.IsTimerRelative ? "(rel)" : "(abs)"));
                }
            }
            if (rtmpEvent is IStreamData && (rtmpEvent as IStreamData).Data != null)
            {
                _bytesReceived += (rtmpEvent as IStreamData).Data.Limit;
            }

            if (rtmpEvent is AudioData)
            {
                if (info != null)
                {
                    info.HasAudio = true;
                }
                if (rtmpEvent.Header.IsTimerRelative)
                {
                    if (_audioTime == 0)
                    {
                        log.Warn(string.Format("First Audio timestamp is relative! {0}", rtmpEvent.Timestamp));
                    }
                    _audioTime += rtmpEvent.Timestamp;
                }
                else
                {
                    _audioTime = rtmpEvent.Timestamp;
                }
                eventTime = _audioTime;
            }
            else if (rtmpEvent is VideoData)
            {
                IVideoStreamCodec videoStreamCodec = null;
                if (_videoCodecFactory != null && _checkVideoCodec)
                {
                    videoStreamCodec = _videoCodecFactory.GetVideoCodec((rtmpEvent as VideoData).Data);
                    if (codecInfo is StreamCodecInfo)
                    {
                        (codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec;
                    }
                    _checkVideoCodec = false;
                }
                else if (codecInfo != null)
                {
                    videoStreamCodec = codecInfo.VideoCodec;
                }

                if (videoStreamCodec != null)
                {
                    videoStreamCodec.AddData((rtmpEvent as VideoData).Data);
                }

                if (info != null)
                {
                    info.HasVideo = true;
                }
                if (rtmpEvent.Header.IsTimerRelative)
                {
                    if (_videoTime == 0)
                    {
                        log.Warn(string.Format("First Video timestamp is relative! {0}", rtmpEvent.Timestamp));
                    }
                    _videoTime += rtmpEvent.Timestamp;
                }
                else
                {
                    _videoTime = rtmpEvent.Timestamp;
                    // Flash player may send first VideoData with old-absolute timestamp.
                    // This ruins the stream's timebase in FileConsumer.
                    // We don't want to discard the packet, as it may be a video keyframe.
                    // Generally a Data or Audio packet has set the timebase to a reasonable value,
                    // Eventually a new/correct absolute time will come on the video channel.
                    // We could put this logic between livePipe and filePipe;
                    // This would work for Audio Data as well, but have not seen the need.
                    int cts = Math.Max(_audioTime, _dataTime);
                    cts = Math.Max(cts, _minStreamTime);
                    int fudge = 20;
                    // Accept some slightly (20ms) retro timestamps [this may not be needed,
                    // the publish Data should strictly precede the video data]
                    if (_videoTime + fudge < cts)
                    {
                        if (log.IsDebugEnabled)
                        {
                            log.Debug(string.Format("DispatchEvent: adjust archaic videoTime, from: {0} to {1}", _videoTime, cts));
                        }
                        _videoTime = cts;
                    }
                }
                eventTime = _videoTime;
            }
            else if (rtmpEvent is Invoke)
            {
                if (rtmpEvent.Header.IsTimerRelative)
                {
                    if (_dataTime == 0)
                    {
                        log.Warn(string.Format("First data [Invoke] timestamp is relative! {0}", rtmpEvent.Timestamp));
                    }
                    _dataTime += rtmpEvent.Timestamp;
                }
                else
                {
                    _dataTime = rtmpEvent.Timestamp;
                }
                return;
            }
            else if (rtmpEvent is Notify)
            {
                if (rtmpEvent.Header.IsTimerRelative)
                {
                    if (_dataTime == 0)
                    {
                        log.Warn(string.Format("First data [Notify] timestamp is relative! {0}", rtmpEvent.Timestamp));
                    }
                    _dataTime += rtmpEvent.Timestamp;
                }
                else
                {
                    _dataTime = rtmpEvent.Timestamp;
                }
                eventTime = _dataTime;
            }

            // Notify event listeners
            CheckSendNotifications(evt);

            // Create new RTMP message, initialize it and push through pipe
            GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage msg = new GodLesZ.Library.Amf.Messaging.Rtmp.Stream.Messages.RtmpMessage();
            msg.body           = rtmpEvent;
            msg.body.Timestamp = eventTime;
            try {
                if (_livePipe != null)
                {
                    _livePipe.PushMessage(msg);
                }
                if (_recordPipe != null)
                {
                    _recordPipe.PushMessage(msg);
                }
            } catch (System.IO.IOException ex) {
                SendRecordFailedNotify(ex.Message);
                Stop();
            }

            // Notify listeners about received packet
            if (rtmpEvent is IStreamPacket)
            {
                foreach (IStreamListener listener in GetStreamListeners())
                {
                    try {
                        listener.PacketReceived(this, rtmpEvent as IStreamPacket);
                    } catch (Exception ex) {
                        log.Error(string.Format("Error while notifying listener {0}", listener), ex);
                    }
                }
            }
        }
Example #17
0
		/// <summary>
		/// Send clear ping, that is, just to check if connection is alive
		/// </summary>
		private void SendClearPing() {
			Ping ping1 = new Ping();
			ping1.PingType = (short)Ping.StreamPlayBufferClear;
			ping1.Value2 = this.StreamId;
			RtmpMessage ping1Msg = new RtmpMessage();
			ping1Msg.body = ping1;
			DoPushMessage(ping1Msg);
		}
Example #18
0
		public void DispatchEvent(IEvent @event) {
			try {
				if (@event is IRtmpEvent) {
					IRtmpEvent rtmpEvent = @event as IRtmpEvent;
					if (_livePipe != null) {
						RtmpMessage msg = new RtmpMessage();
						msg.body = rtmpEvent;
						if (_creationTime == -1)
							_creationTime = rtmpEvent.Timestamp;
						try {
							if (@event is AudioData) {
								(_codecInfo as StreamCodecInfo).HasAudio = true;
							} else if (@event is VideoData) {
								IVideoStreamCodec videoStreamCodec = null;
								if (_codecInfo.VideoCodec == null) {
									videoStreamCodec = _videoCodecFactory.GetVideoCodec((@event as VideoData).Data);
									(_codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec;
								} else if (_codecInfo != null) {
									videoStreamCodec = _codecInfo.VideoCodec;
								}

								if (videoStreamCodec != null) {
									videoStreamCodec.AddData((rtmpEvent as VideoData).Data);
								}

								if (_codecInfo != null)
									(_codecInfo as StreamCodecInfo).HasVideo = true;
							}
							_livePipe.PushMessage(msg);

							// Notify listeners about received packet
							if (rtmpEvent is IStreamPacket) {
								foreach (IStreamListener listener in GetStreamListeners()) {
									try {
										listener.PacketReceived(this, rtmpEvent as IStreamPacket);
									} catch (Exception ex) {
										log.Error("Error while notifying listener " + listener, ex);
									}
								}
							}
						} catch (Exception ex) {
							// ignore
							log.Error("DispatchEvent exception", ex);
						}
					}
				}
			} finally {
			}
		}