示例#1
0
		static ByteBuffer EncodeAudioData(RtmpContext context, AudioData audioData) {
			ByteBuffer output = audioData.Data;
			return output;
		}
示例#2
0
		/// <summary>
		/// Recieve then send if message is data (not audio or video)
		/// </summary>
		internal void PullAndPush() {
			lock (this.SyncRoot) {
				if (_playlistSubscriberStream.State == State.PLAYING && _isPullMode && !_waitingForToken) {
					if (_pendingMessage != null) {
						IRtmpEvent body = _pendingMessage.body;
						if (!OkayToSendMessage(body))
							return;

						SendMessage(_pendingMessage);
						ReleasePendingMessage();
					} else {
						while (true) {
							IMessage msg = _msgIn.PullMessage();
							if (msg == null) {
								// No more packets to send
								Stop();
								break;
							} else {
								if (msg is RtmpMessage) {
									RtmpMessage rtmpMessage = (RtmpMessage)msg;
									IRtmpEvent body = rtmpMessage.body;
									if (!_receiveAudio && body is AudioData) {
										// The user doesn't want to get audio packets
										//((IStreamData) body).Data.Release();
										if (_sendBlankAudio) {
											// Send reset audio packet
											_sendBlankAudio = false;
											body = new AudioData();
											// We need a zero timestamp
											if (_lastMessage != null) {
												body.Timestamp = _lastMessage.Timestamp - _timestampOffset;
											} else {
												body.Timestamp = -_timestampOffset;
											}
											rtmpMessage.body = body;
										} else {
											continue;
										}
									} else if (!_receiveVideo && body is VideoData) {
										// The user doesn't want to get video packets
										//((IStreamData) body).Data.Release();
										continue;
									}

									// Adjust timestamp when playing lists
									body.Timestamp = body.Timestamp + _timestampOffset;
									if (OkayToSendMessage(body)) {
										if (log.IsDebugEnabled)
											log.Debug(string.Format("ts: {0}", rtmpMessage.body.Timestamp));
										SendMessage(rtmpMessage);
										//((IStreamData) body).Data.Release();
									} else {
										_pendingMessage = rtmpMessage;
									}
									EnsurePullAndPushRunning();
									break;
								}
							}
						}
					}
				}
			}
		}
		/// <summary>
		/// Initializes a new instance of the <see cref="NetStreamAudioEventArgs"/> class.
		/// </summary>
		/// <param name="audioData">The audio data.</param>
		internal NetStreamAudioEventArgs(AudioData audioData) {
			_audioData = audioData;
		}
示例#4
0
		/// <summary>
		/// Seek position in file
		/// </summary>
		/// <param name="position"></param>
		public void Seek(int position) {
			lock (this.SyncRoot) {
				if (_playlistSubscriberStream.State != State.PLAYING && _playlistSubscriberStream.State != State.PAUSED && _playlistSubscriberStream.State != State.STOPPED) {
					throw new IllegalStateException();
				}
				if (!_isPullMode) {
					throw new NotSupportedException();
				}

				ReleasePendingMessage();
				ClearWaitJobs();
				_bwController.ResetBuckets(_bwContext);
				_waitingForToken = false;
				SendClearPing();
				SendReset();
				SendSeekStatus(_currentItem, position);
				SendStartStatus(_currentItem);
				int seekPos = SendVODSeekCM(_msgIn, position);
				// We seeked to the nearest keyframe so use real timestamp now
				if (seekPos == -1) {
					seekPos = position;
				}
				_playbackStart = System.Environment.TickCount - seekPos;
				_playlistSubscriberStream.NotifyItemSeek(_currentItem, seekPos);
				bool messageSent = false;
				bool startPullPushThread = false;
				if ((_playlistSubscriberStream.State == State.PAUSED || _playlistSubscriberStream.State == State.STOPPED) && SendCheckVideoCM(_msgIn)) {
					// we send a single snapshot on pause.
					// XXX we need to take BWC into account, for
					// now send forcefully.
					IMessage msg;
					try {
						msg = _msgIn.PullMessage();
					} catch (Exception ex) {
						log.Error("Error while pulling message.", ex);
						msg = null;
					}
					while (msg != null) {
						if (msg is RtmpMessage) {
							RtmpMessage rtmpMessage = (RtmpMessage)msg;
							IRtmpEvent body = rtmpMessage.body;
							if (body is VideoData && ((VideoData)body).FrameType == FrameType.Keyframe) {
								body.Timestamp = seekPos;
								DoPushMessage(rtmpMessage);
								//rtmpMessage.body.Release();
								messageSent = true;
								_lastMessage = body;
								break;
							}
						}

						try {
							msg = _msgIn.PullMessage();
						} catch (Exception ex) {
							log.Error("Error while pulling message.", ex);
							msg = null;
						}
					}
				} else {
					startPullPushThread = true;
				}

				if (!messageSent) {
					// Send blank audio packet to notify client about new position
					AudioData audio = new AudioData();
					audio.Timestamp = seekPos;
					audio.Header = new RtmpHeader();
					audio.Header.Timer = seekPos;
					audio.Header.IsTimerRelative = false;
					RtmpMessage audioMessage = new RtmpMessage();
					audioMessage.body = audio;
					_lastMessage = audio;
					DoPushMessage(audioMessage);
				}

				if (startPullPushThread) {
					EnsurePullAndPushRunning();
				}

				if (_playlistSubscriberStream.State != State.STOPPED && _currentItem.Length >= 0 && (position - _streamOffset) >= _currentItem.Length) {
					// Seeked after end of stream
					Stop();
					return;
				}
			}
		}
示例#5
0
		public void PushMessage(IPipe pipe, IMessage message) {
			lock (this.SyncRoot) {
				if (message is ResetMessage) {
					SendReset();
					return;
				}
				if (message is RtmpMessage) {
					RtmpMessage rtmpMessage = (RtmpMessage)message;
					IRtmpEvent body = rtmpMessage.body;
					if (!(body is IStreamData)) {
						throw new ApplicationException("expected IStreamData but got " + body.GetType().FullName);
					}

					int size = ((IStreamData)body).Data.Limit;
					if (body is VideoData) {
						IVideoStreamCodec videoCodec = null;
						if (_msgIn is IBroadcastScope) {
							IClientBroadcastStream stream = ((IBroadcastScope)_msgIn).GetAttribute(Constants.BroadcastScopeStreamAttribute) as IClientBroadcastStream;
							if (stream != null && stream.CodecInfo != null) {
								videoCodec = stream.CodecInfo.VideoCodec;
							}
						}

						if (videoCodec == null || videoCodec.CanDropFrames) {
							if (_playlistSubscriberStream.State == State.PAUSED) {
								// The subscriber paused the video
								_videoFrameDropper.DropPacket(rtmpMessage);
								return;
							}

							// Only check for frame dropping if the codec supports it
							long pendingVideos = GetPendingVideoMessageCount();
							if (!_videoFrameDropper.CanSendPacket(rtmpMessage, pendingVideos)) {
								// Drop frame as it depends on other frames that were dropped before.
								return;
							}

							bool drop = !_videoBucket.AcquireToken(size, 0);
							if (!_receiveVideo || drop) {
								// The client disabled video or the app doesn't have enough bandwidth
								// allowed for this stream.
								_videoFrameDropper.DropPacket(rtmpMessage);
								return;
							}

							long[] writeDelta = GetWriteDelta();
							if (pendingVideos > 1 /*|| writeDelta[0] > writeDelta[1]*/) {
								// We drop because the client has insufficient bandwidth.
								long now = System.Environment.TickCount;
								if (_bufferCheckInterval > 0 && now >= _nextCheckBufferUnderrun) {
									// Notify client about frame dropping (keyframe)
									SendInsufficientBandwidthStatus(_currentItem);
									_nextCheckBufferUnderrun = now + _bufferCheckInterval;
								}
								_videoFrameDropper.DropPacket(rtmpMessage);
								return;
							}

							_videoFrameDropper.SendPacket(rtmpMessage);
						}
					} else if (body is AudioData) {
						if (!_receiveAudio && _sendBlankAudio) {
							// Send blank audio packet to reset player
							_sendBlankAudio = false;
							body = new AudioData();
							if (_lastMessage != null) {
								body.Timestamp = _lastMessage.Timestamp;
							} else {
								body.Timestamp = 0;
							}
							rtmpMessage.body = body;
						} else if (_playlistSubscriberStream.State == State.PAUSED || !_receiveAudio || !_audioBucket.AcquireToken(size, 0)) {
							return;
						}
					}
					if (body is IStreamData && ((IStreamData)body).Data != null) {
						_bytesSent += ((IStreamData)body).Data.Limit;
					}
					_lastMessage = body;
				}
				_msgOut.PushMessage(message);
			}
		}
示例#6
0
		internal void RaiseNetStreamAudio(AudioData audioData) {
			if (_netStreamAudioHandler != null) {
				_netStreamAudioHandler(this, new NetStreamAudioEventArgs(audioData));
			}
		}
示例#7
0
		public void PushMessage(IPipe pipe, IMessage message) {
			if (message is ResetMessage) {
				_timeStamper.Reset();
			} else if (message is StatusMessage) {
				StatusMessage statusMsg = message as StatusMessage;
				_data.SendStatus(statusMsg.body as StatusASO);
			} else if (message is RtmpMessage) {
				// Make sure chunk size has been sent
				if (!_chunkSizeSent)
					SendChunkSize();

				RtmpMessage rtmpMsg = message as RtmpMessage;
				IRtmpEvent msg = rtmpMsg.body;

				int eventTime = msg.Timestamp;
#if !SILVERLIGHT
				if (log.IsDebugEnabled)
					log.Debug(string.Format("Message timestamp: {0}", eventTime));
#endif
				if (eventTime < 0) {
#if !SILVERLIGHT
					if (log.IsDebugEnabled)
						log.Debug(string.Format("Message has negative timestamp: {0}", eventTime));
#endif
					return;
				}
				byte dataType = msg.DataType;
				// Create a new header for the consumer
				RtmpHeader header = _timeStamper.GetTimeStamp(dataType, eventTime);

				switch (msg.DataType) {
					case Constants.TypeStreamMetadata:
						Notify notify = new Notify((msg as Notify).Data);
						notify.Header = header;
						notify.Timestamp = header.Timer;
						_data.Write(notify);
						break;
					case Constants.TypeFlexStreamEnd:
						// TODO: okay to send this also to AMF0 clients?
						FlexStreamSend send = new FlexStreamSend((msg as Notify).Data);
						send.Header = header;
						send.Timestamp = header.Timer;
						_data.Write(send);
						break;
					case Constants.TypeVideoData:
						VideoData videoData = new VideoData((msg as VideoData).Data);
						videoData.Header = header;
						videoData.Timestamp = header.Timer;
						_video.Write(videoData);
						break;
					case Constants.TypeAudioData:
						AudioData audioData = new AudioData((msg as AudioData).Data);
						audioData.Header = header;
						audioData.Timestamp = header.Timer;
						_audio.Write(audioData);
						break;
					case Constants.TypePing:
						Ping ping = new Ping((msg as Ping).PingType, (msg as Ping).Value2, (msg as Ping).Value3, (msg as Ping).Value4);
						ping.Header = header;
						_connection.Ping(ping);
						break;
					case Constants.TypeBytesRead:
						BytesRead bytesRead = new BytesRead((msg as BytesRead).Bytes);
						bytesRead.Header = header;
						bytesRead.Timestamp = header.Timer;
						_connection.GetChannel((byte)2).Write(bytesRead);
						break;
					default:
						_data.Write(msg);
						break;
				}
			}
		}
示例#8
0
		public IMessage PullMessage(IPipe pipe) {
			lock (_syncLock) {
				if (_pipe != pipe)
					return null;
				if (_reader == null)
					Init();
				if (!_reader.HasMoreTags()) {
					// TODO send OOBCM to notify EOF
					// Do not unsubscribe as this kills VOD seek while in buffer
					// this.pipe.unsubscribe(this);
					return null;
				}
				ITag tag = _reader.ReadTag();
				IRtmpEvent msg = null;
				int timestamp = tag.Timestamp;
				switch (tag.DataType) {
					case Constants.TypeAudioData:
						msg = new AudioData(tag.Body);
						break;
					case Constants.TypeVideoData:
						msg = new VideoData(tag.Body);
						break;
					case Constants.TypeInvoke:
						msg = new Invoke(tag.Body);
						break;
					case Constants.TypeNotify:
						msg = new Notify(tag.Body);
						break;
					case Constants.TypeFlexStreamEnd:
						msg = new FlexStreamSend(tag.Body);
						break;
					default:
						log.Warn("Unexpected type " + tag.DataType);
						msg = new Unknown(tag.DataType, tag.Body);
						break;
				}
				msg.Timestamp = timestamp;
				RtmpMessage rtmpMsg = new RtmpMessage();
				rtmpMsg.body = msg;
				return rtmpMsg;
			}
		}