/// <summary> /// Create and return new video codec applicable for byte buffer data /// </summary> /// <param name="data">Byte buffer data.</param> /// <returns>Video codec.</returns> public IVideoStreamCodec GetVideoCodec(ByteBuffer data) { IVideoStreamCodec result = null; //get the codec identifying byte int codecId = data.Get() & 0x0f; switch (codecId) { case 2: //sorenson result = new SorensonVideo(); break; case 3: //screen video result = new ScreenVideo(); break; case 7: //avc/h.264 video result = new AVCVideo(); break; } data.Rewind(); if (result == null) { IVideoStreamCodec codec; foreach (IVideoStreamCodec storedCodec in _codecs) { // XXX: this is a bit of a hack to create new instances of the // configured video codec for each stream try { codec = Activator.CreateInstance(storedCodec.GetType()) as IVideoStreamCodec; } catch (Exception ex) { log.Error("Could not create video codec instance.", ex); continue; } log.Info("Trying codec " + codec); if (codec.CanHandleData(data)) { result = codec; break; } } } return(result); }
public void DispatchEvent(IEvent evt) { if (!(evt is IRtmpEvent) && (evt.EventType != EventType.STREAM_CONTROL) && (evt.EventType != EventType.STREAM_DATA) || _closed) { // ignored event if (log.IsDebugEnabled) { log.Debug("DispatchEvent: " + evt.EventType); } return; } // Get stream codec IStreamCodecInfo codecInfo = this.CodecInfo; StreamCodecInfo info = null; if (codecInfo is StreamCodecInfo) { info = codecInfo as StreamCodecInfo; } IRtmpEvent rtmpEvent = evt as IRtmpEvent; if (rtmpEvent == null) { if (log.IsDebugEnabled) { log.Debug("IRtmpEvent expected in event dispatch"); } return; } int eventTime = -1; // If this is first packet save it's timestamp if (_firstPacketTime == -1) { _firstPacketTime = rtmpEvent.Timestamp; if (log.IsDebugEnabled) { log.Debug(string.Format("CBS: {0} firstPacketTime={1} {2}", this.Name, _firstPacketTime, rtmpEvent.Header.IsTimerRelative ? "(rel)" : "(abs)")); } } if (rtmpEvent is IStreamData && (rtmpEvent as IStreamData).Data != null) { _bytesReceived += (rtmpEvent as IStreamData).Data.Limit; } if (rtmpEvent is AudioData) { if (info != null) { info.HasAudio = true; } if (rtmpEvent.Header.IsTimerRelative) { if (_audioTime == 0) { log.Warn(string.Format("First Audio timestamp is relative! {0}", rtmpEvent.Timestamp)); } _audioTime += rtmpEvent.Timestamp; } else { _audioTime = rtmpEvent.Timestamp; } eventTime = _audioTime; } else if (rtmpEvent is VideoData) { IVideoStreamCodec videoStreamCodec = null; if (_videoCodecFactory != null && _checkVideoCodec) { videoStreamCodec = _videoCodecFactory.GetVideoCodec((rtmpEvent as VideoData).Data); if (codecInfo is StreamCodecInfo) { (codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec; } _checkVideoCodec = false; } else if (codecInfo != null) { videoStreamCodec = codecInfo.VideoCodec; } if (videoStreamCodec != null) { videoStreamCodec.AddData((rtmpEvent as VideoData).Data); } if (info != null) { info.HasVideo = true; } if (rtmpEvent.Header.IsTimerRelative) { if (_videoTime == 0) { log.Warn(string.Format("First Video timestamp is relative! {0}", rtmpEvent.Timestamp)); } _videoTime += rtmpEvent.Timestamp; } else { _videoTime = rtmpEvent.Timestamp; // Flash player may send first VideoData with old-absolute timestamp. // This ruins the stream's timebase in FileConsumer. // We don't want to discard the packet, as it may be a video keyframe. // Generally a Data or Audio packet has set the timebase to a reasonable value, // Eventually a new/correct absolute time will come on the video channel. // We could put this logic between livePipe and filePipe; // This would work for Audio Data as well, but have not seen the need. int cts = Math.Max(_audioTime, _dataTime); cts = Math.Max(cts, _minStreamTime); int fudge = 20; // Accept some slightly (20ms) retro timestamps [this may not be needed, // the publish Data should strictly precede the video data] if (_videoTime + fudge < cts) { if (log.IsDebugEnabled) { log.Debug(string.Format("DispatchEvent: adjust archaic videoTime, from: {0} to {1}", _videoTime, cts)); } _videoTime = cts; } } eventTime = _videoTime; } else if (rtmpEvent is Invoke) { if (rtmpEvent.Header.IsTimerRelative) { if (_dataTime == 0) { log.Warn(string.Format("First data [Invoke] timestamp is relative! {0}", rtmpEvent.Timestamp)); } _dataTime += rtmpEvent.Timestamp; } else { _dataTime = rtmpEvent.Timestamp; } return; } else if (rtmpEvent is Notify) { if (rtmpEvent.Header.IsTimerRelative) { if (_dataTime == 0) { log.Warn(string.Format("First data [Notify] timestamp is relative! {0}", rtmpEvent.Timestamp)); } _dataTime += rtmpEvent.Timestamp; } else { _dataTime = rtmpEvent.Timestamp; } eventTime = _dataTime; } // Notify event listeners CheckSendNotifications(evt); // Create new RTMP message, initialize it and push through pipe FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage msg = new FluorineFx.Messaging.Rtmp.Stream.Messages.RtmpMessage(); msg.body = rtmpEvent; msg.body.Timestamp = eventTime; try { if (_livePipe != null) { _livePipe.PushMessage(msg); } if (_recordPipe != null) { _recordPipe.PushMessage(msg); } } catch (System.IO.IOException ex) { SendRecordFailedNotify(ex.Message); Stop(); } // Notify listeners about received packet if (rtmpEvent is IStreamPacket) { foreach (IStreamListener listener in GetStreamListeners()) { try { listener.PacketReceived(this, rtmpEvent as IStreamPacket); } catch (Exception ex) { log.Error(string.Format("Error while notifying listener {0}", listener), ex); } } } }
public void DispatchEvent(IEvent @event) { try { if (@event is IRtmpEvent) { IRtmpEvent rtmpEvent = @event as IRtmpEvent; if (_livePipe != null) { RtmpMessage msg = new RtmpMessage(); msg.body = rtmpEvent; if (_creationTime == -1) { _creationTime = rtmpEvent.Timestamp; } try { if (@event is AudioData) { (_codecInfo as StreamCodecInfo).HasAudio = true; } else if (@event is VideoData) { IVideoStreamCodec videoStreamCodec = null; if (_codecInfo.VideoCodec == null) { videoStreamCodec = _videoCodecFactory.GetVideoCodec((@event as VideoData).Data); (_codecInfo as StreamCodecInfo).VideoCodec = videoStreamCodec; } else if (_codecInfo != null) { videoStreamCodec = _codecInfo.VideoCodec; } if (videoStreamCodec != null) { videoStreamCodec.AddData((rtmpEvent as VideoData).Data); } if (_codecInfo != null) { (_codecInfo as StreamCodecInfo).HasVideo = true; } } _livePipe.PushMessage(msg); // Notify listeners about received packet if (rtmpEvent is IStreamPacket) { foreach (IStreamListener listener in GetStreamListeners()) { try { listener.PacketReceived(this, rtmpEvent as IStreamPacket); } catch (Exception ex) { log.Error("Error while notifying listener " + listener, ex); } } } } catch (Exception ex) { // ignore log.Error("DispatchEvent exception", ex); } } } } finally { } }
public void DispatchEvent(IEvent evt) { if (((!(evt is IRtmpEvent) && (evt.EventType != EventType.STREAM_CONTROL)) && (evt.EventType != EventType.STREAM_DATA)) || this._closed) { if (log.get_IsDebugEnabled()) { log.Debug("DispatchEvent: " + evt.EventType); } } else { IStreamCodecInfo codecInfo = base.CodecInfo; StreamCodecInfo info2 = null; if (codecInfo is StreamCodecInfo) { info2 = codecInfo as StreamCodecInfo; } IRtmpEvent event2 = evt as IRtmpEvent; int num = -1; if (this._firstPacketTime == -1) { this._firstPacketTime = event2.Timestamp; } if (event2 is AudioData) { if (info2 != null) { info2.HasAudio = true; } if (event2.Header.IsTimerRelative) { this._audioTime += event2.Timestamp; } else { this._audioTime = event2.Timestamp; } num = this._audioTime; } else if (event2 is VideoData) { IVideoStreamCodec videoCodec = null; if ((this._videoCodecFactory != null) && this._checkVideoCodec) { videoCodec = this._videoCodecFactory.GetVideoCodec((event2 as VideoData).Data); if (codecInfo is StreamCodecInfo) { (codecInfo as StreamCodecInfo).VideoCodec = videoCodec; } this._checkVideoCodec = false; } else if (codecInfo != null) { videoCodec = codecInfo.VideoCodec; } if (videoCodec != null) { videoCodec.AddData((event2 as VideoData).Data); } if (info2 != null) { info2.HasVideo = true; } if (event2.Header.IsTimerRelative) { this._videoTime += event2.Timestamp; } else { this._videoTime = event2.Timestamp; } num = this._videoTime; } else { if (event2 is Invoke) { if (event2.Header.IsTimerRelative) { this._dataTime += event2.Timestamp; } else { this._dataTime = event2.Timestamp; } return; } if (event2 is Notify) { if (event2.Header.IsTimerRelative) { this._dataTime += event2.Timestamp; } else { this._dataTime = event2.Timestamp; } num = this._dataTime; } } if ((event2 is IStreamData) && ((event2 as IStreamData).Data != null)) { this._bytesReceived += (event2 as IStreamData).Data.Limit; } this.CheckSendNotifications(evt); RtmpMessage message = new RtmpMessage { body = event2 }; message.body.Timestamp = num; try { if (this._livePipe != null) { this._livePipe.PushMessage(message); } if (this._recordPipe != null) { this._recordPipe.PushMessage(message); } } catch (IOException exception) { this.SendRecordFailedNotify(exception.Message); this.Stop(); } } }