public ITag ReadTag() { lock (this.SyncRoot) { long oldPos = GetCurrentPosition(); ITag tag = ReadTagHeader(); if (_tagPosition == 0 && tag.DataType != IOConstants.TYPE_METADATA && _generateMetadata) { // Generate initial metadata automatically SetCurrentPosition(oldPos); KeyFrameMeta meta = AnalyzeKeyFrames(); _tagPosition++; if (meta != null) { return(CreateFileMeta()); } } // This assists in 'properly' handling damaged FLV files long newPosition = GetCurrentPosition() + tag.BodySize; if (newPosition <= GetTotalBytes()) { byte[] buffer = _reader.ReadBytes(tag.BodySize); tag.Body = buffer; _tagPosition++; } return(tag); } }
public int Seek(int ts) { lock (this._syncLock) { if (this._keyFrameMeta == null) { if (!(this._reader is IKeyFrameDataAnalyzer)) { return(ts); } this._keyFrameMeta = (this._reader as IKeyFrameDataAnalyzer).AnalyzeKeyFrames(); } if (this._keyFrameMeta.Positions.Length == 0) { return(ts); } if (ts >= this._keyFrameMeta.Duration) { this._reader.Position = 0x7fffffffffffffffL; return((int)this._keyFrameMeta.Duration); } int index = 0; for (int i = 0; i < this._keyFrameMeta.Positions.Length; i++) { if (this._keyFrameMeta.Timestamps[i] > ts) { break; } index = i; } this._reader.Position = this._keyFrameMeta.Positions[index]; return(this._keyFrameMeta.Timestamps[index]); } }
public ITag ReadTag() { lock (this.SyncRoot) { long currentPosition = this.GetCurrentPosition(); ITag tag = this.ReadTagHeader(); if (((this._tagPosition == 0) && (tag.DataType != IOConstants.TYPE_METADATA)) && this._generateMetadata) { this.SetCurrentPosition(currentPosition); KeyFrameMeta meta = this.AnalyzeKeyFrames(); this._tagPosition++; if (meta != null) { return(this.CreateFileMeta()); } } long num2 = this.GetCurrentPosition() + tag.BodySize; if (num2 <= this.GetTotalBytes()) { byte[] buffer = this._reader.ReadBytes(tag.BodySize); tag.Body = buffer; this._tagPosition++; } return(tag); } }
public bool HasVideo() { KeyFrameMeta meta = AnalyzeKeyFrames(); if (meta == null) { return(false); } return(!meta.AudioOnly && meta.Positions.Length > 0); }
public KeyFrameMeta AnalyzeKeyFrames() { lock (this._syncLock) { if (this._frameMeta == null) { List <long> list = new List <long>(); List <double> list2 = new List <double>(); this._dataRate = 0; long num = 0L; int num2 = 0; long position = this._fileStream.Position; double item = 0.0; this._fileStream.Position = 0L; this.ProcessID3v2Header(); this.SearchNextFrame(); while (this.HasMoreTags()) { Mp3Header header = this.ReadHeader(); if ((header == null) || (header.FrameSize == 0)) { break; } long num5 = this._fileStream.Position - 4L; if ((num5 + header.FrameSize) > this._fileStream.Length) { break; } list.Add(num5); list2.Add(item); num += header.BitRate / 0x3e8; item += header.FrameDuration; this._fileStream.Position = num5 + header.FrameSize; num2++; } this._fileStream.Position = position; this._duration = (long)item; this._dataRate = (int)(num / ((long)num2)); this._posTimeMap = new Dictionary <long, double>(); this._frameMeta = new KeyFrameMeta(); this._frameMeta.Duration = this._duration; this._frameMeta.Positions = new long[list.Count]; this._frameMeta.Timestamps = new int[list2.Count]; this._frameMeta.AudioOnly = true; for (int i = 0; i < this._frameMeta.Positions.Length; i++) { this._frameMeta.Positions[i] = (int)list[i]; this._frameMeta.Timestamps[i] = (int)list2[i]; this._posTimeMap.Add(list[i], list2[i]); } } return(this._frameMeta); } }
public FlvReader(FileInfo file, bool generateMetadata) { _file = file; FileStream fs = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536); //_reader = new AMFReader(file.Open(FileMode.Open)); _reader = new AMFReader(fs); _generateMetadata = generateMetadata; if (GetRemainingBytes() >= 9) { DecodeHeader(); } _keyframeMeta = AnalyzeKeyFrames(); }
public FlvReader(FileInfo file, bool generateMetadata) { this._syncLock = new object(); this._firstVideoTag = -1L; this._firstAudioTag = -1L; this._file = file; FileStream stream = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.Read, 0x10000); this._reader = new AMFReader(stream); this._generateMetadata = generateMetadata; if (this.GetRemainingBytes() >= 9L) { this.DecodeHeader(); } this._keyframeMeta = this.AnalyzeKeyFrames(); }
public int Seek(int ts) { lock (_syncLock) { if (_keyFrameMeta == null) { if (!(_reader is IKeyFrameDataAnalyzer)) { // Seeking not supported return(ts); } _keyFrameMeta = (_reader as IKeyFrameDataAnalyzer).AnalyzeKeyFrames(); } if (_keyFrameMeta.Positions.Length == 0) { // no video keyframe metainfo, it's an audio-only FLV // we skip the seek for now. // TODO add audio-seek capability return(ts); } if (ts >= _keyFrameMeta.Duration) { // Seek at or beyond EOF _reader.Position = long.MaxValue; return((int)_keyFrameMeta.Duration); } int frame = 0; for (int i = 0; i < _keyFrameMeta.Positions.Length; i++) { if (_keyFrameMeta.Timestamps[i] > ts) { break; } frame = i; } _reader.Position = _keyFrameMeta.Positions[frame]; return(_keyFrameMeta.Timestamps[frame]); } }
public KeyFrameMeta AnalyzeKeyFrames() { lock (_syncLock) { if (_frameMeta != null) { return(_frameMeta); } #if !NET_1_1 List <long> positionList = new List <long>(); List <double> timestampList = new List <double>(); #else ArrayList positionList = new ArrayList(); ArrayList timestampList = new ArrayList(); #endif _dataRate = 0; long rate = 0; int count = 0; long origPos = _fileStream.Position; double time = 0; _fileStream.Position = 0; ProcessID3v2Header(); SearchNextFrame(); while (this.HasMoreTags()) { Mp3Header header = ReadHeader(); if (header == null) { // No more tags break; } if (header.FrameSize == 0) { // TODO find better solution how to deal with broken files... // See APPSERVER-62 for details break; } long pos = _fileStream.Position - 4; if (pos + header.FrameSize > _fileStream.Length) { // Last frame is incomplete break; } positionList.Add(pos); timestampList.Add(time); rate += header.BitRate / 1000; time += header.FrameDuration; _fileStream.Position = pos + header.FrameSize; count++; } // restore the pos _fileStream.Position = origPos; _duration = (long)time; _dataRate = (int)(rate / count); #if !NET_1_1 _posTimeMap = new Dictionary <long, double>(); #else _posTimeMap = new Hashtable(); #endif _frameMeta = new KeyFrameMeta(); _frameMeta.Duration = _duration; _frameMeta.Positions = new long[positionList.Count]; _frameMeta.Timestamps = new int[timestampList.Count]; _frameMeta.AudioOnly = true; for (int i = 0; i < _frameMeta.Positions.Length; i++) { _frameMeta.Positions[i] = (int)positionList[i]; _frameMeta.Timestamps[i] = (int)timestampList[i]; _posTimeMap.Add(positionList[i], timestampList[i]); } return(_frameMeta); } }
/// <summary> /// Key frames analysis may be used as a utility method so synchronize it. /// </summary> /// <returns></returns> public KeyFrameMeta AnalyzeKeyFrames() { lock (this.SyncRoot) { if (_keyframeMeta != null) { return(_keyframeMeta); } // Lists of video positions and timestamps List <long> positionList = new List <long>(); List <int> timestampList = new List <int>(); // Lists of audio positions and timestamps List <long> audioPositionList = new List <long>(); List <int> audioTimestampList = new List <int>(); long origPos = GetCurrentPosition(); // point to the first tag SetCurrentPosition(9); // Maps positions to tags _posTagMap = new Dictionary <long, int>(); int idx = 0; bool audioOnly = true; while (this.HasMoreTags()) { long pos = GetCurrentPosition(); _posTagMap.Add(pos, idx++); // Read tag header and duration ITag tmpTag = this.ReadTagHeader(); _duration = tmpTag.Timestamp; if (tmpTag.DataType == IOConstants.TYPE_VIDEO) { if (audioOnly) { audioOnly = false; audioPositionList.Clear(); audioTimestampList.Clear(); } if (_firstVideoTag == -1) { _firstVideoTag = pos; } // Grab Frame type byte frametype = _reader.ReadByte(); if (((frametype & IOConstants.MASK_VIDEO_FRAMETYPE) >> 4) == IOConstants.FLAG_FRAMETYPE_KEYFRAME) { positionList.Add(pos); timestampList.Add(tmpTag.Timestamp); } } else if (tmpTag.DataType == IOConstants.TYPE_AUDIO) { if (_firstAudioTag == -1) { _firstAudioTag = pos; } if (audioOnly) { audioPositionList.Add(pos); audioTimestampList.Add(tmpTag.Timestamp); } } // This properly handles damaged FLV files - as far as duration/size is concerned long newPosition = pos + tmpTag.BodySize + 15; if (newPosition >= GetTotalBytes()) { #if !SILVERLIGHT log.Info("New position exceeds limit"); if (log.IsDebugEnabled) { log.Debug("Keyframe analysis"); log.Debug(" data type=" + tmpTag.DataType + " bodysize=" + tmpTag.BodySize); log.Debug(" remaining=" + GetRemainingBytes() + " limit=" + GetTotalBytes() + " new pos=" + newPosition + " pos=" + pos); } #endif break; } else { SetCurrentPosition(newPosition); } } // restore the pos SetCurrentPosition(origPos); _keyframeMeta = new KeyFrameMeta(); _keyframeMeta.Duration = _duration; _posTimeMap = new Dictionary <long, long>(); if (audioOnly) { // The flv only contains audio tags, use their lists to support pause and seeking positionList = audioPositionList; timestampList = audioTimestampList; } _keyframeMeta.AudioOnly = audioOnly; _keyframeMeta.Positions = new long[positionList.Count]; _keyframeMeta.Timestamps = new int[timestampList.Count]; for (int i = 0; i < _keyframeMeta.Positions.Length; i++) { _keyframeMeta.Positions[i] = (long)positionList[i]; _keyframeMeta.Timestamps[i] = (int)timestampList[i]; _posTimeMap.Add((long)positionList[i], (long)((int)timestampList[i])); } return(_keyframeMeta); } }
public KeyFrameMeta AnalyzeKeyFrames() { lock (this.SyncRoot) { if (this._keyframeMeta == null) { ArrayList list = new ArrayList(); ArrayList list2 = new ArrayList(); ArrayList list3 = new ArrayList(); ArrayList list4 = new ArrayList(); long currentPosition = this.GetCurrentPosition(); this.SetCurrentPosition(9L); this._posTagMap = new Hashtable(); int num2 = 0; bool flag = true; while (this.HasMoreTags()) { long key = this.GetCurrentPosition(); this._posTagMap.Add(key, num2++); ITag tag = this.ReadTagHeader(); this._duration = tag.Timestamp; if (tag.DataType == IOConstants.TYPE_VIDEO) { if (flag) { flag = false; list3.Clear(); list4.Clear(); } if (this._firstVideoTag == -1L) { this._firstVideoTag = key; } if (((this._reader.ReadByte() & IOConstants.MASK_VIDEO_FRAMETYPE) >> 4) == IOConstants.FLAG_FRAMETYPE_KEYFRAME) { list.Add(key); list2.Add(tag.Timestamp); } } else if (tag.DataType == IOConstants.TYPE_AUDIO) { if (this._firstAudioTag == -1L) { this._firstAudioTag = key; } if (flag) { list3.Add(key); list4.Add(tag.Timestamp); } } long pos = (key + tag.BodySize) + 15L; if (pos >= this.GetTotalBytes()) { log.Info("New position exceeds limit"); if (log.get_IsDebugEnabled()) { log.Debug("Keyframe analysis"); log.Debug(string.Concat(new object[] { " data type=", tag.DataType, " bodysize=", tag.BodySize })); log.Debug(string.Concat(new object[] { " remaining=", this.GetRemainingBytes(), " limit=", this.GetTotalBytes(), " new pos=", pos, " pos=", key })); } break; } this.SetCurrentPosition(pos); } this.SetCurrentPosition(currentPosition); this._keyframeMeta = new KeyFrameMeta(); this._keyframeMeta.Duration = this._duration; this._posTimeMap = new Hashtable(); if (flag) { list = list3; list2 = list4; } this._keyframeMeta.AudioOnly = flag; this._keyframeMeta.Positions = new long[list.Count]; this._keyframeMeta.Timestamps = new int[list2.Count]; for (int i = 0; i < this._keyframeMeta.Positions.Length; i++) { this._keyframeMeta.Positions[i] = (long)list[i]; this._keyframeMeta.Timestamps[i] = (int)list2[i]; this._posTimeMap.Add((long)list[i], (long)((int)list2[i])); } } return(this._keyframeMeta); } }