private bool InternalSeek(ref double absoluteTimestamp) { //0. We have to send codecs again _audioVideoCodecsSent = false; //1. Switch to millisecond.FrameIndex table if (!_pSeekFile.SeekTo(_timeToIndexOffset)) { Logger.FATAL("Failed to seek to ms.FrameIndex table"); return(false); } //2. Read the sampling rate var samplingRate = _pSeekFile.Br.ReadUInt32(); //3. compute the index in the time2frameindex var tableIndex = (uint)(absoluteTimestamp / samplingRate); //4. Seek to that corresponding index _pSeekFile.SeekAhead(tableIndex * 4); //5. Read the frame index var frameIndex = _pSeekFile.Br.ReadUInt32(); //7. Position the seek file to that particular frame if (!_pSeekFile.SeekTo(_framesBaseOffset + frameIndex * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } //8. Read the frame if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out _currentFrame)) { Logger.FATAL("Unable to read frame from seeking file"); return(false); } //9. update the stream counters _startFeedingTime = DateTime.Now; _totalSentTime = 0; _currentFrameIndex = frameIndex; _totalSentTimeBase = (uint)(_currentFrame.AbsoluteTime / 1000); absoluteTimestamp = _currentFrame.AbsoluteTime; //10. Go back on the frame of interest if (!_pSeekFile.SeekTo(_framesBaseOffset + frameIndex * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } //11. Done return(true); }
protected virtual bool Feed() { //2. First, send audio and video codecs if (!_audioVideoCodecsSent && !SendCodecs()) { Logger.FATAL("Unable to send audio codec"); return(false); } while (!Paused && OutStreams.Count != 0) { //2. Determine if the client has enough data on the buffer and continue //or stay put var elapsedTime = (int)(DateTime.Now - _startFeedingTime).TotalSeconds; if ((int)_totalSentTime - elapsedTime >= _clientSideBufferLength) { return(true); } //3. Test to see if we have sent the last frame if (_currentFrameIndex >= _totalFrames || _playLimit >= 0 && _playLimit < _totalSentTime) { this.Log().Info("Done streaming file"); OutStreams.Last().SignalStreamCompleted(); Paused = true; return(true); } //4. Read the current frame from the seeking file if (!_pSeekFile.SeekTo(_framesBaseOffset + _currentFrameIndex * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out _currentFrame)) { Logger.FATAL("Unable to read frame from seeking file"); return(false); } Stream buffer = null; //Debug.WriteLine("{2},{0}:{1}", _currentFrame.AbsoluteTime, _currentFrame.Type, _currentFrameIndex); switch (_currentFrame.Type) { case MediaFrameType.Data: _currentFrameIndex++; if (!FeedMetaData(_pFile, _currentFrame)) { Logger.FATAL("Unable to feed metadata"); return(false); } break; case MediaFrameType.Audio: buffer = _audioBuffer; goto case MediaFrameType.Video; case MediaFrameType.Video: if (buffer == null) { buffer = _videoBuffer; } //7. Build the frame if (!BuildFrame(_pFile, _currentFrame, buffer)) { Logger.FATAL("Unable to build the frame"); return(false); } //8. Compute the timestamp _totalSentTime = _currentFrame.AbsoluteTime / 1000 - _totalSentTimeBase; //9. Do the feedeng FeedData(buffer, (uint)buffer.Length, 0, (uint)buffer.Length, _currentFrame.AbsoluteTime, _currentFrame.Type == MediaFrameType.Audio); //10. Discard the data buffer.IgnoreAll(); //11. Increment the frame index _currentFrameIndex++; //12. Done. We either feed again if frame length was 0 //or just return true //return _currentFrame.Length != 0 || Feed(); break; default: if (!FeedOtherType()) { return(false); } break; } } return(true); }
private bool SendCodecs() { //1. Read the first frame MediaFrame frame1; if (!_pSeekFile.SeekTo(_framesBaseOffset + 0 * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out frame1)) { Logger.FATAL("Unable to read frame from seeking file"); return(false); } //2. Read the second frame MediaFrame frame2; if (!_pSeekFile.SeekTo(_framesBaseOffset + 1 * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out frame2)) { Logger.FATAL("Unable to read frame from seeking file"); return(false); } //3. Read the current frame to pickup the timestamp from it MediaFrame currentFrame; if (!_pSeekFile.SeekTo(_framesBaseOffset + _currentFrameIndex * MediaFrame.MediaFrameSize)) { Logger.FATAL("Unablt to seek inside seek file"); return(false); } if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out currentFrame)) { Logger.FATAL("Unable to read frame from seeking file"); return(false); } //4. Is the first frame a codec setup? //If not, the second is not a codec setup for sure if (!frame1.IsBinaryHeader) { _audioVideoCodecsSent = true; return(true); } //5. Build the buffer for the first frame var buffer = Utils.Rms.GetStream(); if (!BuildFrame(_pFile, frame1, buffer)) { Logger.FATAL("Unable to build the frame"); return(false); } //6. Do the feedeng with the first frame FeedData(buffer, (uint)buffer.Length, 0, (uint)buffer.Length, currentFrame.AbsoluteTime, frame1.Type == MediaFrameType.Audio); //7. Is the second frame a codec setup? if (!frame2.IsBinaryHeader) { _audioVideoCodecsSent = true; return(true); } //8. Build the buffer for the second frame buffer.IgnoreAll(); if (!BuildFrame(_pFile, frame2, buffer)) { Logger.FATAL("Unable to build the frame"); return(false); } //9. Do the feedeng with the second frame FeedData(buffer, (uint)buffer.Length, 0, (uint)buffer.Length, currentFrame.AbsoluteTime, frame2.Type == MediaFrameType.Audio); //10. Done _audioVideoCodecsSent = true; return(true); }