/// <summary> /// Correct presentation and send time stamps based on the stream info /// <param name="configuration">The ASF configuration</param> /// <param name="asfStream">The stream info</param> /// </summary> public bool SetStart(AsfFileConfiguration configuration, AsfStreamInfo asfStream) { if (asfStream.StreamType != AsfStreamType.asfAudio) //nothing to do for audio { //determine keyframe PayloadInfo keyframeInfo = (from payload in Payload where (payload.StreamId == configuration.AsfVideoStreamId && payload.IsKeyframeStart) select payload).LastOrDefault(); if (keyframeInfo == null) { return(false); } //now cut out everything before that presentation time for (int idx = 0; idx < Payload.Count; idx++) { if (Payload[idx].StreamId == configuration.AsfVideoStreamId && !Payload[idx].IsKeyframeStart && Payload[idx].PresentationTime < keyframeInfo.PresentationTime) { byte streamId = Payload[idx].StreamId; if (Payload[idx].MediaObjectNumber < keyframeInfo.MediaObjectNumber && asfStream.StreamType != AsfStreamType.asfUnaltered) //this is a B or P-frame that comes before the key frame, so it must be disabled { streamId += AsfConstants.ASF_PRIVATE_STREAM_OFFSET; } Payload[idx].StreamId = streamId; Buffer.SetByte(_packet, Payload[idx].StreamIDOffset, streamId); //fix media offset as well UInt32 mediaOffset = 0; Payload[idx].OffsetIntoMedia = mediaOffset; Buffer.BlockCopy(BitConverter.GetBytes(mediaOffset), 0, _packet, Payload[idx].MediaOffset, sizeof(UInt32)); } } } return(true); }
public void UpdateFromStream(AsfStreamInfo info) { _mediaObjectId = new byte[info.MediaObjectId.Length]; _prevMediaObjectId = new byte[info.PrevMediaObjectId.Length]; Array.Copy(info.MediaObjectId, _mediaObjectId, info.MediaObjectId.Length); Array.Copy(info.PrevMediaObjectId, _prevMediaObjectId, info.PrevMediaObjectId.Length); _maxPresentationTime = new Dictionary <byte, uint>(info.MaxPresentationTime); }
public AsfStreamInfo(AsfStreamInfo info) { //copy properties StreamType = info.StreamType; StartSendTime = info.StartSendTime; MinPacketSendTime = info.MinPacketSendTime; StartTimeOffset = info.StartTimeOffset; EndTimeOffset = info.EndTimeOffset; _maxPresentationTime = new Dictionary <byte, uint>(info.MaxPresentationTime); UpdateFromStream(info); }
public AsfStreamInfo(AsfStreamInfo info) { //copy properties StreamType = info.StreamType; StartSendTime = info.StartSendTime; MinPacketSendTime = info.MinPacketSendTime; StartTimeOffset = info.StartTimeOffset; EndTimeOffset = info.EndTimeOffset; _maxPresentationTime = new Dictionary<byte,uint>(info.MaxPresentationTime); UpdateFromStream(info); }
private void Init(AsfStreamType streamType, double startOffset, double endOffset) { StreamType = streamType; if (StreamType == AsfStreamType.asfImage) { endOffset = 0; //use all available data } if (startOffset < 0 || endOffset < 0 || ((streamType != AsfStreamType.asfStream && streamType != AsfStreamType.asfImage && streamType != AsfStreamType.asfAudio) && endOffset < startOffset)) { throw new ArgumentOutOfRangeException(); } if (streamType == AsfStreamType.asfImage && _asfFile.PacketConfiguration.ImageWidth == 0) { throw new AsfStreamException("Cannot create image stream for audio file"); } _streamInfo = new AsfStreamInfo(StreamType); FilePosition fileStartPos; FilePosition fileEndPos; bool status = _asfFile.SetOffsetRange(startOffset, endOffset, out fileStartPos, out fileEndPos, streamType); if (!status) { throw new AsfStreamException("Asf stream data within required offsets not found"); // could not find file data covering the requested start and end offsets } _asfConfig = _asfFile.PacketConfiguration; _streamInfo.StartTimeOffset = fileStartPos.TimeOffset; _streamInfo.EndTimeOffset = fileEndPos.TimeOffset; if (StreamType == AsfStreamType.asfStream) { _length = Math.Min(int.MaxValue, _asfFile.Length + _asfConfig.AsfHeaderSize); } else if (StreamType == AsfStreamType.asfUnaltered) { _length = _asfFile.Length; } else { _length = _asfFile.Length + _asfConfig.AsfHeaderSize; } }
private void Init(AsfStreamType streamType, double startOffset, double endOffset) { StreamType = streamType; if (StreamType == AsfStreamType.asfImage) { endOffset = 0; //use all available data } if (startOffset < 0 || endOffset < 0 || ((streamType != AsfStreamType.asfStream && streamType != AsfStreamType.asfImage && streamType != AsfStreamType.asfAudio) && endOffset < startOffset)) throw new ArgumentOutOfRangeException(); if (streamType == AsfStreamType.asfImage && _asfFile.PacketConfiguration.ImageWidth == 0) { throw new AsfStreamException("Cannot create image stream for audio file"); } _streamInfo = new AsfStreamInfo(StreamType); FilePosition fileStartPos; FilePosition fileEndPos; bool status = _asfFile.SetOffsetRange(startOffset, endOffset, out fileStartPos, out fileEndPos, streamType); if (!status) throw new AsfStreamException("Asf stream data within required offsets not found"); // could not find file data covering the requested start and end offsets _asfConfig = _asfFile.PacketConfiguration; _streamInfo.StartTimeOffset = fileStartPos.TimeOffset; _streamInfo.EndTimeOffset = fileEndPos.TimeOffset; if (StreamType == AsfStreamType.asfStream) _length = Math.Min(int.MaxValue, _asfFile.Length + _asfConfig.AsfHeaderSize); else if (StreamType == AsfStreamType.asfUnaltered) _length = _asfFile.Length; else _length = _asfFile.Length + _asfConfig.AsfHeaderSize; }
/// <summary> /// Correct presentation and send time stamps based on the stream info /// <param name="configuration">The ASF configuration</param> /// <param name="asfStreamInfo">The stream info</param> /// </summary> public void SetFollowup(AsfFileConfiguration configuration, AsfStreamInfo asfStreamInfo) { Int64 packetSendTime = (Int64)SendTime - asfStreamInfo.StartSendTime; if (packetSendTime - AsfConstants.ASF_SEND_SAFTEY_THRESHOLD > 0) { packetSendTime -= AsfConstants.ASF_SEND_SAFTEY_THRESHOLD; } else { packetSendTime = 0; } SendTime = (uint)packetSendTime; //base payload on packet send time, add delta to zero based send time for (int i = 0; i < Payload.Count; i++) { Int64 payloadPresentationTime = (Int64)Payload[i].PresentationTime - asfStreamInfo.StartTimeOffset; if (payloadPresentationTime < _asfConfig.AsfPreroll) { //an audio payload before the preroll must be eliminated, in this case we assign it a private stream id if (Payload[i].StreamId == configuration.AsfAudioStreamId && asfStreamInfo.StreamType != AsfStreamType.asfUnaltered) { payloadPresentationTime = SendTime + _asfConfig.AsfPreroll; MovePayloadPrivate(Payload[i], (uint)payloadPresentationTime); } else { //set the time slightly before the preroll time - not used by renderer, but decoded so first frame (seek point) can be delta frame payloadPresentationTime = asfStreamInfo.StreamType == AsfStreamType.asfImage ? (_asfConfig.AsfPreroll - 100) : _asfConfig.AsfPreroll; } } //remove unnecesscary audio data for image stream, that means only one stream is remaining which sets the timeline if (asfStreamInfo.StreamType == AsfStreamType.asfImage && Payload[i].StreamId == configuration.AsfAudioStreamId) { MovePayloadPrivate(Payload[i], SendTime + _asfConfig.AsfPreroll); } //Packet Sendtime must be earlier than Payload presentation times if (payloadPresentationTime < SendTime) { SendTime = Math.Max((uint)payloadPresentationTime, asfStreamInfo.MinPacketSendTime); } SetPayloadPresentationTime(Payload[i], (uint)payloadPresentationTime); if ((asfStreamInfo.StreamType != AsfStreamType.asfUnaltered) && asfStreamInfo.StreamType != AsfStreamType.asfImage && Payload[i].PresentationTime > _asfConfig.AsfPreroll && Payload[i].PresentationTime - _asfConfig.AsfPreroll > (asfStreamInfo.EndTimeOffset - asfStreamInfo.StartTimeOffset)) { //Crop both audio and video at the end of the segment payloadPresentationTime = (asfStreamInfo.EndTimeOffset - asfStreamInfo.StartTimeOffset) + _asfConfig.AsfPreroll; MovePayloadPrivate(Payload[i], (uint)payloadPresentationTime); } //Handle media object id's: must be consecutive, starting at zero, roll over at 255 uint maxPresentationTime = 0; asfStreamInfo.MaxPresentationTime.TryGetValue(Payload[i].StreamId, out maxPresentationTime); if (maxPresentationTime < Payload[i].PresentationTime) { asfStreamInfo.MaxPresentationTime[Payload[i].StreamId] = Payload[i].PresentationTime; } if ((asfStreamInfo.MediaObjectId[Payload[i].StreamId] == 0 && asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] == 0) || asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] != Payload[i].MediaObjectNumber) { asfStreamInfo.MediaObjectId[Payload[i].StreamId]++; } asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] = Payload[i].MediaObjectNumber; SetMediaObjectNumber(i, asfStreamInfo.MediaObjectId[Payload[i].StreamId]); } //the send time of the next packet must be larger or equal than the send time of the current packet, keep track of send time asfStreamInfo.MinPacketSendTime = SendTime; }
public void UpdateFromStream(AsfStreamInfo info) { _mediaObjectId = new byte[info.MediaObjectId.Length]; _prevMediaObjectId = new byte[info.PrevMediaObjectId.Length]; Array.Copy(info.MediaObjectId, _mediaObjectId, info.MediaObjectId.Length); Array.Copy(info.PrevMediaObjectId, _prevMediaObjectId, info.PrevMediaObjectId.Length); _maxPresentationTime = new Dictionary<byte, uint>(info.MaxPresentationTime); }
/// <summary> /// Correct presentation and send time stamps based on the stream info /// <param name="configuration">The ASF configuration</param> /// <param name="asfStream">The stream info</param> /// </summary> public bool SetStart(AsfFileConfiguration configuration, AsfStreamInfo asfStream) { if (asfStream.StreamType != AsfStreamType.asfAudio) //nothing to do for audio { //determine keyframe PayloadInfo keyframeInfo = (from payload in Payload where (payload.StreamId == configuration.AsfVideoStreamId && payload.IsKeyframeStart) select payload).LastOrDefault(); if (keyframeInfo == null) return false; //now cut out everything before that presentation time for (int idx = 0; idx < Payload.Count; idx++) { if (Payload[idx].StreamId == configuration.AsfVideoStreamId && !Payload[idx].IsKeyframeStart && Payload[idx].PresentationTime < keyframeInfo.PresentationTime) { byte streamId = Payload[idx].StreamId; if (Payload[idx].MediaObjectNumber < keyframeInfo.MediaObjectNumber && asfStream.StreamType != AsfStreamType.asfUnaltered) //this is a B or P-frame that comes before the key frame, so it must be disabled streamId += AsfConstants.ASF_PRIVATE_STREAM_OFFSET; Payload[idx].StreamId = streamId; Buffer.SetByte(_packet, Payload[idx].StreamIDOffset, streamId); //fix media offset as well UInt32 mediaOffset = 0; Payload[idx].OffsetIntoMedia = mediaOffset; Buffer.BlockCopy(BitConverter.GetBytes(mediaOffset), 0, _packet, Payload[idx].MediaOffset, sizeof(UInt32)); } } } return true; }
/// <summary> /// Correct presentation and send time stamps based on the stream info /// <param name="configuration">The ASF configuration</param> /// <param name="asfStreamInfo">The stream info</param> /// </summary> public void SetFollowup(AsfFileConfiguration configuration, AsfStreamInfo asfStreamInfo) { Int64 packetSendTime = (Int64)SendTime - asfStreamInfo.StartSendTime; if (packetSendTime - AsfConstants.ASF_SEND_SAFTEY_THRESHOLD > 0) packetSendTime -= AsfConstants.ASF_SEND_SAFTEY_THRESHOLD; else packetSendTime = 0; SendTime = (uint)packetSendTime; //base payload on packet send time, add delta to zero based send time for (int i = 0; i < Payload.Count; i++) { Int64 payloadPresentationTime = (Int64)Payload[i].PresentationTime - asfStreamInfo.StartTimeOffset; if (payloadPresentationTime < _asfConfig.AsfPreroll) { //an audio payload before the preroll must be eliminated, in this case we assign it a private stream id if (Payload[i].StreamId == configuration.AsfAudioStreamId && asfStreamInfo.StreamType != AsfStreamType.asfUnaltered) { payloadPresentationTime = SendTime + _asfConfig.AsfPreroll; MovePayloadPrivate(Payload[i], (uint)payloadPresentationTime); } else { //set the time slightly before the preroll time - not used by renderer, but decoded so first frame (seek point) can be delta frame payloadPresentationTime = asfStreamInfo.StreamType == AsfStreamType.asfImage ? (_asfConfig.AsfPreroll - 100) : _asfConfig.AsfPreroll; } } //remove unnecesscary audio data for image stream, that means only one stream is remaining which sets the timeline if (asfStreamInfo.StreamType == AsfStreamType.asfImage && Payload[i].StreamId == configuration.AsfAudioStreamId) { MovePayloadPrivate(Payload[i], SendTime + _asfConfig.AsfPreroll); } //Packet Sendtime must be earlier than Payload presentation times if (payloadPresentationTime < SendTime) SendTime = Math.Max((uint)payloadPresentationTime, asfStreamInfo.MinPacketSendTime); SetPayloadPresentationTime(Payload[i], (uint)payloadPresentationTime); if ((asfStreamInfo.StreamType != AsfStreamType.asfUnaltered) && asfStreamInfo.StreamType != AsfStreamType.asfImage && Payload[i].PresentationTime > _asfConfig.AsfPreroll && Payload[i].PresentationTime - _asfConfig.AsfPreroll > (asfStreamInfo.EndTimeOffset - asfStreamInfo.StartTimeOffset)) { //Crop both audio and video at the end of the segment payloadPresentationTime = (asfStreamInfo.EndTimeOffset - asfStreamInfo.StartTimeOffset) + _asfConfig.AsfPreroll; MovePayloadPrivate(Payload[i], (uint)payloadPresentationTime); } //Handle media object id's: must be consecutive, starting at zero, roll over at 255 uint maxPresentationTime = 0; asfStreamInfo.MaxPresentationTime.TryGetValue(Payload[i].StreamId, out maxPresentationTime); if (maxPresentationTime < Payload[i].PresentationTime) asfStreamInfo.MaxPresentationTime[Payload[i].StreamId] = Payload[i].PresentationTime; if ((asfStreamInfo.MediaObjectId[Payload[i].StreamId] == 0 && asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] == 0) || asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] != Payload[i].MediaObjectNumber) { asfStreamInfo.MediaObjectId[Payload[i].StreamId]++; } asfStreamInfo.PrevMediaObjectId[Payload[i].StreamId] = Payload[i].MediaObjectNumber; SetMediaObjectNumber(i, asfStreamInfo.MediaObjectId[Payload[i].StreamId]); } //the send time of the next packet must be larger or equal than the send time of the current packet, keep track of send time asfStreamInfo.MinPacketSendTime = SendTime; }