/// <summary> /// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read. /// </summary> /// <param name="mpegLayer3Frame"> First MpegFrame</param> /// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param> /// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param> /// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param> private void ReadPastId3v2TagsCallback( MpegFrame mpegLayer3Frame, Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes, List <MediaStreamDescription> mediaStreamDescriptions, Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes) { if (mpegLayer3Frame.FrameSize <= 0) { throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative"); } // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame. WaveFormatExtensible wfx = new WaveFormatExtensible(); this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat(); this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx; this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = 85; this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2); this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate; this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8; this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = 1; this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = 0; this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = 12; this.MpegLayer3WaveFormat.Id = 1; this.MpegLayer3WaveFormat.BitratePaddingMode = 0; this.MpegLayer3WaveFormat.FramesPerBlock = 1; this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize; this.MpegLayer3WaveFormat.CodecDelay = 0; mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString(); this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes); mediaStreamDescriptions.Add(this.audioStreamDescription); this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond)); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture); if (this.audioStream.CanSeek) { mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1"; } else { mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0"; } // Report that the Mp3MediaStreamSource has finished initializing its internal state and can now // pass in Mp3 Samples. this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); this.currentFrame = mpegLayer3Frame; this.currentFrameStartPosition = MpegFrame.FrameHeaderSize; }
public void SetupTests() { this.wfx = new WaveFormatExtensible(); this.wfx.FormatTag = 85; this.wfx.Channels = 2; this.wfx.SamplesPerSec = 8000; this.wfx.AverageBytesPerSecond = 500; this.wfx.BlockAlign = 1; this.wfx.BitsPerSample = 16; this.wfx.ExtraDataSize = 12; this.mp3wfx = new MpegLayer3WaveFormat(); }
public void Configure(IAudioFrameHeader frameHeader) { Mp3FrameHeader mp3FrameHeader = (Mp3FrameHeader)frameHeader; MpegLayer3WaveFormat layer3WaveFormat1 = new MpegLayer3WaveFormat(); layer3WaveFormat1.nChannels = (ushort)mp3FrameHeader.Channels; layer3WaveFormat1.nSamplesPerSec = (uint)frameHeader.SamplingFrequency; layer3WaveFormat1.nAvgBytesPerSec = (uint)mp3FrameHeader.Bitrate / 8U; layer3WaveFormat1.nBlockSize = (ushort)frameHeader.FrameLength; MpegLayer3WaveFormat layer3WaveFormat2 = layer3WaveFormat1; this.CodecPrivateData = WaveFormatExExtensions.ToCodecPrivateData((WaveFormatEx)layer3WaveFormat2); this.Channels = (int)layer3WaveFormat2.nChannels; this.SamplingFrequency = frameHeader.SamplingFrequency; this.Bitrate = new int?(mp3FrameHeader.Bitrate); this.Name = frameHeader.Name; this.SetConfigured(); }
public void Configure(IAudioFrameHeader frameHeader) { var mp3FrameHeader = (Mp3FrameHeader)frameHeader; var layer3WaveFormat1 = new MpegLayer3WaveFormat { nChannels = (ushort)mp3FrameHeader.Channels, nSamplesPerSec = (uint)frameHeader.SamplingFrequency, nAvgBytesPerSec = (uint)mp3FrameHeader.Bitrate / 8U, nBlockSize = (ushort)frameHeader.FrameLength }; var layer3WaveFormat2 = layer3WaveFormat1; CodecPrivateData = layer3WaveFormat2.ToCodecPrivateData(); Channels = layer3WaveFormat2.nChannels; SamplingFrequency = frameHeader.SamplingFrequency; Bitrate = mp3FrameHeader.Bitrate; Name = frameHeader.Name; SetConfigured(); }
private void ReadPastId3v2TagsCallback(MpegFrame mpegLayer3Frame, Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes, List <MediaStreamDescription> mediaStreamDescriptions, Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes) { if (mpegLayer3Frame.FrameSize <= 0) { throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative"); } WaveFormatExtensible formatExtensible = new WaveFormatExtensible(); this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat(); this.MpegLayer3WaveFormat.WaveFormatExtensible = formatExtensible; this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = (short)85; this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = mpegLayer3Frame.Channels == Channel.SingleChannel ? (short)1 : (short)2; this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate; this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8; this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = (short)1; this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = (short)0; this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = (short)12; this.MpegLayer3WaveFormat.Id = (short)1; this.MpegLayer3WaveFormat.BitratePaddingMode = 0; this.MpegLayer3WaveFormat.FramesPerBlock = (short)1; this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize; this.MpegLayer3WaveFormat.CodecDelay = (short)0; mediaStreamAttributes[(MediaStreamAttributeKeys)0] = this.MpegLayer3WaveFormat.ToHexString(); this.audioStreamDescription = new MediaStreamDescription((MediaStreamType)0, (IDictionary <MediaStreamAttributeKeys, string>)mediaStreamAttributes); mediaStreamDescriptions.Add(this.audioStreamDescription); this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / (long)this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond)); mediaSourceAttributes[(MediaSourceAttributesKeys)1] = this.trackDuration.Ticks.ToString((IFormatProvider)CultureInfo.InvariantCulture); if (this.audioStream.CanSeek) { mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "True"; } else { mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "False"; } this.ReportOpenMediaCompleted((IDictionary <MediaSourceAttributesKeys, string>)mediaSourceAttributes, (IEnumerable <MediaStreamDescription>)mediaStreamDescriptions); this.currentFrame = mpegLayer3Frame; this.currentFrameStartPosition = 4L; }