Beispiel #1
0
 public void ReadPastId3V2Tags(Action <MpegFrame> callback)
 {
     byte[] numArray = new byte[10];
     if (this.audioStream.Read(numArray, 0, 3) == 3)
     {
         if ((int)numArray[0] == 73 && (int)numArray[1] == 68 && (int)numArray[2] == 51)
         {
             if (this.audioStream.Read(numArray, 3, 7) == 7)
             {
                 int       id3Size   = BitTools.ConvertSyncSafeToInt32(numArray, 6);
                 int       bytesRead = 0;
                 MpegFrame mpegFrame;
                 ThreadPool.QueueUserWorkItem((WaitCallback)(state =>
                 {
                     while (id3Size > 0)
                     {
                         bytesRead = id3Size - Mp3MediaStreamSourceX.buffer.Length > 0 ? this.audioStream.Read(Mp3MediaStreamSourceX.buffer, 0, Mp3MediaStreamSourceX.buffer.Length) : this.audioStream.Read(Mp3MediaStreamSourceX.buffer, 0, id3Size);
                         id3Size -= bytesRead;
                     }
                     this._offsetFirstFrame = this.audioStream.Position;
                     mpegFrame = new MpegFrame(this.audioStream);
                     callback(mpegFrame);
                 }));
                 return;
             }
         }
         else if (this.audioStream.Read(numArray, 3, 1) == 1)
         {
             MpegFrame mpegFrame = new MpegFrame(this.audioStream, numArray);
             callback(mpegFrame);
             return;
         }
     }
     throw new Exception("Could not read intial audio stream data");
 }
        /// <summary>
        /// Parses the next sample from the requested stream and then calls ReportGetSampleCompleted
        /// to inform its parent MediaElement of the next sample.
        /// </summary>
        /// <param name="mediaStreamType">
        /// Should always be Audio for this MediaStreamSource.
        /// </param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            Dictionary <MediaSampleAttributeKeys, string> emptyDict = new Dictionary <MediaSampleAttributeKeys, string>();
            MediaStreamSample audioSample = null;

            if (this.currentFrame != null)
            {
                // Calculate our current position based on the stream's length
                //// double ratio = (double)this.currentFrameStartPosition / (double)this.audioStreamLength;
                //// TimeSpan currentPosition = new TimeSpan((long)(this.trackDuration.Ticks * ratio));

                // Calculate our current position instead based on the bitrate of the stream (more accurate?)
                double   position        = (double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate;
                TimeSpan currentPosition = TimeSpan.FromSeconds(position * 8 /* bits per Byte */);

                // Create a MemoryStream to hold the bytes
                // FrameSize includes the frame header which we've already read from the previous iteration, so just copy the
                // header, and then read the remaining bytes
                this.currentFrame.CopyHeader(buffer);
                int audioSampleSize = this.currentFrame.FrameSize - MpegFrame.FrameHeaderSize;
                int c = this.audioStream.Read(buffer, MpegFrame.FrameHeaderSize, audioSampleSize);
                if (c != audioSampleSize)
                {
                    // Ran out of bytes trying to read MP3 frame.
                    this.currentFrame = null;
                    audioSample       = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);
                    return;
                }

                this.currentFrameStartPosition += c;
                using (MemoryStream audioFrameStream = new MemoryStream(buffer))
                {
                    // Return the next sample in the stream
                    audioSample = new MediaStreamSample(this.audioStreamDescription, audioFrameStream, 0, this.currentFrame.FrameSize, currentPosition.Ticks, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);

                    // Grab the next frame
                    MpegFrame nextFrame = new MpegFrame(this.audioStream);
                    if ((nextFrame.Version == 1 || nextFrame.Version == 2) && nextFrame.Layer == 3)
                    {
                        this.currentFrameStartPosition += MpegFrame.FrameHeaderSize;
                        this.currentFrame = nextFrame;
                    }
                    else
                    {
                        this.currentFrame = null;
                    }
                }
            }
            else
            {
                // We're near the end of the file, or we got an irrecoverable error.
                // Return a null stream which tells the MediaStreamSource & MediaElement to shut down
                audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                this.ReportGetSampleCompleted(audioSample);
            }
        }
        public void Setup()
        {
            this.s.Position = 0;
            this.mf         = new MpegFrame(this.s);

            this.s.Seek(0, SeekOrigin.Begin);
            this.s.Read(headerData, 0, 4);
            this.mf2 = new MpegFrame(this.s, MpegFrameTests.headerData);
        }
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                this.mf = null;
            }

            this.s.Close();
        }
Beispiel #5
0
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (this._isClosed)
     {
         return;
     }
     lock (this._streamLock)
     {
         Dictionary <MediaSampleAttributeKeys, string> dictionary = new Dictionary <MediaSampleAttributeKeys, string>();
         if (this.audioStream.Position < this.audioStream.Length && (!this.HaveEnoughDataInBuffer() || !this.SeekToTimeIfNeeded()))
         {
             this.ReportGetSampleProgress(0.5);
             ThreadPool.QueueUserWorkItem((WaitCallback)(obj =>
             {
                 Thread.Sleep(1000);
                 this.GetSampleAsync(mediaStreamType);
             }));
         }
         else if (this.currentFrame != null)
         {
             TimeSpan timeSpan = TimeSpan.FromSeconds((double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate * 8.0);
             this.currentFrame.CopyHeader(Mp3MediaStreamSourceX.buffer);
             int count = this.currentFrame.FrameSize - 4;
             int num   = this.audioStream.Read(Mp3MediaStreamSourceX.buffer, 4, count);
             if (num != count)
             {
                 this.currentFrame = null;
                 this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
             }
             else
             {
                 this.currentFrameStartPosition = this.currentFrameStartPosition + (long)num;
                 using (MemoryStream memoryStream = new MemoryStream(Mp3MediaStreamSourceX.buffer))
                 {
                     this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, (Stream)memoryStream, 0, (long)this.currentFrame.FrameSize, timeSpan.Ticks, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
                     MpegFrame mpegFrame = new MpegFrame(this.audioStream);
                     if ((mpegFrame.Version == 1 || mpegFrame.Version == 2) && mpegFrame.Layer == 3)
                     {
                         this.currentFrameStartPosition = this.currentFrameStartPosition + 4L;
                         this.currentFrame = mpegFrame;
                     }
                     else
                     {
                         this.currentFrame = null;
                     }
                 }
             }
         }
         else
         {
             this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
         }
     }
 }
        /// <summary>
        /// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
        /// </summary>
        /// <param name="mpegLayer3Frame"> First MpegFrame</param>
        /// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
        /// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
        /// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
        private void ReadPastId3v2TagsCallback(
            MpegFrame mpegLayer3Frame,
            Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes,
            List <MediaStreamDescription> mediaStreamDescriptions,
            Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes)
        {
            if (mpegLayer3Frame.FrameSize <= 0)
            {
                throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
            }

            // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
            WaveFormatExtensible wfx = new WaveFormatExtensible();

            this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
            this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;

            this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag             = 85;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels              = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
            this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec         = mpegLayer3Frame.SamplingRate;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign            = 1;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample         = 0;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize         = 12;

            this.MpegLayer3WaveFormat.Id = 1;
            this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
            this.MpegLayer3WaveFormat.FramesPerBlock     = 1;
            this.MpegLayer3WaveFormat.BlockSize          = (short)mpegLayer3Frame.FrameSize;
            this.MpegLayer3WaveFormat.CodecDelay         = 0;

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.audioStreamDescription);

            this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
            if (this.audioStream.CanSeek)
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
            }
            else
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
            }

            // Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
            // pass in Mp3 Samples.
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            this.currentFrame = mpegLayer3Frame;
            this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
        }
Beispiel #7
0
        private void ReadPastId3v2TagsCallback(MpegFrame mpegLayer3Frame, Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes, List <MediaStreamDescription> mediaStreamDescriptions, Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes)
        {
            if (mpegLayer3Frame.FrameSize <= 0)
            {
                throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
            }
            WaveFormatExtensible formatExtensible = new WaveFormatExtensible();

            this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
            this.MpegLayer3WaveFormat.WaveFormatExtensible                       = formatExtensible;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag             = (short)85;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels              = mpegLayer3Frame.Channels == Channel.SingleChannel ? (short)1 : (short)2;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec         = mpegLayer3Frame.SamplingRate;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign            = (short)1;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample         = (short)0;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize         = (short)12;
            this.MpegLayer3WaveFormat.Id = (short)1;
            this.MpegLayer3WaveFormat.BitratePaddingMode       = 0;
            this.MpegLayer3WaveFormat.FramesPerBlock           = (short)1;
            this.MpegLayer3WaveFormat.BlockSize                = (short)mpegLayer3Frame.FrameSize;
            this.MpegLayer3WaveFormat.CodecDelay               = (short)0;
            mediaStreamAttributes[(MediaStreamAttributeKeys)0] = this.MpegLayer3WaveFormat.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription((MediaStreamType)0, (IDictionary <MediaStreamAttributeKeys, string>)mediaStreamAttributes);
            mediaStreamDescriptions.Add(this.audioStreamDescription);
            this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / (long)this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
            mediaSourceAttributes[(MediaSourceAttributesKeys)1] = this.trackDuration.Ticks.ToString((IFormatProvider)CultureInfo.InvariantCulture);
            if (this.audioStream.CanSeek)
            {
                mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "True";
            }
            else
            {
                mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "False";
            }
            this.ReportOpenMediaCompleted((IDictionary <MediaSourceAttributesKeys, string>)mediaSourceAttributes, (IEnumerable <MediaStreamDescription>)mediaStreamDescriptions);
            this.currentFrame = mpegLayer3Frame;
            this.currentFrameStartPosition = 4L;
        }
Beispiel #8
0
        private bool SeekToTimeIfNeeded()
        {
            long pendingSeekToTime = this._pendingSeekToTime;

            if (pendingSeekToTime >= 0L && this._currentFrame != null)
            {
                long num1      = pendingSeekToTime / 10000000L * (long)this._currentFrame.Bitrate / 8L + this._offsetFirstFrame;
                long position1 = this.audioStream.Position;
                this.audioStream.Position      = num1;
                this.currentFrameStartPosition = this.currentFrameStartPosition + (this.audioStream.Position - position1);
                if (!this.HaveEnoughDataInBuffer())
                {
                    return(false);
                }
                int  num2;
                long position2;
                while (true)
                {
                    do
                    {
                        do
                        {
                            num2 = this.audioStream.ReadByte();
                            this.currentFrameStartPosition = this.currentFrameStartPosition + 1L;
                            if (num2 == -1)
                            {
                                goto label_11;
                            }
                        }while (num2 != (int)byte.MaxValue);
                        num2 = this.audioStream.ReadByte();
                        this.currentFrameStartPosition = this.currentFrameStartPosition + 1L;
                    }while ((num2 & 240) != 240);
                    position2 = this.audioStream.Position;
                    this.audioStream.Position -= 2L;
                    MpegFrame mpegFrame = new MpegFrame(this.audioStream);
                    if (mpegFrame.Bitrate > 0 && mpegFrame.SamplingRate > 0 && mpegFrame.FrameSize > 0)
                    {
                        this.audioStream.Position += (long)(mpegFrame.FrameSize - 4);
                        if (this.audioStream.ReadByte() == (int)byte.MaxValue && (this.audioStream.ReadByte() & 240) == 240)
                        {
                            break;
                        }
                    }
                    this.audioStream.Position = position2;
                }
                this.audioStream.Position = position2;
label_11:
                if (num2 != -1)
                {
                    this.audioStream.Position      = this.audioStream.Position - 2L;
                    this.currentFrameStartPosition = this.currentFrameStartPosition - 2L;
                    MpegFrame mpegFrame = new MpegFrame(this.audioStream);
                    if ((mpegFrame.Version == 1 || mpegFrame.Version == 2) && mpegFrame.Layer == 3)
                    {
                        this.currentFrameStartPosition = this.currentFrameStartPosition + 4L;
                        this.currentFrame = mpegFrame;
                    }
                    else
                    {
                        this.currentFrame = null;
                    }
                }
                else
                {
                    this.currentFrame = null;
                }
            }
            this._pendingSeekToTime = -1L;
            return(true);
        }
        /// <summary>
        /// Read off the Id3Data from the stream and return the first MpegFrame of the audio stream.
        /// This assumes that the first bit of data is either an ID3 segment or an MPEG segment. Should
        /// probably do something a bit more robust at some point.
        /// </summary>
        /// <returns>
        /// The first MpegFrame in the audio stream.
        /// </returns>
        public MpegFrame ReadPastId3V2Tags(Action <MpegFrame> callback)
        {
            /*
             * Since this code assumes that the first bit of data is either an ID3 segment or an MPEG segment it could
             * get into trouble. Should probably do something a bit more robust at some point.
             */

            MpegFrame mpegFrame;

            // Read and (throw out) any Id3 data if present.
            byte[] data = new byte[10];
            if (this.audioStream.Read(data, 0, 3) != 3)
            {
                goto cleanup;
            }

            if (data[0] == 73 /* I */ &&
                data[1] == 68 /* D */ &&
                data[2] == 51 /* 3 */)
            {
                // Need to update to read the is footer present flag and account for its 10 bytes if needed.
                if (this.audioStream.Read(data, 3, 7) != 7)
                {
                    goto cleanup;
                }

                int id3Size   = BitTools.ConvertSyncSafeToInt32(data, 6);
                int bytesRead = 0;

                ThreadPool.QueueUserWorkItem(o =>
                {
                    // Read through the ID3 Data tossing it out.)
                    while (id3Size > 0)
                    {
                        bytesRead = (id3Size - buffer.Length > 0)
                                                                         ? this.audioStream.Read(buffer, 0,
                                                                                                 buffer.Length)
                                                                         : this.audioStream.Read(buffer, 0, id3Size);
                        id3Size -= bytesRead;
                    }

                    mpegFrame = new MpegFrame(this.audioStream);
                    callback(mpegFrame);
                });
            }
            else
            {
                // No ID3 tag present, presumably this is streaming and we are starting right at the Mp3 data.
                // Assume the stream isn't seekable.
                if (this.audioStream.Read(data, 3, 1) != 1)
                {
                    goto cleanup;
                }

                mpegFrame = new MpegFrame(this.audioStream, data);
                callback(mpegFrame);
            }

            return(null);

            // Cleanup and quit if you couldn't even read the initial data for some reason.
cleanup:
            throw new Exception("Could not read intial audio stream data");
        }
 protected override void ReadScaleFactorSelection(MpegFrame frame, int[][] scfsi, int channels)
 {
     // this is a no-op since the base logic uses "2" as the "has energy" marker
 }
 protected override int[] GetRateTable(MpegFrame frame)
 {
     return(_rateTable);
 }
 public static bool GetCRC(MpegFrame frame, ref uint crc)
 {
     return(GetCRC(frame, _rateTable, _allocLookupTable, false, ref crc));
 }
        /// <summary>
        /// Decode the Mpeg frame into provided buffer.
        /// Result varies with different <see cref="StereoMode"/>:
        /// <list type="bullet">
        /// <item>
        /// <description>For <see cref="StereoMode.Both"/>, sample data on both two channels will occur in turn (left first).</description>
        /// </item>
        /// <item>
        /// <description>For <see cref="StereoMode.LeftOnly"/> and <see cref="StereoMode.RightOnly"/>, only data on
        /// specified channel will occur.</description>
        /// </item>
        /// <item>
        /// <description>For <see cref="StereoMode.DownmixToMono"/>, two channels will be down-mixed into single channel.</description>
        /// </item>
        /// </list>
        /// </summary>
        /// <param name="frame">The Mpeg frame to be decoded.</param>
        /// <param name="destination">The buffer to fill with PCM samples.</param>
        /// <returns>The actual amount of samples read.</returns>
        public int DecodeFrame(MpegFrame frame, Span <float> destination)
        {
            if (frame == null)
            {
                throw new ArgumentNullException(nameof(frame));
            }

            LayerDecoderBase decoder;

            switch (frame.Layer)
            {
            case MpegLayer.LayerI:
                if (_layer1Decoder == null)
                {
                    _layer1Decoder = new Layer1Decoder();
                }
                decoder = _layer1Decoder;
                break;

            case MpegLayer.LayerII:
                if (_layer2Decoder == null)
                {
                    _layer2Decoder = new Layer2Decoder();
                }
                decoder = _layer2Decoder;
                break;

            case MpegLayer.LayerIII:
                if (_layer3Decoder == null)
                {
                    _layer3Decoder = new Layer3Decoder();
                }
                decoder = _layer3Decoder;
                break;

            default:
                return(0);
            }

            frame.Reset();

            decoder.SetEQ(_eqFactors);
            decoder.StereoMode = StereoMode;

            int decodedCount = decoder.DecodeFrame(frame, _ch0, _ch1);

            float[] ch0 = _ch0;
            float[] ch1 = _ch1;

            if (frame.ChannelMode == MpegChannelMode.Mono ||
                decoder.StereoMode != StereoMode.Both)
            {
                ch0.AsSpan(0, decodedCount).CopyTo(destination);
            }
            else
            {
                // This is kinda annoying...  if we're doing a downmix,
                // we should technically only output a single channel
                // The problem is, our caller is probably expecting stereo output.  Grrrr....

                // TODO: optimize
                for (int i = 0; i < decodedCount; i++)
                {
                    destination[i * 2 + 0] = ch0[i];
                    destination[i * 2 + 1] = ch1[i];
                }
                decodedCount *= 2;
            }

            return(decodedCount);
        }