Ejemplo n.º 1
0
        public void PlayBytes(StreamProviderDelegate currentAudioStreamProvider,
                              long dataLength, AudioLibPCMFormat pcmInfo,
                              long bytesFrom, long bytesTo)
        {
            if (pcmInfo == null)
            {
                throw new ArgumentNullException("PCM format cannot be null !");
            }

            if (currentAudioStreamProvider == null)
            {
                throw new ArgumentNullException("Stream cannot be null !");
            }
            if (dataLength <= 0)
            {
                throw new ArgumentOutOfRangeException("Duration cannot be <= 0 !");
            }

            if (CurrentState == State.NotReady)
            {
                return;
            }

            if (CurrentState != State.Stopped)
            {
                Debug.Fail("Attempting to play when not stopped ? " + CurrentState);
                return;
            }


#if USE_SOUNDTOUCH
            if (false && pcmInfo.NumberOfChannels > 1)
            {
                m_UseSoundTouch = false; //TODO: stereo all scrambled with SoundTouch !!
            }
            else
            {
                m_UseSoundTouch = m_UseSoundTouchBackup;
            }
#endif // USE_SOUNDTOUCH

            m_CurrentAudioStreamProvider = currentAudioStreamProvider;
            m_CurrentAudioStream         = m_CurrentAudioStreamProvider();
            m_CurrentAudioPCMFormat      = pcmInfo;
            m_CurrentAudioDataLength     = dataLength;



            long startPosition = 0;
            if (bytesFrom > 0)
            {
                startPosition = m_CurrentAudioPCMFormat.AdjustByteToBlockAlignFrameSize(bytesFrom);
            }

            long endPosition = 0;
            if (bytesTo > 0)
            {
                endPosition = m_CurrentAudioPCMFormat.AdjustByteToBlockAlignFrameSize(bytesTo);
            }

            if (m_CurrentAudioPCMFormat.BytesAreEqualWithMillisecondsTolerance(startPosition, 0))
            {
                startPosition = 0;
            }

            if (m_CurrentAudioPCMFormat.BytesAreEqualWithMillisecondsTolerance(endPosition, dataLength))
            {
                endPosition = dataLength;
            }

            if (m_CurrentAudioPCMFormat.BytesAreEqualWithMillisecondsTolerance(endPosition, 0))
            {
                endPosition = 0;
            }

            if (endPosition != 0 &&
                m_CurrentAudioPCMFormat.BytesAreEqualWithMillisecondsTolerance(endPosition, startPosition))
            {
                return;
            }

            if (startPosition >= 0 &&
                (endPosition == 0 || startPosition < endPosition) &&
                endPosition <= dataLength)
            {
                if (m_FwdRwdRate == 0)
                {
                    startPlayback(startPosition, endPosition);
                    Console.WriteLine("starting playback ");
                }
                else if (m_FwdRwdRate > 0)
                {
                    FastForward(startPosition);
                    Console.WriteLine("fast forward ");
                }
                else if (m_FwdRwdRate < 0)
                {
                    if (startPosition == 0)
                    {
                        startPosition = m_CurrentAudioStream.Length;
                    }
                    Rewind(startPosition);
                    Console.WriteLine("Rewind ");
                }
            }
            else
            {
                //throw new Exception("Start/end positions out of bounds of audio asset.");
                DebugFix.Assert(false);
            }
        }
Ejemplo n.º 2
0
        private void startPlayback(long startPosition, long endPosition)
        {
            initializeBuffers();

            m_PlaybackStartPositionInCurrentAudioStream = startPosition;
            m_PlaybackEndPositionInCurrentAudioStream   = endPosition == 0
                ? m_CurrentAudioDataLength
                : endPosition;

            m_CircularBufferWritePosition = 0;

            //m_CircularBufferFlushTolerance = -1;
            //m_PredictedByteIncrement = -1;

            //m_PreviousCircularBufferPlayPosition = -1;
            //m_CircularBufferTotalBytesPlayed = -1;

            m_CurrentAudioStream          = m_CurrentAudioStreamProvider();
            m_CurrentAudioStream.Position = m_PlaybackStartPositionInCurrentAudioStream;

            int circularBufferLength = m_CircularBuffer.
#if USE_SHARPDX
                                       Capabilities
#else
                                       Caps
#endif
                                       .BufferBytes
            ;

            int bytesWrittenToCirularBuffer =
                transferBytesFromWavStreamToCircularBuffer(circularBufferLength);

            m_CurrentBytePosition = m_PlaybackStartPositionInCurrentAudioStream;

            CurrentState = State.Playing;
            //if (AllowBackToBackPlayback && m_MonitoringTimer != null) m_MonitoringTimer.Start();
            try
            {
                m_CircularBuffer.Play(0,
#if USE_SHARPDX
                                      PlayFlags.Looping
#else
                                      BufferPlayFlags.Looping // this makes it a circular buffer (which we manage manually, by tracking playback versus writing positions)
#endif
                                      );
                if (m_PlaybackStopWatch == null)
                {
                    m_PlaybackStopWatch = new Stopwatch();
                }

#if NET4
                m_PlaybackStopWatch.Restart();
#else
                m_PlaybackStopWatch.Stop();
                m_PlaybackStopWatch.Reset();
                m_PlaybackStopWatch.Start();
#endif //NET4
            }
            catch (Exception)
            {
                Debug.Fail("EmergencyStopForSoundBufferProblem !");

                CurrentState = State.Stopped;

                StopForwardRewind();
                stopPlayback();

                return;
            }

            ThreadStart threadDelegate = delegate()
            {
                bool endOfAudioStream = false;
                try
                {
                    endOfAudioStream = circularBufferRefreshThreadMethod();
                }
                catch (ThreadAbortException ex)
                {
                    //
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                    Console.WriteLine(ex.StackTrace);
                }
                finally
                {
                    if (m_PlaybackStopWatch != null)
                    {
                        m_PlaybackStopWatch.Stop();
                    }

                    if (mPreviewTimer.Enabled)
                    {
                        if (endOfAudioStream || CurrentState == State.Playing)
                        {
                            m_ResumeStartPosition = CurrentBytePosition;

                            CurrentState = State.Paused;                                  // before stopPlayback(), doesn't kill the stream provider
                        }
                        lock (LOCK)
                        {
                            m_CircularBufferRefreshThread = null;
                        }

                        stopPlayback();
                    }
                    else
                    {
                        if (endOfAudioStream || CurrentState == State.Playing)
                        {
                            CurrentState = State.Stopped;
                        }
                        //if (CurrentState != State.Paused) CurrentState = State.Stopped;

                        lock (LOCK)
                        {
                            m_CircularBufferRefreshThread = null;
                        }

                        StopForwardRewind();
                        stopPlayback();

                        if (endOfAudioStream)
                        {
                            AudioPlaybackFinishHandler delFinished = AudioPlaybackFinished;
                            if (delFinished != null && !mPreviewTimer.Enabled)
                            {
                                delFinished(this, new AudioPlaybackFinishEventArgs());
                            }
                        }
                    }
                }

                //Console.WriteLine("Player refresh thread exiting....");

                //CurrentState = State.Stopped;

                //lock (LOCK)
                //{
                //    //m_CircularBufferRefreshThreadIsAlive = false;
                //    m_CircularBufferRefreshThread = null;
                //}

                //Console.WriteLine("Player refresh thread exit.");
            };


            int count = 0;
            while (m_CircularBufferRefreshThread != null)
            {
                Console.WriteLine(@"------------ m_CircularBufferRefreshThread NOT null!!: " + count++);
                Thread.Sleep(20);

                if (count > 10)
                {
                    Console.WriteLine(@"------------ m_CircularBufferRefreshThread NOT null!! ()BREAK(): " + count++);
                    break;
                }
            }

            if (m_CircularBufferRefreshThread != null)
            {
                stopPlayback();
            }


            DebugFix.Assert(m_CircularBufferRefreshThread == null);

            lock (LOCK)
            {
                m_CircularBufferRefreshThread              = new Thread(threadDelegate);
                m_CircularBufferRefreshThread.Name         = "Player Refresh Thread";
                m_CircularBufferRefreshThread.Priority     = ThreadPriority.Normal;
                m_CircularBufferRefreshThread.IsBackground = true;
                m_CircularBufferRefreshThread.Start();
            }


            //Console.WriteLine("Player refresh thread start.");
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Parses a RIFF WAVE PCM header of a given input <see cref="Stream"/>
        /// </summary>
        /// <remarks>
        /// Upon succesful parsing the <paramref name="input"/> <see cref="Stream"/> is positioned at the beginning of the actual PCM data,
        /// that is at the beginning of the data field of the data sub-chunk
        /// </remarks>
        /// <param name="input">The input <see cref="Stream"/> - must be positioned at the start of the RIFF chunk</param>
        /// <returns>A <see cref="AudioLibPCMFormat"/> containing the parsed data</returns>
        /// <exception cref="ArgumentOutOfRangeException">
        /// Thrown when RIFF WAVE header is invalid or is not PCM data
        /// </exception>
        public static AudioLibPCMFormat RiffHeaderParse(Stream input, out uint dataLength)
        {
            DebugFix.Assert(input.Position == 0);

            dataLength = 0;

            BinaryReader rd = new BinaryReader(input);

            //http://www.sonicspot.com/guide/wavefiles.html

            // Ensures 3x4=12 bytes available to read (RIFF Type Chunk)
            {
                long availableBytes = input.Length - input.Position;
                if (availableBytes < 12)
                {
                    throw new ArgumentOutOfRangeException(
                              "The RIFF chunk descriptor does not fit in the input stream");
                }
            }

            //Chunk ID (4 bytes)
            {
                string chunkId = Encoding.ASCII.GetString(rd.ReadBytes(4));
                if (chunkId != "RIFF")
                {
                    throw new ArgumentOutOfRangeException("ChunkId is not RIFF: " + chunkId);
                }
            }
            //Chunk Data Size (the wavEndPos variable is used further below as the upper limit position in the stream)
            long wavEndPos = 0;
            {
                // 4 bytes
                uint chunkSize = rd.ReadUInt32();

                // Ensures the given size fits within the actual stream
                wavEndPos = input.Position + chunkSize;
                DebugFix.Assert(!(wavEndPos > input.Length));
                //if (wavEndPos > input.Length)
                //{
                //    throw new ArgumentOutOfRangeException(String.Format(
                //                                             "The WAVE PCM chunk does not fit in the input Stream (expected chunk end position is {0:0}, Stream count is {1:0})",
                //                                             wavEndPos, input.Length));
                //}
            }
            //RIFF Type (4 bytes)
            {
                string format = Encoding.ASCII.GetString(rd.ReadBytes(4));
                if (format != "WAVE")
                {
                    throw new ArgumentOutOfRangeException(String.Format(
                                                              "RIFF format {0} is not supported. The only supported RIFF format is WAVE",
                                                              format));
                }
            }


            // We need at least the 'data' and the 'fmt ' chunks
            bool foundWavDataChunk   = false;
            bool foundWavFormatChunk = false;

            // We memorize the position of the actual PCM data in the stream,
            // so we can seek back, if needed (normally, this never happens as the 'data' chunk
            // is always the last one. However the WAV format does not mandate the order of chunks so...)
            long wavDataChunkPosition = -1;

            AudioLibPCMFormat pcmInfo = new AudioLibPCMFormat();

            //loop when there's at least 2x4=8 bytes to read (Chunk ID & Chunk Data Size)
            while (input.Position + 8 <= wavEndPos)
            {
                // 4 bytes
                string chunkId = Encoding.ASCII.GetString(rd.ReadBytes(4));

                // 4 bytes
                uint chunkSize = rd.ReadUInt32();

                // Ensures the given size fits within the actual stream
                if (input.Position + chunkSize > wavEndPos)
                {
                    throw new ArgumentOutOfRangeException(String.Format(
                                                              "ChunkId {0} does not fit in RIFF chunk",
                                                              chunkId));
                }

                switch (chunkId)
                {
                case "fmt ":
                {
                    // The default information fields fit within 16 bytes
                    int extraFormatBytes = (int)chunkSize - 16;

                    // Compression code (2 bytes)
                    ushort compressionCode = rd.ReadUInt16();

                    // Number of channels (2 bytes)
                    ushort numChannels = rd.ReadUInt16();
                    if (numChannels == 0)
                    {
                        throw new ArgumentOutOfRangeException("0 channels of audio is not supported");
                    }

                    // Sample rate (4 bytes)
                    uint sampleRate = rd.ReadUInt32();

                    // Average bytes per second, aka byte-rate (4 bytes)
                    uint byteRate = rd.ReadUInt32();

                    // Block align (2 bytes)
                    ushort blockAlign = rd.ReadUInt16();

                    // Significant bits per sample, aka bit-depth (2 bytes)
                    ushort bitDepth = rd.ReadUInt16();

                    if (compressionCode != 0 && extraFormatBytes > 0)
                    {
                        //  Extra format bytes (2 bytes)
                        uint extraBytes = rd.ReadUInt16();
                        if (extraBytes > 0)
                        {
                            DebugFix.Assert(extraBytes <= (extraFormatBytes - 2));
                        }

                        if (extraFormatBytes > 2)
                        {
                            extraBytes = (uint)(extraFormatBytes - 2);

                            // Skip (we ignore the extra information in this chunk field)
                            rd.ReadBytes((int)extraBytes);

                            // check word-alignment
                            if ((extraBytes % 2) != 0)
                            {
                                rd.ReadByte();
                            }
                        }
                    }

                    if ((bitDepth % 8) != 0)
                    {
                        throw new ArgumentOutOfRangeException(String.Format(
                                                                  "Invalid number of bits per sample {0:0} - must be a mulitple of 8",
                                                                  bitDepth));
                    }
                    if (blockAlign != (numChannels * bitDepth / 8))
                    {
                        throw new ArgumentOutOfRangeException(String.Format(
                                                                  "Invalid block align {0:0} - expected {1:0}",
                                                                  blockAlign, numChannels * bitDepth / 8));
                    }
                    if (byteRate != sampleRate * blockAlign)
                    {
                        throw new ArgumentOutOfRangeException(String.Format(
                                                                  "Invalid byte rate {0:0} - expected {1:0}",
                                                                  byteRate, sampleRate * blockAlign));
                    }
                    pcmInfo.BitDepth         = bitDepth;
                    pcmInfo.NumberOfChannels = numChannels;
                    pcmInfo.SampleRate       = sampleRate;
                    pcmInfo.IsCompressed     = compressionCode != 1;

                    foundWavFormatChunk = true;
                    break;
                }

                case "data":
                {
                    if (input.Position + chunkSize > wavEndPos)
                    {
                        throw new ArgumentOutOfRangeException(String.Format(
                                                                  "ChunkId {0} does not fit in RIFF chunk",
                                                                  "data"));
                    }

                    dataLength = chunkSize;

                    foundWavDataChunk    = true;
                    wavDataChunkPosition = input.Position;

                    // ensure we go past the PCM data, in case there are following chunks.
                    // (it's an unlikely scenario, but it's allowed by the WAV spec.)
                    input.Seek(chunkSize, SeekOrigin.Current);
                    break;
                }

                case "fact":
                {
                    if (chunkSize == 4)
                    {
                        // 4 bytes
                        uint totalNumberOfSamples = rd.ReadUInt32();
                        // This value is unused, we are just reading it for debugging as we noticed that
                        // the WAV files generated by the Microsoft Audio Recorder
                        // contain the 'fact' chunk with this information. Most other recordings
                        // only contain the 'data' and 'fmt ' chunks.
                    }
                    else
                    {
                        rd.ReadBytes((int)chunkSize);
                    }
                    break;
                }

                case "JUNK":
                case "bext":
                case "minf":
                case "regn":
                case "umid":
                case "DGDA":
                case "wavl":
                case "slnt":
                case "cue ":
                case "plst":
                case "list":
                case "labl":
                case "note":
                case "ltxt":
                case "smpl":
                case "inst":
                default:
                {
                    // Unsupported FOURCC codes, we skip.
                    rd.ReadBytes((int)chunkSize);
                    break;
                }
                }
            }

            if (!foundWavDataChunk)
            {
                throw new ArgumentOutOfRangeException("WAV 'data' chunk was not found !");
            }
            if (!foundWavFormatChunk)
            {
                throw new ArgumentOutOfRangeException("WAV 'fmt ' chunk was not found !");
            }
            if (input.Position != wavDataChunkPosition)
            {
                input.Seek(wavDataChunkPosition, SeekOrigin.Begin);
            }

            return(pcmInfo);
        }