Exemple #1
0
        private void OnAudioChunkPlaybackStarted(MvxAudioChunk audioChunk)
        {
            MVCommon.SharedRef <MVGraphAPI.Frame> newLastReceivedFrame = null;

            lock (m_audioChunkFramesQueue)
            {
                KeyValuePair <MvxAudioChunk, MVCommon.SharedRef <MVGraphAPI.Frame> > queuedAudioChunkFrame;
                while (m_audioChunkFramesQueue.Count > 0)
                {
                    // find the frame associated with the audio chunk being played right now, discard preceding frames
                    queuedAudioChunkFrame = m_audioChunkFramesQueue.Dequeue();
                    m_allocatedAudioChunks.Remove(queuedAudioChunkFrame.Key);

                    if (queuedAudioChunkFrame.Key == audioChunk)
                    {
                        newLastReceivedFrame = queuedAudioChunkFrame.Value;
                        break;
                    }
                    else
                    {
                        queuedAudioChunkFrame.Value.Dispose();
                    }
                }
            }

            if (newLastReceivedFrame != null)
            {
                lastReceivedFrame = newLastReceivedFrame;
                onNextFrameReceived.Invoke(lastReceivedFrame);
            }
        }
Exemple #2
0
        private void OnAudioChunkDiscarded(MvxAudioChunk discardedAudioChunk)
        {
            lock (m_audioChunkFramesQueue)
            {
                // discard frame of the audio chunk and preceding frames, if the audio chunk was never played back
                if (!m_allocatedAudioChunks.Contains(discardedAudioChunk))
                {
                    MvxAudioChunksPool.instance.ReturnAudioChunk(discardedAudioChunk);
                    return;
                }

                KeyValuePair <MvxAudioChunk, MVCommon.SharedRef <MVGraphAPI.Frame> > queuedAudioChunkFrame;
                while (m_audioChunkFramesQueue.Count > 0)
                {
                    queuedAudioChunkFrame = m_audioChunkFramesQueue.Dequeue();
                    m_allocatedAudioChunks.Remove(queuedAudioChunkFrame.Key);
                    MvxAudioChunksPool.instance.ReturnAudioChunk(queuedAudioChunkFrame.Key);
                    queuedAudioChunkFrame.Value.Dispose();

                    if (queuedAudioChunkFrame.Key == discardedAudioChunk)
                    {
                        break;
                    }
                }
            }
        }
Exemple #3
0
 public void ReturnAudioChunk(MvxAudioChunk audioChunk)
 {
     lock (m_threadLock)
     {
         m_usedAudioChunks.Remove(audioChunk);
         m_unusedAudioChunks.Enqueue(audioChunk);
     }
 }
Exemple #4
0
        private void ReadFrames()
        {
            while (true)
            {
                if (m_stopReadingFrames)
                {
                    return;
                }

                float queuedAudioDuration;
                lock (m_audioPlayer)
                    queuedAudioDuration = m_audioPlayer.GetQueuedAudioDuration(m_outputSampleRate);

                while (queuedAudioDuration < minimalBufferedAudioDuration)
                {
                    if (m_stopReadingFrames)
                    {
                        return;
                    }

                    MVCommon.SharedRef <MVGraphAPI.Frame> newFrame = null;
                    lock (m_mvxRunner)
                    {
                        if (!m_mvxRunner.ProcessNextFrame())
                        {
                            break;
                        }

                        newFrame = new MVCommon.SharedRef <MVGraphAPI.Frame>(m_frameAccess.GetRecentProcessedFrame());
                    }
                    if (newFrame.sharedObj == null)
                    {
                        break;
                    }

                    MvxAudioChunk newAudioChunk = ExtractAudioFromFrame(newFrame);
                    if (newAudioChunk == null)
                    {
                        newFrame.Dispose();
                        break;
                    }

                    lock (m_audioChunkFramesQueue)
                    {
                        m_audioChunkFramesQueue.Enqueue(new KeyValuePair <MvxAudioChunk, MVCommon.SharedRef <MVGraphAPI.Frame> >(newAudioChunk, newFrame));
                        m_allocatedAudioChunks.Add(newAudioChunk);
                    }

                    lock (m_audioPlayer)
                    {
                        m_audioPlayer.EnqueueAudioChunk(newAudioChunk);
                        queuedAudioDuration = m_audioPlayer.GetQueuedAudioDuration(m_outputSampleRate);
                    }
                }

                Thread.Sleep(10);
            }
        }
Exemple #5
0
 public void Reset()
 {
     lock (m_audioChunksQueue)
     {
         while (m_audioChunksQueue.Count > 0)
         {
             MvxAudioChunk dequeuedAudioChunk = m_audioChunksQueue.Dequeue();
             onAudioChunkDiscarded.Invoke(dequeuedAudioChunk);
         }
         ResetCurrentlyPlayedAudioChunk();
     }
 }
Exemple #6
0
        private void ProcessAudioData()
        {
            while (true)
            {
                if (m_stopProcessingAudioData)
                {
                    return;
                }

                MVGraphAPI.Frame frame = null;

                lock (m_framesQueue)
                {
                    if (m_framesQueue.Count == 0)
                    {
                        return;
                    }

                    frame = m_framesQueue.Dequeue();
                }

                UInt32 framePCMDataSize = MVGraphAPI.FrameAudioExtractor.GetPCMDataSize(frame);
                if (framePCMDataSize == 0)
                {
                    continue;
                }

                UInt32 frameChannelsCount;
                UInt32 frameBitsPerSample;
                UInt32 frameSampleRate;
                MVGraphAPI.FrameAudioExtractor.GetAudioSamplingInfo(frame, out frameChannelsCount, out frameBitsPerSample, out frameSampleRate);
                if (frameBitsPerSample != 8 && frameBitsPerSample != 16 && frameBitsPerSample != 32)
                {
                    Debug.LogErrorFormat("Unsupported 'bits per sample' value {0}", frameBitsPerSample);
                    continue;
                }
                UInt32 frameBytesPerSample = frameBitsPerSample / 8;

                byte[] frameAudioBytes = new byte[framePCMDataSize];
                MVGraphAPI.FrameAudioExtractor.CopyPCMData(frame, frameAudioBytes);

                MvxAudioChunk newAudioChunk = MvxAudioChunksPool.instance.AllocateAudioChunk(frameAudioBytes, frameBytesPerSample, frameChannelsCount, frameSampleRate);
                lock (m_audioPlayer)
                    m_audioPlayer.EnqueueAudioChunk(newAudioChunk);
            }
        }
Exemple #7
0
        public void EnqueueAudioChunk(MvxAudioChunk chunk)
        {
            lock (m_audioChunksQueue)
            {
                m_audioChunksQueue.Enqueue(chunk);

                // reset currently played audio chunk immediatelly in case the buffer is oversized
                if (AudioChunksQueueIsFull())
                {
                    ResetCurrentlyPlayedAudioChunk();

                    while (AudioChunksQueueIsFull())
                    {
                        MvxAudioChunk dequeuedAudioChunk = m_audioChunksQueue.Dequeue();
                        onAudioChunkDiscarded.Invoke(dequeuedAudioChunk);
                    }
                }
            }
        }
Exemple #8
0
        public MvxAudioChunk AllocateAudioChunk(byte[] data, UInt32 bytesPerSample, UInt32 channelsCount, UInt32 sampleRate)
        {
            lock (m_threadLock)
            {
                MvxAudioChunk audioChunk = null;

                if (m_unusedAudioChunks.Count > 0)
                {
                    audioChunk = m_unusedAudioChunks.Dequeue();
                    audioChunk.Reset(data, bytesPerSample, channelsCount, sampleRate);
                }
                else
                {
                    audioChunk = new MvxAudioChunk(data, bytesPerSample, channelsCount, sampleRate);
                }

                m_usedAudioChunks.Add(audioChunk);
                return(audioChunk);
            }
        }
Exemple #9
0
        private float[] PrepareAuxiliaryDataForAudioChunksConcat(
            MvxAudioChunk precedingAudioChunk, int precedingAudioChunkIndex,
            MvxAudioChunk subsequentAudioChunk,
            out int channelsCount)
        {
            channelsCount = Mathf.Min((int)precedingAudioChunk.channelsCount, (int)subsequentAudioChunk.channelsCount);

            if (m_auxiliaryAudioData == null || m_auxiliaryAudioData.Length < channelsCount * 2)
            {
                m_auxiliaryAudioData = new float[channelsCount * 2];
            }

            for (int channelIndex = 0; channelIndex < channelsCount; channelIndex++)
            {
                m_auxiliaryAudioData[0 + channelIndex]             = precedingAudioChunk.data[precedingAudioChunkIndex + channelIndex];
                m_auxiliaryAudioData[channelsCount + channelIndex] = subsequentAudioChunk.data[0 + channelIndex];
            }

            return(m_auxiliaryAudioData);
        }
Exemple #10
0
        private bool FeedNextSourceSampleToTargetData(float[] targetData, ref int targetDataStartIndex, int targetChannelsCount)
        {
            if (m_currentAudioChunkIndex + currentAudioChunkChannelsCount < currentAudioChunkDataSize)
            {
                FeedSampleFromSourceToTargetData(
                    currentAudioChunkData, m_currentAudioChunkIndex, currentAudioChunkChannelsCount,
                    targetData, targetDataStartIndex, targetChannelsCount,
                    m_sourceRateByTargetRateFeed);

                targetDataStartIndex += targetChannelsCount;
                return(true);
            }

            // current audio chunk does not have enough audio data -> try next one
            if (m_audioChunksQueue.Count == 0)
            {
                return(false);
            }

            // lerp with next buffered audio chunk
            MvxAudioChunk nextAudioChunk = m_audioChunksQueue.Peek();

            int concatenatedAudioChannelsCount;

            float[] concatenatedAudioData = PrepareAuxiliaryDataForAudioChunksConcat(
                currentAudioChunk, m_currentAudioChunkIndex,
                nextAudioChunk, out concatenatedAudioChannelsCount);

            FeedSampleFromSourceToTargetData(
                concatenatedAudioData, 0, concatenatedAudioChannelsCount,
                targetData, targetDataStartIndex, targetChannelsCount,
                m_sourceRateByTargetRateFeed);

            targetDataStartIndex += targetChannelsCount;
            return(true);
        }
Exemple #11
0
 private void OnAudioChunkDiscarded(MvxAudioChunk discardedAudioChunk)
 {
     MvxAudioChunksPool.instance.ReturnAudioChunk(discardedAudioChunk);
 }