コード例 #1
0
ファイル: MusicPlayer.cs プロジェクト: BclEx/object-assets
 public void Play()
 {
     currentSongIndex = GetNextSongIndex();
     if (currentSongIndex >= 0)
     {
         var audioStream = new MP3StreamReader(songFilePaths[currentSongIndex]);
         currentAudioSourceObj = AudioUtils.Play2DAudioStream(audioStream);
     }
 }
コード例 #2
0
ファイル: AudioUtils.cs プロジェクト: BclEx/object-assets
        public static GameObject Play2DAudioStream(MP3StreamReader audioStream)
        {
            var gameObject  = new GameObject("tmp2DAudioStream");
            var audioSource = gameObject.AddComponent <AudioSource>();

            //audioSource.clip = CreateStreamingAudioClip("tmpAudioClip", audioStream);
            audioSource.loop = true;
            var audioStreamComponent = gameObject.AddComponent <OneShotAudioStreamComponent>();

            audioStreamComponent.audioStream = audioStream;
            return(gameObject);
        }
コード例 #3
0
ファイル: AudioUtils.cs プロジェクト: BclEx/object-assets
        /// <summary>
        /// Create a Unity audio clip for an audio stream.
        /// </summary>
        private static AudioClip CreateStreamingAudioClip(string name, MP3StreamReader audioStream)
        {
            var streamBuffer = new PCMAudioBuffer(audioStream.channelCount, audioStream.bitDepth, audioStream.samplingRate, 8192);
            var bufferAudioClipSampleFrameCount = audioStream.samplingRate;

            return(AudioClip.Create(name, bufferAudioClipSampleFrameCount, audioStream.channelCount, audioStream.samplingRate, true, delegate(float[] samples)
            {
                var samplesReturned = FillUnityStreamBuffer(samples, streamBuffer, audioStream);
                if (audioStream.isOpen && audioStream.isDoneStreaming)
                {
                    audioStream.Close();
                }
            }));
        }
コード例 #4
0
ファイル: AudioUtils.cs プロジェクト: BclEx/object-assets
 // TODO: Handle exceptions
 public static PCMAudioBuffer ReadMP3(string filePath)
 {
     using (var audioStream = new MP3StreamReader(filePath))
     {
         var audioData = new List <byte>(2 * (int)audioStream.compressedStreamLengthInBytes); // Allocate enough space for a 50% compression ratio.
         var streamBufferSizeInSampleFrames = 16384;
         var streamBuffer = new byte[SampleFramesToBytes(streamBufferSizeInSampleFrames, audioStream.channelCount, audioStream.bitDepth)];
         do
         {
             var sampleFramesRead = audioStream.ReadSampleFrames(streamBuffer, 0, streamBufferSizeInSampleFrames);
             if (sampleFramesRead > 0)
             {
                 var bytesRead = SampleFramesToBytes(sampleFramesRead, audioStream.channelCount, audioStream.bitDepth);
                 audioData.AddRange(new ArrayRange <byte>(streamBuffer, 0, bytesRead));
             }
         } while (!audioStream.isDoneStreaming);
         return(new PCMAudioBuffer(audioStream.channelCount, audioStream.bitDepth, audioStream.samplingRate, audioData.ToArray()));
     }
 }
コード例 #5
0
ファイル: AudioUtils.cs プロジェクト: BclEx/object-assets
        /// <summary>
        /// Streams audio into a floating point sample buffer.
        /// </summary>
        /// <param name="unityBuffer"></param>
        /// <param name="intermediateBuffer">A PCM sample buffer to act as an intermediary between the raw audio stream and Unity.</param>
        /// <param name="audioStream"></param>
        /// <returns>Returns the number of samples that were read from the stream.</returns>
        public static int FillUnityStreamBuffer(float[] unityBuffer, PCMAudioBuffer intermediateBuffer, MP3StreamReader audioStream)
        {
            if (audioStream.isDoneStreaming)
            {
                // Fill the Unity sample buffer with zeros.
                Array.Clear(unityBuffer, 0, unityBuffer.Length);
                return(0);
            }
            var totalSampleFramesToRead = unityBuffer.Length / audioStream.channelCount;
            var sampleFramesRead        = 0;

            while (sampleFramesRead < totalSampleFramesToRead)
            {
                // Read some sample frames.
                var sampleFramesLeftToRead = totalSampleFramesToRead - sampleFramesRead;
                var sampleFramesReturned   = audioStream.ReadSampleFrames(intermediateBuffer.data, 0, Math.Min(sampleFramesLeftToRead, intermediateBuffer.sampleFrameCount));
                if (sampleFramesReturned > 0)
                {
                    // Convert the read samples to floats copy them to the output buffer.
                    intermediateBuffer.ToFloatArray(unityBuffer, sampleFramesRead, sampleFramesReturned);
                    sampleFramesRead += sampleFramesReturned;
                }
                else
                {
                    // Fill the rest of the Unity sample buffer with zeros.
                    var samplesRead = sampleFramesRead * audioStream.channelCount;
                    Array.Clear(unityBuffer, samplesRead, unityBuffer.Length - samplesRead);
                    break;
                }
            }
            return(sampleFramesRead * audioStream.channelCount);
        }