Beispiel #1
0
        ///<Summary>
        /// Begin playback or resume if paused
        ///</Summary>
        public void Play()
        {
            if (player == null)
            {
                return;
            }

            if (!_hasPlayedFirst)
            {
                NSError er;
                var     audioFileBuffer = new AVAudioPcmBuffer(audioFile.ProcessingFormat, (uint)audioFile.Length);
                audioFile.ReadIntoBuffer(audioFileBuffer, out er);
                player.ScheduleBuffer(audioFileBuffer, null, AVAudioPlayerNodeBufferOptions.Loops, null);
                _hasPlayedFirst = true;
            }

            player.PlayAtTime(new AVAudioTime(0));
        }
Beispiel #2
0
    public void onDecodedData(float[] data)
    {
        if (!running)
        {
            return;
        }

        if (audioPlayer != null)
        {
            AVAudioPcmBuffer buffer   = new AVAudioPcmBuffer(inputAudioFormat, (uint)data.Length);
            IntPtr           int_data = Marshal.AllocHGlobal(data.Length * sizeof(float));
            Marshal.Copy(data, 0, int_data, data.Length);

            buffer.AudioBufferList.SetData(0, int_data, data.Length);
            buffer.FrameLength = (uint)data.Length;

            audioPlayer.ScheduleBuffer(buffer, () => {
                Marshal.FreeHGlobal(int_data);
                buffer.Dispose();
                GC.Collect();
                GC.WaitForPendingFinalizers();
            });
        }
    }
Beispiel #3
0
        unsafe void PlayToneForColor(float carrierFrequency, float modulatorFrequency, uint sampleLength, float pan, NSColor color)
        {
            const float modulatorAmplitude = .8f;

            var unitVelocity      = 2 * Math.PI / AudioFormat.SampleRate;
            var carrierVelocity   = carrierFrequency * unitVelocity;
            var modulatorVelocity = modulatorFrequency * unitVelocity;

            AudioQueue.DispatchAsync(() =>
            {
                var sampleTime = 0f;

                if (!ForceStop)
                {
                    Semaphore.Wait();
                }
                else
                {
                    return;
                }

                var outChannels     = AudioFormat.ChannelCount;
                var outDataPointers = new float *[outChannels];

                var buffer = AudioBuffers[BufferIndex];

                for (int i = 0; i < outChannels; i++)
                {
                    // buffer.FloatChannelData is a native array of pointers to audio data.
                    // convert that into a managed array of pointers to audio data.
                    outDataPointers[i] = (float *)Marshal.ReadIntPtr(buffer.FloatChannelData, i * IntPtr.Size);
                }

                var leftChannel  = outDataPointers[0];
                var rightChannel = outDataPointers[1];

                for (int sampleIndex = 0; sampleIndex < sampleLength; sampleIndex++)
                {
                    var sample = (float)Math.Sin(carrierVelocity * sampleTime + modulatorAmplitude *
                                                 Math.Sin(modulatorVelocity * sampleTime));

                    leftChannel[sampleIndex]  = sample;
                    rightChannel[sampleIndex] = sample;

                    sampleTime++;
                }

                buffer.FrameLength = sampleLength;

                PlayerNode.Pan = pan;

                NotePlayedForColor?.Invoke(color);

                PlayerNode.ScheduleBuffer(buffer, () =>
                {
                    Semaphore.Release();
                });

                BufferIndex = (BufferIndex + 1) % AudioBuffers.Count;
            });
        }