예제 #1
0
        public IOSAudioDriver( )
        {
            _engine = new AVAudioEngine();
            _mixer  = new AVAudioMixerNode();

            _engine.AttachNode(_mixer);
            _engine.Connect(_mixer, _engine.MainMixerNode, _engine.MainMixerNode.GetBusOutputFormat(0));

            _players = new Player[MaxPlayers];
            for (int i = 0; i < MaxPlayers; i++)
            {
                var player = new Player();
                player.Callback = player.OnPlayedBack;
                player.Done     = true;
                player.Node     = new AVAudioPlayerNode();
                player.PlayId   = 1;
                player.Id       = (uint)i;
                _players[i]     = player;

                _engine.AttachNode(player.Node);
                //_engine.Connect(player.Node, _engine.MainMixerNode, _format);
            }

            _engine.Prepare();
            _engine.StartAndReturnError(out var error);
        }
예제 #2
0
    private void initPlayer()
    {
        audioEngine = new AVAudioEngine();
        NSError error = new NSError();

        if (!AVAudioSession.SharedInstance().SetPreferredSampleRate(sampleRate, out error))
        {
            throw new Exception("Error setting preffered sample rate for player: " + error);
        }
        AVAudioSession.SharedInstance().SetCategory(AVAudioSessionCategory.PlayAndRecord, AVAudioSessionCategoryOptions.InterruptSpokenAudioAndMixWithOthers);
        AVAudioSession.SharedInstance().SetActive(true);

        audioPlayer = new AVAudioPlayerNode();
        setVolume(AVAudioSession.SharedInstance().OutputVolume);
        inputAudioFormat = new AVAudioFormat(AVAudioCommonFormat.PCMFloat32, sampleRate, (uint)channels, false);

        audioEngine.AttachNode(audioPlayer);
        audioEngine.Connect(audioPlayer, audioEngine.MainMixerNode, inputAudioFormat);
        audioEngine.Prepare();
        if (!audioEngine.StartAndReturnError(out error))
        {
            throw new Exception("Error starting playback audio engine: " + error);
        }
        audioPlayer.Play();
    }
예제 #3
0
        ///<Summary>
        /// Load wave or mp3 audio file from the Android assets folder
        ///</Summary>
        public bool Load(string fileName)
        {
            DeletePlayer();

            NSError error = new NSError();

            if (!String.IsNullOrWhiteSpace(fileName))
            {
                string directory = Path.GetDirectoryName(fileName);
                string filename  = Path.GetFileNameWithoutExtension(fileName);
                string extension = Path.GetExtension(fileName).Substring(1);
                NSUrl  url       = NSBundle.MainBundle.GetUrlForResource(filename, extension, directory);
                audioFile = new AVAudioFile(url, out error);
            }

            if (audioFile != null)
            {
                componentDescription = new AudioComponentDescription();
                componentDescription.ComponentType    = AudioComponentType.FormatConverter;
                componentDescription.ComponentSubType = (int)AudioUnitSubType.Varispeed;

                engine = new AVAudioEngine();
                player = new AVAudioPlayerNode();
                pitch  = new AVAudioUnitTimePitch(componentDescription);


                engine.AttachNode(player);
                engine.AttachNode(pitch);

                engine.Connect(player, pitch, audioFile.ProcessingFormat);
                engine.Connect(pitch, engine.MainMixerNode, audioFile.ProcessingFormat);

                engine.Prepare();
                NSError startError = new NSError();
                engine.StartAndReturnError(out startError);
            }

            return(true);
        }
예제 #4
0
        public CoreMidiSynthesizer()
        {
            details  = this;
            _engine  = new AVAudioEngine();
            _sampler = new AVAudioUnitSampler();
            _engine.AttachNode(_sampler);
            _engine.Connect(_sampler, _engine.MainMixerNode, format: new AVAudioFormat(44100, 1));

            LoadSoundFontIntoSampler(0);
            AddObservers();
            StartEngine();
            SetSessionPlayback();
        }
예제 #5
0
        public SimplePlayEngine(Action componentsFoundCallback = null)
        {
            PresetList = new AUAudioUnitPreset [0];
            this.componentsFoundCallback = componentsFoundCallback;
            engine.AttachNode(player);

            var fileUrl = NSBundle.MainBundle.GetUrlForResource("drumLoop", "caf");

            if (fileUrl == null)
            {
                throw new NullReferenceException("drumploop.caf file not found");
            }

            SetPlayerFile(fileUrl);

            if (componentsFoundCallback != null)
            {
                UpdateEffectsList();
                AUAudioUnit.Notifications.ObserveAudioComponentRegistrationsChanged((sender, e) => UpdateEffectsList());
            }

#if __IOS__
            var error = AVAudioSession.SharedInstance().SetCategory(AVAudioSessionCategory.Playback);
            if (error != null)
            {
                throw new NSErrorException(error);
            }
#endif

            AUAudioUnit.Notifications.ObserveAudioComponentInstanceInvalidation((sender, e) => {
                var crashedAU = e.Notification.Object as AUAudioUnit;
                if (AudioUnit == crashedAU)
                {
                    SelectEffectWithComponentDescription(null, null);
                }
            });
        }
예제 #6
0
        void Init()
        {
            FrequenciesByOctave = new FrequencyDictionary();

            for (int i = 0; i < InFlightAudioBuffers; i++)
            {
                var buffer = new AVAudioPcmBuffer(AudioFormat, SamplesPerBuffer);
                AudioBuffers.Add(buffer);
            }

            PlayerNode = new AVAudioPlayerNode {
                Volume = .8f
            };
            AudioEngine.AttachNode(PlayerNode);
            AudioEngine.Connect(PlayerNode, AudioEngine.MainMixerNode, AudioFormat);
            AudioEngine.StartAndReturnError(out var error);
        }
예제 #7
0
        public void AudioSetupStart()
        {
            FloatQueue        = new Queue <float>();
            engine            = new AVAudioEngine();
            nodeEQ            = new AVAudioUnitEQ(1);
            nodeEQ.GlobalGain = 1;
            engine.AttachNode(nodeEQ);

            AVAudioUnitEQFilterParameters filter = nodeEQ.Bands[0];

            filter.FilterType = AVAudioUnitEQFilterType.LowPass;
            filter.Frequency  = 1000; //In hertz
            filter.Bandwidth  = 1;
            filter.Bypass     = false;
            // in db -96 db through 24 d
            filter.Gain = 50;

            //not sure if this is necessary
            nodeEQ.Bands[0] = filter;

            //1
            AVAudioFormat format2 = engine.MainMixerNode.GetBusOutputFormat(0);

            //2
            //AVAudioPcmBuffer buffMix = new AVAudioPcmBuffer(engine.MainMixerNode.GetBusInputFormat(0),2);
            //AVAudioTime timeMix = engine.MainMixerNode.LastRenderTime;
            //AVAudioNodeTapBlock MixerBlock = new AVAudioNodeTapBlock((buffMix, timeMix) =>

            //2
            engine.MainMixerNode.InstallTapOnBus(0, 1024, format2, (AVAudioPcmBuffer buffMix, AVAudioTime when) =>
            {
                //Console.WriteLine("Called");

                //3     **Dont have an 'Updater' also not checking for null**
                IntPtr channelData = buffMix.FloatChannelData;

                int lengthOfBuffer = (int)buffMix.FrameLength;

                int frame_length = (int)buffMix.FrameLength;

                /*
                 * byte[] bytesArray = new byte[lengthOfBuffer];
                 *
                 * Marshal.Copy(channelData, bytesArray, 0, lengthOfBuffer);
                 */
                /*
                 * double total = 0;
                 * int nonZero = 0;
                 * for (int a = 0; a < buffMix.FrameLength - 4; a+=1)
                 * {
                 *  //float tempx = BitConverter.ToSingle(bytesArray, a);
                 *  float tempx = bytesArray[a];
                 *  Console.WriteLine(tempx);
                 *  double temp = Math.Pow(tempx, 2);
                 *  total += temp;
                 *  if (temp.Equals(0))
                 *      nonZero++;
                 * }
                 * int tester;
                 * //Need to figure out how the buffer works, if at all
                 * total = Math.Sqrt(total / nonZero);
                 * double avgPower = 20 * Math.Log10(total);
                 * avgPower /= 160;
                 *
                 * if (avgPower > .9)
                 *  High_Level_Detected++;
                 * FloatQueue.Enqueue((float)avgPower);
                 * //Console.WriteLine(avgPower);
                 *
                 * Marshal.FreeHGlobal(channelData);
                 */
                //var ns = buffMix.MutableCopy(); //test later

                T_Proccess tws   = new T_Proccess(channelData, lengthOfBuffer, frame_length);
                Thread processer = new Thread(new ThreadStart(tws.ThreadProc));
                processer.Start();
            });

            AVAudioFormat format = engine.InputNode.GetBusInputFormat(0);

            engine.Connect(engine.InputNode, engine.MainMixerNode, format);
            engine.Connect(nodeEQ, engine.MainMixerNode, format);

            StartEngine();
            started = true;
        }