Inheritance: IDisposable
        public void Dispose()
        {
            r_Session.VolumeChanged -= Session_VolumeChanged;

            r_Session.Dispose();
            r_Session = null;
        }
        public BrowserAudioSession(AudioSession rpSession)
        {
            r_Session = rpSession;
            r_BrowserProcess = r_Session.ProcessID;

            r_IsMute = r_Session.IsMute;
            r_Volume = r_Session.Volume;

            r_Session.VolumeChanged += Session_VolumeChanged;
        }
Exemple #3
0
 public PlayButtonRederer()
 {
     AudioSession.Initialize();
 }
 public void send(AudioSession toSend)
 {
     new AudioSessionSender(connection).send(tag, toSend);
     AudioSession.removeSessionID(toSend.id);
 }
Exemple #5
0
 private static void Session_StateChanged(AudioSession sender, AudioSessionState state)
 {
     RecalculateProcessDimState(processToDimWatcher, priorityProcessWatcher);
 }
Exemple #6
0
 private static void Session_Disconnected(AudioSession sender, AudioSessionDisconnectReason reason)
 {
     RecalculateProcessDimState(processToDimWatcher, priorityProcessWatcher);
 }
Exemple #7
0
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.01f;

            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            var cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                   1 // Remote Input
                                   );

            // setting AudioStreamBasicDescription
            int AudioUnitSampleTypeSize             = (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(uint);
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _sampleRate,
                Format     = AudioFormatType.LinearPCM,
                //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
                FormatFlags      = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
                ChannelsPerFrame = 2,
                BytesPerPacket   = AudioUnitSampleTypeSize,
                BytesPerFrame    = AudioUnitSampleTypeSize,
                FramesPerPacket  = 1,
                BitsPerChannel   = 8 * AudioUnitSampleTypeSize,
                Reserved         = 0
            };

            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                      0 // Remote output
                                      );
            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,
                                      1 // Remote input
                                      );

            // setting callback

            /*
             * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback);
             * else
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
             */
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_callback);
            // initialize
            _audioUnit.Initialize();
        }
 public AudioSessionTest()
 {
     TestRuntime.RequestMicrophonePermission();
     AudioSession.Initialize();
 }
Exemple #9
0
        public override bool FinishedLaunching(UIApplication app, NSDictionary options)
        {
            //
            // Setup audio system
            //
            AudioSession.Initialize();
            AudioSession.Category = AudioSessionCategory.MediaPlayback;


            //
            // Format description, we generate LinearPCM as short integers
            //
            sampleRate = AudioSession.CurrentHardwareSampleRate;
            var format = new AudioStreamBasicDescription()
            {
                SampleRate       = sampleRate,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked,
                BitsPerChannel   = 16,
                ChannelsPerFrame = 1,
                BytesPerFrame    = 2,
                BytesPerPacket   = 2,
                FramesPerPacket  = 1,
            };

            //
            // Create an output queue
            //
            var queue          = new OutputAudioQueue(format);
            var bufferByteSize = (sampleRate > 16000)? 2176 : 512;             // 40.5 Hz : 31.25 Hz

            //
            // Create three buffers, generate a tone, and output the tones
            //
            var buffers = new AudioQueueBuffer * [numBuffers];

            for (int i = 0; i < numBuffers; i++)
            {
                queue.AllocateBuffer(bufferByteSize, out buffers [i]);
                GenerateTone(buffers [i]);
                queue.EnqueueBuffer(buffers [i], null);
            }

            //
            // Output callback: invoked when the audio system is done with the
            // buffer, this implementation merely recycles it.
            //
            queue.OutputCompleted += (object sender, OutputCompletedEventArgs e) => {
                if (alternate)
                {
                    outputWaveForm += 1;
                    if (outputWaveForm > WaveForm.Square)
                    {
                        outputWaveForm = WaveForm.Sine;
                    }
                    GenerateTone(e.UnsafeBuffer);
                }
                queue.EnqueueBuffer(e.UnsafeBuffer, null);
            };

            queue.Start();
            return(true);
        }
 public void handleMessage(string message)
 {
     message = message.Substring(Tag.Length + 1);
     logic.sendVolumeBlacklist.Add(AudioSession.getSessionId(message));
 }
        /// <summary>
        /// Open the sound device, sets up an audio context, and makes the new context
        /// the current context. Note that this method will stop the playback of
        /// music that was running prior to the game start. If any error occurs, then
        /// the state of the controller is reset.
        /// </summary>
        /// <returns>True if the sound controller was setup, and false if not.</returns>
        private bool OpenSoundController()
        {
#if MONOMAC
            alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE);
#endif
            try
            {
                _device = Alc.OpenDevice(string.Empty);
            }
            catch (Exception ex)
            {
                _SoundInitException = ex;
                return(false);
            }
            if (CheckALError("Could not open AL device"))
            {
                return(false);
            }
            if (_device != IntPtr.Zero)
            {
#if ANDROID
                // Attach activity event handlers so we can pause and resume all playing sounds
                AndroidGameActivity.Paused  += Activity_Paused;
                AndroidGameActivity.Resumed += Activity_Resumed;

                // Query the device for the ideal frequency and update buffer size so
                // we can get the low latency sound path.

                /*
                 * The recommended sequence is:
                 *
                 * Check for feature "android.hardware.audio.low_latency" using code such as this:
                 * import android.content.pm.PackageManager;
                 * ...
                 * PackageManager pm = getContext().getPackageManager();
                 * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
                 * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty().
                 * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this:
                 * import android.media.AudioManager;
                 * ...
                 * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
                 * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
                 * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
                 * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt().
                 * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator.
                 *
                 * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android
                 */

                int frequency     = DEFAULT_FREQUENCY;
                int updateSize    = DEFAULT_UPDATE_SIZE;
                int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT;
                if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1)
                {
                    Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback.");

                    var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager;
                    if (audioManager != null)
                    {
                        var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate);
                        if (!string.IsNullOrEmpty(result))
                        {
                            frequency = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                        result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer);
                        if (!string.IsNullOrEmpty(result))
                        {
                            updateSize = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                    }

                    // If 4.4 or higher, then we don't need to double buffer on the application side.
                    // See http://stackoverflow.com/a/15006327
                    // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4.
                    if ((int)Android.OS.Build.VERSION.SdkInt >= 19)
                    {
                        updateBuffers = 1;
                    }
                }
                else
                {
                    Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback.");
                }
                Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames.");

                // These are missing and non-standard ALC constants
                const int AlcFrequency     = 0x1007;
                const int AlcUpdateSize    = 0x1014;
                const int AlcUpdateBuffers = 0x1015;

                int[] attribute = new[]
                {
                    AlcFrequency, frequency,
                    AlcUpdateSize, updateSize,
                    AlcUpdateBuffers, updateBuffers,
                    0
                };
#elif IOS
                AudioSession.Initialize();

                AudioSession.Interrupted += (sender, e) => {
                    AudioSession.SetActive(false);
                    Alc.MakeContextCurrent(ContextHandle.Zero);
                    Alc.SuspendContext(_context);
                };
                AudioSession.Resumed += (sender, e) => {
                    AudioSession.SetActive(true);
                    Alc.MakeContextCurrent(_context);
                    Alc.ProcessContext(_context);
                };

                int[] attribute = new int[0];
#else
                int[] attribute = new int[0];
#endif
                _context = Alc.CreateContext(_device, attribute);
                if (CheckALError("Could not create AL context"))
                {
                    CleanUpOpenAL();
                    return(false);
                }

                if (_context != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(_context);
                    if (CheckALError("Could not make AL context current"))
                    {
                        CleanUpOpenAL();
                        return(false);
                    }
                    return(true);
                }
            }
            return(false);
        }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Initialize Recorder
            RVM = new RecordingViewModel();

            // Perform any additional setup after loading the view, typically from a nib.
            // Init audio session
            AudioSession.Initialize(null, NSRunLoop.NSDefaultRunLoopMode);
            AudioSession.AudioRouteChanged += AudioSession_AudioRouteChanged;

            // Init call event handler
            callcenter.CallEventHandler += callState.CallEvent;

            // Set menu button
            UIImageView menuImageView = new UIImageView();

            menuImageView.Bounds = new CGRect(0, 0, 20, 20);
            menuImageView.Image  = UIImage.FromBundle("threelines");

            menuButton = new UIBarButtonItem(
                menuImageView.Image,
                UIBarButtonItemStyle.Plain,
                (s, e) => {
                System.Diagnostics.Debug.WriteLine("menu button tapped");
                if (RVM.IsRecording)
                {
                    SidebarController.ToggleMenu();
                    var okAlertController = UIAlertController.Create("", UiStrings.MainNotificationSuspendUserAction, UIAlertControllerStyle.Alert);
                    //Add Action
                    okAlertController.AddAction(UIAlertAction.Create(NSBundle.MainBundle.GetLocalizedString("Vernacular_P0_dialog_ok"), UIAlertActionStyle.Default, alert => {
                        RVM.StopRecordingCommand.Execute(null);
                    }));

                    PresentViewController(okAlertController, true, null);
                }
                else
                {
                    SidebarController.ToggleMenu();
                }
            }
                );

            // Add button to item array
            barButtonItems [0] = menuButton;
            NavigationItem.LeftBarButtonItem = menuButton;

            // Set bottom view labels
            lblTitle.Text = "";
            lblBody.Text  = "";

            // Set start button style
            btnStart.SetBackgroundImage(UIImage.FromBundle("CarButton"), UIControlState.Normal);
            View.BackgroundColor = StyleSettings.ThemePrimaryDarkLightenedColor();
            btnStart.SetTitle("", UIControlState.Normal);

            // Add square to stop || TODO: change to image
            UIView stopSquare = new UIView(new RectangleF(
                                               (float)(btnStop.Bounds.X + 15),
                                               (float)(btnStop.Bounds.Y + 15),
                                               (float)(btnStop.Bounds.Width - 30),
                                               (float)(btnStop.Bounds.Height - 30))
                                           );

            // Set stop button attributes
            stopButtonTranslation      = btnStopBottomConstraint.Constant;
            stopSquare.BackgroundColor = UIColor.White;
            btnStop.SetBackgroundImage(UIImage.FromBundle("srs-stop-btn"), UIControlState.Normal);
            btnStop.Layer.CornerRadius = btnStop.Bounds.Width / 2;

            lblLeft.Hidden   = true;
            lblCenter.Hidden = true;
            lblRight.Hidden  = true;

            // Set initial status of vehicle type & support
            btnSupport.Hidden        = true;
            btnVehicle.Hidden        = true;
            lblCalibration.Hidden    = true;
            lblCalibration.Text      = NSBundle.MainBundle.LocalizedString("Vernacular_P0_label_setup", null);
            lblCalibration.TextColor = StyleSettings.SubduedTextOnDarkColor();

            // Logic
            btnStart.TouchUpInside += (object sender, EventArgs e) => {
                if (Settings.CalibrationDone)
                {
                    if (RVM.IsRecording)
                    {
                        return;
                    }

                    var passengerNumberPickerVC = Storyboard.InstantiateViewController("PassengerNumberPickerViewController") as PassengerNumberPickerViewController;
                    passengerNumberPickerVC.ModalPresentationStyle = UIModalPresentationStyle.OverCurrentContext;
                    passengerNumberPickerVC.ModalTransitionStyle   = UIModalTransitionStyle.CoverVertical;
                    passengerNumberPickerVC.parentVC = this;
                    NavigationController.PresentViewController(passengerNumberPickerVC, true, null);
                }
                else
                {
                    // TODO: warn user that calibration has to be done
                }
            };

            btnStop.TouchUpInside += (object sender, EventArgs e) => {
                UnbindFromService();
                AnimateStopButton();
                RVM.OnDestroy();
                StopRecording();
                UpdateRecordButtonUi();
                UploadData();
                lblCenter.Hidden = true;
            };

            btnVehicle.TouchUpInside += (object sender, EventArgs e) => {
                OpenSettingsVC();
            };

            btnSupport.TouchUpInside += (object sender, EventArgs e) => {
                OpenSettingsVC();
            };
        }
Exemple #13
0
        protected override void OnElementChanged(ElementChangedEventArgs <Button> e)
        {
            try
            {
                base.OnElementChanged(e);

                if (e.NewElement != null)
                {
                    //sobreescreve eventos e outras coisas
                }
                else if (e.OldElement != null)
                {
                    //cancela a assinatura dos eventos
                }

                if (Control != null)
                {
                    Control.TouchUpInside += (send, ebla) =>
                    {
                        if (Control.TitleLabel.Text.Contains("Gravar"))
                        {
                            Console.WriteLine("Begin Recording");

                            AudioSession.Category = AudioSessionCategory.RecordAudio;
                            AudioSession.SetActive(true);

                            if (!PrepareAudioRecording())
                            {
                                return;
                            }
                            if (!_recorder.Record())
                            {
                                return;
                            }
                            _stopwatch = new Stopwatch();
                            _stopwatch.Start();

                            Control.SetTitle("Parar", UIControlState.Normal);
                        }
                        else
                        {
                            if (_recorder != null && !_recorder.Record())
                            {
                                return;
                            }
                            _recorder?.Stop();
                            Control.SetTitle($" Gravar {_stopwatch.Elapsed:hh\\:mm\\:ss} ", UIControlState.Normal);
                            _stopwatch.Stop();

                            MessagingCenter.Send <StartRecordingButtonRenderer, NSUrl>(this, "audioFile", _audioFilePath);
                        }
                    };
                    _observer = NSNotificationCenter.DefaultCenter.AddObserver(AVPlayerItem.DidPlayToEndTimeNotification, delegate
                    {
                        _player?.Dispose();
                        _player = null;
                    });
                }
            }
            catch (Exception exe)
            {
                throw new Exception(exe.Message);
            }
        }
Exemple #14
0
 public StartRecordingButtonRenderer()
 {
     AudioSession.Initialize();
 }
Exemple #15
0
 public SoundViewController()
     : base(UserInterfaceIdiomIsPhone ? "SoundViewController_iPhone" : "SoundViewController_iPad", null)
 {
     AudioSession.Initialize();
 }
Exemple #16
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            this.RecordingStatusLabel.Text   = "";
            this.LengthOfRecordingLabel.Text = "";

            // start recording wireup
            this.StartRecordingButton.TouchUpInside += (sender, e) => {
                Console.WriteLine("Begin Recording");

                AudioSession.Category = AudioSessionCategory.RecordAudio;
                AudioSession.SetActive(true);

                if (!PrepareAudioRecording())
                {
                    RecordingStatusLabel.Text = "Error preparing";
                    return;
                }

                if (!recorder.Record())
                {
                    RecordingStatusLabel.Text = "Error preparing";
                    return;
                }

                this.stopwatch = new Stopwatch();
                this.stopwatch.Start();
                this.LengthOfRecordingLabel.Text     = "";
                this.RecordingStatusLabel.Text       = "Recording";
                this.StartRecordingButton.Enabled    = false;
                this.StopRecordingButton.Enabled     = true;
                this.PlayRecordedSoundButton.Enabled = false;
            };

            // stop recording wireup
            this.StopRecordingButton.TouchUpInside += (sender, e) => {
                this.recorder.Stop();

                this.LengthOfRecordingLabel.Text = string.Format("{0:hh\\:mm\\:ss}", this.stopwatch.Elapsed);
                this.stopwatch.Stop();
                this.RecordingStatusLabel.Text       = "";
                this.StartRecordingButton.Enabled    = true;
                this.StopRecordingButton.Enabled     = false;
                this.PlayRecordedSoundButton.Enabled = true;
            };

            observer = NSNotificationCenter.DefaultCenter.AddObserver(AVPlayerItem.DidPlayToEndTimeNotification, delegate(NSNotification n) {
                player.Dispose();
                player = null;
            });

            // play recorded sound wireup
            this.PlayRecordedSoundButton.TouchUpInside += (sender, e) => {
                try {
                    Console.WriteLine("Playing Back Recording " + this.audioFilePath.ToString());

                    // The following line prevents the audio from stopping
                    // when the device autolocks. will also make sure that it plays, even
                    // if the device is in mute
                    AudioSession.Category = AudioSessionCategory.MediaPlayback;

                    this.player = new AVPlayer(this.audioFilePath);
                    this.player.Play();
                } catch (Exception ex) {
                    Console.WriteLine("There was a problem playing back audio: ");
                    Console.WriteLine(ex.Message);
                }
            };
        }
 private void Form1_OnAudioSessionRemoved(AudioSession session)
 {
     new AudioSessionRemoveMessageSender(connection).send(session);
 }
 void OnAudioSessionResumed(object sender, EventArgs e)
 {
     AudioSession.SetActive(true);
     audioUnit.Initialize();
     audioUnit.Start();
 }
 private void Form1_OnAudioSessionIconChanged(AudioSession session)
 {
     new AudioSessionImageSender(connection).send(Main.Instance.audioManager.getSessionIcon(session.id));
 }
 private void Form1_OnAudioSessionEdited(AudioSession session)
 {
     new AudioSessionEditedMessageSender(connection, logic).send(session);
 }
        private OpenALSoundController()
        {
#if IPHONE
            AudioSession.Initialize();

            // NOTE: iOS 5.1 simulator throws an exception when setting the category
            // to SoloAmbientSound.  This could be removed if that bug gets fixed.
            try
            {
                if (AudioSession.OtherAudioIsPlaying)
                {
                    AudioSession.Category = AudioSessionCategory.AmbientSound;
                }
                else
                {
                    AudioSession.Category = AudioSessionCategory.SoloAmbientSound;
                }
            }
            catch (AudioSessionException) { }
#endif
            alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE);
            _device = Alc.OpenDevice(string.Empty);
            CheckALError("Could not open AL device");
            if (_device != IntPtr.Zero)
            {
                int[] attribute = new int[0];
                _context = Alc.CreateContext(_device, attribute);
                CheckALError("Could not open AL context");

                if (_context != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(_context);
                    CheckALError("Could not make AL context current");
                }
            }
            else
            {
                return;
            }

            allSourcesArray = new int[MAX_NUMBER_OF_SOURCES];
            AL.GenSources(allSourcesArray);

            availableSourcesCollection = new HashSet <int> ();
            inUseSourcesCollection     = new HashSet <OALSoundBuffer> ();
            playingSourcesCollection   = new HashSet <OALSoundBuffer> ();


            for (int x = 0; x < MAX_NUMBER_OF_SOURCES; x++)
            {
                availableSourcesCollection.Add(allSourcesArray [x]);
            }
#if IPHONE
            AudioSession.Interrupted += (sender, e) =>
            {
                AudioSession.SetActive(false);

                Alc.MakeContextCurrent(ContextHandle.Zero);
                Alc.SuspendContext(_context);
            };

            AudioSession.Resumed += (sender, e) =>
            {
                // That is, without this, the code wont work :(
                // It will fail on the next line of code
                // Maybe you could ask for an explanation
                // to someone at xamarin
                System.Threading.Thread.Sleep(100);

                AudioSession.SetActive(true);
                AudioSession.Category = AudioSessionCategory.SoloAmbientSound;

                Alc.MakeContextCurrent(_context);
                Alc.ProcessContext(_context);
            };
#endif
        }