private void getSettings() { var documents = Environment.GetFolderPath (Environment.SpecialFolder.MyDocuments); var library = System.IO.Path.Combine (documents, "..", "Library"); var urlpath = System.IO.Path.Combine (library, "audioRecording.wav"); session = AVAudioSession.SharedInstance (); session.SetCategory (AVAudioSessionCategory.PlayAndRecord); url = new NSUrl (urlpath, false); NSFileManager manager = new NSFileManager (); NSError error = new NSError (new NSString ("world"), 1); //if there is a file at the save location, delete it so we can save there if (manager.FileExists (urlpath)) { Console.WriteLine ("Deleting File"); manager.Remove (urlpath, out error); Console.WriteLine ("Deleted File"); } NSObject[] values = new NSObject[] { NSNumber.FromFloat (44100.0f), //Sample Rate NSNumber.FromInt32 ((int)MonoTouch.AudioToolbox.AudioFormatType.LinearPCM), //AVFormat NSNumber.FromInt32 (2), //Channels NSNumber.FromInt32 (16), //PCMBitDepth NSNumber.FromBoolean (false), //IsBigEndianKey NSNumber.FromBoolean (false) //IsFloatKey }; NSObject[] keys = new NSObject[] { AVAudioSettings.AVSampleRateKey, AVAudioSettings.AVFormatIDKey, AVAudioSettings.AVNumberOfChannelsKey, AVAudioSettings.AVLinearPCMBitDepthKey, AVAudioSettings.AVLinearPCMIsBigEndianKey, AVAudioSettings.AVLinearPCMIsFloatKey }; settings = NSDictionary.FromObjectsAndKeys (values, keys); recorder = AVAudioRecorder.ToUrl (url, settings, out error); }
// // This method is invoked when the application has loaded and is ready to run. In this // method you should instantiate the window, load the UI into it and then make the window // visible. // // You have 17 seconds to return from this method, or iOS will terminate your application. // public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { session = AVAudioSession.SharedInstance(); session.BeginInterruption += delegate { Debug.WriteLine("Session interrupted"); ThreadStateBeginInterruption(); }; session.EndInterruption += (object sender, EventArgs e) => { Debug.WriteLine("Session resumed"); session.SetActive(true); ThreadStateEndInterruption(); }; int routes = session.InputNumberOfChannels; // our default category -- we change this for conversion and playback appropriately try { session.SetCategory(new NSString("AudioSessionCategory.SoloAmbientSound")); } catch { Debug.Print("ERROR: Cannot change audio session category"); } session.InputAvailabilityChanged += (object sender, AVStatusEventArgs e) => { var sess = sender as AVAudioSession; Debug.Print("Old route: {0}", routes); Debug.Print("New route: {0}", sess.InputNumberOfChannels); routes = sess.InputNumberOfChannels; }; session.SetActive(true); return true; }
private void UpdateSource() { System.Diagnostics.Debug.WriteLine("UpdateSource"); if (Element.Source != null) { AVAsset asset = null; if (Element.Source.Scheme == null) { // file path asset = AVAsset.FromUrl(NSUrl.FromFilename(Element.Source.OriginalString)); } else if (Element.Source.Scheme == "ms-appx") { // used for a file embedded in the application package asset = AVAsset.FromUrl(NSUrl.FromFilename(Element.Source.LocalPath.Substring(1))); } else if (Element.Source.Scheme == "ms-appdata") { asset = AVAsset.FromUrl(NSUrl.FromFilename(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), Element.Source.LocalPath.Substring(1)))); } else { asset = AVUrlAsset.Create(NSUrl.FromString(Element.Source.ToString()), GetOptionsWithHeaders(Element.HttpHeaders)); } AVPlayerItem item = new AVPlayerItem(asset); RemoveStatusObserver(); _observer = (NSObject)item.AddObserver("status", NSKeyValueObservingOptions.New, ObserveStatus); if (_avPlayerViewController.Player != null) { _avPlayerViewController.Player.ReplaceCurrentItemWithPlayerItem(item); } else { _avPlayerViewController.Player = new AVPlayer(item); } if (Element.AutoPlay) { var audioSession = AVAudioSession.SharedInstance(); NSError err = audioSession.SetCategory(AVAudioSession.CategoryPlayback); audioSession.SetMode(AVAudioSession.ModeMoviePlayback, out err); err = audioSession.SetActive(true); _avPlayerViewController.Player.Play(); Element.CurrentState = MediaElementState.Playing; } } else { if (Element.CurrentState == MediaElementState.Playing || Element.CurrentState == MediaElementState.Buffering) { Element.Stop(); } } }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { try { _device = Alc.OpenDevice(string.Empty); EffectsExtension.device = _device; } catch (DllNotFoundException ex) { throw ex; } catch (Exception ex) { throw new NoAudioHardwareException("OpenAL device could not be initialized.", ex); } AlcHelper.CheckError("Could not open OpenAL device"); if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds MonoGameAndroidGameView.OnPauseGameThread += Activity_Paused; MonoGameAndroidGameView.OnResumeGameThread += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.Kitkat) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS EventHandler <AVAudioSessionInterruptionEventArgs> handler = delegate(object sender, AVAudioSessionInterruptionEventArgs e) { switch (e.InterruptionType) { case AVAudioSessionInterruptionType.Began: AVAudioSession.SharedInstance().SetActive(false); Alc.MakeContextCurrent(IntPtr.Zero); Alc.SuspendContext(_context); break; case AVAudioSessionInterruptionType.Ended: AVAudioSession.SharedInstance().SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); break; } }; AVAudioSession.Notifications.ObserveInterruption(handler); int[] attribute = new int[0]; #else int[] attribute = new int[0]; #endif _context = Alc.CreateContext(_device, attribute); #if DESKTOPGL _oggstreamer = new OggStreamer(); #endif AlcHelper.CheckError("Could not create OpenAL context"); if (_context != NullContext) { Alc.MakeContextCurrent(_context); AlcHelper.CheckError("Could not make OpenAL context current"); SupportsIma4 = AL.IsExtensionPresent("AL_EXT_IMA4"); SupportsAdpcm = AL.IsExtensionPresent("AL_SOFT_MSADPCM"); SupportsEfx = AL.IsExtensionPresent("AL_EXT_EFX"); SupportsIeee = AL.IsExtensionPresent("AL_EXT_float32"); return(true); } } return(false); }
void onVolumeChanged(NSNotification notification) { VoIPManager.setVolume(AVAudioSession.SharedInstance().OutputVolume); }
public bool HaveMicrophonePermissions() { var audioSession = AVAudioSession.SharedInstance(); return(audioSession.RecordPermission != AVAudioSessionRecordPermission.Denied); }
public CCMusicPlayer() { #if IOS AVAudioSession.SharedInstance().Init(); #endif }
public void PreparePlayBack() { AVAudioSession.SharedInstance().SetCategory(AVAudioSessionCategory.Playback); }
public void PrepareRecord() { AVAudioSession.SharedInstance().SetCategory(AVAudioSessionCategory.Record); }
/// <summary> /// Initializes a new instance of the <see cref="LocalMedia"/> class. /// </summary> /// <param name="disableAudio">Whether to disable audio.</param> /// <param name="disableVideo">Whether to disable video.</param> /// <param name="aecContext">The AEC context, if using software echo cancellation.</param> public LocalMedia(bool disableAudio, bool disableVideo, AecContext aecContext) : base(disableAudio, disableVideo, aecContext) { AVAudioSession.SharedInstance().SetCategory(AVAudioSessionCategory.PlayAndRecord, AVAudioSessionCategoryOptions.AllowBluetooth | AVAudioSessionCategoryOptions.DefaultToSpeaker); }
public void PlayAndRecord() { var audioSession = AVAudioSession.SharedInstance(); var err = audioSession.SetCategory(AVAudioSessionCategory.PlayAndRecord); }
public override bool FinishedLaunching(UIApplication app, NSDictionary options) { // // Setup audio system // var session = AVAudioSession.SharedInstance(); session.SetCategory(new NSString("AVAudioSessionCategoryPlayback"), AVAudioSessionCategoryOptions.DefaultToSpeaker, out error); // // Format description, we generate LinearPCM as short integers // sampleRate = session.SampleRate; var format = new AudioStreamBasicDescription { SampleRate = sampleRate, Format = AudioFormatType.LinearPCM, FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked, BitsPerChannel = 16, ChannelsPerFrame = 1, BytesPerFrame = 2, BytesPerPacket = 2, FramesPerPacket = 1, }; // // Create an output queue // var queue = new OutputAudioQueue(format); var bufferByteSize = (sampleRate > 16000) ? 2176 : 512; // 40.5 Hz : 31.25 Hz // // Create three buffers, generate a tone, and output the tones // var buffers = new AudioQueueBuffer * [numBuffers]; for (int i = 0; i < numBuffers; i++) { queue.AllocateBuffer(bufferByteSize, out buffers[i]); GenerateTone(buffers[i]); queue.EnqueueBuffer(buffers[i], null); } // // Output callback: invoked when the audio system is done with the // buffer, this implementation merely recycles it. // queue.OutputCompleted += (object sender, OutputCompletedEventArgs e) => { if (alternate) { outputWaveForm += 1; if (outputWaveForm > WaveForm.Square) { outputWaveForm = WaveForm.Sine; } GenerateTone(e.UnsafeBuffer); } queue.EnqueueBuffer(e.UnsafeBuffer, null); }; queue.Start(); return(true); }
public void DeactivateAudioSession() { AVAudioSession.SharedInstance().SetActive(false); }
bool PrepareAudioRecording() { // You must initialize an audio session before trying to record var audioSession = AVAudioSession.SharedInstance(); var err = audioSession.SetCategory(AVAudioSessionCategory.PlayAndRecord); if (err != null) { Console.WriteLine("audioSession: {0}", err); return(false); } err = audioSession.SetActive(true); if (err != null) { Console.WriteLine("audioSession: {0}", err); return(false); } // Declare string for application temp path and tack on the file extension string fileName = $"Myfile{DateTime.Now.ToString("yyyyMMddHHmmss")}.aac"; string tempRecording = Path.Combine(Path.GetTempPath(), fileName); Console.WriteLine(tempRecording); _audioFilePath = NSUrl.FromFilename(tempRecording); //set up the NSObject Array of values that will be combined with the keys to make the NSDictionary NSObject[] values = new NSObject[] { NSNumber.FromFloat(44100.0f), NSNumber.FromInt32((int)AudioFormatType.MPEG4AAC), NSNumber.FromInt32(1), NSNumber.FromInt32((int)AVAudioQuality.High) }; //Set up the NSObject Array of keys that will be combined with the values to make the NSDictionary NSObject[] keys = new NSObject[] { AVAudioSettings.AVSampleRateKey, AVAudioSettings.AVFormatIDKey, AVAudioSettings.AVNumberOfChannelsKey, AVAudioSettings.AVEncoderAudioQualityKey }; //Set Settings with the Values and Keys to create the NSDictionary _settings = NSDictionary.FromObjectsAndKeys(values, keys); //Set recorder parameters NSError error; _recorder = AVAudioRecorder.Create(_audioFilePath, new AudioSettings(_settings), out error); if ((_recorder == null) || (error != null)) { Console.WriteLine(error); return(false); } //Set Recorder to Prepare To Record if (!_recorder.PrepareToRecord()) { _recorder.Dispose(); _recorder = null; return(false); } _recorder.FinishedRecording += delegate(object sender, AVStatusEventArgs e) { _recorder.Dispose(); _recorder = null; Console.WriteLine("Done Recording (status: {0})", e.Status); }; return(true); }
public void RequestMicrophoneAccess (bool usePermissionAPI) { AVAudioSession audioSession = new AVAudioSession (); if (!usePermissionAPI) { NSError error; audioSession.SetCategory (AVAudioSession.CategoryRecord, out error); } else { audioSession.RequestRecordPermission (delegate(bool granted) { ShowAlert (DataClass.Microphone, granted ? "granted" : "denied"); }); } }
public void ReactivateAudioSession() { var session = AVAudioSession.SharedInstance(); session.SetActive(true); }
/// <summary> /// Inits the AVAudioSession. /// </summary> private void initAudioSession() { _session = AVAudioSession.SharedInstance(); var errorOrNull = _session.SetCategory (AVAudioSessionCategory.Ambient); if (errorOrNull != null) { throw new Exception (errorOrNull.Description); } errorOrNull = _session.SetActive (true); if (errorOrNull != null) { throw new Exception (errorOrNull.Description); } _session.AddObserver (this, VOLUMECHANGE_KEYPATH, NSKeyValueObservingOptions.New, IntPtr.Zero); // Audio session is interrupted when you send the app to the background, // and needs to be set to active again when it goes to app goes back to the foreground NSNotificationCenter.DefaultCenter.AddObserver(AVAudioSession.InterruptionNotification, onAudioSessionInterrupted); }
private void SpeechSynthesizer_DidFinishSpeechUtterance(object sender, AVSpeechSynthesizerUteranceEventArgs e) { AVAudioSession.SharedInstance().SetActive(false); }