static Notify() { // Setup your session AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AudioSession.SetActive(true); }
public void StartRecording() { try { AudioSession.Initialize(); Console.WriteLine("Begin Recording"); AudioSession.Category = AudioSessionCategory.RecordAudio; AudioSession.SetActive(true); if (!PrepareAudioRecording()) { //RecordingStatusLabel.Text = "Error preparing"; return; } if (!recorder.Record()) { //RecordingStatusLabel.Text = "Error recording"; return; } this.stopwatch = new Stopwatch(); this.stopwatch.Start(); RecorderPopup.timerText = stopwatch.ElapsedMilliseconds.ToString(); } catch (Exception ex) { } }
void initAudioSession() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.01f; }
public BackgroundAudioPlayerIphone() { #if !SIMULATOR AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.SoloAmbientSound; AudioSession.SetActive(true); #endif }
/// <summary> /// The main entry point for the application. /// </summary> static void Main(string[] args) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.AmbientSound; AudioSession.SetActive(true); UIApplication.Main(args, null, "AppDelegate"); }
static void Main(string[] args) { // the AudioSession code must be placed here to work properly AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.AmbientSound; AudioSession.SetActive(true); UIApplication.Main(args, null, "AppDelegate"); }
internal static void Setup() { Debug.WriteLine("AudioSessionManager.Setup()"); AudioSession.Initialize(); AudioSession.Interrupted += (o, e) => { Debug.WriteLine("AudioSession.Interrupted"); audioSystemAvailable = false; AudioSession.SetActive(false); }; // Want to reactivate on resume from interruption AudioSession.Resumed += (o, e) => { Debug.WriteLine("AudioSession.Resumed"); AudioSession.SetActive(true); audioSystemAvailable = true; SoundEffectThread.RestartAllRestarable(); }; // Checking if Other Audio is Playing During App Launch bool otherAudioIsPlaying = AudioSession.OtherAudioIsPlaying; MediaPlayer.otherAudioIsPlaying = otherAudioIsPlaying; Debug.WriteLine("AudioSession.OtherAudioIsPlaying == " + otherAudioIsPlaying); // For some unknown reason, setting category on the simulator fails with an unknown error code (-50) try { if (otherAudioIsPlaying) { AudioSession.Category = AudioSessionCategory.AmbientSound; } else { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } } catch { Debug.WriteLine("Exception when setting AudioSession.Category"); } AudioSession.SetActive(true); audioSystemAvailable = true; }
// This method is invoked when the application has loaded its UI and its ready to run public override bool FinishedLaunching(UIApplication app, NSDictionary options) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AudioSession.SetActive(true); app.ApplicationSupportsShakeToEdit = false; // If you have defined a view, add it here: window.AddSubview(rootController.View); window.MakeKeyAndVisible(); return(true); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // creating an AudioComponentDescription of the RemoteIO AudioUnit AudioComponentDescription cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent using the audio component description _audioComponent = AudioComponent.FindComponent(cd); // creating an audio unit instance _audioUnit = AudioUnit.CreateInstance(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote Output ); _audioUnit.SetAudioFormat(AudioUnitUtils.AUCanonicalASBD(_sampleRate, 2), AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback method _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback); _audioUnit.Initialize(); _audioUnit.Start(); }
public void Open(int inputChannelsWanted, int outputChannelsWanted, double targetSampleRate, int bufferSize = 0) { Console.WriteLine("Request ios audio device with: inputs: {0}, outputs: {1}, sampleRate: {2}, buffer: {3}", inputChannelsWanted, outputChannelsWanted, targetSampleRate, bufferSize); Close(); var file = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "/out.pcm"; Console.WriteLine("Saving to {0}", file); // _out = new FileStream(file, FileMode.Create, FileAccess.Write); _preferredBufferSize = (bufferSize <= 0) ? DefaultBufferSize : bufferSize; _numInputChannels = inputChannelsWanted; _numOutputChannels = outputChannelsWanted; AudioSession.SetActive(false); if (_numInputChannels > 0 && _audioInputIsAvailable) { AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.OverrideCategoryEnableBluetoothInput = true; } else { AudioSession.Category = AudioSessionCategory.PlayAndRecord; } FixRouteIfSetToReceiver(); AudioSession.PreferredHardwareSampleRate = targetSampleRate; UpdateDeviceInfo(); UpdateCurrentBufferSize(); PrepareFloatBuffers(_actualBufferSize); CreateAudioUnit(); _format = _audioUnit.GetAudioFormat(AudioUnitScopeType.Output, 1); _audioUnit.Stop(); _audioUnit.Start(); AudioSession.SetActive(true); _isRunning = true; Console.WriteLine("Opened ios audio device with: inputs: {0}, outputs: {1}, sampleRate: {2}, buffer: {3}", _numInputChannels, _numOutputChannels, _sampleRate, _actualBufferSize); }
public override bool StartNotifyBeep() { if (!AUDIOSESSION_INITIALIZED) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AUDIOSESSION_INITIALIZED = true; } if (!AUDIOSESSION_ACTIVE) { AudioSession.SetActive(true); AUDIOSESSION_ACTIVE = true; } var sound = SystemSound.FromFile("res/SystemAlert.wav"); sound.PlayAlertSound(); return(true); }
public void Close() { if (_isRunning) { _isRunning = false; AudioSession.Category = AudioSessionCategory.MediaPlayback; AudioSession.SetActive(false); if (_audioUnit != null) { _audioUnit.Dispose(); _audioUnit = null; } if (_out != null) { _out.Close(); } } }
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib. public override void ViewDidLoad() { base.ViewDidLoad(); // setting button stat _recordingButton.Enabled = true; _playBackButton.Enabled = false; // binding event handlers _recordingButton.TouchUpInside += new EventHandler(_recordingButton_TouchCancel); _playBackButton.TouchUpInside += new EventHandler(_playBackButton_TouchDown); // getting local sound file path var path = Environment.GetFolderPath(Environment.SpecialFolder.Personal); path = System.IO.Path.Combine(path, "recording.aiff"); _url = CFUrl.FromFile(path); // setting audio session AudioSession.Initialize(); AudioSession.SetActive(true); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // Getting AudioComponent Remote output _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); // creating an audio unit instance _audioUnit = new AudioUnit(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnitScopeType.Input, 0 // Remote Output ); var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32); format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitCanonical; _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1); // setting callback method _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global); _audioUnit.Initialize(); _audioUnit.Start(); }
// // This method is invoked when the application has loaded and is ready to run. In this // method you should instantiate the window, load the UI into it and then make the window // visible. // // You have 17 seconds to return from this method, or iOS will terminate your application. // public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { AudioSession.Initialize(); AudioSession.Interrupted += delegate { Debug.WriteLine("Session interrupted"); ThreadStateBeginInterruption(); }; AudioSession.Resumed += delegate { Debug.WriteLine("Session resumed"); Debug.Print(AudioSession.InterruptionType.ToString()); AudioSession.SetActive(true); ThreadStateEndInterruption(); }; // our default category -- we change this for conversion and playback appropriately try { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } catch { Debug.Print("ERROR: Cannot change audio session category"); } AudioSession.AudioRouteChanged += delegate(object sender, AudioSessionRouteChangeEventArgs e) { var gg = e.PreviousInputRoute; Debug.Print("Audio route change: {0}", e.Reason); Debug.Print("Old route: {0}", e.PreviousOutputRoutes[0]); Debug.Print("New route: {0}", e.CurrentOutputRoutes[0]); }; AudioSession.SetActive(true); return(true); }
private OpenALSoundController() { #if IPHONE AudioSession.Initialize(); // NOTE: iOS 5.1 simulator throws an exception when setting the category // to SoloAmbientSound. This could be removed if that bug gets fixed. try { if (AudioSession.OtherAudioIsPlaying) { AudioSession.Category = AudioSessionCategory.AmbientSound; } else { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } } catch (AudioSessionException) { } #endif alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); _device = Alc.OpenDevice(string.Empty); CheckALError("Could not open AL device"); if (_device != IntPtr.Zero) { int[] attribute = new int[0]; _context = Alc.CreateContext(_device, attribute); CheckALError("Could not open AL context"); if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); CheckALError("Could not make AL context current"); } } else { return; } allSourcesArray = new int[MAX_NUMBER_OF_SOURCES]; AL.GenSources(allSourcesArray); availableSourcesCollection = new HashSet <int> (); inUseSourcesCollection = new HashSet <OALSoundBuffer> (); playingSourcesCollection = new HashSet <OALSoundBuffer> (); for (int x = 0; x < MAX_NUMBER_OF_SOURCES; x++) { availableSourcesCollection.Add(allSourcesArray [x]); } #if IPHONE AudioSession.Interrupted += (sender, e) => { AudioSession.SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); }; AudioSession.Resumed += (sender, e) => { // That is, without this, the code wont work :( // It will fail on the next line of code // Maybe you could ask for an explanation // to someone at xamarin System.Threading.Thread.Sleep(100); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.SoloAmbientSound; Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); }; #endif }
protected override void OnElementChanged(ElementChangedEventArgs <Button> e) { try { base.OnElementChanged(e); if (e.NewElement != null) { //sobreescreve eventos e outras coisas } else if (e.OldElement != null) { //cancela a assinatura dos eventos } if (Control != null) { Control.TouchUpInside += (send, ebla) => { if (Control.TitleLabel.Text.Contains("Gravar")) { Console.WriteLine("Begin Recording"); AudioSession.Category = AudioSessionCategory.RecordAudio; AudioSession.SetActive(true); if (!PrepareAudioRecording()) { return; } if (!_recorder.Record()) { return; } _stopwatch = new Stopwatch(); _stopwatch.Start(); Control.SetTitle("Parar", UIControlState.Normal); } else { if (_recorder != null && !_recorder.Record()) { return; } _recorder?.Stop(); Control.SetTitle($" Gravar {_stopwatch.Elapsed:hh\\:mm\\:ss} ", UIControlState.Normal); _stopwatch.Stop(); MessagingCenter.Send <StartRecordingButtonRenderer, NSUrl>(this, "audioFile", _audioFilePath); } }; _observer = NSNotificationCenter.DefaultCenter.AddObserver(AVPlayerItem.DidPlayToEndTimeNotification, delegate { _player?.Dispose(); _player = null; }); } } catch (Exception exe) { throw new Exception(exe.Message); } }
public override bool StopNotifyBeep() { AudioSession.SetActive(false); AUDIOSESSION_ACTIVE = false; return(true); }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { #if MONOMAC || IOS alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); #endif try { _device = Alc.OpenDevice(string.Empty); } catch (Exception ex) { _SoundInitException = ex; return(false); } if (CheckALError("Could not open AL device")) { return(false); } if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds AndroidGameActivity.Paused += Activity_Paused; AndroidGameActivity.Resumed += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4. if ((int)Android.OS.Build.VERSION.SdkInt >= 19) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS AudioSession.Initialize(); AudioSession.Interrupted += (sender, e) => { AudioSession.SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); }; AudioSession.Resumed += (sender, e) => { AudioSession.SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); }; int[] attribute = new int[0]; #elif !DESKTOPGL int[] attribute = new int[0]; #endif #if DESKTOPGL _acontext = new AudioContext(); _context = Alc.GetCurrentContext(); _oggstreamer = new OggStreamer(); #else _context = Alc.CreateContext(_device, attribute); #endif if (CheckALError("Could not create AL context")) { CleanUpOpenAL(); return(false); } if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); if (CheckALError("Could not make AL context current")) { CleanUpOpenAL(); return(false); } return(true); } } return(false); }
public override void ViewDidLoad() { base.ViewDidLoad(); this.RecordingStatusLabel.Text = ""; this.LengthOfRecordingLabel.Text = ""; // start recording wireup this.StartRecordingButton.TouchUpInside += (sender, e) => { Console.WriteLine("Begin Recording"); AudioSession.Category = AudioSessionCategory.RecordAudio; AudioSession.SetActive(true); if (!PrepareAudioRecording()) { RecordingStatusLabel.Text = "Error preparing"; return; } if (!recorder.Record()) { RecordingStatusLabel.Text = "Error recording"; return; } this.stopwatch = new Stopwatch(); this.stopwatch.Start(); this.LengthOfRecordingLabel.Text = ""; this.RecordingStatusLabel.Text = "Recording"; this.StartRecordingButton.Enabled = false; this.StopRecordingButton.Enabled = true; this.PlayRecordedSoundButton.Enabled = false; }; // stop recording wireup this.StopRecordingButton.TouchUpInside += (sender, e) => { this.recorder.Stop(); this.LengthOfRecordingLabel.Text = string.Format("{0:hh\\:mm\\:ss}", this.stopwatch.Elapsed); this.stopwatch.Stop(); this.RecordingStatusLabel.Text = ""; this.StartRecordingButton.Enabled = true; this.StopRecordingButton.Enabled = false; this.PlayRecordedSoundButton.Enabled = true; }; observer = NSNotificationCenter.DefaultCenter.AddObserver(AVPlayerItem.DidPlayToEndTimeNotification, delegate(NSNotification n) { player.Dispose(); player = null; }); // play recorded sound wireup this.PlayRecordedSoundButton.TouchUpInside += (sender, e) => { try { Console.WriteLine("Playing Back Recording " + this.audioFilePath.ToString()); // The following line prevents the audio from stopping // when the device autolocks. will also make sure that it plays, even // if the device is in mute AudioSession.Category = AudioSessionCategory.MediaPlayback; this.player = new AVPlayer(this.audioFilePath); this.player.Play(); } catch (Exception ex) { Console.WriteLine("There was a problem playing back audio: "); Console.WriteLine(ex.Message); } }; }
// This method is invoked when the application is about to move from active to inactive state. // OpenGL applications should use this method to pause. public override void OnResignActivation(UIApplication application) { ThreadStateBeginInterruption(); AudioSession.SetActive(false); Debug.Write("Audio Session Deactivated"); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.01f; // Creating AudioComponentDescription instance of RemoteIO Audio Unit var cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent from the description _component = AudioComponent.FindComponent(cd); // Getting Audiounit _audioUnit = AudioUnit.CreateInstance(_component); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting AudioStreamBasicDescription int AudioUnitSampleTypeSize = (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(uint); AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription() { SampleRate = _sampleRate, Format = AudioFormatType.LinearPCM, //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift), FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)), ChannelsPerFrame = 2, BytesPerPacket = AudioUnitSampleTypeSize, BytesPerFrame = AudioUnitSampleTypeSize, FramesPerPacket = 1, BitsPerChannel = 8 * AudioUnitSampleTypeSize, Reserved = 0 }; _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote output ); _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback /* * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback); * else * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback); */ _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_callback); // initialize _audioUnit.Initialize(); }
public override void ViewDidLoad() { base.ViewDidLoad(); View.BackgroundColor = UIColor.White; var image = new UIImageView(); View.AddSubview(image); image.Image = UIImage.FromBundle("microphone.png"); var recordingStatusLabel = new UILabel { Text = "", TextColor = Constants.MainColor, }; View.AddSubview(recordingStatusLabel); var lengthOfRecordingLabel = new UILabel { Text = "", TextColor = Constants.MainColor, }; View.AddSubview(lengthOfRecordingLabel); var startRecordingButton = new UIButton { }; View.AddSubview(startRecordingButton); startRecordingButton.SetTitle("Start", UIControlState.Normal); startRecordingButton.SetTitleColor(Constants.MainColor, UIControlState.Normal); var stopRecordingButton = new UIButton { }; View.AddSubview(stopRecordingButton); stopRecordingButton.SetTitle("Stop", UIControlState.Normal); stopRecordingButton.SetTitleColor(Constants.MainColor, UIControlState.Normal); var playRecordedSoundButton = new UIButton { }; View.AddSubview(playRecordedSoundButton); playRecordedSoundButton.SetTitle("Play", UIControlState.Normal); playRecordedSoundButton.SetTitleColor(Constants.MainColor, UIControlState.Normal); // start recording wireup startRecordingButton.TouchUpInside += (sender, e) => { AudioSession.Category = AudioSessionCategory.RecordAudio; AudioSession.SetActive(true); if (!PrepareAudioRecording()) { recordingStatusLabel.Text = "Error preparing"; return; } if (!Recorder.Record()) { recordingStatusLabel.Text = "Error recording"; return; } Stopwatch = new Stopwatch(); Stopwatch.Start(); lengthOfRecordingLabel.Text = ""; recordingStatusLabel.Text = "Recording"; startRecordingButton.Enabled = false; stopRecordingButton.Enabled = true; }; // stop recording wireup stopRecordingButton.TouchUpInside += (sender, e) => { Recorder.Stop(); lengthOfRecordingLabel.Text = string.Format("{0:hh\\:mm\\:ss}", Stopwatch.Elapsed); Stopwatch.Stop(); recordingStatusLabel.Text = ""; startRecordingButton.Enabled = true; stopRecordingButton.Enabled = false; }; Observer = NSNotificationCenter.DefaultCenter.AddObserver(AVPlayerItem.DidPlayToEndTimeNotification, delegate(NSNotification n) { Player.Dispose(); Player = null; }); // play recorded sound wireup playRecordedSoundButton.TouchUpInside += (sender, e) => { try { Console.WriteLine("Playing Back Recording " + AudioFilePath.ToString()); // The following line prevents the audio from stopping // when the device autolocks. will also make sure that it plays, even // if the device is in mute AudioSession.Category = AudioSessionCategory.MediaPlayback; Player = new AVPlayer(AudioFilePath); Player.Play(); } catch (Exception ex) { Console.WriteLine("There was a problem playing back audio: "); Console.WriteLine(ex.Message); } }; #region Layout var topPad = (float)NavigationController.NavigationBar.Frame.Size.Height + 40f; View.ConstrainLayout(() => image.Frame.Top == View.Frame.Top + topPad && image.Frame.GetCenterX() == View.Frame.GetCenterX() && image.Frame.Height == 100f && image.Frame.Width == 100f && startRecordingButton.Frame.Top == image.Frame.Bottom + 30f && startRecordingButton.Frame.Left == View.Frame.Left + 30f && startRecordingButton.Frame.Height == Constants.ControlsHeight && startRecordingButton.Frame.Width == 100f && recordingStatusLabel.Frame.Top == image.Frame.Bottom + 30f && recordingStatusLabel.Frame.Left == startRecordingButton.Frame.Right + 30f && recordingStatusLabel.Frame.Height == Constants.ControlsHeight && recordingStatusLabel.Frame.Width == 100f && stopRecordingButton.Frame.Top == startRecordingButton.Frame.Bottom + 30f && stopRecordingButton.Frame.Left == View.Frame.Left + 30f && stopRecordingButton.Frame.Height == Constants.ControlsHeight && stopRecordingButton.Frame.Width == 100f && lengthOfRecordingLabel.Frame.Top == startRecordingButton.Frame.Bottom + 30f && lengthOfRecordingLabel.Frame.Left == stopRecordingButton.Frame.Right + 30f && lengthOfRecordingLabel.Frame.Height == Constants.ControlsHeight && lengthOfRecordingLabel.Frame.Width == 100f && playRecordedSoundButton.Frame.Top == stopRecordingButton.Frame.Bottom + 30f && playRecordedSoundButton.Frame.Left == View.Frame.Left + 30f && playRecordedSoundButton.Frame.Height == Constants.ControlsHeight && playRecordedSoundButton.Frame.Width == 100f ); #endregion }
void OnAudioSessionResumed(object sender, EventArgs e) { AudioSession.SetActive(true); audioUnit.Initialize(); audioUnit.Start(); }