public override bool FinishedLaunching(UIApplication app, NSDictionary options) { whisperList = new DialogViewController(UITableViewStyle.Plain, new RootElement("Whispers")); window.RootViewController = viewController; window.MakeKeyAndVisible(); Task.Factory.StartNew(() => { var dbPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), "whispers.db"); _db = new Database(dbPath); viewController.AddChildViewController(whisperList); whisperList.View.Frame = new RectangleF(0, 60, viewController.View.Frame.Width, viewController.View.Frame.Height - 130); whisperList.Root.Add(new Section("") { }); foreach (Whisper w in _db.Query <Whisper>("select * from Whisper where Length > 0")) { whisperList.Root[0].Add(new WhisperElement(w)); } BeginInvokeOnMainThread(() => { viewController.View.AddSubview(whisperList.View); }); }); AudioSession.Initialize(); return(true); }
static Notify() { // Setup your session AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AudioSession.SetActive(true); }
public void StartRecording() { try { AudioSession.Initialize(); Console.WriteLine("Begin Recording"); AudioSession.Category = AudioSessionCategory.RecordAudio; AudioSession.SetActive(true); if (!PrepareAudioRecording()) { //RecordingStatusLabel.Text = "Error preparing"; return; } if (!recorder.Record()) { //RecordingStatusLabel.Text = "Error recording"; return; } this.stopwatch = new Stopwatch(); this.stopwatch.Start(); RecorderPopup.timerText = stopwatch.ElapsedMilliseconds.ToString(); } catch (Exception ex) { } }
void initAudioSession() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.01f; }
public BackgroundAudioPlayerIphone() { #if !SIMULATOR AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.SoloAmbientSound; AudioSession.SetActive(true); #endif }
/// <summary> /// The main entry point for the application. /// </summary> static void Main(string[] args) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.AmbientSound; AudioSession.SetActive(true); UIApplication.Main(args, null, "AppDelegate"); }
public LessonScreenRecorderViewController(IList <Screen> screens, int index) : base(screens, index) { Stopwatch = null; AudioFilePath = null; AudioSession.Initialize(); // Required to activate recording }
public IOSAudioIODevice() { _numInputChannels = 2; _numOutputChannels = 2; _preferredBufferSize = 0; AudioSession.Initialize(); UpdateDeviceInfo(); }
static void Main(string[] args) { // the AudioSession code must be placed here to work properly AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.AmbientSound; AudioSession.SetActive(true); UIApplication.Main(args, null, "AppDelegate"); }
//prepares the audio public override void ViewDidLoad() { base.ViewDidLoad(); //enable audio AudioSession.Initialize(); //load the sound Sound = SystemSound.FromFile("Sounds/tap.aif"); }
public override void ViewWillAppear(bool animated) { base.ViewWillAppear(animated); Title = PlayerOption == PlayerOption.Stream ? "Stream " : "Stream & Save"; playPauseButton.TitleLabel.Text = "Pause"; timeLabel.Text = string.Empty; AudioSession.Initialize(); StartPlayback(); IsPlaying = true; }
internal static void Setup() { Debug.WriteLine("AudioSessionManager.Setup()"); AudioSession.Initialize(); AudioSession.Interrupted += (o, e) => { Debug.WriteLine("AudioSession.Interrupted"); audioSystemAvailable = false; AudioSession.SetActive(false); }; // Want to reactivate on resume from interruption AudioSession.Resumed += (o, e) => { Debug.WriteLine("AudioSession.Resumed"); AudioSession.SetActive(true); audioSystemAvailable = true; SoundEffectThread.RestartAllRestarable(); }; // Checking if Other Audio is Playing During App Launch bool otherAudioIsPlaying = AudioSession.OtherAudioIsPlaying; MediaPlayer.otherAudioIsPlaying = otherAudioIsPlaying; Debug.WriteLine("AudioSession.OtherAudioIsPlaying == " + otherAudioIsPlaying); // For some unknown reason, setting category on the simulator fails with an unknown error code (-50) try { if (otherAudioIsPlaying) { AudioSession.Category = AudioSessionCategory.AmbientSound; } else { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } } catch { Debug.WriteLine("Exception when setting AudioSession.Category"); } AudioSession.SetActive(true); audioSystemAvailable = true; }
public ExtAudioBufferPlayer(CFUrl url) { this.url = url; AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.PlayAndRecord; // TODO: we need to play only here AudioSession.Resumed += OnAudioSessionResumed; PrepareAudioUnit(); PrepareExtAudioFile(); audioUnit.Initialize(); audioUnit.Start(); }
public override bool FinishedLaunching(UIApplication app, NSDictionary options) { window.AddSubview(viewController.View); AudioSession.Initialize(); // Nice creative commons source. entry.Text = "http://ccmixter.org/content/bradstanfield/bradstanfield_-_People_Let_s_Stop_The_War.mp3"; entry.EditingDidEnd += delegate { entry.ResignFirstResponder(); }; window.MakeKeyAndVisible(); return(true); }
// This method is invoked when the application has loaded its UI and its ready to run public override bool FinishedLaunching(UIApplication app, NSDictionary options) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AudioSession.SetActive(true); app.ApplicationSupportsShakeToEdit = false; // If you have defined a view, add it here: window.AddSubview(rootController.View); window.MakeKeyAndVisible(); return(true); }
public static void InitAudio() { if (inited) { return; } inited = true; AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.AmbientSound; try { AudioSession.OverrideCategoryMixWithOthers = true; } catch { // Not available in older OSes } }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // creating an AudioComponentDescription of the RemoteIO AudioUnit AudioComponentDescription cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent using the audio component description _audioComponent = AudioComponent.FindComponent(cd); // creating an audio unit instance _audioUnit = AudioUnit.CreateInstance(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote Output ); _audioUnit.SetAudioFormat(AudioUnitUtils.AUCanonicalASBD(_sampleRate, 2), AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback method _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback); _audioUnit.Initialize(); _audioUnit.Start(); }
public override bool StartNotifyBeep() { if (!AUDIOSESSION_INITIALIZED) { AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; AUDIOSESSION_INITIALIZED = true; } if (!AUDIOSESSION_ACTIVE) { AudioSession.SetActive(true); AUDIOSESSION_ACTIVE = true; } var sound = SystemSound.FromFile("res/SystemAlert.wav"); sound.PlayAlertSound(); return(true); }
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib. public override void ViewDidLoad() { base.ViewDidLoad(); // setting button stat _recordingButton.Enabled = true; _playBackButton.Enabled = false; // binding event handlers _recordingButton.TouchUpInside += new EventHandler(_recordingButton_TouchCancel); _playBackButton.TouchUpInside += new EventHandler(_playBackButton_TouchDown); // getting local sound file path var path = Environment.GetFolderPath(Environment.SpecialFolder.Personal); path = System.IO.Path.Combine(path, "recording.aiff"); _url = CFUrl.FromFile(path); // setting audio session AudioSession.Initialize(); AudioSession.SetActive(true); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // Getting AudioComponent Remote output _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); // creating an audio unit instance _audioUnit = new AudioUnit(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnitScopeType.Input, 0 // Remote Output ); var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32); format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitCanonical; _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1); // setting callback method _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global); _audioUnit.Initialize(); _audioUnit.Start(); }
// // This method is invoked when the application has loaded and is ready to run. In this // method you should instantiate the window, load the UI into it and then make the window // visible. // // You have 17 seconds to return from this method, or iOS will terminate your application. // public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { AudioSession.Initialize(); AudioSession.Interrupted += delegate { Debug.WriteLine("Session interrupted"); ThreadStateBeginInterruption(); }; AudioSession.Resumed += delegate { Debug.WriteLine("Session resumed"); Debug.Print(AudioSession.InterruptionType.ToString()); AudioSession.SetActive(true); ThreadStateEndInterruption(); }; // our default category -- we change this for conversion and playback appropriately try { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } catch { Debug.Print("ERROR: Cannot change audio session category"); } AudioSession.AudioRouteChanged += delegate(object sender, AudioSessionRouteChangeEventArgs e) { var gg = e.PreviousInputRoute; Debug.Print("Audio route change: {0}", e.Reason); Debug.Print("Old route: {0}", e.PreviousOutputRoutes[0]); Debug.Print("New route: {0}", e.CurrentOutputRoutes[0]); }; AudioSession.SetActive(true); return(true); }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { #if MONOMAC || IOS alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); #endif try { _device = Alc.OpenDevice(string.Empty); } catch (Exception ex) { _SoundInitException = ex; return(false); } if (CheckALError("Could not open AL device")) { return(false); } if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds AndroidGameActivity.Paused += Activity_Paused; AndroidGameActivity.Resumed += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4. if ((int)Android.OS.Build.VERSION.SdkInt >= 19) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS AudioSession.Initialize(); AudioSession.Interrupted += (sender, e) => { AudioSession.SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); }; AudioSession.Resumed += (sender, e) => { AudioSession.SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); }; int[] attribute = new int[0]; #elif !DESKTOPGL int[] attribute = new int[0]; #endif #if DESKTOPGL _acontext = new AudioContext(); _context = Alc.GetCurrentContext(); _oggstreamer = new OggStreamer(); #else _context = Alc.CreateContext(_device, attribute); #endif if (CheckALError("Could not create AL context")) { CleanUpOpenAL(); return(false); } if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); if (CheckALError("Could not make AL context current")) { CleanUpOpenAL(); return(false); } return(true); } } return(false); }
public ViewController(IntPtr handle) : base(handle) { AudioSession.Initialize(); }
public AudioSessionTest() { AudioSession.Initialize(); }
public PlayButtonRederer() { AudioSession.Initialize(); }
public SoundViewController() : base(UserInterfaceIdiomIsPhone ? "SoundViewController_iPhone" : "SoundViewController_iPad", null) { AudioSession.Initialize(); }
public override void ViewDidLoad() { base.ViewDidLoad(); // Initialize Recorder RVM = new RecordingViewModel(); // Perform any additional setup after loading the view, typically from a nib. // Init audio session AudioSession.Initialize(null, NSRunLoop.NSDefaultRunLoopMode); AudioSession.AudioRouteChanged += AudioSession_AudioRouteChanged; // Init call event handler callcenter.CallEventHandler += callState.CallEvent; // Set menu button UIImageView menuImageView = new UIImageView(); menuImageView.Bounds = new CGRect(0, 0, 20, 20); menuImageView.Image = UIImage.FromBundle("threelines"); menuButton = new UIBarButtonItem( menuImageView.Image, UIBarButtonItemStyle.Plain, (s, e) => { System.Diagnostics.Debug.WriteLine("menu button tapped"); if (RVM.IsRecording) { SidebarController.ToggleMenu(); var okAlertController = UIAlertController.Create("", UiStrings.MainNotificationSuspendUserAction, UIAlertControllerStyle.Alert); //Add Action okAlertController.AddAction(UIAlertAction.Create(NSBundle.MainBundle.GetLocalizedString("Vernacular_P0_dialog_ok"), UIAlertActionStyle.Default, alert => { RVM.StopRecordingCommand.Execute(null); })); PresentViewController(okAlertController, true, null); } else { SidebarController.ToggleMenu(); } } ); // Add button to item array barButtonItems [0] = menuButton; NavigationItem.LeftBarButtonItem = menuButton; // Set bottom view labels lblTitle.Text = ""; lblBody.Text = ""; // Set start button style btnStart.SetBackgroundImage(UIImage.FromBundle("CarButton"), UIControlState.Normal); View.BackgroundColor = StyleSettings.ThemePrimaryDarkLightenedColor(); btnStart.SetTitle("", UIControlState.Normal); // Add square to stop || TODO: change to image UIView stopSquare = new UIView(new RectangleF( (float)(btnStop.Bounds.X + 15), (float)(btnStop.Bounds.Y + 15), (float)(btnStop.Bounds.Width - 30), (float)(btnStop.Bounds.Height - 30)) ); // Set stop button attributes stopButtonTranslation = btnStopBottomConstraint.Constant; stopSquare.BackgroundColor = UIColor.White; btnStop.SetBackgroundImage(UIImage.FromBundle("srs-stop-btn"), UIControlState.Normal); btnStop.Layer.CornerRadius = btnStop.Bounds.Width / 2; lblLeft.Hidden = true; lblCenter.Hidden = true; lblRight.Hidden = true; // Set initial status of vehicle type & support btnSupport.Hidden = true; btnVehicle.Hidden = true; lblCalibration.Hidden = true; lblCalibration.Text = NSBundle.MainBundle.LocalizedString("Vernacular_P0_label_setup", null); lblCalibration.TextColor = StyleSettings.SubduedTextOnDarkColor(); // Logic btnStart.TouchUpInside += (object sender, EventArgs e) => { if (Settings.CalibrationDone) { if (RVM.IsRecording) { return; } var passengerNumberPickerVC = Storyboard.InstantiateViewController("PassengerNumberPickerViewController") as PassengerNumberPickerViewController; passengerNumberPickerVC.ModalPresentationStyle = UIModalPresentationStyle.OverCurrentContext; passengerNumberPickerVC.ModalTransitionStyle = UIModalTransitionStyle.CoverVertical; passengerNumberPickerVC.parentVC = this; NavigationController.PresentViewController(passengerNumberPickerVC, true, null); } else { // TODO: warn user that calibration has to be done } }; btnStop.TouchUpInside += (object sender, EventArgs e) => { UnbindFromService(); AnimateStopButton(); RVM.OnDestroy(); StopRecording(); UpdateRecordButtonUi(); UploadData(); lblCenter.Hidden = true; }; btnVehicle.TouchUpInside += (object sender, EventArgs e) => { OpenSettingsVC(); }; btnSupport.TouchUpInside += (object sender, EventArgs e) => { OpenSettingsVC(); }; }
public AudioSessionTest() { TestRuntime.RequestMicrophonePermission(); AudioSession.Initialize(); }
public StartRecordingButtonRenderer() { AudioSession.Initialize(); }
public override bool FinishedLaunching(UIApplication app, NSDictionary options) { // // Setup audio system // AudioSession.Initialize(); AudioSession.Category = AudioSessionCategory.MediaPlayback; // // Format description, we generate LinearPCM as short integers // sampleRate = AudioSession.CurrentHardwareSampleRate; var format = new AudioStreamBasicDescription() { SampleRate = sampleRate, Format = AudioFormatType.LinearPCM, FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked, BitsPerChannel = 16, ChannelsPerFrame = 1, BytesPerFrame = 2, BytesPerPacket = 2, FramesPerPacket = 1, }; // // Create an output queue // var queue = new OutputAudioQueue(format); var bufferByteSize = (sampleRate > 16000)? 2176 : 512; // 40.5 Hz : 31.25 Hz // // Create three buffers, generate a tone, and output the tones // var buffers = new AudioQueueBuffer * [numBuffers]; for (int i = 0; i < numBuffers; i++) { queue.AllocateBuffer(bufferByteSize, out buffers [i]); GenerateTone(buffers [i]); queue.EnqueueBuffer(buffers [i], null); } // // Output callback: invoked when the audio system is done with the // buffer, this implementation merely recycles it. // queue.OutputCompleted += (object sender, OutputCompletedEventArgs e) => { if (alternate) { outputWaveForm += 1; if (outputWaveForm > WaveForm.Square) { outputWaveForm = WaveForm.Sine; } GenerateTone(e.UnsafeBuffer); } queue.EnqueueBuffer(e.UnsafeBuffer, null); }; queue.Start(); return(true); }