public static void refresh_devices(bool force=false) { if (! force && !Broker.get_instance().fully_loaded) return; foreach (InternalAudioDevice device in _devices) device.is_alive = false; Utils.api_exec("pa", "rescan"); XmlDocument doc = XmlUtils.GetDocument(Utils.api_exec("pa", "devlist xml")); XmlNode node = XmlUtils.GetNode(doc, "devices", 0); foreach (XmlNode child in node.ChildNodes) { AudioDevice dev = new AudioDevice( cur_guid, XmlUtils.GetNodeAttrib(child, "name"), int.Parse(XmlUtils.GetNodeAttrib(child, "inputs")), int.Parse(XmlUtils.GetNodeAttrib(child, "outputs")) ); int dev_id = int.Parse(XmlUtils.GetNodeAttrib(child, "id")); bool found_device = false; foreach (InternalAudioDevice device in _devices)//TODO: Probably should sort here { if (device.device.name == dev.name && device.is_alive == false) { device.is_alive = true; device.id = dev_id; found_device = true; break; } } if (!found_device) { InternalAudioDevice new_device = new InternalAudioDevice { device = dev, is_alive = true, id = dev_id }; cur_guid++; _devices.Add(new_device); } } _pub_devices = (from c in _devices where c.is_alive select c.device).ToArray(); }
/// <summary> /// Based on compilation setting, returns the proper instance of sounds. /// </summary> /// <returns>A platform specific instance of <see cref="AudioEngine"/></returns> public static AudioEngine NewAudioEngine(AudioDevice device = null) { AudioEngine engine = null; #if SILICONSTUDIO_PLATFORM_IOS engine = new AudioEngineIos(); #else engine = new AudioEngine(device); #endif engine.InitializeAudioEngine(); return engine; }
public DiverGame() { DefaultContent = Content; graphicsDeviceManager = new GraphicsDeviceManager(this); Content.RootDirectory = "Content"; IsMouseVisible = true; audioDevice = new AudioDevice(); audioMixer = new AudioMixer(audioDevice); state = new State(); state.Input = new Input(); }
public Output(int _rows) { device = new AudioDevice(); this.rows = _rows; this.waves = new OutputStream[this.rows]; int i; for (i = 0; i < this.rows; i++) { waves[i] = device.CreateTone(musical_constrained(i)); waves[i].Volume = 0f; waves[i].Play(); } }
public AudioDevicePicker() { InitializeComponent(); comboBox.Items.Add(new AudioDevice(-1, "Default")); int waveInDevicesCount = WaveNative.waveInGetNumDevs(); if (waveInDevicesCount > 0) { for (int uDeviceID = 0; uDeviceID < waveInDevicesCount; uDeviceID++) { WaveNative.WaveInCaps waveInCaps = new WaveNative.WaveInCaps(); WaveNative.waveInGetDevCapsA(uDeviceID, ref waveInCaps, Marshal.SizeOf(typeof(WaveNative.WaveInCaps))); AudioDevice device = new AudioDevice(uDeviceID, new string(waveInCaps.szPname).Remove(new string(waveInCaps.szPname).IndexOf('\0')).Trim()); comboBox.Items.Add(device); } } comboBox.SelectedIndex = 0; }
public Player(System.IO.Stream stream, AudioDevice device) { bitstream = new Bitstream(stream); decoder = new Decoder(); if (device != null) { audio = device; } else { FactoryRegistry r = FactoryRegistry.systemRegistry(); audio = r.createAudioDevice(); } audio.open(decoder); }
/// <summary> /// Read block from device. /// </summary> /// <param name="device">Device handle</param> /// <param name="buffer">Buffer into which to read</param> /// <param name="blockSize">Block size</param> internal static unsafe void Read(AudioDevice device, byte[] buffer, int blockSize) { fixed(void *bufferPtr = buffer) { long err = Read(device.Handle, bufferPtr, (ulong)blockSize); if (err < 0) { err = Recover(device.Handle, (int)err, 1); if (err < 0) { throw new ArgumentException("Read recovery failed."); } } else if (err != blockSize) { throw new ArgumentException("Read failed."); } } }
public void Load() { // can this be reused to load the same file again without re-setting the location? if (load_completed) { return; } if (audiostream != null) { LoadFromStream(audiostream); } else { LoadFromUri(sound_location); } // force recreate for new stream adata = null; adev = null; load_completed = true; AsyncCompletedEventArgs e = new AsyncCompletedEventArgs(null, false, this); OnLoadCompleted(e); if (LoadCompleted != null) { LoadCompleted(this, e); } if (use_win32_player) { if (win32_player == null) { win32_player = new Win32SoundPlayer(mstream); } else { win32_player.Stream = mstream; } } }
public static void Main(string[] args) { AudioDevice device = new AudioDevice(); AudioSource channel0 = new AudioSource(); AudioClip guitar = new AudioClip("Assets/korg_m3r_rock.wav"); AudioSequence ozzy = new AudioSequence("Assets/ozzy_osbourne_crazy_train.mid"); Window window = new Window(1024, 576, "Aiv.Audio.Example.Midi"); while (window.opened) { if (window.GetKey(KeyCode.Space)) { channel0.Play(guitar); } window.Update(); } }
//UPGRADE_ISSUE: Class 'java.lang.ClassLoader' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javalangClassLoader"' /// <summary> Creates an instance of an AudioDevice implementation. /// </summary> /// <param name="loader The"><code>ClassLoader</code> to use to /// load the named class, or null to use the /// system class loader. /// </param> /// <param name="name The">name of the class to load. /// </param> /// <returns> A newly-created instance of the audio device class. /// /// </returns> protected internal virtual AudioDevice instantiate(ClassLoader loader, System.String name) { AudioDevice dev = null; System.Type cls = null; if (loader == null) { //UPGRADE_TODO: Format of parameters of method 'java.lang.Class.forName' are different in the equivalent in .NET. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1092"' cls = System.Type.GetType(name); } else { //UPGRADE_ISSUE: Method 'java.lang.ClassLoader.loadClass' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javalangClassLoader"' cls = loader.loadClass(name); } System.Object o = SupportClass.CreateNewInstance(cls); dev = (AudioDevice)o; return(dev); }
public void Start(AudioDevice audioDevice) { if (loopbackCapture == null || loopbackCapture.CaptureState == NAudio.CoreAudioApi.CaptureState.Stopped) { //Start a capturing using a specific device try { loopbackCapture = new WasapiLoopbackCapture(new MMDeviceEnumerator().GetDevice(audioDevice.Id)); loopbackCapture.DataAvailable += loopbackCapture_DataAvailable; loopbackCapture.StartRecording(); } catch (Exception e) { new Exception($"An error ocurred when we try to start the loopback capture. Error message: {e.Message}"); } } else { new Exception("Loopback could't start the capture because it's current capturing. You need to stop the old audio capturing before start a new one."); } }
/* FIXME: We should work toward eliminating internal methods. They * could eliminate the possibility that additional platforms could * be added by third parties without changing FNA itself. */ private void DoUpdate(GameTime gameTime) { AssertNotDisposed(); #if BASIC_PROFILER updateStart = _gameTimer.ElapsedTicks; #endif if (Platform.BeforeUpdate(gameTime)) { AudioDevice.Update(); Update(gameTime); /* The TouchPanel needs to know the time for when * touches arrive. */ TouchPanelState.CurrentTimestamp = gameTime.TotalGameTime; } #if BASIC_PROFILER updateTime = _gameTimer.ElapsedTicks - updateStart; #endif }
public static void Main() { Global.Ins.defaultResolution.x = Screen.width; Global.Ins.defaultResolution.y = Screen.height; Global.Ins.RefreshConfig(); DOTween.defaultEaseType = Ease.Linear; DOTween.Init(); UIPanelMgr.Ins.Init(GameObject.Find("UIPanel").transform); StageMgr.Ins.Init(GameObject.Find("Stage").transform); UIWinMgr.Ins.Init(GameObject.Find("UIWin").transform); RegistViews(); if (Debug.isDebugBuild) { GUIDeviceInfo.Show(); } Global.Ins.audioDevice = AudioDevice.Get("music"); UIPanelMgr.Ins.SwitchASync <MenuPanel>(); }
public RtcAudioManager(Context context) { _context = context; _audioManager = (AudioManager)context.GetSystemService(Context.AudioService); //_wiredHeadsetReceiver = new WiredHeadsetReceiver(); _amState = AudioManagerState.Uninitialized; ISharedPreferences preferences = PreferenceManager.GetDefaultSharedPreferences(context); _useSpeakerPhone = preferences.GetString(context.GetString(Resource.String.pref_speakerphone_key), context.GetString(Resource.String.pref_speakerphone_default)); if (_useSpeakerPhone.Equals(SpeakerPhoneFalse)) { _defaultAudioDevice = AudioDevice.Earpiece; } else { _defaultAudioDevice = AudioDevice.SpeakerPhone; } _proximitySensor = RTCProximitySensor.Create(_context, new Runnable(OnProximitySensorChangedState)); _wiredHeadsetReceiver = new WiredHeadsetReceiver(new Runnable(() => { UpdateAudioDeviceState(); })); }
public void SetDefaultAudioDevice(AudioDevice defaultDevice) { ThreadUtils.CheckIsOnMainThread(); switch (defaultDevice) { case AudioDevice.SpeakerPhone: _defaultAudioDevice = defaultDevice; break; case AudioDevice.Earpiece: if (HasEarpiece()) { _defaultAudioDevice = defaultDevice; } else { _defaultAudioDevice = AudioDevice.SpeakerPhone; } break; } UpdateAudioDeviceState(); }
/// <summary> /// Gets all audio devices (input and output). /// </summary> /// <param name="audioSource"></param> /// <param name="defaultDeviceName"></param> /// <returns></returns> private static List <AudioDevice> GetAudioDevices(Source audioSource, string defaultDeviceName) { List <AudioDevice> audioDevices = new List <AudioDevice>(); audioDevices.Add(new AudioDevice { name = Constants.Audio.Settings.AudioDeviceNoneName, id = Constants.Audio.NO_DEVICE_ID }); ObsProperty[] audioSourceProperties = audioSource.GetProperties().GetPropertyList(); for (int i = 0; i < audioSourceProperties.Length; i++) { if (audioSourceProperties[i].Name.Equals(Constants.Audio.SettingKeys.DeviceId)) { string[] propertyNames = audioSourceProperties[i].GetListItemNames(); object[] propertyValues = audioSourceProperties[i].GetListItemValues(); for (int j = 0; j < propertyNames.Length; j++) { string deviceName = propertyNames[j]; if (deviceName == Constants.Audio.Settings.DefaultDeviceName) { deviceName = defaultDeviceName; } AudioDevice device = new AudioDevice { name = deviceName, id = (string)propertyValues[j] }; audioDevices.Add(device); } } } return(audioDevices); }
public void TestGetDummyDefaultPlaybackDevice() { Browser browser = new Browser(with => with.Module(new HttpModule())); BrowserResponse result = browser.Get("/DummyDefaultPlaybackDevice", with => { with.AjaxRequest(); with.Header("Accept", "application/json"); }); Assert.AreEqual(HttpStatusCode.OK, result.StatusCode); string json = result.Body.AsString(); Assert.IsFalse(string.IsNullOrWhiteSpace(json)); AudioDevice data = JsonConvert.DeserializeObject <AudioDevice>(json); Assert.IsNotNull(data); Assert.IsTrue(data.Id == "1"); Assert.IsTrue(data.IsCurrentDevice); Assert.IsTrue(data.Name == "Speaker"); }
public static void Main() { new ScreenChecker(); DOTween.defaultEaseType = Ease.Linear; DOTween.Init(); Application.targetFrameRate = 60; UIPanelMgr.Ins.Init(GameObject.Find("UIPanel").transform); StageMgr.Ins.Init(GameObject.Find("Stage").transform); UIWinMgr.Ins.Init(GameObject.Find("UIWin").transform); RegistViews(); Global.Ins.menu.ShowMenu(); Global.Ins.bgmDevice = AudioDevice.Create("Bgm"); Global.Ins.effectDevice = AudioDevice.Create("Effect"); Global.Ins.bgmDevice.Play(ResMgr.Ins.Load <AudioClip>("hot_res/audios/bgm")); if (Debug.isDebugBuild) { GUIDeviceInfo.Show(); } }
protected virtual void Dispose(bool isDisposing) { if (ShaderManager != null) { ShaderManager.Dispose(); } if (RenderStatePool != null) { RenderStatePool.Dispose(); } if (GraphicsDevice != null) { GraphicsDevice.Dispose(); } if (AudioDevice != null) { AudioDevice.Dispose(); } if (onStart != null) { onStart.Dispose(); } }
//UPGRADE_NOTE: Synchronized keyword was removed from method 'close'. Lock expression was added. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1027"' /// <summary> Cloases this player. Any audio currently playing is stopped /// immediately. /// </summary> public virtual void close() { lock (this) { AudioDevice out_Renamed = audio; if (out_Renamed != null) { closed = true; audio = null; // this may fail, so ensure object state is set up before // calling this method. out_Renamed.close(); lastPosition = out_Renamed.Position; try { bitstream.close(); } catch (BitstreamException ex) { } } } }
static void Main(string[] args) { GameSystems.Initialize(new GameStartInfo { Window = new WindowInfo() { Name = "TestUnit", Size = new Size(1920, 1080) } }); var stream0 = new AudioStream(@"C:\Users\LinkC\source\love.mp3"); var stream1 = new AudioStream(@"C:\Users\LinkC\source\free.wav"); AudioQueue audioQueue0 = new AudioQueue(); AudioQueue audioQueue1 = new AudioQueue(); audioQueue0.Add(stream0, 0, stream0.Length); audioQueue1.Add(stream1, 0, stream1.Length); AudioSource audioSource0 = new AudioSource(stream0.WaveFormat); AudioSource audioSource1 = new AudioSource(stream1.WaveFormat); audioSource0.SubmitAudioQueue(audioQueue0); audioSource1.SubmitAudioQueue(audioQueue1); audioSource0.Start(); audioSource1.Start(); GameSystems.RunLoop(); AudioDevice.Terminate(); }
public void PlaySync() { Start(); if (mstream == null) { SystemSounds.Beep.Play(); return; } if (!use_win32_player) { try { if (adata == null) { adata = new WavData(mstream); } if (adev == null) { adev = AudioDevice.CreateDevice(null); } if (adata != null) { adata.Setup(adev); adata.Play(adev); } } catch { } } else { win32_player.PlaySync(); } }
private void OnSettingsLoaded() { this.devices = new List <AudioDevice>(); try { AudioConfiguration config = new AudioConfiguration(); this.devices = config.ListDevices(); } catch (Exception ex) { ShowStatus(ex.Message); } SetButtonState(); if (this.devices.Count > 0) { AudioDevice device = null; // select the last used microphone or the first one in the list. if (!string.IsNullOrEmpty(this.settings.DeviceName)) { device = (from i in this.devices where i.Name == this.settings.DeviceName select i).FirstOrDefault(); } if (device == null) { device = this.devices[0]; } SelectDevice(device); } else { ShowStatus("No audio capture devices found"); } if (this.settings.LastFile != null && System.IO.File.Exists(this.settings.LastFile)) { OpenAudioFile(this.settings.LastFile); } this.Title = string.Format(this.titlePattern, this.settings.LastFile); }
public Audio() { int centrePitch = ((MaxPitch-MinPitch)/2)+MinPitch; int pitchStep = (MaxPitch - MinPitch) / 10; this.device = new AudioDevice(); this.waves = new OutputStream[4, 5]; int panLeft; int pitchHigh; for (int i = 0; i < 4; i++) { // If this quadrant is in the left 50%, pan left (-1), else pan right (1) panLeft = ((i % 2) == 0) ? -1 : 1; // If this quadrant is in the top 50%, add pitch steps to centre pitch, else subtract pitchHigh = (i < 2) ? 1 : -1; for (int j = 0; j < 5; j++) { int pitch = centrePitch + (j * pitchStep * pitchHigh); float pan = ((float)(5 + (j * panLeft)) / 5f) - 1f; waves[i, j] = device.CreateTone((double)pitch); waves[i, j].Volume = 0f; waves[i, j].Pan = pan; waves[i, j].Play(); } } }
/// <summary> /// Write block to device. /// </summary> /// <param name="device">Device handle.</param> /// <param name="buffer">Buffer to be written.</param> /// <param name="blockSize">Block size.</param> /// <param name="offset">Offset into buffer from which to start writing data.</param> /// <returns>Number of frames (1 frame=1 sample from each channel) written.</returns> internal static unsafe int Write(AudioDevice device, byte[] buffer, int blockSize, int offset = 0) { long err = 0; fixed(void *bufferPtr = buffer) { byte *pb = (byte *)bufferPtr + offset; err = Write(device.Handle, pb, (ulong)blockSize); if (err < 0) { err = Recover(device.Handle, (int)err, 1); if (err < 0) { throw new ArgumentException("Write recovery failed."); } } else if (err != blockSize) { } } return((int)err); }
/// <summary> /// Configures an explicit audio output device. If the module paramater is NULL, /// audio output will be moved to the device specified by the device identifier string immediately. /// This is the recommended usage. A list of adequate potential device strings can be obtained with /// <see cref="EnumAudioDeviceList" />. /// However passing NULL is supported in LibVLC version 2.2.0 and later only; in earlier versions, this function would /// have no effects when the module parameter was NULL. /// If the module parameter is not NULL, the device parameter of the corresponding audio output, if it exists, will be /// set to the specified string. /// Note that some audio output modules do not have such a parameter (notably MMDevice and PulseAudio). /// A list of adequate potential device strings can be obtained with <see cref="GetAudioDeviceList" />. /// </summary> public void SetAudioDevice(AudioOutput audioOutput, AudioDevice audioDevice) { VlcMediaPlayer.SetAudioDevice(audioOutput, audioDevice); }
public override string ToString() { return(base.ToString() + " | Type : Audio Hotkey" + " | Name : " + Name + " | Audio Device : " + AudioDevice.ToString() + " | Files : " + Files + " | Time Start : " + StartingTime + " | Volume : " + Volume); }
/// <summary> /// Constructor /// </summary> /// <param name="dev"></param> public AudioDeviceChangedEventArgs(AudioDevice dev, AudioDeviceEventType type) { Device = dev; EventType = type; }
private void comboBox_screenAudioDevice_SelectedIndexChanged(object sender, EventArgs e) { mAudioDevice = (AudioDevice)comboBox_screenAudioDevice.SelectedItem; }
public bool SetDefaultOutputDevice(AudioDevice device) { return SetDefaultOutputDevice(device.Id); }
/// <summary> /// Create a new game /// </summary> /// <param name="intendedWidth">The intended width of the game</param> /// <param name="intendedHeight">The intended height of the game</param> /// <param name="title">The window title</param> /// <param name="fps">The number of updates to be called every second</param> /// <param name="fullscreen">Whether or not the game will start in fullscreen</param> /// <param name="vsync">Whether or not VSync is enabled</param> public Game(int intendedWidth, int intendedHeight, string title = "Created with Inferno", int fps = 30, bool fullscreen = false, bool vsync = true) { //Set my "Me" reference Instance = this; //Scaling VirtualWidth = intendedWidth; VirtualHeight = intendedHeight; //Create Keys Array Keys = new List <Key>(); //Configure states _currentState = null; //Create Graphics Device GraphicsDevice = new GraphicsDevice(); //Create Audio Device AudioDevice = new AudioDevice(); //Register events for focus pause OnDeactivated += (sender, e) => { if (!FocusPause) { return; } Paused = true; AudioDevice.PauseAll(); }; OnActivated += (sender, e) => { if (!FocusPause) { return; } Paused = false; AudioDevice.ResumeAll(); }; //Platform game PlatformGame = new PlatformGame(this); //Create GameWindow Window = new GameWindow(this, GraphicsDevice, title, intendedWidth, intendedHeight); //Fps and fullscreen FramesPerSecond = fps; Window.Fullscreen = fullscreen; //Attach window to device GraphicsDevice.AttachWindow(Window); //Create Renderer Renderer = new Renderer(GraphicsDevice); //Create render target _baseRenderTarget = new RenderTarget(VirtualWidth, VirtualHeight); }
/// <summary> Decompresses audio data from an InputStream and plays it /// back through an AudioDevice. The playback is run on a newly /// created thread. /// /// </summary> /// <param name="in The">InputStream that provides the MPEG audio data. /// </param> /// <param name="dev The">AudioDevice to use to sound the decompressed data. /// /// @throws JavaLayerException if there was a problem decoding /// or playing the audio data. /// /// </param> protected internal virtual void play(System.IO.Stream @in, AudioDevice dev) { stopPlayer(); if (@in != null && dev != null) { player = new Player(@in, dev); playerThread = createPlayerThread(); playerThread.Start(); } }
public static void refresh_devices(bool force=false) { if (! force && !Broker.get_instance().fully_loaded) return; foreach (InternalAudioDevice device in _devices) device.is_alive = false; Utils.api_exec("pa", "rescan"); XmlDocument doc=null; try{ doc = XmlUtils.GetDocument(Utils.api_exec("pa", "devlist xml")); }catch(KeyNotFoundException){ MessageBox.Show("Portaudio did not return a device list most likely because it cannot find an active microphone or speaker it can use, FSClient will now exit.", "Missing Device List", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(-1); return; } XmlNode node = XmlUtils.GetNode(doc, "devices", 0); foreach (XmlNode child in node.ChildNodes) { AudioDevice dev = new AudioDevice( cur_guid, XmlUtils.GetNodeAttrib(child, "name"), int.Parse(XmlUtils.GetNodeAttrib(child, "inputs")), int.Parse(XmlUtils.GetNodeAttrib(child, "outputs")) ); int dev_id = int.Parse(XmlUtils.GetNodeAttrib(child, "id")); bool found_device = false; foreach (InternalAudioDevice device in _devices)//TODO: Probably should sort here { if (device.device.name == dev.name && device.is_alive == false) { device.is_alive = true; device.id = dev_id; found_device = true; break; } } if (!found_device) { InternalAudioDevice new_device = new InternalAudioDevice { device = dev, is_alive = true, id = dev_id }; cur_guid++; _devices.Add(new_device); } } _pub_devices = (from c in _devices where c.is_alive select c.device).ToArray(); }
public override void Setup(GraphicsDevice graphics, AudioDevice audio, Window window) { window.OnKeyPressed += Window_OnKeyPressed; }
public static void PrepareStream(AudioDevice indev, AudioDevice outdev) { Utils.bgapi_exec("pa", "preparestream #" + guid_to_id(indev.guid) + " #" + guid_to_id(outdev.guid)); }
public static void SetInAndOutDev(AudioDevice indev, AudioDevice outdev) { if (indev != null && outdev != null) SetInAndOutDev(indev.guid, outdev.guid); else if (indev != null) indev.SetInDev(); else if (outdev != null) outdev.SetOutDev(); }
public static extern int AudioSetVolume(AudioDevice device, int volume);
public override void SelectAudioDevice(AudioDevice audioDevice) { voximplant_audio_manager_select_audio_device((int)audioDevice); }
private async Task SetAudioDeviceAsync() { this.audioDevice = await this.GetSavedAudioDeviceAsync(); }
public ChangeDeviceName(AudioDevice selectedDevice) { selectedAudioDevice = selectedDevice; InitializeComponent(); }
public SetAsDefaultMultimediaDeviceCommand(AudioDeviceManager manager, AudioDevice device) : base(manager, device, AudioDeviceRole.Console) { Text = Resources.SetAsDefaultMultimediaDevice; Image = Resources.DefaultMultimediaDevice.ToBitmap(); }
private void loadAudioOutputs() { try { _audioOutputs = new ArrayList(); IntPtr pOutputs = VLCLibrary.Instance.audio_output_list_get(_instance.Handle); if (pOutputs != IntPtr.Zero) { VLCLibrary.libvlc_audio_output_t output; do { output = (VLCLibrary.libvlc_audio_output_t)Marshal.PtrToStructure(pOutputs, typeof(VLCLibrary.libvlc_audio_output_t)); AudioOutput aOutput = new AudioOutput(output); int mdeviceCount = VLCLibrary.Instance.audio_output_device_count(_instance.Handle, output.psz_name); if (mdeviceCount > 0) { for (int k = 0; k < mdeviceCount; k++) { AudioDevice device = new AudioDevice(); IntPtr pId = VLCLibrary.Instance.audio_output_device_id(_instance.Handle, output.psz_name, k); device.deviceId = Marshal.PtrToStringAnsi(pId); IntPtr pLongName = VLCLibrary.Instance.audio_output_device_longname(_instance.Handle, output.psz_name, k); device.deviceName = Marshal.PtrToStringAnsi(pLongName); aOutput.AddDevice(device); } _audioOutputs.Add(aOutput); } pOutputs = output.p_next; } while (output.p_next != IntPtr.Zero); } VLCLibrary.Instance.audio_output_list_release(pOutputs); } catch (Exception e) { Logger.WriteToLog("Error during loading Audio Outputs: " + e.Message); } }
public AudioDeviceViewModel(AudioDevice device) { _device = device; }
public static extern int AudioGetVolume(AudioDevice device, ref int volume);
static void Main(string[] args) { foreach (string device in AudioDevice.Devices) { Console.WriteLine(device); } foreach (string device in AudioDevice.CaptureDevices) { Console.WriteLine(device); } AudioDevice playerEar = new AudioDevice(); Console.WriteLine(AudioDevice.CurrentDevice.Name); AudioClip clip = new AudioClip("Assets/jumping.ogg"); AudioClip laser = new AudioClip("Assets/laser.wav"); AudioClip backgroundMusic = new AudioClip("Assets/test_wikipedia_mono.ogg"); Console.WriteLine(clip.Channels); Console.WriteLine(clip.Frequency); Console.WriteLine(clip.Samples); Console.WriteLine(clip.Duration); AudioSource source = new AudioSource(); source.Play(clip); AudioCapture microphone = new AudioCapture(22050, 1, 5f); AudioBuffer micBuffer = new AudioBuffer(); microphone.Start(); AudioSource background = new AudioSource(); Window window = new Window(1024, 576, "Aiv.Audio Example"); background.Position = new OpenTK.Vector3(window.Width / 2, window.Height / 2, 0); background.ReferenceDistance = 50; background.MaxDistance = 100; background.RolloffFactor = 1f; Sprite sprite = new Sprite(100, 100); while (window.opened) { background.Stream(backgroundMusic, window.deltaTime); if (window.GetKey(KeyCode.Space)) { source.Play(clip); } if (window.GetKey(KeyCode.Return)) { source.Play(laser); } if (window.GetKey(KeyCode.ShiftRight)) { microphone.Read(micBuffer); source.Play(micBuffer); } if (window.GetKey(KeyCode.Right)) { sprite.position.X += 100 * window.deltaTime; } if (window.GetKey(KeyCode.Left)) { sprite.position.X -= 100 * window.deltaTime; } if (window.GetKey(KeyCode.Up)) { sprite.position.Y -= 100 * window.deltaTime; } if (window.GetKey(KeyCode.Down)) { sprite.position.Y += 100 * window.deltaTime; } playerEar.Position = new OpenTK.Vector3(sprite.position.X, sprite.position.Y, 0); source.Position = playerEar.Position; sprite.DrawSolidColor(1f, 0, 0); window.Update(); } }
public void Init(AudioDevice audioDevice, Vector2 minMaxDistance, int nSoundChannels, JMOD.CustomReadFileMethodDelegate customReadFileMethod) { SoundSystem = new SoundSystem(minMaxDistance, nSoundChannels, customReadFileMethod); AudioDevice = audioDevice; SystemGlue = new JMODSystem(SoundSystem); }
public DefaultAudioDeviceEventArgs(AudioDevice device, AudioDeviceKind kind, AudioDeviceRole role) : base(device) { _kind = kind; _role = role; }
public void AddDevice(AudioDevice device) { _outputDevices.Add(device); }