Пример #1
0
        public void CanSelectXAudioDevice()
        {
            object deviceId = null;

            if (_xaudio2 is XAudio2_7)
            {
                var xaudio27      = ((XAudio2_7)_xaudio2);
                int deviceCount   = xaudio27.GetDeviceCount();
                var deviceDetails = xaudio27.GetDeviceDetails(deviceCount - 1);

                Debug.WriteLine(deviceDetails.DisplayName);

                deviceId = 0; //deviceid for XAudio2.7 is the index of a device from 0 - (deviceCount - 1).
            }
            else if (_xaudio2 is XAudio2_8)
            {
                //deviceId =
                //    MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console)
                //        .PropertyStore.GetValue(PropertyStore.AudioEndpointPath)
                //        .GetValue();
                deviceId = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console).DevicePath;
            }
            else
            {
                Assert.Fail("Invalid XAudio Version");
            }

            using (
                var masteringVoice = _xaudio2.CreateMasteringVoice(CSCore.XAudio2.XAudio2.DefaultChannels,
                                                                   CSCore.XAudio2.XAudio2.DefaultSampleRate, deviceId))
            {
                Debug.WriteLine("MasteringVoice created.");
            }
        }
Пример #2
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            _soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();

            timer.Start();

            //propertyGridTop.SelectedObject = _lineSpectrum;
            //propertyGridBottom.SelectedObject = _voicePrint3DSpectrum;
        }
Пример #3
0
        static OutputDevice()
        {
            var defaultDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            ActiveDevice = new OutputDevice();
            ActiveDevice.Start(defaultDevice);
        }
        public static (IEnumerable <MMDevice> devices, MMDevice defaultDevice) GetDevices()
        {
            var defaultDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var devices       = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active);

            return(devices, defaultDevice);
        }
        public void GetDevices(IMainForm mainFormIn)
        {
            mainForm = mainFormIn;
            var defaultDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var devices       = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active);

            mainForm.AddRecordingDevices(devices, defaultDevice);
        }
        static void Main(string[] args)

        {
            MMDevice dev = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            capture        = new WasapiLoopbackCapture();
            capture.Device = dev;
            capture.Initialize();

            SoundInSource soundInSource = new SoundInSource(capture);

            nStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
            final   = nStream.ToWaveSource();
            nStream.SingleBlockRead     += NStream_SingleBlockRead;
            soundInSource.DataAvailable += encode;
            trashBuf = new byte[final.WaveFormat.BytesPerSecond / 2];

            Console.WriteLine($"sample rate:{capture.WaveFormat.SampleRate}");
            Console.WriteLine($"bits per sample:{capture.WaveFormat.BitsPerSample }");
            Console.WriteLine($"channels:{capture.WaveFormat.Channels }");
            Console.WriteLine($"bytes per sample:{capture.WaveFormat.BytesPerSample }");
            Console.WriteLine($"bytes per second:{capture.WaveFormat.BytesPerSecond }");
            Console.WriteLine($"AudioEncoding:{capture.WaveFormat.WaveFormatTag  }");


            EncodingContext context = FrameEncoder.GetDefaultsContext();

            context.Channels        = 6;
            context.SampleRate      = capture.WaveFormat.SampleRate;
            context.AudioCodingMode = AudioCodingMode.Front3Rear2;
            context.HasLfe          = true;
            context.SampleFormat    = A52SampleFormat.Float;
            enc = new FrameEncoderFloat(ref context);

            //_writer = new WaveWriter("test.ac3", final.WaveFormat);


            capture.Start();

            wBuffSrc = new WriteableBufferingSource(new WaveFormat(capture.WaveFormat.SampleRate, capture.WaveFormat.BitsPerSample, capture.WaveFormat.Channels, AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF), (int)capture.WaveFormat.MillisecondsToBytes(20));

            w = new WasapiOut2(false, AudioClientShareMode.Shared, 20);

            w.Device = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active).Where(x => x.FriendlyName.Contains("Digital")).Single();
            AudioClient a = AudioClient.FromMMDevice(w.Device);

            w.Initialize(wBuffSrc);
            w.Play();


            Task.Run(async() => await encoderThread());
            //encodeSinus();

            Console.ReadLine();

            System.Environment.Exit(0);
        }
Пример #7
0
        /// <summary>
        /// CSCore を用いたオーディオセッションの取得
        /// </summary>
        /// <param name="callback">各セッションに対し行う関数</param>
        static void GetAudioSessions(EnumAudioSessionDelegation callback)
        {
            // デフォルトオーディオエンドポイントとイテレータの取得
            MMDevice               device     = MMDeviceEnumerator.DefaultAudioEndpoint(CSCore.CoreAudioAPI.DataFlow.Render, CSCore.CoreAudioAPI.Role.Multimedia);
            AudioSessionManager2   manager    = AudioSessionManager2.FromMMDevice(device);
            AudioSessionEnumerator enumerator = manager.GetSessionEnumerator();

            foreach (AudioSessionControl sessionControl in enumerator)
            {
                // 関数呼び出し
                callback(sessionControl);
            }
        }
        /// <summary>
        /// Get the recording device.
        /// </summary>
        public void GetDevices()
        {
            if (mainForm == null)
            {
                return;
            }

            var devices = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active);

            if (devices.Count > 0)
            {
                var defaultDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
                mainForm.AddRecordingDevices(devices, defaultDevice);
            }
        }
Пример #9
0
        public MainWindow()
        {
            //singleton
            Instance = this;

            InitializeComponent();

            //get device
            MMDevice device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);

            //init analyzer
            Analyzer.CreateAnalyserBars();
            Analyzer.InitAudioSource(device);

            //init mixer
            Mixer.CreateMixerChannels(device);
        }
Пример #10
0
        public void Initialize()
        {
            MMDevice   captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat  = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try { _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i)); } catch { /* We're just trying ... */ }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _source = _soundInSource.WaveFormat.SampleRate == 44100
                          ? _soundInSource.ToStereo()
                          : _soundInSource.ChangeSampleRate(44100).ToStereo();

            _stream = new SingleBlockNotificationStream(_source.ToSampleSource());
            _stream.SingleBlockRead += StreamOnSingleBlockRead;

            _source = _stream.ToWaveSource();

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            _soundInSource.DataAvailable += (s, aEvent) =>
            {
                while ((_source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _capture.Start();
        }
Пример #11
0
        public void FromDefaultDevice()
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            _soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            soundInSource = new SoundInSource(_soundIn);
            ISampleSource source = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += SoundInSource_DataAvailable;
        }
Пример #12
0
        public bool SetAudioEndpoint(string dev, bool usedefault = false)
        {
#if true
            System.Collections.ObjectModel.ReadOnlyCollection <DirectSoundDevice> list = DirectSoundDeviceEnumerator.EnumerateDevices();

            DirectSoundDevice dsd = null;
            if (dev != null)                                               // active selection
            {
                dsd = list.FirstOrDefault(x => x.Description.Equals(dev)); // find
                if (dsd == null && !usedefault)                            // if not found, and don't use the default (used by constructor)
                {
                    return(false);
                }
            }

            DirectSoundOut dso = new DirectSoundOut(100, System.Threading.ThreadPriority.Highest);    // seems good quality at 200 ms latency

            if (dsd != null)
            {
                dso.Device = dsd.Guid;
            }
#else
            MMDevice  def = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            ISoundOut dso = new WasapiOut()
            {
                Latency = 100, Device = def
            };                                                                //BAD breakup
#endif

            if (aout != null)                 // clean up last
            {
                aout.Stopped -= Output_Stopped;
                aout.Stop();
                aout.Dispose();
            }

            aout          = dso;
            aout.Stopped += Output_Stopped;

            return(true);
        }
Пример #13
0
        public void Initialize()
        {
            MMDevice   captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat  = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try
                {
                    _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i));
                }
                catch
                { }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            _capture.Initialize();
            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };

            _stream = _soundInSource.WaveFormat.SampleRate == 44100
                ? new SingleBlockNotificationStream(_soundInSource.ToStereo().ToSampleSource())
                : new SingleBlockNotificationStream(_soundInSource.ChangeSampleRate(44100).ToStereo().ToSampleSource());

            _soundInSource.DataAvailable += OnSoundDataAvailable;

            _capture.Start();
        }
 public static MMDevice GetDefaultDevice()
 {
     return(MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia));
 }
 /// <summary>
 /// Returns the default rendering device.
 /// </summary>
 /// <returns>Default rendering device.</returns>
 protected override MMDevice GetDefaultDevice()
 {
     return(MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console));
 }
Пример #16
0
 /// <summary>
 /// Returns the default device.
 /// </summary>
 /// <returns>The default device.</returns>
 protected virtual MMDevice GetDefaultDevice()
 {
     return(MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Capture, Role.Console));
 }
Пример #17
0
        public VolumeMixer()
        {
            AudioSessionManager2 audioSessionManager = AudioSessionManager2.FromMMDevice(MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia));

            audioSessionEnumerator = audioSessionManager.GetSessionEnumerator();
        }