Example #1
0
 public WAVEFORMATEXTENSIBLE(WAVEFORMATEXTENSIBLE o)
 {
     wFormatTag          = o.wFormatTag;
     nChannels           = o.nChannels;
     nSamplesPerSec      = o.nSamplesPerSec;
     nAvgBytesPerSec     = o.nAvgBytesPerSec;
     nBlockAlign         = o.nBlockAlign;
     wBitsPerSample      = o.wBitsPerSample;
     cbSize              = o.cbSize;
     wValidBitsPerSample = o.wValidBitsPerSample;
     dwChannelMask       = o.dwChannelMask;
     SubFormat           = o.SubFormat;
 }
Example #2
0
        public bool IsFormatSupported(AudioClientShareMode shareMode, WAVEFORMATEXTENSIBLE format, ref WAVEFORMATEXTENSIBLE closestMatch)
        {
            int  hr  = _RealClient.IsFormatSupported(shareMode, format, out closestMatch);
            bool ret = false;

            if (hr == 0)
            {
                ret = true;
            }
            else if (hr == 1)
            {
                ret = false;
            }
            else
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            return(ret);
        }
Example #3
0
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);

            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
Example #4
0
 public bool IsFormatSupported(AudioClientShareMode shareMode, WAVEFORMATEXTENSIBLE format, ref WAVEFORMATEXTENSIBLE closestMatch)
 {
     int hr = _RealClient.IsFormatSupported(shareMode, format, out closestMatch);
     bool ret = false;
     if (hr == 0) ret = true;
     else if (hr == 1) ret = false;
     else Marshal.ThrowExceptionForHR(hr);
     
     return ret;
 }
Example #5
0
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);
            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
        public AudioCaptureViewModel()
        {
            _audioInput = new SharedAudioInput();

            _audioInput.DeviceInfoUpdated += (s, e) =>
            {
                Devices = new ObservableCollection<DeviceInfoViewModel>(
                    e.DeviceInfo.FindAll(di => true)//di.DataFlow == EDataFlow.eCapture)
                    .Select(di => new DeviceInfoViewModel(di)));
                addMessage("Devices updated");
            };

            _audioInput.DeviceSelected += (s, e) =>
            {
                addMessage("Device selected: " + e.Device.FriendlyName);
            };
            _audioInput.CaptureStarted += (s, e) =>
            {
                addMessage("Capture started");
            };
            _audioInput.CaptureStopped += (s, e) =>
            {
                addMessage("Capture stopped");

                if (_wavWriters != null)
                {
                    foreach (var wr in _wavWriters) wr.Close();
                }
                _wavWriters = null;
                if (_wavRawWriter != null)
                {
                    _wavRawWriter.Close(); _wavRawWriter = null;
                }
                if (_writer != null) { _writer.Close(); _writer = null; }
            };

            _audioInput.DataUpdated += (s, e) =>
            {
                if (_wavWriters != null)
                {
                    for (int i = 0; i < _wavWriters.Length; i++)
                        _wavWriters[i].Write(e.Data[i]);
                }
                if (_wavRawWriter != null)
                {
                    _wavRawWriter.WriteRawData(e.RawData);
                }
                if (_writer != null) _writer.Write(e.RawData);
            };
            _audioInput.VolumeChanged += (s, e) =>
            {
                Volume = e.Master;
            };

            _audioInput.ErrorOccured += (s, e) =>
            {
                addMessage(e.Exception.Message);
            };

            #region initialize commands
            StartCaptureCommand = new DelegateCommand(
                () =>
                {
                    if (_wavWriters != null)
                    {
                        foreach (var wr in _wavWriters) wr.Close();
                    }

                    _wavWriters = new WaveFileWriter[_audioInput.CapFormat.nChannels];
                    WAVEFORMATEXTENSIBLE fmt = new WAVEFORMATEXTENSIBLE(_audioInput.CapFormat);
                    fmt.wFormatTag = WaveFormatTag.WAVE_FORMAT_PCM;
                    fmt.nChannels = 1;
                    fmt.wBitsPerSample = 16;
                    fmt.wValidBitsPerSample = 16;
                    fmt.nAvgBytesPerSec = (uint)(fmt.nChannels * fmt.nSamplesPerSec * fmt.wBitsPerSample / 8.0);

                    for (int i = 0; i < _wavWriters.Length; i++)
                    {
                        _wavWriters[i] = new WaveFileWriter(fmt.nChannels, (int)fmt.nSamplesPerSec, fmt.wBitsPerSample,
                            DateTime.Now.ToString("yyyyMMdd-HHmmss") + string.Format("_{0}.wav", i));
                    }

                    WAVEFORMATEXTENSIBLE rawFmt = new WAVEFORMATEXTENSIBLE(_audioInput.CapFormat);
                    _wavRawWriter = new WaveFileWriter(rawFmt.nChannels, (int)rawFmt.nSamplesPerSec, rawFmt.wBitsPerSample,
                        DateTime.Now.ToString("yyyyMMdd-HHmmss") + "_raw.wav");

                    _writer = new System.IO.BinaryWriter(new System.IO.FileStream("test.wav", System.IO.FileMode.Create));

                    _audioInput.StartCapture();
                },
                () => { return _selectedDev != null && !_audioInput.Capturing
                    && _selectedDev.State == DeviceState.DEVICE_STATE_ACTIVE; });

            StopCaptureCommand = new DelegateCommand(
                () =>
                {
                    _audioInput.StopCapture();
                },
                () => { return SelectedDev != null && _audioInput.Capturing; });
            #endregion

            Devices = new ObservableCollection<DeviceInfoViewModel>();
            _audioInput.UpdateDeviceInfo();
        }
Example #7
0
        private void selectDeviceImpl(string devId)
        {
            releaseDevice();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return di.DeviceId == devId; });
            if (_capDevice == null)
            {
#warning 例外
            }
            _capDeviceId = _capDevice.Id;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            if (_capDevice.DataFlow == EDataFlow.eRender)
                streamFlags = AudioClientStreamFlags.Loopback |
                    AudioClientStreamFlags.EventCallback; // 実際は発生してくれない
          
            // フォーマット
            if(_audioClient != null) _capDevice.ReleaseAudioClient();

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;
                _pitchAnalyzer.SampleFrequency = (double)(_capFormat.nSamplesPerSec);

                // 初期化
                _audioClient.Initialize(AudioClientShareMode.Shared,
                    streamFlags, 300 /*ms*/ * 10000, 0, _capFormat, Guid.Empty);
                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                DeviceSelectedEventHandler del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
#warning エラー処理
                _audioClient = null;
                _capClient = null;
            }
        }
Example #8
0
 public WAVEFORMATEXTENSIBLE(WAVEFORMATEXTENSIBLE o)
 {
     wFormatTag = o.wFormatTag;
     nChannels = o.nChannels;
     nSamplesPerSec = o.nSamplesPerSec;
     nAvgBytesPerSec = o.nAvgBytesPerSec;
     nBlockAlign = o.nBlockAlign;
     wBitsPerSample = o.wBitsPerSample;
     cbSize = o.cbSize;
     wValidBitsPerSample = o.wValidBitsPerSample;
     dwChannelMask = o.dwChannelMask;
     SubFormat = o.SubFormat;
 }
        public MultiCaptureViewModel()
        {
            //ExclusiveAudioInput ai = new ExclusiveAudioInput();

            _audioInput = new SharedMultiAudioInput();

            _audioInput.DeviceInfoUpdated += (s, e) =>
            {
                Devices = new ObservableCollection<DeviceInfoViewModel>(
                    e.DeviceInfo.FindAll(di => true)//di.DataFlow == EDataFlow.eCapture)
                    .Select(di => new DeviceInfoViewModel(di)));
                addMessage("Devices updated");

                Volumes = new ObservableCollection<VolumeViewModel>(Devices.Select(di => new VolumeViewModel(di.DeviceId)));
                foreach (var di in e.DeviceInfo)
                {
                    if (!_selecting.ContainsKey(di.DeviceId))
                        _selecting.Add(di.DeviceId, false);
                }
            };

            _audioInput.DeviceSelected += (s, e) =>
            {
                addMessage("Device selected: " + e.Device.Id + "/" + e.Device.FriendlyName);
                _selecting[e.Device.Id] = true;
            };
            _audioInput.CaptureStarted += (s, e) =>
            {
                addMessage("Capture started: " + e.DeviceId);
            };
            _audioInput.CaptureStopped += (s, e) =>
            {
                addMessage("Capture stopped: " + e.DeviceId);
                lock (_wavWriters)
                {
                    if (_wavWriters.ContainsKey(e.DeviceId))
                    {
                        _wavWriters[e.DeviceId].Close();
                    }
                }
            };

            _audioInput.DataUpdated += (s, e) =>
            {
                lock (_wavWriters)
                {
                    if (_wavWriters.ContainsKey(e.DeviceId))
                    {
                        _wavWriters[e.DeviceId].Write(e.Data);
                    }
                }
            };
            _audioInput.VolumeChanged += (s, e) =>
            {
                foreach (var v in Volumes)
                {
                    if (v.DeviceId == e.DeviceId)
                    {
                        v.Master = e.Master;
                    }
                }
            };

            _audioInput.ErrorOccured += (s, e) =>
            {
                addMessage(e.Exception.Message);
            };

            #region initialize commands
            SelectCommand = new DelegateCommand<string>(
                devId => _audioInput.SelectDevice(devId),
                devId => devId != null && _selecting.ContainsKey(devId) && !_selecting[devId] && !_audioInput.Capturing(devId));
            ReleaseCommand = new DelegateCommand<string>(
                devId =>
                {
                    _audioInput.ReleaseDevice(devId);
                    _selecting[devId] = false;
                },
                devId => devId != null && _selecting.ContainsKey(devId) && _selecting[devId] && !_audioInput.Capturing(devId));

            StartCaptureCommand = new DelegateCommand<string>(
                devId =>
                {
                    lock (_wavWriters)
                    {
                        if (_wavWriters.ContainsKey(devId))
                        {
                            _wavWriters[devId].Close();
                            _wavWriters.Remove(devId);
                        }
                    }

                    WAVEFORMATEXTENSIBLE fmt = new WAVEFORMATEXTENSIBLE(_audioInput.GetCapFormat(devId));
                    fmt.wFormatTag = WaveFormatTag.WAVE_FORMAT_PCM;
                    fmt.wBitsPerSample = 16;
                    fmt.wValidBitsPerSample = 16;
                    fmt.nAvgBytesPerSec = (uint)(fmt.nChannels * fmt.nSamplesPerSec * fmt.wBitsPerSample / 8.0);

                    WaveFileWriter writer = new WaveFileWriter(fmt.nChannels, (int)fmt.nSamplesPerSec, fmt.wBitsPerSample,
                        string.Format("{0}.wav", devId));
                    lock (_wavWriters)
                    {
                        _wavWriters.Add(devId, writer);
                    }

                    _audioInput.StartCapture(devId);
                },
                devId =>
                {
                    if (devId != null && !_audioInput.Capturing(devId))
                    {
                        DeviceInfo di = _audioInput.getDeviceInfo(devId);
                        return _selecting.ContainsKey(devId) && _selecting[devId] &&
                            di != null && di.State == DeviceState.DEVICE_STATE_ACTIVE;
                    }
                    return false;
                });

            StopCaptureCommand = new DelegateCommand<string>(
                devId => _audioInput.StopCapture(devId),
                devId =>
                {
                    return devId != null && _audioInput.Capturing(devId);
                });
            #endregion

            Devices = new ObservableCollection<DeviceInfoViewModel>();
            _audioInput.UpdateDeviceInfo();
        }