Beispiel #1
0
        /// <summary>
        /// Initialises a new instance of the WASAPI capture class
        /// </summary>
        /// <param name="captureDevice">Capture device to use</param>
        public WasapiCapture(MMDevice captureDevice)
        {
            syncContext = SynchronizationContext.Current;
            audioClient = captureDevice.AudioClient;
            ShareMode   = AudioClientShareMode.Shared;

            waveFormat = audioClient.MixFormat;
            var wfe = waveFormat as WaveFormatExtensible;

            if (wfe != null)
            {
                try
                {
                    waveFormat = wfe.ToStandardWaveFormat();
                }
                catch (InvalidOperationException)
                {
                    // couldn't convert to a standard format
                }
            }
        }
        public void StartListeningForPeakLevel(ref AudioClient _audioClient, ref MMDevice Device)
        {
            if (_audioClient != null)
            {
                return;
            }

            // Peak Level is available for recording devices only when they are active
            //if (IsLoopback)
            //    return;

            _audioClient = Device.AudioClient;
            _audioClient.Initialize(AudioClientShareMode.Shared,
                                    AudioClientStreamFlags.None,
                                    100,
                                    100,
                                    _audioClient.MixFormat,
                                    Guid.Empty);

            _audioClient.Start();
        }
Beispiel #3
0
        private void CaptureThread(AudioClient client)
        {
            Exception exception = null;

            try
            {
                DoRecording(client);
            }
            catch (Exception e)
            {
                exception = e;
            }
            finally
            {
                client.Stop();
                // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed
            }
            captureThread = null;
            RaiseRecordingStopped(exception);
            Debug.WriteLine("Stop wasapi");
        }
Beispiel #4
0
        private async Task <Task> IdleStreamStop()
        {
            while (this.connectedChannels.Count > 0)
            {
                await Task.Delay(30000);

                FC.Log.Write("Checking For Inactive Audio Clients from total of " + this.connectedChannels.Count, "Bot - Audio");

                DateTime runTime = DateTime.Now;
                foreach (KeyValuePair <ulong, AudioClient> connectedChannel in this.connectedChannels)
                {
                    AudioClient client = connectedChannel.Value;
                    if (client.CurrentStream == null && (!client.StreamInactiveAt.HasValue || (runTime - client.StreamInactiveAt.Value).TotalSeconds > 30))
                    {
                        await this.LeaveAudio(connectedChannel.Key);
                    }
                }
            }

            FC.Log.Write("All Audio Streams Closed", "Bot - Audio");
            return(Task.CompletedTask);
        }
 private void releaseDeviceImpl()
 {
     if (_capDevice != null)
     {
         if (_capturing)
         {
             stopCaptureImpl();
         }
         _capDevice.Dispose();
     }
     _capDevice = null;
     if (_capClient != null)
     {
         _capClient.Dispose();
     }
     _capClient = null;
     if (_audioClient != null)
     {
         _audioClient.Dispose();
     }
     _audioClient = null;
 }
Beispiel #6
0
        internal BufferedWriteStream(AudioStream next, AudioClient client, int bufferMillis, CancellationToken cancelToken, Logger logger, int maxFrameSize = 1500)
        {
            //maxFrameSize = 1275 was too limiting at 128kbps,2ch,60ms
            _next          = next;
            _client        = client;
            _ticksPerFrame = OpusEncoder.FrameMillis;
            _logger        = logger;
            _queueLength   = (bufferMillis + (_ticksPerFrame - 1)) / _ticksPerFrame; //Round up

            _cancelTokenSource = new CancellationTokenSource();
            _cancelToken       = CancellationTokenSource.CreateLinkedTokenSource(_cancelTokenSource.Token, cancelToken).Token;
            _queuedFrames      = new ConcurrentQueue <Frame>();
            _bufferPool        = new ConcurrentQueue <byte[]>();
            for (int i = 0; i < _queueLength; i++)
            {
                _bufferPool.Enqueue(new byte[maxFrameSize]);
            }
            _queueLock     = new SemaphoreSlim(_queueLength, _queueLength);
            _silenceFrames = MaxSilenceFrames;

            _task = Run();
        }
Beispiel #7
0
        private void CleanUp()
        {
            logger.Debug("AudioSource::CleanUp()");

            if (captureDevice != null)
            {
                captureDevice.Dispose();
                captureDevice = null;
            }

            if (audioClient != null)
            {
                audioClient.Dispose();
                audioClient = null;
            }

            if (frameEventWaitHandle != null)
            {
                frameEventWaitHandle.Dispose();
                frameEventWaitHandle = null;
            }
        }
Beispiel #8
0
        private void CleanupResources()
        {
            if (_createdResampler && _source is DmoResampler)
            {
                ((DmoResampler)_source).DisposeResamplerOnly();
                _source = null;
            }

            if (_renderClient != null)
            {
                _renderClient.Dispose();
                _renderClient = null;
            }
            if (_audioClient != null && _audioClient.BasePtr != IntPtr.Zero)
            {
                try
                {
                    _audioClient.StopNative();
                    _audioClient.Reset();
                }
                catch (CoreAudioAPIException ex)
                {
                    if (ex.ErrorCode != unchecked ((int)0x88890001))                    //AUDCLNT_E_NOT_INITIALIZED
                    {
                        throw;
                    }
                }
                _audioClient.Dispose();
                _audioClient = null;
            }
            if (_eventWaitHandle != null)
            {
                _eventWaitHandle.Close();
                _eventWaitHandle = null;
            }

            _isInitialized = false;
        }
Beispiel #9
0
        /// <summary>
        /// Initializes the capture device. Must be called on the UI (STA) thread.
        /// If not called manually then StartRecording() will call it internally.
        /// </summary>
        public async Task InitAsync()
        {
            if (captureState == WasapiCaptureState.Disposed)
            {
                throw new ObjectDisposedException(nameof(WasapiCaptureRT));
            }
            if (captureState != WasapiCaptureState.Uninitialized)
            {
                throw new InvalidOperationException("Already initialized");
            }

            var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2));
            IActivateAudioInterfaceAsyncOperation activationOperation;

            // must be called on UI thread
            NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
            audioClient = new AudioClient((IAudioClient)(await icbh));

            hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(hEvent);

            captureState = WasapiCaptureState.Stopped;
        }
Beispiel #10
0
        private void InitializeCaptureDevice(IAudioClient audioClientInterface)
        {
            var audioClient = new AudioClient((IAudioClient)audioClientInterface);

            if (waveFormat == null)
            {
                this.waveFormat = audioClient.MixFormat;
            }

            long requestedDuration = REFTIMES_PER_MILLISEC * 100;


            if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }

            var streamFlags = GetAudioClientStreamFlags();

            audioClient.Initialize(AudioClientShareMode.Shared,
                                   streamFlags,
                                   requestedDuration,
                                   0,
                                   this.waveFormat,
                                   Guid.Empty);


            int bufferFrameCount = audioClient.BufferSize;

            this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
            this.recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
            Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            // Get back the effective latency from AudioClient
            latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
        }
Beispiel #11
0
        /// <summary>
        /// Dispose
        /// </summary>
        public void Dispose()
        {
            if (captureState == WasapiCaptureState.Disposed)
            {
                return;
            }

            try
            {
                StopRecording();

                NativeMethods.CloseHandle(hEvent);
                audioClient?.Dispose();
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString());
            }

            hEvent      = IntPtr.Zero;
            audioClient = null;

            captureState = WasapiCaptureState.Disposed;
        }
Beispiel #12
0
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            //check whether initial format is supported
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                //initial format is not supported -> maybe there was some kind of close match ...
                if (closestMatch == null)
                {
                    //no match ... check whether the format of the windows audio mixer is supported
                    //yes ... this gets executed for shared and exclusive mode streams
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        //mixformat is not supported
                        //start generating possible formats

                        mixformat = null;
                        WaveFormatExtensible[] possibleFormats;
                        if (_shareMode == AudioClientShareMode.Exclusive)
                        {
                            //for exclusive mode streams use the DeviceFormat of the initialized MMDevice
                            //as base for further possible formats
                            var deviceFormat = Device.DeviceFormat;

                            //generate some possible formats based on the samplerate of the DeviceFormat
                            possibleFormats = GetPossibleFormats(deviceFormat.SampleRate, deviceFormat.Channels);
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                //none of the tested formats were supported
                                //try some different samplerates
                                List <WaveFormatExtensible> waveFormats = new List <WaveFormatExtensible>();
                                foreach (var sampleRate in new[] { 44100, 48000, 96000, 192000 })
                                {
                                    waveFormats.AddRange(GetPossibleFormats(sampleRate, deviceFormat.Channels));
                                }

                                //assign the generated formats with samplerates 44.1kHz, 48kHz, 96kHz and 192kHz to
                                //the possibleFormats array which will be used below
                                possibleFormats = waveFormats.ToArray();
                            }
                        }
                        else
                        {
                            //for shared mode streams, generate some formats based on the initial waveFormat
                            possibleFormats = GetPossibleFormats(waveFormat.SampleRate, waveFormat.Channels);
                        }

                        if (mixformat == null)
                        {
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                    ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                    : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
Beispiel #13
0
        internal async Task FinishConnectAudio(int id, string url, string token)
        {
            var voiceState = GetVoiceState(Discord.CurrentUser.Id).Value;

            await _audioLock.WaitAsync().ConfigureAwait(false);

            try
            {
                if (_audioClient == null)
                {
                    var audioClient = new AudioClient(this, id);
                    var promise     = _audioConnectPromise;
                    audioClient.Disconnected += async ex =>
                    {
                        //If the initial connection hasn't been made yet, reconnecting will lead to deadlocks
                        if (!promise.Task.IsCompleted)
                        {
                            try { audioClient.Dispose(); } catch { }
                            _audioClient = null;
                            if (ex != null)
                            {
                                await promise.TrySetExceptionAsync(ex);
                            }
                            else
                            {
                                await promise.TrySetCanceledAsync();
                            }
                            return;
                        }

                        //TODO: Implement reconnect

                        /*await _audioLock.WaitAsync().ConfigureAwait(false);
                         * try
                         * {
                         *  if (AudioClient == audioClient) //Only reconnect if we're still assigned as this guild's audio client
                         *  {
                         *      if (ex != null)
                         *      {
                         *          //Reconnect if we still have channel info.
                         *          //TODO: Is this threadsafe? Could channel data be deleted before we access it?
                         *          var voiceState2 = GetVoiceState(Discord.CurrentUser.Id);
                         *          if (voiceState2.HasValue)
                         *          {
                         *              var voiceChannelId = voiceState2.Value.VoiceChannel?.Id;
                         *              if (voiceChannelId != null)
                         *              {
                         *                  await Discord.ApiClient.SendVoiceStateUpdateAsync(Id, voiceChannelId, voiceState2.Value.IsSelfDeafened, voiceState2.Value.IsSelfMuted);
                         *                  return;
                         *              }
                         *          }
                         *      }
                         *      try { audioClient.Dispose(); } catch { }
                         *      AudioClient = null;
                         *  }
                         * }
                         * finally
                         * {
                         *  _audioLock.Release();
                         * }*/
                    };
                    _audioClient = audioClient;
                }
                await _audioClient.ConnectAsync(url, Discord.CurrentUser.Id, voiceState.VoiceSessionId, token).ConfigureAwait(false);

                await _audioConnectPromise.TrySetResultAsync(_audioClient).ConfigureAwait(false);
            }
            catch (OperationCanceledException)
            {
                await DisconnectAudioInternalAsync().ConfigureAwait(false);
            }
            catch (Exception e)
            {
                await _audioConnectPromise.SetExceptionAsync(e).ConfigureAwait(false);
                await DisconnectAudioInternalAsync().ConfigureAwait(false);
            }
            finally
            {
                _audioLock.Release();
            }
        }
        private void DataFromServer(object?sender, MessageEventArgs e)
        {
            if (e.IsPing)
            {
                return;
            }
            IncomingWSS?data = JsonSerializer.Deserialize(e.Data, IncomingWSSContext.Default.IncomingWSS);

            switch (data?.type)
            {
            case DataType.Login:
                Token = data.token;
                break;

            case DataType.Error:
                if (Token is null)
                {
                    Error = data.error;
                }
                else
                {
                    if (OnError is not null)
                    {
                        _ = OnError.Invoke(new Exception(data.error));
                    }
                }
                break;

            case DataType.Message_Create:
                if (MessageReceived is not null)
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        SocketMessage?m = JsonSerializer.Deserialize <SocketMessage>(obj);
                        if (m is not null)
                        {
                            m.decrypt(Encryption.File.Channels.GetKey(m.channel_id));
                            _ = MessageReceived.Invoke(m);
                        }
                    }
                }
                break;

            case DataType.Status_Update:
                if (UserStatusUpdate is not null)
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        StatusUpdate?SU = JsonSerializer.Deserialize <StatusUpdate>(obj);
                        if (SU is not null)
                        {
                            SocketRemoteUser after = SocketRemoteUser.GetUser(SU.id);
                            after.status = SU.after;
                            SocketRemoteUser before = (SocketRemoteUser)after.Clone();
                            before.status = SU.before;
                            _             = UserStatusUpdate.Invoke(before, after);
                        }
                    }
                }
                break;

            case DataType.Friend_Request:
                if (ReceivedFriendRequest is not null)
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        FriendRequest?request = JsonSerializer.Deserialize <FriendRequest>(obj);
                        if (request is not null)
                        {
                            _ = ReceivedFriendRequest.Invoke(SocketRemoteUser.GetUser(request.id));
                        }
                    }
                }
                break;

            case DataType.Friend_Request_Result:
                if (FriendRequestResult is not null)
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        FriendRequestResult?FRR = JsonSerializer.Deserialize <FriendRequestResult>(obj);
                        if (FRR is not null && FRR.channel is not null && FRR.id is not null && FRR.result is not null)
                        {
                            SocketChannel chan = SocketChannel.GetChannel((long)FRR.channel);
                            chans.Add(chan);
                            SocketRemoteUser from1 = SocketRemoteUser.GetUser((long)FRR.id);
                            from1.Channel = chan;
                            _             = FriendRequestResult.Invoke(from1, (bool)FRR.result);
                        }
                    }
                }
                break;

            case DataType.Call_Info:
                if (IncommingCall is not null)
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        callinfoinc?ci = JsonSerializer.Deserialize <callinfoinc>(obj);
                        if (ci is not null)
                        {
                            _ = IncommingCall.Invoke(SocketChannel.GetChannel(ci.channel), SocketRemoteUser.GetUser(ci.from));
                        }
                    }
                }
                break;

            case DataType.Call_Data:
                if (AudioClient is not null)
                {
                    AudioClient.Givedata(data.data);
                }
                break;

            case DataType.Key_Exchange:
                try
                {
                    string?obj = data?.data.ToString();
                    if (obj is not null)
                    {
                        KeyExchange?KE = JsonSerializer.Deserialize <KeyExchange>(obj);
                        if (KE is not null)
                        {
                            Encryption.File.Channels.AddKey(KE.channel, Encryption.Encoder.GetString(Encryption.Decrypt(Convert.FromBase64String(KE.key))));
                        }
                    }
                }
                catch (Exception ex)
                {
                    if (OnError is not null)
                    {
                        OnError.Invoke(ex);
                    }
                }
                break;

            default:
                break;
            }
        }
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return di.DeviceId == devId; });
            if (_capDevice == null)
            {
            #warning 例外
                _audioClient = null;
                _capClient = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
                case AudioClientShareMode.Shared:
                    switch (_capDevice.DataFlow)
                    {
                        case EDataFlow.eCapture:
                            streamFlags = 0;
                            break;
                        case EDataFlow.eRender:
                            streamFlags = AudioClientStreamFlags.Loopback;
                            break;
                    }
                    break;
                case AudioClientShareMode.Exclusive:
                    streamFlags = AudioClientStreamFlags.NoPersist;
                    break;
            }

            // フォーマット
            if (_audioClient != null) _capDevice.ReleaseAudioClient();

            // ボリューム
            _masterVolume = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt)) _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
Beispiel #16
0
        public async Task FinishConnectAudio(int id, string url, string token)
        {
            var voiceState = GetVoiceState(CurrentUser.Id).Value;

            await _audioLock.WaitAsync().ConfigureAwait(false);

            try
            {
                if (AudioClient == null)
                {
                    var audioClient = new AudioClient(this, id);
                    audioClient.Disconnected += async ex =>
                    {
                        await _audioLock.WaitAsync().ConfigureAwait(false);

                        try
                        {
                            if (AudioClient == audioClient) //Only reconnect if we're still assigned as this guild's audio client
                            {
                                if (ex != null)
                                {
                                    //Reconnect if we still have channel info.
                                    //TODO: Is this threadsafe? Could channel data be deleted before we access it?
                                    var voiceState2 = GetVoiceState(CurrentUser.Id);
                                    if (voiceState2.HasValue)
                                    {
                                        var voiceChannelId = voiceState2.Value.VoiceChannel?.Id;
                                        if (voiceChannelId != null)
                                        {
                                            await Discord.ApiClient.SendVoiceStateUpdateAsync(Id, voiceChannelId, voiceState2.Value.IsSelfDeafened, voiceState2.Value.IsSelfMuted);
                                        }
                                    }
                                }
                                else
                                {
                                    try { AudioClient.Dispose(); } catch { }
                                    AudioClient = null;
                                }
                            }
                        }
                        finally
                        {
                            _audioLock.Release();
                        }
                    };
                    AudioClient = audioClient;
                }
                await AudioClient.ConnectAsync(url, CurrentUser.Id, voiceState.VoiceSessionId, token).ConfigureAwait(false);

                await _audioConnectPromise.TrySetResultAsync(AudioClient).ConfigureAwait(false);
            }
            catch (OperationCanceledException)
            {
                await DisconnectAudioAsync();
            }
            catch (Exception e)
            {
                await _audioConnectPromise.SetExceptionAsync(e).ConfigureAwait(false);
                await DisconnectAudioAsync();
            }
            finally
            {
                _audioLock.Release();
            }
        }
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient   = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode   shareMode   = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode, streamFlags,
                                            tmp2, tmp2,
                                            _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                        case AudioClientError.BufferSizeNotAligned:
                            uint bufSize = _audioClient.BufferSize;
                            tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                            _audioClient.Initialize(shareMode,
                                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                            break;

                        case AudioClientError.UnsupportedFormat:

                            break;
                        }
                    }
                    catch (InvalidCastException)
                    {
                    }
                }

                _capClient = _audioClient.AudioCaptureClient;
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
Beispiel #18
0
        private void InitializeInternal()
        {
            var defaultFormat = _waveFormat;

            _audioClient = AudioClient.FromMMDevice(Device);

            /*if (_shareMode == AudioClientShareMode.Exclusive)
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }
             * else
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }*/
            _waveFormat = _waveFormat ?? _audioClient.MixFormat;

            _waveFormat = SetupWaveFormat(_waveFormat, _audioClient);

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None | GetStreamFlags(), _latency * ReftimesPerMillisecond, 0, _waveFormat, Guid.Empty);
            }
            else
            {
                if (_shareMode == AudioClientShareMode.Exclusive)
                {
                    try
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), _latency * ReftimesPerMillisecond, _latency * ReftimesPerMillisecond, _waveFormat, Guid.Empty);
                    }
                    catch (CoreAudioAPIException e)
                    {
                        if (e.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                        {
                            int bufferSize = _audioClient.BufferSize;
                            _audioClient.Dispose();
                            long hnsRequestedDuration = (long)(((double)ReftimesPerMillisecond * 1000 / _waveFormat.SampleRate * bufferSize) + 0.5);
                            _audioClient = AudioClient.FromMMDevice(Device);
                            if (defaultFormat == null)
                            {
                                _waveFormat = _audioClient.MixFormat;
                            }
                            _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), hnsRequestedDuration, hnsRequestedDuration, _waveFormat, Guid.Empty);
                        }
                    }
                }
                else
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), 0, 0, _waveFormat, Guid.Empty);
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            if (_audioClient.StreamLatency > 0)
            {
                _streamLatency = (int)(_audioClient.StreamLatency / ReftimesPerMillisecond);
            }
            _audioCaptureClient = AudioCaptureClient.FromAudioClient(_audioClient);
        }
Beispiel #19
0
        public async Task SendAudioAsync(IGuild Guild, string UserInput)
        {
            var YTC = new YoutubeClient();

            if (UserInput.ToLower().Contains("youtube.com"))
            {
                UserInput = YoutubeClient.ParseVideoId(UserInput);
            }
            else
            {
                //var SearchList = await YTC.SearchAsync( UserInput );

                HttpClient _httpClient = new HttpClient();

                string EncodedSearchQuery = WebUtility.UrlEncode(UserInput);

                string Request = $"https://www.youtube.com/search_ajax?style=xml&search_query={EncodedSearchQuery}";

                var Response = await _httpClient.GetStringAsync(Request).ConfigureAwait(false);

                var SearchResultsXml = XElement.Parse(Response).StripNamespaces();

                var VideoIds = SearchResultsXml.Descendants("encrypted_id").Select(e => ( string )e);

                UserInput = VideoIds.First();
            }

            var MediaInfo = await YTC.GetVideoMediaStreamInfosAsync(UserInput);

            var ASI = MediaInfo.Audio.OrderBy(x => x.Bitrate).Last();

            var VideoInfo = await YTC.GetVideoAsync(UserInput);

            var Title = VideoInfo.Title; //VideoInfo.ToString();            ;

            var RGX = new Regex("[^a-zA-Z0-9 -]");

            Title = RGX.Replace(Title, "");

            var Name = $"{Title}.{ASI.AudioEncoding.ToString()}";

#if DEBUG
            var Path = "bin/Debug/netcoreapp1.1/Songs/";
#else
            String Path = "Songs/";
#endif
            if (!File.Exists(Path + Name))
            {
                using (var Input = await YTC.GetMediaStreamAsync(ASI)) {
                    Directory.CreateDirectory(Path);
                    using (var Out = File.Create(Path + Name)) {
                        await Input.CopyToAsync(Out);
                    }
                }
            }

            IAudioClient AudioClient;

            await JukeBot.DiscordClient.SetGameAsync(Title);

            if (this.ConnectedChannels.TryGetValue(Guild.Id, out AudioClient))
            {
                var Output = this.CreateStream(Path + Name).StandardOutput.BaseStream;
                this.AudioData = new MemoryStream();
                await Output.CopyToAsync(this.AudioData);

                await Output.FlushAsync();

                Output.Dispose();
                int  read_length = 0;
                bool flipflop    = false;
                int  buffer_size = 2048;
                var  buffer      = new[] { new byte[buffer_size], new byte[buffer_size] };
                this.AudioData.Seek(0x0, SeekOrigin.Begin);
                var        DiscordStream = AudioClient.CreatePCMStream(AudioApplication.Music, 2880);
                Task       writer;
                Task <int> reader;
                while (this.AudioData.Position < this.AudioData.Length)
                {
                    if (!this.Pause)
                    {
                        writer      = DiscordStream.WriteAsync(buffer[flipflop ? 0 : 1], 0, read_length);
                        flipflop    = !flipflop;
                        reader      = this.AudioData.ReadAsync(buffer[flipflop ? 0 : 1], 0, buffer_size);
                        read_length = await reader;
                        await writer;
                    }
                    else
                    {
                        await DiscordStream.WriteAsync(new byte[512], 0, 512);

                        read_length = 0;
                    }
                }
                await this.AudioData.FlushAsync();

                //await Output.CopyToAsync(DiscordStream);
                await DiscordStream.FlushAsync();

                await JukeBot.DiscordClient.SetGameAsync("");
            }
        }
 public SodiumEncryptStream(AudioStream next, IAudioClient client)
 {
     _next   = next;
     _client = (AudioClient)client;
     _nonce  = new byte[24];
 }
Beispiel #21
0
        private void InitializeInternal()
        {
            const int reftimesPerMillisecond = 10000;

            _audioClient  = AudioClient.FromMMDevice(Device);
            _outputFormat = SetupWaveFormat(_source, _audioClient);

            long latency = _latency * reftimesPerMillisecond;

AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN:
            try
            {
                if (!_eventSync)
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None, latency, 0, _outputFormat,
                                            Guid.Empty);
                }
                else //event sync
                {
                    if (_shareMode == AudioClientShareMode.Exclusive) //exclusive
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, latency,
                                                latency, _outputFormat, Guid.Empty);
                    }
                    else //shared
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, 0, 0,
                                                _outputFormat, Guid.Empty);
                        //latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
                    }
                }
            }
            catch (CoreAudioAPIException exception)
            {
                if (exception.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                {
                    const long reftimesPerSec = 10000000;
                    int        framesInBuffer = _audioClient.GetBufferSize();
                    // ReSharper disable once PossibleLossOfFraction
                    latency = (int)(reftimesPerSec * framesInBuffer / _outputFormat.SampleRate + 0.5);
                    goto AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN;
                }
                throw;
            }

            if (_audioClient.StreamLatency != 0) //windows 10 returns zero, got no idea why => https://github.com/filoe/cscore/issues/11
            {
                Latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
            }

            if (_eventSync)
            {
                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _renderClient = AudioRenderClient.FromAudioClient(_audioClient);

            if (_streamSwitchEvent == null)
            {
                _streamSwitchEvent = new AutoResetEvent(false);
            }

            InitializeStreamRouting();
        }
        /// <summary>
        /// Stream youtube audio data to discord voice channel. Works by youtube-dl piping video data to ffmpeg,
        /// which then extracts the audio and outputs it, which is then read by a stream, which is then forced into the user's ear
        /// </summary>
        /// <param name="url">Url of the video</param>
        /// <returns></returns>
        private async Task StreamAudio(string url, CancellationToken cancelToken)
        {
            Console.WriteLine("Youtube requested");
            using (var stream = AudioClient.CreatePCMStream(application: AudioApplication.Mixed))
            {
                try
                {
                    if (System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows))
                    {
                        #if DEBUG
                        Console.WriteLine("Windows Detected");
                        #endif
                        _process = Process.Start(new ProcessStartInfo
                        {
                            // 'Direct' method using only ffmpeg and a music link

                            FileName  = "Binaries\\ffmpeg",
                            Arguments =
                                $"-i \"{url}\" " +
                                " -ac 2 -f s16le -ar 48000 pipe:1",
                            UseShellExecute        = false,
                            RedirectStandardOutput = true,
                            RedirectStandardError  = false

                                                     // 'indirect' method using both youtube-dl and ffmpeg

                                                     /*
                                                      * FileName = "cmd",
                                                      * Arguments = $"/C youtube-dl.exe --hls-prefer-native -q -o - {url} | ffmpeg.exe -i - -f s16le -ar 48000 -ac 2 -reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 10 pipe:1 -b:a 96K ",
                                                      * UseShellExecute = false,
                                                      * RedirectStandardOutput = true,
                                                      * RedirectStandardError = false,
                                                      */
                        });
                    }
                    else
                    {
                        #if DEBUG
                        Console.WriteLine("Linux Detected");
                        #endif
                        _process = Process.Start(new ProcessStartInfo
                        {
                            /*
                             * FileName = "/bin/bash",
                             * Arguments =
                             * $"-c \"ffmpeg -i \'{url}\' " +
                             * " -ac 2 -f s16le -ar 48000 -loglevel panic pipe:1 \" ",
                             * UseShellExecute = false,
                             * RedirectStandardOutput = true,
                             * RedirectStandardError = false
                             */
                            FileName               = "/bin/bash",
                            Arguments              = $"youtube-dl.exe --hls-prefer-native -q -o - {url} | ffmpeg.exe -i - -f s16le -ar 48000 -ac 2 -reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 10 pipe:1 -b:a 96K",
                            UseShellExecute        = false,
                            RedirectStandardOutput = true,
                            RedirectStandardError  = false,
                        });
                    }
                    Console.WriteLine("Starting process...");
                    int blockSize = 512;
                    var buffer    = new byte[blockSize];
                    int byteCount = 1;
                    do
                    {
                        // Don't send any data or read from the stream if the stream is supposed to be paused
                        if (Paused)
                        {
                            continue;
                        }

                        if (cancelToken.IsCancellationRequested || WillSkip)
                        {
                            break;
                        }

                        byteCount = await _process.StandardOutput.BaseStream.ReadAsync(buffer, 0, blockSize);

                        //buffer = AdjustVolume(buffer, Volume);
                        await stream.WriteAsync(buffer, 0, blockSize);
                    } while (byteCount > 0);
                    if (!WillSkip)
                    {
                        _process.WaitForExit();
                    }
                    _process.Close();
                    await stream.FlushAsync();

                    WillSkip = false;
                    Paused   = false;


                    #if DEBUG
                    Console.WriteLine("Process finished.");
                    #endif
                }
                catch (OperationCanceledException)
                {
                    Console.WriteLine("Cancelled by user.");
                    _process.Close();
                    await stream.FlushAsync();

                    WillSkip = false;
                }
                catch (FileNotFoundException)
                {
                    await _context.Channel.SendMessageAsync("Error, Youtube-dl and/or ffmpeg can not be found");
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.InnerException);
                }
            }
        }
Beispiel #23
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    outputFormat = GetFallbackFormat();
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                try
                {
                    // just check that we can make it.
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                catch (Exception)
                {
                    // On Windows 10 some poorly coded drivers return a bad format in to closestSampleRateFormat
                    // In that case, try and fallback as if it provided no closest (e.g. force trying the mix format)
                    outputFormat = GetFallbackFormat();
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    try
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                               outputFormat, Guid.Empty);
                    }
                    catch (COMException ex)
                    {
                        // Starting with Windows 7, Initialize can return AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED for a render device.
                        // We should to initialize again.
                        if (ex.ErrorCode != ErrorCodes.AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
                        {
                            throw ex;
                        }

                        // Calculate the new latency.
                        long newLatencyRefTimes = (long)(10000000.0 /
                                                         (double)this.outputFormat.SampleRate *
                                                         (double)this.audioClient.BufferSize + 0.5);

                        this.audioClient.Dispose();
                        this.audioClient = this.mmDevice.AudioClient;
                        this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback,
                                                    newLatencyRefTimes, newLatencyRefTimes, this.outputFormat, Guid.Empty);
                    }
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
Beispiel #24
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    try
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                               outputFormat, Guid.Empty);
                    }
                    catch (COMException ex)
                    {
                        // Starting with Windows 7, Initialize can return AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED for a render device.
                        // We should to initialize again.
                        if (ex.ErrorCode != ErrorCodes.AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
                        {
                            throw ex;
                        }

                        // Calculate the new latency.
                        long newLatencyRefTimes = (long)(10000000.0 /
                                                         (double)this.outputFormat.SampleRate *
                                                         (double)this.audioClient.BufferSize + 0.5);

                        this.audioClient.Dispose();
                        this.audioClient = this.mmDevice.AudioClient;
                        this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback,
                                                    newLatencyRefTimes, newLatencyRefTimes, this.outputFormat, Guid.Empty);
                    }
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
Beispiel #25
0
 private bool IsFormatSupported(WaveFormat waveFormat, AudioClientShareMode sharemode, AudioClient audioClient)
 {
     return(audioClient.IsFormatSupported(sharemode, waveFormat));
 }
Beispiel #26
0
        private void DoRecording(AudioClient client)
        {
            Debug.WriteLine(client.BufferSize);

            var buf = new Byte[client.BufferSize * bytesPerFrame];

            int bufLength     = 0;
            int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms

            IntPtr hEvent = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);

            client.SetEventHandle(hEvent);

            try
            {
                AudioCaptureClient capture = client.AudioCaptureClient;
                client.Start();

                int packetSize = capture.GetNextPacketSize();

                while (!this.stop)
                {
                    IntPtr pData                   = IntPtr.Zero;
                    int    numFramesToRead         = 0;
                    AudioClientBufferFlags dwFlags = 0;

                    if (packetSize == 0)
                    {
                        if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0)
                        {
                            throw new Exception("Capture event timeout");
                        }
                    }

                    pData = capture.GetBuffer(out numFramesToRead, out dwFlags);

                    if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
                    {
                        pData = IntPtr.Zero;
                    }

                    if (numFramesToRead == 0)
                    {
                        continue;
                    }

                    int capturedBytes = numFramesToRead * bytesPerFrame;

                    System.Runtime.InteropServices.Marshal.Copy(pData, buf, bufLength, capturedBytes);
                    bufLength += capturedBytes;

                    capture.ReleaseBuffer(numFramesToRead);

                    if (bufLength >= minPacketSize)
                    {
                        if (DataAvailable != null)
                        {
                            DataAvailable(this, new WaveInEventArgs(buf, bufLength));
                        }
                        bufLength = 0;
                    }

                    packetSize = capture.GetNextPacketSize();
                }
            }
            catch (Exception ex)
            {
                RaiseRecordingStopped(ex);
                Debug.WriteLine("stop wasapi");
            }
            finally
            {
                RaiseRecordingStopped(null);

                NativeMethods.CloseHandle(hEvent);
                client.Stop();
                client.Dispose();
            }
        }
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
                _capDevice.ReleaseAudioClient();

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                        _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode, streamFlags,
                        tmp2, tmp2,
                        _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                            case AudioClientError.BufferSizeNotAligned:
                                uint bufSize = _audioClient.BufferSize;
                                tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                                _audioClient.Initialize(shareMode,
                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                                break;

                            case AudioClientError.UnsupportedFormat:

                                break;
                        }
                    }
                    catch (InvalidCastException)
                    {

                    }
                }

                _capClient = _audioClient.AudioCaptureClient;

            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
        private async void PlayThread()
        {
            await Activate();

            var  playbackProvider = Init();
            bool isClientRunning  = false;

            try
            {
                if (this.resamplerNeeded)
                {
                    var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate);
                    playbackProvider = new SampleToWaveProvider(resampler);
                }

                // fill a whole buffer
                bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame    = outputFormat.Channels * outputFormat.BitsPerSample / 8;
                readBuffer       = new byte[bufferFrameCount * bytesPerFrame];
                FillBuffer(playbackProvider, bufferFrameCount);
                int timeout = 3 * latencyMilliseconds;

                while (playbackState != WasapiOutState.Disposed)
                {
                    if (playbackState != WasapiOutState.Playing)
                    {
                        playThreadEvent.WaitOne(500);
                    }

                    // If still playing and notification is ok
                    if (playbackState == WasapiOutState.Playing)
                    {
                        if (!isClientRunning)
                        {
                            audioClient.Start();
                            isClientRunning = true;
                        }
                        // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
                        var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true);
                        if (r != 0)
                        {
                            throw new InvalidOperationException("Timed out waiting for event");
                        }
                        // See how much buffer space is available.
                        int numFramesPadding = 0;
                        // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
                        numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;

                        int numFramesAvailable = bufferFrameCount - numFramesPadding;
                        if (numFramesAvailable > 0)
                        {
                            FillBuffer(playbackProvider, numFramesAvailable);
                        }
                    }

                    if (playbackState == WasapiOutState.Stopping)
                    {
                        // play the buffer out
                        while (audioClient.CurrentPadding > 0)
                        {
                            await Task.Delay(latencyMilliseconds / 2);
                        }
                        audioClient.Stop();
                        isClientRunning = false;
                        audioClient.Reset();
                        playbackState = WasapiOutState.Stopped;
                        RaisePlaybackStopped(null);
                    }
                    if (playbackState == WasapiOutState.Disposing)
                    {
                        audioClient.Stop();
                        isClientRunning = false;
                        audioClient.Reset();
                        playbackState = WasapiOutState.Disposed;
                        var disposablePlaybackProvider = playbackProvider as IDisposable;
                        if (disposablePlaybackProvider != null)
                        {
                            disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation
                        }
                        RaisePlaybackStopped(null);
                    }
                }
            }
            catch (Exception e)
            {
                RaisePlaybackStopped(e);
            }
            finally
            {
                audioClient.Dispose();
                audioClient  = null;
                renderClient = null;
                NativeMethods.CloseHandle(frameEventWaitHandle);
            }
        }
Beispiel #29
0
 static void ClientInit2()
 {
     _audioClient2 = new AudioClient("127.0.0.1", 38083);
     _audioClient2.Start();
     _audioClient2.Join("audiochat");
 }
Beispiel #30
0
        internal async Task <IAudioClient> ConnectAudioAsync(ulong channelId, bool selfDeaf, bool selfMute, bool external)
        {
            TaskCompletionSource <AudioClient> promise;

            await _audioLock.WaitAsync().ConfigureAwait(false);

            try
            {
                await DisconnectAudioInternalAsync().ConfigureAwait(false);

                promise = new TaskCompletionSource <AudioClient>();
                _audioConnectPromise = promise;

                if (external)
                {
                    var _ = promise.TrySetResultAsync(null);
                    await Discord.ApiClient.SendVoiceStateUpdateAsync(Id, channelId, selfDeaf, selfMute).ConfigureAwait(false);

                    return(null);
                }

                if (_audioClient == null)
                {
                    var audioClient = new AudioClient(this, Discord.GetAudioId(), channelId);
                    audioClient.Disconnected += async ex =>
                    {
                        if (!promise.Task.IsCompleted)
                        {
                            try
                            { audioClient.Dispose(); }
                            catch { }
                            _audioClient = null;
                            if (ex != null)
                            {
                                await promise.TrySetExceptionAsync(ex);
                            }
                            else
                            {
                                await promise.TrySetCanceledAsync();
                            }
                            return;
                        }
                    };
                    audioClient.Connected += () =>
                    {
                        var _ = promise.TrySetResultAsync(_audioClient);
                        return(Task.Delay(0));
                    };
                    _audioClient = audioClient;
                }

                await Discord.ApiClient.SendVoiceStateUpdateAsync(Id, channelId, selfDeaf, selfMute).ConfigureAwait(false);
            }
            catch (Exception)
            {
                await DisconnectAudioInternalAsync().ConfigureAwait(false);

                throw;
            }
            finally
            {
                _audioLock.Release();
            }

            try
            {
                var timeoutTask = Task.Delay(15000);
                if (await Task.WhenAny(promise.Task, timeoutTask) == timeoutTask)
                {
                    throw new TimeoutException();
                }
                return(await promise.Task.ConfigureAwait(false));
            }
            catch (Exception)
            {
                await DisconnectAudioAsync().ConfigureAwait(false);

                throw;
            }
        }
        /// <summary>
        /// Begin Playback
        /// </summary>
        public void Play()
        {
            if (PlaybackState == EPlaybackState.Playing)
            {
                return;
            }

            if (PlaybackState == EPlaybackState.Paused)
            {
                PlaybackState = EPlaybackState.Playing;
                return;
            }

            Debug.WriteLine("[render]Task starting...");
            playTask = Task.Run(() => {
                IWaveProvider playbackProvider = this.sourceProvider;
                AudioClient client             = this.audioClient;
                Exception exception            = null;

                PlaybackState = EPlaybackState.Playing;

                try
                {
                    // fill a whole buffer
                    var bufferFrameCount = client.BufferSize;
                    var bytesPerFrame    = outputFormat.Channels * outputFormat.BitsPerSample / 8;

                    readBuffer = new byte[bufferFrameCount * bytesPerFrame];
                    //FillBuffer(playbackProvider, bufferFrameCount);

                    client.Start();

                    while (PlaybackState != EPlaybackState.Stopped)
                    {
                        // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
                        if (isUsingEventSync)
                        {
                            //indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false);
                            frameEventWaitHandle.WaitOne(3 * latencyMilliseconds);
                        }
                        else
                        {
                            Task.Delay(latencyMilliseconds / 2);
                        }

                        // If still playing and notification is ok
                        if (PlaybackState != EPlaybackState.Playing)
                        {
                            continue;
                        }

                        // See how much buffer space is available.
                        int numFramesPadding = 0;
                        if (isUsingEventSync)
                        {
                            // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
                            numFramesPadding = (shareMode == EAudioClientShareMode.Shared) ? client.CurrentPadding : 0;
                        }
                        else
                        {
                            numFramesPadding = client.CurrentPadding;
                        }
                        int numFramesAvailable = bufferFrameCount - numFramesPadding;
                        if (numFramesAvailable > 0)
                        {
                            FillBuffer(playbackProvider, numFramesAvailable);
                        }
                    }
                }
                catch (Exception e)
                {
                    Debug.WriteLine("[render]Task catch Exception.");
                    Debug.WriteLine(e.Message);
                    Debug.WriteLine(e.Source);
                    Debug.WriteLine(e.StackTrace);
                    exception = e;
                }
                finally
                {
                    client.Stop();
                    client.Reset();
                    Debug.WriteLine("[render]Task stop detected.");
                    RaisePlaybackStopped(exception);
                }
            });
            Debug.WriteLine("[render]Task started");
        }
Beispiel #32
0
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats =
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate,    32, waveFormat.Channels,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   2,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   2,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   1,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   1,
                                                     AudioSubTypes.Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            throw new NotSupportedException("Could not find a supported format.");
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                                        ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                                        : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
 private void releaseDeviceImpl()
 {
     if (_capDevice != null)
     {
         if (_capturing) stopCaptureImpl();
         _capDevice.Dispose();
     }
     _capDevice = null;
     if (_capClient != null) _capClient.Dispose();
     _capClient = null;
     if (_audioClient != null) _audioClient.Dispose();
     _audioClient = null;
 }