Example #1
0
        public void RespectsOffsetAndCount()
        {
            var sp     = new SilenceProvider(new WaveFormat(44100, 2));
            var length = 10;
            var b      = Enumerable.Range(1, length).Select(n => (byte)1).ToArray();
            var read   = sp.Read(b, 2, 4);

            Assert.AreEqual(4, read);
            Assert.AreEqual(new byte[] { 1, 1, 0, 0, 0, 0, 1, 1, 1, 1 }, b);
        }
Example #2
0
        public void CanReadSilence()
        {
            var sp     = new SilenceProvider(new WaveFormat(44100, 2));
            var length = 1000;
            var b      = Enumerable.Range(1, length).Select(n => (byte)1).ToArray();
            var read   = sp.Read(b, 0, length);

            Assert.AreEqual(length, read);
            Assert.AreEqual(new byte[length], b);
        }
Example #3
0
        public override async Task <CommandResult> RunVoiceCommand(string[] args, CommandContext context, ConnectionModel connection)
        {
            if (connection.Recording)
            {
                connection.Recording = false;
            }
            else
            {
                try
                {
                    SilenceProvider silence = new SilenceProvider(format);
                    mixer.AddInputStream(new Wave16ToFloatProvider(silence));
                    mixer.AddInputStream(new Wave16ToFloatProvider(new VolumeWaveProvider16(connection.RecordBuffer)
                    {
                        Volume = 0.1f
                    }));

                    path = Path.ChangeExtension(Path.GetTempFileName(), ".wav");

                    recorder = new WaveRecorder(mixer, path);
                    model    = connection;

                    connection.Connection.VoiceReceived += Connection_VoiceReceived;
                    connection.Disconnected             += Connection_Disconnected;
                    connection.Recording = true;

                    await context.ReplyAsync("Now recording...");

                    output = new WaveOut();
                    output.Init(new VolumeWaveProvider16(new WaveFloatTo16Provider(recorder))
                    {
                        Volume = 0.0f
                    });

                    int pingInSamples = (48000 * (connection.Connection.Ping / 1000)) * 2;
                    connection.RecordBuffer.ClearBuffer();
                    connection.RecordBuffer.AddSamples(Enumerable.Repeat((byte)0, pingInSamples).ToArray(), 0, pingInSamples);
                    output.Play();
                }
                catch { }

                while (connection.Connected && connection.Recording)
                {
                    await Task.Delay(100);
                }

                await CleanupAsync();

                string newPath = new Uri(Path.Combine(Path.GetTempPath(), $"{context.Guild.Name.ToLower()} - {DateTime.Now.ToString("dd-MM-yyyy HH-mm", CultureInfo.InvariantCulture)}.mp3")).LocalPath;
                MediaFoundationEncoder.EncodeToMp3(new WaveFileReader(path), newPath);
                await context.Channel.SendFileAsync(newPath, $"Here's your ~~clusterfuck~~ recording!");
            }

            return(CommandResult.Empty);
        }
Example #4
0
        private void ConfigureSilenceOut()
        {
            // WasapiLoopbackCapture doesn't record any audio when nothing is playing
            // so we must play some silence!

            var silence = new SilenceProvider(new WaveFormat(44100, 2));

            _silenceWaveOut = new WaveOutEvent();
            _silenceWaveOut.Init(silence);
            _silenceWaveOut.Play();
        }
Example #5
0
        public PlayersForm(WaveFormat format)
        {
            InitializeComponent();
            //dialog. = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments);
            //dialog.RestoreDirectory = true;
            dialog.Title    = "Choose an audio clip to load";
            dialog.Filter   = "Audio files (*.wav;*.mp3)|*.wav;*.mp3|All files (*.*)|*.*";
            this.WaveFormat = format;
            var dummySampleProvider     = new SilenceProvider(format).ToSampleProvider();
            var listofonesampleprovider = new List <ISampleProvider>();

            listofonesampleprovider.Add(dummySampleProvider);
            Mixer = new MixingSampleProvider(listofonesampleprovider);
        }
 private Task StartPlaySilence(AudioInterface audioInterface)
 {
     if (audioInterface.IsActive && silentAudioEvent?.PlaybackState != PlaybackState.Playing)
     {
         SilenceProvider provider = new SilenceProvider(WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));
         silentAudioEvent = new WaveOutEvent()
         {
             DeviceNumber = GetWaveOutDeviceNumber(audioInterface)
         };
         silentAudioEvent.Init(provider);
         silentAudioEvent.Play();
     }
     return(Task.CompletedTask);
 }
Example #7
0
        private void PlayTonePairs(float[] frequencies)
        {
            float gain = 0.25f;

            try
            {
                ISampleProvider provider1 = new SignalGenerator
                {
                    Frequency = frequencies[0],
                    Gain      = gain
                };

                ISampleProvider provider2 = new SignalGenerator
                {
                    Frequency = frequencies[1],
                    Gain      = gain
                };

                ISampleProvider provider3 = new SignalGenerator
                {
                    Frequency = frequencies[2],
                    Gain      = gain
                };

                ISampleProvider provider4 = new SignalGenerator
                {
                    Frequency = frequencies[3],
                    Gain      = gain
                };

                ISampleProvider pause = new SilenceProvider(provider2.WaveFormat).ToSampleProvider().Take(TimeSpan.FromMilliseconds(200));

                var duration = TimeSpan.FromMilliseconds(1000);

                var group1 = new[] { provider1.Take(duration), provider2.Take(duration) };
                var group2 = new[] { provider3.Take(duration), provider4.Take(duration) };

                var mixer1 = new MixingSampleProvider(group1);
                var mixer2 = new MixingSampleProvider(group2);

                mWaveOut = new WaveOut();
                mWaveOut.Init(mixer1.FollowedBy(TimeSpan.FromMilliseconds(200), mixer2));
                mWaveOut.Play();
            }
            catch (Exception ex)
            {
                throw new ArgumentException(ex.Message);
            }
        }
Example #8
0
        public MainWindow()
        {
            // Set DataContext
            DataContext = this;

            // Init local variables and settings
            if (OutputLocation == "BLANK")
            {
                OutputLocation = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "AudioInstantReplay");
                Directory.CreateDirectory(OutputLocation);
            }

            // Init buffers
            speakerBytes = new CircularBuffer <byte>(DurationToBytes(ReplayDuration));
            micBytes     = new CircularBuffer <byte>(DurationToBytes(ReplayDuration));

            // Get input devices
            IList <InputDevice> deviceList = new List <InputDevice>();

            for (int n = -1; n < WaveIn.DeviceCount; n++)
            {
                var caps = WaveIn.GetCapabilities(n);
                deviceList.Add(new InputDevice()
                {
                    Name = caps.ProductName, DeviceId = n
                });
            }
            deviceList.Insert(0, new InputDevice()
            {
                Name = "None", DeviceId = -9999
            });
            InputDevices        = new CollectionView(deviceList);
            SelectedInputDevice = InputDeviceId;

            // Play silence so mic output matches speaker output
            WasapiLoopbackCapture tempCapture = new WasapiLoopbackCapture();
            var          silence = new SilenceProvider(tempCapture.WaveFormat).ToSampleProvider();
            WaveOutEvent wo      = new WaveOutEvent();

            wo.Init(silence);
            wo.Play();

            // Initialize Component
            InitializeComponent();
        }
		//ISampleProvider FOut;

		public NAudioFileStreamNode()
		{
			FSilence = new SilenceProvider(this.WaveFormat);
		}
Example #10
0
        private static async Task <bool> MakeAudioConfigAsync(SpeechHandler handler)
        {
            // var audioConfig = AudioConfig.FromWavFileInput(@"D:\Users\ManabuTonosaki\OneDrive - tomarika\tono.wav");
            // var audioConfig = AudioConfig.FromDefaultMicrophoneInput();

            Debug.Assert(handler.Device != null);

            var wavein        = new WasapiLoopbackCapture(handler.Device);
            var waveoutFormat = new WaveFormat(16000, 16, 1);
            var lastSpeakDT   = DateTime.Now;
            var willStop      = DateTime.MaxValue;

            wavein.DataAvailable += (s, e) =>
            {
                if (e.BytesRecorded > 0)
                {
                    using var ms   = new MemoryStream(e.Buffer, 0, e.BytesRecorded);
                    using var rs   = new RawSourceWaveStream(ms, wavein.WaveFormat);
                    using var freq = new MediaFoundationResampler(rs, waveoutFormat.SampleRate);
                    var w16 = freq.ToSampleProvider().ToMono().ToWaveProvider16();
                    var len = w16.Read(handler.buf, 0, handler.buf.Length);
                    handler.AudioInputStream.Write(handler.buf, len);

                    lastSpeakDT = DateTime.Now;
                    willStop    = DateTime.MaxValue;
                }
                else
                {
                    if (DateTime.Now < willStop)
                    {
                        if (willStop == DateTime.MaxValue)
                        {
                            willStop = DateTime.Now + TimeSpan.FromSeconds(10);
                        }
                        var silence = new SilenceProvider(waveoutFormat);
                        var len     = silence.Read(handler.buf, 0, waveoutFormat.BitsPerSample * waveoutFormat.SampleRate / 8 / 100); // 10ms
                        var cnt     = (int)((DateTime.Now - lastSpeakDT).TotalMilliseconds / 10);
                        for (var i = 0; i < cnt; i++)
                        {
                            handler.AudioInputStream.Write(handler.buf, len);
                        }
                        lastSpeakDT = DateTime.Now;
                    }
                }
            };

            var audioformat = AudioStreamFormat.GetWaveFormatPCM(samplesPerSecond: 16000, bitsPerSample: 16, channels: 1);

            handler.AudioInputStream = AudioInputStream.CreatePushStream(audioformat);
            handler.AudioConfig      = AudioConfig.FromStreamInput(handler.AudioInputStream);

            await Task.Delay(100);

            handler.StopRequested += (s, e) =>
            {
                wavein.StopRecording();
            };
            wavein.StartRecording();

            return(true);
        }