/// <summary>
 /// Initializes a new instance of the <see cref="AudioVisualizationBase"/> class.
 /// </summary>
 public AudioVisualizationBase()
 {
     InitializeComponent();
     Disposed          += AudioVisualizationBase_Disposed;
     listenMmDevice     = WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice();
     PropertyChanged   += AudioVisualizationBase_PropertyChanged;
     IsBarVisualization = GetType() == typeof(AudioVisualizationBars);
 }
Beispiel #2
0
        public void StartRecording(string outputFilePath)
        {
            this.outputFilePath     = outputFilePath;
            this.LoopbackFilePath   = Path.Combine(Path.GetDirectoryName(this.outputFilePath), Path.GetFileNameWithoutExtension(this.outputFilePath) + "_i" + Path.GetExtension(this.outputFilePath));
            this.MicrophoneFilePath = Path.Combine(Path.GetDirectoryName(this.outputFilePath), Path.GetFileNameWithoutExtension(this.outputFilePath) + "_m" + Path.GetExtension(this.outputFilePath));
            // Redefine the capturer instance with a new instance of the LoopbackCapture class
            this.LoopbackCapture = new WasapiLoopbackCapture();
            this.SilencePlayer   = new WasapiOut(WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice(), AudioClientShareMode.Shared, false, 100);
            this.SilencePlayer.Init(new SilenceProvider(LoopbackCapture.WaveFormat));
            this.SilencePlayer.Play();

            // Redefine the audio writer instance with the given configuration
            this.LoopbackWriter = new WaveFileWriter(this.LoopbackFilePath, LoopbackCapture.WaveFormat);

            // When the capturer receives audio, start writing the buffer into the mentioned file
            this.LoopbackCapture.DataAvailable += (s, a) =>
            {
                this.LoopbackWriter.Write(a.Buffer, 0, a.BytesRecorded);
            };

            // When the Capturer Stops
            this.LoopbackCapture.RecordingStopped += (s, a) =>
            {
                this.LoopbackWriter.Close();
                this.LoopbackWriter.Dispose();
                this.LoopbackWriter = null;
                this.SilencePlayer.Stop();
                LoopbackCapture.Dispose();
            };

            // Start recording !
            this.LoopbackCapture.StartRecording();

            this.MicrophoneCapture = new WaveInEvent();
            this.MicrophoneCapture.DeviceNumber       = 0;
            this.MicrophoneCapture.WaveFormat         = LoopbackCapture.WaveFormat;
            this.MicrophoneCapture.BufferMilliseconds = 50;
            this.MicrophoneWriter = new WaveFileWriter(this.MicrophoneFilePath, LoopbackCapture.WaveFormat);
            this.MicrophoneCapture.DataAvailable += (s, a) =>
            {
                this.MicrophoneWriter.Write(a.Buffer, 0, a.BytesRecorded);
            };
            this.MicrophoneCapture.RecordingStopped += (s, a) =>
            {
                this.MicrophoneWriter.Close();
                this.MicrophoneWriter.Dispose();
                this.MicrophoneWriter = null;
                MicrophoneCapture.Dispose();
            };
            MicrophoneCapture.StartRecording();
        }
Beispiel #3
0
        public ApplicationForm(IRecorderService recorderService,
                               IConverterService converterService,
                               IEncryptionService encryptionService)
        {
            if (recorderService == null)
            {
                throw new ArgumentException("recorderService");
            }
            if (converterService == null)
            {
                throw new ArgumentException("converterService");
            }
            if (encryptionService == null)
            {
                throw new ArgumentException("encryptionService");
            }

            _recorderService   = recorderService;
            _converterService  = converterService;
            _encryptionService = encryptionService;

            AppParameters = new Parameters {
                TemporaryFolder = ApplicationConfiguration.TemporaryFolder.GetProgramDataSubFolder()
            };

            InitializeComponent();

            _devices = new Dictionary <string, Device>();

            MMDeviceEnumerator enumerator = new MMDeviceEnumerator();

            foreach (MMDevice device in enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active))
            {
                _devices.Add(device.ID, new Device(device.ID, device.FriendlyName, false));
            }

            MMDevice loopbackDevice = WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice();

            _devices.Add(loopbackDevice.ID, new Device(loopbackDevice.ID, string.Format("Windows mixed output - {0}", loopbackDevice.FriendlyName), true));

            _devices.ForEach(device => cb_soundcard.Items.Add(new ComboboxItem {
                Text = device.Value.ProductName, Value = device.Key
            }));

            LoadConfiguration();
        }
Beispiel #4
0
        private async Task PlayAudioAsync(IAudioClient client)
        {
            var discord = client.CreatePCMStream(AudioApplication.Music);
            WasapiLoopbackCapture CaptureInstance = new WasapiLoopbackCapture(WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice());

            CaptureInstance.DataAvailable += (s, a) =>
            {
                //step 1
                //var resampleStream = new AcmStream(WaveFormat.CreateCustomFormat(WaveFormatEncoding.IeeeFloat, 48000, 2, 384000, 8, 32), WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 48000, 2, 16000, 8, 16));
                var resampleStream = new AcmStream(new WaveFormat(41000, 16, 2), new WaveFormat(4800, 16, 2));                 //causes demonic screeching

                //step 2
                byte[] source = a.Buffer;
                Buffer.BlockCopy(source, 0, resampleStream.SourceBuffer, 0, a.BytesRecorded);

                //step 3
                int sourceBytesConverted = 0;
                var convertedBytes       = resampleStream.Convert(source.Length, out sourceBytesConverted);
                if (sourceBytesConverted != source.Length)
                {
                    Console.WriteLine("We didn't convert everything {0} bytes in, {1} bytes converted");
                }

                //step 4
                var converted = new byte[convertedBytes];
                Buffer.BlockCopy(resampleStream.DestBuffer, 0, converted, 0, convertedBytes);


                discord.Write(converted, 0, a.BytesRecorded);
            };

            CaptureInstance.RecordingStopped += (s, a) =>
            {
                Console.WriteLine("Stopped Recording!");
                CaptureInstance.Dispose();
                discord.Dispose();
            };

            CaptureInstance.StartRecording();

            await Task.Delay(5000);

            CaptureInstance.StopRecording();
            await Task.Delay(5000);
        }
Beispiel #5
0
        private void OnAudioMeterTimerElapsed(object sender, EventArgs e)
        {
            try
            {
                if (SelectedCaptureDevicesCollection.Any())
                {
                    SelectedCaptureDevicesCollection[0].AudioPeak = (int)(WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice().AudioMeterInformation.MasterPeakValue * 100);
                }

                SelectedOutputDevicesCollection.ToList().ForEach(outputDevice =>
                {
                    outputDevice.AudioPeak = (int)(outputDevice.AudioMeterInformation?.MasterPeakValue ?? 0 * 100);
                });
            }
            catch (Exception ex)
            {
                ApplicationLogger.Log(ex.Message, ex.StackTrace);
            }
        }
        //запись входящих звуков из динамиков
        private void RecordIn(int inputDeviceIndex)
        {
            lock (o1)
            {
                var DefaultSpeaker = WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice();

                MMDevice device = _devices.ElementAt(inputDeviceIndex);
                waveIn = new WasapiLoopbackCapture(device)
                {
                    //ShareMode = AudioClientShareMode.
                };

                waveIn.DataAvailable    += WaveIn_DataAvailable;
                waveIn.RecordingStopped += this.WaveIn_RecordingStopped;

                waveWriter_in = new LameMP3FileWriter(FileName + "_in" + extention, wf, 32);

                waveOneOut = new WaveOut();
                waveOneOut.Init(new SilentWaveProvider());
                waveOneOut.Play();

                waveIn.StartRecording();
            }
        }
Beispiel #7
0
        /// <summary>
        /// Creates a new IWaveProvider using a Wasapi Capture device
        /// </summary>
        /// <param name="id">The ID of the Wasapi Device</param>
        /// <param name="inputLatency">Length of Wasapi buffer in ms, or -1 for automatic value</param>
        /// <param name="bufferLatency">Length of Wavebuffer in ms, or -1 for automatic value</param>
        public WasapiProvider(string id, int inputLatency = -1, int bufferLatency = -1)
        {
            MMDevice device = null;

            if (id == "<default>")
            {
                device = WasapiCapture.GetDefaultCaptureDevice();
            }
            else if (id == "<defaultLoopback>")
            {
                device = WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice();
            }
            else
            {
                foreach (MMDevice dev in deviceEnumerator.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active))
                {
                    if (dev.ID == id)
                    {
                        device = dev;
                        break;
                    }
                }
            }

            if (device == null)
            {
                throw new KeyNotFoundException($"Device with ID '{id}' not found or inactive");
            }

            Name = (device.DataFlow == DataFlow.Capture ? "In " : "Out ") + device.FriendlyName;

            if (device.DataFlow == DataFlow.Capture)
            {
                if (inputLatency == -1)
                {
                    capture = new WasapiCapture(device);
                }
                else
                {
                    capture = new WasapiCapture(device, false, inputLatency);
                }
            }
            else
            {
                capture = new WasapiLoopbackCapture(device);
            }


            if (bufferLatency == -1)
            {
                buffer = new BufferedWaveProvider(capture.WaveFormat)
                {
                    DiscardOnBufferOverflow = true
                }
            }
            ;
            else
            {
                buffer = new BufferedWaveProvider(capture.WaveFormat)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(bufferLatency)
                }
            };

            capture.DataAvailable += Capture_DataAvailable;
        }
Beispiel #8
0
 public LoopbackCapture(int audioBufferMillisecondsLength)
     : base(WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice(), false, audioBufferMillisecondsLength)
 {
 }