Esempio n. 1
0
 internal WasapiLoopbackRecorder(INetworkChatCodec c)
 {
     deviceNumber = -1;
     codec = c;
     waveIn = new WasapiLoopbackCapture();
     convertionStream = new AcmStream(new WaveFormat(waveIn.WaveFormat.SampleRate, 16, waveIn.WaveFormat.Channels), codec.RecordFormat);
 }
Esempio n. 2
0
        public static void Connect(IPEndPoint endpoint, MMDevice device, ICodec codec)
        {
            var config = new NetPeerConfiguration("airgap");

            _client = new NetClient(config);
            _client.RegisterReceivedCallback(MessageReceived);

            _client.Start();

            _waveIn = new WasapiLoopbackCapture(device);
            _codec = codec;

            _sourceFormat = _waveIn.WaveFormat;
            _targetFormat = new WaveFormat(_codec.SampleRate, _codec.Channels); // format to convert to

            _waveIn.DataAvailable += SendData;
            _waveIn.RecordingStopped += (sender, args) => Console.WriteLine("Stopped");
            // TODO: RecordingStopped is called when you change the audio device settings, should recover from that

            NetOutgoingMessage formatMsg = _client.CreateMessage();
            formatMsg.Write(_targetFormat.Channels);
            formatMsg.Write(_targetFormat.SampleRate);
            formatMsg.Write(codec.Name);

            _client.Connect(endpoint, formatMsg);
        }
		public void Initalize(MMDevice audioDevice, int fftLength, int bufferLenght)
		{
            this.audioDevice = audioDevice;
			this.bufferLenght = bufferLenght;
			Capture = new WasapiLoopbackCapture(audioDevice);
			SampleAggregator = new SampleAggregator(fftLength);
			SampleAggregator.FftCalculated += new EventHandler<FftEventArgs>(FftCalculated);
			SampleAggregator.PerformFFT = true;
			//capture.ShareMode = AudioClientShareMode.Shared;
		}
Esempio n. 4
0
        public RealTimePlayback()
        {
            this._lock = new object();

            this._capture = new WasapiLoopbackCapture();
            this._capture.DataAvailable += this.DataAvailable;
            initAudioDev();
            this._m = (int)Math.Log(this._fftLength, 2.0);
            this._fftLength = 1024; // 44.1kHz.
            this._fftBuffer = new Complex[this._fftLength];
            this._lastFftBuffer = new float[this._fftLength];
        }
Esempio n. 5
0
        public AudioType()
        {
            _device = new MMDeviceEnumerator()
                .EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active).FirstOrDefault();

            _sampleAggregator.FftCalculated += FftCalculated;
            _sampleAggregator.PerformFFT = true;

            // Start listening for sound data
            _waveIn = new WasapiLoopbackCapture();
            _waveIn.DataAvailable += OnDataAvailable;
            _waveIn.StartRecording();
        }
Esempio n. 6
0
        private NAudioEngine()
        {
            sampleAggregator = new SampleAggregator(fftDataSize);

            var deviceEnumerator = new MMDeviceEnumerator();
            var defaultDevice = deviceEnumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            this.capture = new WasapiLoopbackCapture(defaultDevice);
            capture.ShareMode = AudioClientShareMode.Shared;

            capture.DataAvailable += CaptureOnDataAvailable;

            capture.StartRecording();
        }
Esempio n. 7
0
        public void Start()
        {
            LogAudioDevices();

              Console.Out.WriteLine("Starting from default device...");
              myCapture = new WasapiLoopbackCapture();

              Console.Out.WriteLine("Capture format: {0}", myCapture.WaveFormat);
              Console.Out.WriteLine("");

              myCapture.DataAvailable += capture_DataAvailable;

              Console.Out.WriteLine("Loopback wave format is: {0}", myCapture.WaveFormat);
              myCapture.StartRecording();
        }
Esempio n. 8
0
        static async Task MainAsync()
        {
            Console.Title = "Audio Streamer - PC to Android";

            IPAddress IPAddr;
            bool      UseAdb = false;

            try
            {
                var AdbDevices = Process.Start(new ProcessStartInfo()
                {
                    FileName               = "adb",
                    Arguments              = "devices",
                    UseShellExecute        = false,
                    RedirectStandardOutput = true
                });

                await AdbDevices.StandardOutput.ReadLineAsync();

                UseAdb = !string.IsNullOrWhiteSpace(await AdbDevices.StandardOutput.ReadLineAsync());
            }
            catch (System.ComponentModel.Win32Exception)
            {
            }

            if (UseAdb)
            {
                IPAddr = IPAddress.Loopback;
            }
            else
            {
                Console.Write("IP: ");
                IPAddr = IPAddress.Parse(Console.ReadLine());
            }

            Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High;
            using (Capture = new WasapiLoopbackCapture(0, new CSCore.WaveFormat(), ThreadPriority.Highest))
            {
                while (true)
                {
                    var NoSpamDelay = Task.Delay(1000);
                    if (UseAdb)
                    {
                        Process.Start(new ProcessStartInfo()
                        {
                            FileName        = "adb",
                            Arguments       = "forward tcp:1420 tcp:1420",
                            UseShellExecute = false
                        });
                    }

                    using (var Conn = new TcpClient()
                    {
                        NoDelay = true,
                        ReceiveBufferSize = 64,
                        SendBufferSize = 1 << 12    //2^12 = ~4000 so 1000 floats
                    })
                    {
                        try
                        {
                            await Conn.ConnectAsync(IPAddr, ServerPort);

                            Stream = Conn.GetStream();
                            if (Stream.ReadByte() == 1)
                            {
                                Console.WriteLine("Connected to " + IPAddr.ToString());
                                Capture.Initialize();
                                using (Source = new SoundInSource(Capture))
                                {
                                    int SampleRateServer = Source.WaveFormat.SampleRate;
                                    int SampleRateClient = Stream.ReadByte() | Stream.ReadByte() << 8 | Stream.ReadByte() << 16;
                                    if (SampleRateClient != SampleRateServer)
                                    {
                                        Console.WriteLine($"Sample rate mismatch, PC was {SampleRateServer} Hz but client was {SampleRateClient} Hz");
                                        Console.WriteLine("Adjust your PC's sample rate then press any key to try again");
                                        Console.ReadKey();
                                        Console.Clear();
                                    }
                                    else
                                    {
                                        // Start Capturing
                                        Source.DataAvailable += DataAvailable;
                                        Capture.Start();

                                        Console.WriteLine($"Started recording audio at {SampleRateServer} Hz");
                                        Window.SetWindowShown(false);

                                        // Stop Capturing
                                        await(DisconnectWaiter = new TaskCompletionSource <bool>()).Task;
                                        await Task.Run(() => Capture.Stop());

                                        Window.SetWindowShown(true);
                                        Console.WriteLine("Disconnected, stopped recording audio");
                                    }
                                }
                            }
                        }
                        catch { }
                        await NoSpamDelay;
                    }
                }
            }
        }
Esempio n. 9
0
        static void Main(string[] args)
        {
            //running setup program
            Console.WriteLine("Welcome to the STM32F0 Discovery DAC/PWM Audio setup program!");
            Console.WriteLine("Please connect your audio device to either pin PA4 for DAC mode, or PC7 for PWM!");
            Console.WriteLine("");
            Console.WriteLine("DAC plays at 48khz/12bit.");
            Console.WriteLine("PWM plays at 48khz/10bit.");
            Console.WriteLine("");
            Console.WriteLine("Before we begin, make sure that the baudrate is set to 960000 on the STM32F0!");
            Console.WriteLine("Compile and upload the included STM32F0 code to your device for hassle free audio playback ;)!");
            Console.WriteLine("");
            Console.WriteLine("");

            buffer = new ConcurrentBag <UInt16[]>();

            //lists all the avialable serial ports
            string[] ports = SerialPort.GetPortNames();
            int      b     = 0;

            Console.WriteLine("Select STM32F0 Discovery port by typing the corrosponding number:");
            foreach (string port in ports)
            {
                Console.WriteLine(b + " - " + port);
                b++;
            }

            bool succesconversion = false;
            int  choiceint        = 0;

            while (!succesconversion)
            {
                string choice = Console.ReadLine();
                succesconversion = int.TryParse(choice, out choiceint);
            }
            string port_chosen = "";

            if (choiceint < ports.Length)
            {
                port_chosen = ports[choiceint];
            }

            //start connection with chosen serial port
            //baudrate of 960000 due to needing 2 bits to send data  = 48khz
            ComPort          = new SerialPort();
            ComPort.PortName = port_chosen;
            ComPort.BaudRate = 960000;
            ComPort.Open();

            //gets all available devices for playback
            Console.WriteLine("Select where your audio should be played from by typing the corrosponding number:");
            MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
            var devices = enumerator.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active);

            for (int i = 0; i < devices.Count; i++)
            {
                Console.WriteLine(i + " : " + devices[i].ToString());
            }

            succesconversion = false;
            choiceint        = 0;

            while (!succesconversion)
            {
                string choice = Console.ReadLine();
                succesconversion = int.TryParse(choice, out choiceint);
            }

            //start recording audio
            if (choiceint < devices.Count)
            {
                sourceStream = new WasapiLoopbackCapture(devices[choiceint]);
                sourceStream.DataAvailable += sourceStream_DataAvailable;
                sourceStream.StartRecording();
            }

            //play audio on device
            Thread play = new Thread(new ThreadStart(playbufferback));

            play.Start();

            //select which mode you want on the fly (need to manually change it on the stm32f0
            Console.WriteLine("10 BIT PWM MODE DISABLED");
            Console.WriteLine("Now using 12 bit DAC mode on pin PA4!");
            Console.WriteLine("If no sound or weird distorted sound is being produced, press the USER button on your DISCOVERY board!");
            Console.WriteLine("Sometimes your DISOVERY board needs a reset!");
            Console.WriteLine("You can switch to PWM by typing: pwm/PWM");
            Console.WriteLine("");
            Console.WriteLine("");

            while (true)
            {
                string modeselect = Console.ReadLine();
                if (modeselect.ToLower().Contains("dac"))
                {
                    Console.WriteLine("10 BIT PWM MODE DISABLED");
                    Console.WriteLine("Now using 12 bit DAC mode on pin PA4!");
                    Console.WriteLine("If the green LED on your Discovery is turned off, you are still in PWM mode, switch to DAC mode by pressing the USER button!");
                    Console.WriteLine("If no sound or weird distorted sound is being produced, press the USER button multiple times on your DISCOVERY board!");
                    Console.WriteLine("Sometimes your DISOVERY board needs a reset!");
                    Console.WriteLine("You can switch to PWM by typing: pwm/PWM");
                    Console.WriteLine("");
                    Console.WriteLine("");
                    mode = true;
                }
                else if (modeselect.ToLower().Contains("pwm"))
                {
                    Console.WriteLine("12 BIT DAC MODE DISABLED");
                    Console.WriteLine("Now using 10 bit PWM mode on PIN PC7!");
                    Console.WriteLine("If the green LED on your Discovery is turned on, you are still in DAC mode, switch to PWM mode by pressing the USER button!");
                    Console.WriteLine("If no sound or weird distorted sound is being produced, press the USER button multiple times on your DISCOVERY board!");
                    Console.WriteLine("Sometimes your DISOVERY board needs a reset!");
                    Console.WriteLine("You can switch to DAC by typing: dac/DAC");
                    Console.WriteLine("");
                    Console.WriteLine("");
                    mode = false;
                }
            }
        }
Esempio n. 10
0
 static ProofOfConcept()
 {
     capture = new WasapiLoopbackCapture();
     writer  = new WaveFileWriter(ConfigurationManager.AppSettings.Get("OutputPath").ToString(), capture.WaveFormat);
 }
Esempio n. 11
0
        public void StartListen()
        {
            _isRunning       = true;
            _loopbackCapture = new WasapiLoopbackCapture();
            _loopbackCapture.Initialize();

            _soundInSource = new SoundInSource(_loopbackCapture);

            _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize);

            LineSpectrum lineSpectrum50 = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = 50,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Linear
            };

            LineSpectrum lineSpectrum200 = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = 200,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Linear
            };

            LineSpectrum lineSpectrum1000 = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = 1000,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Linear
            };

            _loopbackCapture.Start();

            _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _realtimeSource = _singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2];

            _soundInSource.DataAvailable += (s, ea) =>
            {
                int read;
                while (_isRunning && (read = _realtimeSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    float[] audioData50   = lineSpectrum50.GetSpectrumData(MaxAudioValue);
                    float[] audioData200  = lineSpectrum200.GetSpectrumData(MaxAudioValue);
                    float[] audioData1000 = lineSpectrum1000.GetSpectrumData(MaxAudioValue);

                    if (audioData50 != null && audioData200 != null && audioData1000 != null && _receiveAudio != null)
                    {
                        _receiveAudio(audioData50, audioData200, audioData1000);
                    }
                }
            };

            _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead;
        }
Esempio n. 12
0
 public LoopbackRecord()
 {
     _loopbackCapture = new WasapiLoopbackCapture();
 }
Esempio n. 13
0
        private static async Task <bool> MakeAudioConfigAsync(SpeechHandler handler)
        {
            // var audioConfig = AudioConfig.FromWavFileInput(@"D:\Users\ManabuTonosaki\OneDrive - tomarika\tono.wav");
            // var audioConfig = AudioConfig.FromDefaultMicrophoneInput();

            Debug.Assert(handler.Device != null);

            var wavein        = new WasapiLoopbackCapture(handler.Device);
            var waveoutFormat = new WaveFormat(16000, 16, 1);
            var lastSpeakDT   = DateTime.Now;
            var willStop      = DateTime.MaxValue;

            wavein.DataAvailable += (s, e) =>
            {
                if (e.BytesRecorded > 0)
                {
                    using var ms   = new MemoryStream(e.Buffer, 0, e.BytesRecorded);
                    using var rs   = new RawSourceWaveStream(ms, wavein.WaveFormat);
                    using var freq = new MediaFoundationResampler(rs, waveoutFormat.SampleRate);
                    var w16 = freq.ToSampleProvider().ToMono().ToWaveProvider16();
                    var len = w16.Read(handler.buf, 0, handler.buf.Length);
                    handler.AudioInputStream.Write(handler.buf, len);

                    lastSpeakDT = DateTime.Now;
                    willStop    = DateTime.MaxValue;
                }
                else
                {
                    if (DateTime.Now < willStop)
                    {
                        if (willStop == DateTime.MaxValue)
                        {
                            willStop = DateTime.Now + TimeSpan.FromSeconds(10);
                        }
                        var silence = new SilenceProvider(waveoutFormat);
                        var len     = silence.Read(handler.buf, 0, waveoutFormat.BitsPerSample * waveoutFormat.SampleRate / 8 / 100); // 10ms
                        var cnt     = (int)((DateTime.Now - lastSpeakDT).TotalMilliseconds / 10);
                        for (var i = 0; i < cnt; i++)
                        {
                            handler.AudioInputStream.Write(handler.buf, len);
                        }
                        lastSpeakDT = DateTime.Now;
                    }
                }
            };

            var audioformat = AudioStreamFormat.GetWaveFormatPCM(samplesPerSecond: 16000, bitsPerSample: 16, channels: 1);

            handler.AudioInputStream = AudioInputStream.CreatePushStream(audioformat);
            handler.AudioConfig      = AudioConfig.FromStreamInput(handler.AudioInputStream);

            await Task.Delay(100);

            handler.StopRequested += (s, e) =>
            {
                wavein.StopRecording();
            };
            wavein.StartRecording();

            return(true);
        }
Esempio n. 14
0
        public static void Main()
        {
            //Decrypt resources and load.
            Console.WriteLine("---------------------------------------------------------------------");
            Console.WriteLine("Dual Audio - (C) 2014 Thr. Using NAudio (http://naudio.codeplex.com/)");
            Console.WriteLine("---------------------------------------------------------------------");

            int waveInDevices  = WaveIn.DeviceCount;
            int waveOutDevices = WaveOut.DeviceCount;
            int inputdevice    = 0;
            int output         = 0;

            List <int> Outputs = new List <int>();

            Console.WriteLine("Located {0} Input Devices.\n", waveInDevices);
            Console.Write("How many outputs to bind to? (max {0}): ", waveOutDevices);
            //grab inputs.

            while (!int.TryParse(Console.ReadLine(), out totaloutputs) || totaloutputs > waveOutDevices)
            {
                Console.Write("How many outputs to bind to? (max {0}): ", waveOutDevices);
            }

            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("{0}: {1}, {2} channels.", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            Console.Write("Select Input Line: ");
            while (!int.TryParse(Console.ReadLine(), out inputdevice))
            {
                Console.Write("Select Input Line: ");
            }

            Console.WriteLine("Successfully set input as device {0}.", inputdevice);
            Console.WriteLine("");
            output = totaloutputs;
            while (output > 0)
            {
                for (int waveOutDevice = 0; waveOutDevice < waveOutDevices; waveOutDevice++)
                {
                    if (!Outputs.Contains(waveOutDevice))
                    {
                        WaveOutCapabilities deviceInfo = WaveOut.GetCapabilities(waveOutDevice);
                        Console.WriteLine("{0}: {1}, {2}", waveOutDevice, deviceInfo.ProductName, deviceInfo.Channels);
                    }
                }
                Console.Write("Select the output device for playback{0}: ", (totaloutputs - output).ToString());
                int device = 0;
                while (!int.TryParse(Console.ReadLine(), out device) || device > waveOutDevices - 1)
                {
                    Console.WriteLine("Invalid Device!");
                    Console.Write("Select the output device for playback{0}: ", (totaloutputs - output).ToString());
                }
                Outputs.Add(device);
                Console.WriteLine("Successfully set the output device for playback{0}.", (totaloutputs - output).ToString());
                output--;
            }
            Console.WriteLine("");
            string p = "";

            Console.Write("Dump to file? (Y\\N) ");
            while ((p = Console.ReadLine().ToLower()) != "y" && p != "n")
            {
                Console.WriteLine("");
                Console.Write("Dump to file? (Y\\N) ");
            }
            dumptofile = Convert.ToBoolean(p == "y" ? true : false);

            Console.Write("Amplify Output? (Y\\N) ");
            while ((p = Console.ReadLine().ToLower()) != "y" && p != "n")
            {
                Console.WriteLine("");
                Console.Write("Amplify Output? (Y\\N) ");
            }
            amplify = Convert.ToBoolean(p == "y" ? true : false);

            waveIn = new WasapiLoopbackCapture();

            Console.WriteLine("Initialized Loopback Capture...");
            if (dumptofile)
            {
                string filename = "";
                Console.Write("Filename (without extension): ");
                while ((filename = Console.ReadLine()) == "")
                {
                    Console.WriteLine("");
                    Console.Write("Filename (without extension): ");
                }
                f = new WaveFileWriter(File.OpenWrite(Environment.CurrentDirectory + "\\" + filename + ".wav"), waveIn.WaveFormat);
            }
            waveIn.DataAvailable += InputBufferToFileCallback;
            waveIn.StartRecording(); //Start our loopback capture.

            WaveOut[] devices = new WaveOut[totaloutputs];

            m1 = new BufferedWaveProvider[totaloutputs];
            for (int i = 0; i < totaloutputs; i++)
            {
                m1[i] = new BufferedWaveProvider(waveIn.WaveFormat);
                m1[i].BufferLength            = 1024 * 1024 * 10;
                m1[i].DiscardOnBufferOverflow = true;
                devices[i]                 = new WaveOut();
                devices[i].Volume          = 3;
                devices[i].NumberOfBuffers = 3;
                devices[i].DeviceNumber    = Outputs[i];
                devices[i].DesiredLatency  = 61;
                devices[i].Init(m1[i]);
                Console.WriteLine("Initializing Device{0}...", i);
                devices[i].Play();
                Console.WriteLine("Started Playing on Device{0}...", i);
            }

            while (true)
            {
                if (Console.ReadLine().ToLower() == "s")
                {
                    stop = true;
                    for (int i = 0; i < devices.Length; i++)
                    {
                        devices[i].Stop();
                    }
                    waveIn.StopRecording();
                    f.Close();
                    Environment.Exit(0);
                }
            }
        }
Esempio n. 15
0
 public LoopbackCaptureProvider()
 {
     _Capture = new WasapiLoopbackCapture();
     Format   = _Capture.WaveFormat;
     _Capture.StartRecording();
 }
Esempio n. 16
0
        static void Main(string[] args)
        {
            int           iSampleRate = 41000;
            int           iCh         = 2;
            int           iBits       = 16;
            int           iVol        = 100;
            WaveInType    waveInType  = WaveInType.WaveIn;
            bool          isHead      = true;
            bool          isHelp      = false;
            bool          isTest      = false;
            List <string> errorList   = new List <string>();

            //read args
            {
                if (args != null)
                {
                    string sw = null;
                    for (int i = 0; i < args.Length; i++)
                    {
                        string arg = args[i];
                        if (string.IsNullOrWhiteSpace(sw))
                        {
                            switch (arg)
                            {
                            case "-d":
                            case "-r":
                            case "-c":
                            case "-b":
                            case "-v":
                                sw = arg;
                                break;

                            case "-N":
                                isHead = false;
                                break;

                            case "-test":
                                isTest = true;
                                break;

                            case "--h":
                            case "--v":
                                isHelp = true;
                                break;

                            default:
                                errorList.Add(string.Format("arg[{0}] : illegal option \"{1}\"", new object[] { i, arg }));
                                break;
                            }
                        }
                        else
                        {
                            Action <Action> Exec = (action) =>
                            {
                                try
                                {
                                    action();
                                }
                                catch (Exception e)
                                {
                                    errorList.Add(string.Format("arg[{0}] : illegal param \"{2}\" at \"{1}\"", new object[] { i, sw, arg }));
                                    errorList.Add(e.ToString());
                                }
                            };
                            switch (sw)
                            {
                            case "-d":
                                switch (arg)
                                {
                                case "wasapiloopback":
                                    waveInType = WaveInType.WasapiLoppback;
                                    break;

                                case "wavein":
                                    waveInType = WaveInType.WaveIn;
                                    break;

                                default:
                                    errorList.Add(string.Format("arg[{0}] : illegal param \"{2}\" at \"{1}\"", new object[] { i, sw, arg }));
                                    break;
                                }
                                break;

                            case "-r":
                                Exec(() => iSampleRate = int.Parse(arg));
                                ;
                                break;

                            case "-c":
                                Exec(() => iCh = int.Parse(arg));
                                break;

                            case "-b":
                                Exec(() => iBits = int.Parse(arg));
                                break;

                            case "-v":
                                Exec(() => iVol = int.Parse(arg));
                                break;
                            }
                            sw = null;
                        }
                    }
                }
            }

            if (isHead)
            {
                message("naucon v0.0.0.0.0.1");
                message("auther takumi.");
                message("copyright libraplanet.");
                message("license n/a");
                message("");
                if (!isHelp)
                {
                    message("parameter:");
                    message(string.Format("  sampling rale  {0} Hz", new object[] { iSampleRate }));
                    message(string.Format("  ch             {0} ch", new object[] { iCh }));
                    message(string.Format("  bits           {0} bit", new object[] { iBits }));
                    message(string.Format("  capture device {0}", new object[] { waveInType }));
                    message(string.Format("  vol            {0}", new object[] { iVol }));
                    message("");
                }
            }
            //start
            if (errorList.Count > 0)
            {
                foreach (string s in errorList)
                {
                    message(s);
                }
                message("");
            }
            else if (isHelp)
            {
                //help
                message("usage: naucon [[option] [param]]...");
                message("");
                message("options and pamrams");
                message("-d [wavein | wasapiloopback]  mode of capture device.");
                message("                              WaveIn or WASAPI Loopback.");
                message("-r [n]                        sampling rate.");
                message("                                e.g.) 441000");
                message("-c [n]                        channels.");
                message("                                e.g.) 2");
                message("-b [n]                        bits per sample.");
                message("                                e.g.) 16");
                message("-v [n]                        volume. 100 = 100%");
                message("                                e.g.) 16");
                message("-N                            no output head message.");
                message("-test                         argument test (no recording).");
                message("--h                           view help.");
                message("--v                           view version.");
                message("");
            }
            else
            {
                object     mutex    = new object();
                bool       isActive = true;
                IWaveIn    waveIn;
                WaveFormat outWaveFormat = new WaveFormat(iSampleRate, iBits, iCh);

                //init
                {
                    switch (waveInType)
                    {
                    case WaveInType.WasapiLoppback:
                        waveIn = new WasapiLoopbackCapture();
                        break;

                    case WaveInType.WaveIn:
                    default:
                        WaveCallbackInfo callback = WaveCallbackInfo.FunctionCallback();
                        waveIn            = new WaveIn(callback);
                        waveIn.WaveFormat = outWaveFormat;
                        break;
                    }
                }

                if (isHead)
                {
                    message("output format:");
                    message(string.Format("  sampling rale  {0} Hz", new object[] { outWaveFormat.SampleRate }));
                    message(string.Format("  ch             {0} ch", new object[] { outWaveFormat.Channels }));
                    message(string.Format("  bits           {0} bit", new object[] { outWaveFormat.BitsPerSample }));
                    message(string.Format("  encoding       {0}", new object[] { outWaveFormat.Encoding }));
                    message("");
                }

                //event
                {
                    waveIn.DataAvailable += (sender, e) =>
                    {
                        lock (mutex)
                        {
                            if (WaveFormat.Equals(waveIn.WaveFormat, outWaveFormat) && (iVol == 100))
                            {
                                using (Stream consoleStream = Console.OpenStandardOutput())
                                {
                                    consoleStream.Write(e.Buffer, 0, e.BytesRecorded);
                                }
                            }
                            else
                            {
                                byte[]    data;
                                AudioData audio = new AudioData(waveIn.WaveFormat, e.Buffer, e.BytesRecorded);

                                if (iVol != 100)
                                {
                                    audio.ChangeVolume(iVol / 100.0);
                                }

                                audio.Conver(outWaveFormat);
                                data = audio.ToBytes();
                                if ((data != null) && (data.Length > 0))
                                {
                                    using (Stream consoleStream = Console.OpenStandardOutput())
                                    {
                                        consoleStream.Write(data, 0, data.Length);
                                    }
                                }
                            }
                        }
                    };

                    waveIn.RecordingStopped += (sender, e) =>
                    {
                        lock (mutex)
                        {
                            isActive = false;
                        }
                    };
                }


                if (!isTest)
                {
                    waveIn.StartRecording();
                    while (true)
                    {
                        lock (mutex)
                        {
                            if (isActive)
                            {
                                Thread.Sleep(1);
                            }
                            else
                            {
                                return;
                            }
                        }
                    }
                }
            }
        }
Esempio n. 17
0
 private IWaveIn CreateWaveInDevice()
 {
     IWaveIn newWaveIn;
     if (radioButtonWaveIn.Checked)
     {
         newWaveIn = new WaveIn();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWaveInEvent.Checked)
     {
         newWaveIn = new WaveInEvent();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWasapi.Checked)
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         var device = (MMDevice) comboWasapiDevices.SelectedItem;
         newWaveIn = new WasapiCapture(device);
     }
     else
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         newWaveIn = new WasapiLoopbackCapture();
     }
     newWaveIn.DataAvailable += OnDataAvailable;
     newWaveIn.RecordingStopped += OnRecordingStopped;
     return newWaveIn;
 }
Esempio n. 18
0
        private static unsafe void Main(string[] args)
        {
            var api = new WasapiLoopbackCapture();

            var(width, height) = (1920, 1080);
            Raylib.InitWindow(width, height, "SharpVisualizer");
            Raylib.SetWindowMinSize(1280, 720);

            const int size     = 8192;
            var       data     = stackalloc Complex[size + 9600];
            var       rects    = stackalloc Rectangle[size + 9600];
            var       dataLock = new Mutex();

            api.DataAvailable += (_, eventArgs) =>
            {
                var len = eventArgs.Buffer.Length / sizeof(float);

                if (len > 9600)
                {
                    return;
                }

                lock (dataLock)
                {
                    fixed(byte *buffer = eventArgs.Buffer)
                    {
                        var buf = (float *)buffer;

                        for (var i = 0; i < len; i++)
                        {
                            data[i].X = buf[i];
                            data[i].Y = 0.0f;
                        }

                        FastFourierTransformUnsafe.FFT(false, (int)Math.Log2(size), data);
                    }
                }
            };

            api.RecordingStopped += (_, eventArgs) => { Console.WriteLine(eventArgs.Exception?.ToString()); };

            api.StartRecording();

            while (!Raylib.WindowShouldClose())
            {
                if (Raylib.IsKeyPressed(KeyboardKey.KEY_F11))
                {
                    Raylib.SetWindowSize(Raylib.GetMonitorWidth(0), Raylib.GetMonitorHeight(0));
                    Raylib.ToggleFullscreen();

                    if (!Raylib.IsWindowFullscreen())
                    {
                        Raylib.SetWindowSize(width, height);
                    }
                }

                Raylib.BeginDrawing();

                lock (dataLock)
                {
                    Draw(data, rects, size);
                }

                Raylib.EndDrawing();
            }

            Raylib.CloseWindow();
            api.StopRecording();
        }
Esempio n. 19
0
        /// <summary>
        /// Inits the capture:
        /// - inits the capture to listen to the current default output device
        /// - creates the listener
        /// - starts recording
        /// </summary>
        private void InitCapture()
        {
            // Takes the current default output device
            capture = new WasapiLoopbackCapture();

            // Used to get the audio spectrum using FFT
            int    fftPos = 0;
            int    m      = (int)Math.Log(fftLength, 2.0);
            object _lock  = new object();

            Complex[] fftBuffer = new Complex[fftLength]; // the data
            lastFftBuffer = new float[fftLength];         // the last data saved

            capture.DataAvailable += (object sender, WaveInEventArgs args) =>
            {
                // Interprets the sample as 32 bit floating point audio (-> FloatBuffer)
                soundLevel = 0;
                var buffer = new WaveBuffer(args.Buffer);

                for (int index = 0; index < args.BytesRecorded / 4; index++)
                {
                    var sample = buffer.FloatBuffer[index];

                    // Sound level
                    if (sample < 0)
                    {
                        sample = -sample;             // abs
                    }
                    if (sample > soundLevel)
                    {
                        soundLevel = sample;
                    }

                    // Bass level

                    if (listenForBass)
                    {
                        // HannWindow sample with amplitude -> amplitude to decibels
                        fftBuffer[fftPos].X = (float)(sample * FastFourierTransform.HannWindow(fftPos, fftLength));
                        fftBuffer[fftPos].Y = 0;
                        fftPos++;

                        if (fftPos >= fftLength)
                        {
                            fftPos = 0;

                            FastFourierTransform.FFT(true, m, fftBuffer);

                            lock (_lock)
                            {
                                bufferAvailible = false;

                                for (int c = 0; c < fftLength; c++)
                                {
                                    float amplitude = (float)Math.Sqrt(fftBuffer[c].X * fftBuffer[c].X + fftBuffer[c].Y * fftBuffer[c].Y);
                                    lastFftBuffer[c] = amplitude;
                                }

                                bufferAvailible = true;
                            }
                        }
                    }
                }
            };

            capture.StartRecording();
        }
Esempio n. 20
0
        private void Button_StartStop_Click(object sender, RoutedEventArgs e)
        {
            // Invert the isRecording bool
            isRecording = !isRecording;

            // Stop the recording
            if (!isRecording)
            {
                // Hide/Show buttons
                StartRecVis = true;
                StopRecVis  = false;

                // Set settings colors
                SettingControlLightColor = "#4889c7";
                SettingControlDarkColor  = "#4889c7";

                // Set settings tooltip
                DisabledSettingTooltip = null;

                // Stop the recording
                audioCapture.StopRecording();
                if (InputDeviceId != -9999)
                {
                    micCapture.StopRecording();
                }

                // Re-init buffers
                speakerBytes = new CircularBuffer <byte>(DurationToBytes(ReplayDuration));
                micBytes     = new CircularBuffer <byte>(DurationToBytes(ReplayDuration));
            }

            // Start recording
            if (isRecording)
            {
                // Hide/Show buttons
                StartRecVis = false;
                StopRecVis  = true;

                // Set settings colors
                SettingControlLightColor = "#eeeeee";
                SettingControlDarkColor  = "#878787";

                // Set settings tooltip
                DisabledSettingTooltip = "You must stop recording in order to change this setting!";

                // Re-init the WasapiLoopbackCapture and WaveIn
                audioCapture = new WasapiLoopbackCapture();
                if (InputDeviceId != -9999)
                {
                    micCapture = new WaveIn()
                    {
                        DeviceNumber = InputDeviceId
                    };
                    micCapture.WaveFormat = audioCapture.WaveFormat;
                }

                // Event Handles
                audioCapture.DataAvailable += (s, a) =>
                {
                    for (int i = 0; i < a.BytesRecorded; i++)
                    {
                        speakerBytes.Enqueue(a.Buffer[i]);
                    }
                };

                // Event Handles
                audioCapture.RecordingStopped += (s, a) =>
                {
                    audioCapture?.Dispose();
                    audioCapture = null;
                };

                if (InputDeviceId != -9999)
                {
                    // Event Handles
                    micCapture.DataAvailable += (s, a) =>
                    {
                        for (int i = 0; i < a.BytesRecorded; i++)
                        {
                            micBytes.Enqueue(a.Buffer[i]);
                        }
                    };

                    // Event Handles
                    micCapture.RecordingStopped += (s, a) =>
                    {
                        micCapture?.Dispose();
                        micCapture = null;
                    };
                }

                // Start the recording
                audioCapture.StartRecording();
                if (InputDeviceId != -9999)
                {
                    micCapture.StartRecording();
                }
            }
        }
Esempio n. 21
0
        private void startWatch_EventArrived(object sender, EventArrivedEventArgs e)
        {
            try
            {
                Process[] proc = Process.GetProcessesByName("Slack");
                if (proc.Length >= 8)
                {
                    if (counter == 3)
                    {
                        try
                        {
                            if (MicFileReader != null && SpeakerFileReader != null)
                            {
                                MicFileReader.Close();
                                MicFileReader.Dispose();
                                MicFileReader = null;

                                SpeakerFileReader.Close();
                                SpeakerFileReader.Dispose();
                                SpeakerFileReader = null;
                            }

                            micRecordFIleName      = RandomString(15) + ".wav";
                            playBackRecordFileName = RandomString(15) + ".wav";

                            if (waveIn != null)
                            {
                                waveIn.StopRecording();
                            }

                            waveIn = new WaveInEvent();

                            waveIn.DeviceNumber = 0;
                            waveIn.WaveFormat   = new NAudio.Wave.WaveFormat(44100, 32, 2);

                            waveIn.DataAvailable    += waveIn_DataAvailable;
                            waveIn.RecordingStopped += waveIn_RecordingStopped;

                            try
                            {
                                writer = new WaveFileWriter(saveDirectory.SelectedPath + "\\" + micRecordFIleName, waveIn.WaveFormat);
                            }
                            #pragma warning disable CS0168 // Variable is declared but never used
                            catch (Exception ex)
                            #pragma warning restore CS0168 // Variable is declared but never used
                            {
                                writer.Close();
                                writer.Dispose();
                                writer = null;

                                writer = new WaveFileWriter(saveDirectory.SelectedPath + "\\" + micRecordFIleName, waveIn.WaveFormat);
                            }

                            waveIn.StartRecording();

                            if (CaptureInstance != null)
                            {
                                CaptureInstance.StopRecording();
                            }

                            CaptureInstance = new WasapiLoopbackCapture();

                            try
                            {
                                RecordedAudioWriter = new WaveFileWriter(saveDirectory.SelectedPath + "\\" + playBackRecordFileName, CaptureInstance.WaveFormat);
                            }
                            #pragma warning disable CS0168 // Variable is declared but never used
                            catch (Exception ex)
                            #pragma warning restore CS0168 // Variable is declared but never used
                            {
                                RecordedAudioWriter.Close();
                                RecordedAudioWriter.Dispose();
                                RecordedAudioWriter = null;

                                RecordedAudioWriter = new WaveFileWriter(saveDirectory.SelectedPath + "\\" + playBackRecordFileName, CaptureInstance.WaveFormat);
                            }

                            CaptureInstance.DataAvailable += (s, a) =>
                            {
                                RecordedAudioWriter.Write(a.Buffer, 0, a.BytesRecorded);
                            };

                            CaptureInstance.RecordingStopped += (s, a) =>
                            {
                                if (RecordedAudioWriter != null)
                                {
                                    try
                                    {
                                        RecordedAudioWriter.Close();
                                        RecordedAudioWriter.Dispose();  //здесь может быть баг
                                        RecordedAudioWriter = null;
                                    }
                                    #pragma warning disable CS0168 // Variable is declared but never used
                                    catch (Exception ex)
                                    #pragma warning restore CS0168 // Variable is declared but never used
                                    {
                                        File.Delete(saveDirectory.SelectedPath + "\\" + micRecordFIleName);
                                        File.Delete(saveDirectory.SelectedPath + "\\" + playBackRecordFileName);
                                    }
                                }

                                if (CaptureInstance != null)
                                {
                                    CaptureInstance.StopRecording();
                                    CaptureInstance.Dispose();
                                    CaptureInstance = null;
                                }
                            };

                            CaptureInstance.StartRecording();
                        }

                        catch (Exception ex)
                        {
                            MessageBox.Show(ex.Message);
                        }

                        notifyIcon.Visible = true;
                        notifyIcon.ShowBalloonTip(1000, "Slack Recorder", "Record started", ToolTipIcon.Info);
                        notifyIcon.Visible = false;

                        counter = 0;
                    }
                    counter++;
                }
            }
            catch (NullReferenceException)
            {
                //MessageBox.Show("Slack call servers are down, please try later");
            }
        }
Esempio n. 22
0
 private void InitLoopbackCapture()
 {
     loopbackCapture = new WasapiLoopbackCapture();
     loopbackCapture.DataAvailable += loopbackCapture_DataAvailable;
     loopbackCapture.ShareMode      = NAudio.CoreAudioApi.AudioClientShareMode.Shared;
 }
 public OutputSoundRecorder(MMDevice device)
 {
     capture   = new WasapiLoopbackCapture(device);
     listeners = new List <ISoundListener>();
 }
Esempio n. 24
0
        public Testfeed(LSDevice src, int[] chans, VolumeSlider[] vus, int outDev)
        {
            this.src   = src;
            this.chans = chans;
            this.vus   = vus;
            this.ok    = false;

            dummies = new List <DummySink>();

            try
            {
                cap = null;
                if (src.isPlay)
                {
                    cap = new WasapiLoopbackCapture(src.mm);
                }
                else
                {
                    cap = new WasapiCapture(src.mm);
                }
            }
            catch (System.Runtime.InteropServices.COMException ce)
            {
                string errmsg = WinapiShit.comEx((uint)ce.ErrorCode);
                if (errmsg == "")
                {
                    errmsg = "(i don't know what this means but please report it)";
                }

                MessageBox.Show("could not access audio device; error code " + ce.ErrorCode.ToString("x") + "\r\n\r\n" + errmsg);
                return;
            }

            cap.DataAvailable += input_DataAvailable;
            wi = new BufferedWaveProvider(cap.WaveFormat);
            var cap_samples   = wi.ToSampleProvider();
            var chan_splitter = new ChannelSplitter(cap_samples, chans);

            for (var a = 0; a < vus.Length; a++)
            {
                vus[a].SetSource(chan_splitter.output[a]);
                dummies.Add(new DummySink(vus[a]));
            }

            if (outDev >= -1)
            {
                wo = new WaveOut();
                wo.DeviceNumber = outDev;
                wo.Init(chan_splitter);
                wo.Play();
            }
            else
            {
                dummies.Add(new DummySink(chan_splitter));
                wo = null;
            }

            try
            {
                cap.StartRecording();
            }
            catch (System.Runtime.InteropServices.COMException ce)
            {
                MessageBox.Show(WinapiShit.comExMsg((uint)ce.ErrorCode));
                return;
            }
            this.ok = true;
        }
Esempio n. 25
0
        public AudioProcessor(Publisher publisher, string outlet = null)
        {
            using WasapiCapture capture = new WasapiLoopbackCapture(CAPTURE_LATENCY);
            capture.Initialize();
            channelNum       = capture.WaveFormat.Channels;
            systemSampleRate = capture.WaveFormat.SampleRate;

            using SoundInSource captureSource =
                      new SoundInSource(capture)
                  {
                      FillWithZeros = false
                  };
            using SimpleNotificationSource notificationSource =
                      new SimpleNotificationSource(FluentExtensions.ToSampleSource(captureSource))
                  {
                      Interval = PROCESS_WINDOW_LENGTH
                  };

            InitializeMonoBuffers(monoBuffers, channelNum, notificationSource.BlockCount);
            blockBuffer       = new float[notificationSource.BlockCount * channelNum];
            lpf               = new LowpassFilter(systemSampleRate, LFE_CUTOFF);
            MonoPulseDetector =
                new SimplePulseDetector(monoBuffers, lfeProvided: false, biQuadFilter: lpf);
            localisationer = new Localisationer(monoBuffers);
            if (channelNum > 2)
            {
                LFEPulseDetector =
                    new SimplePulseDetector(monoBuffers, lfeProvided: true);
            }

            capture.DataAvailable += (s, e) =>
            {
                while (notificationSource.Read(blockBuffer, 0, notificationSource.BlockCount * channelNum) > 0)
                {
                    monoBuffers = Deinterlacing(monoBuffers,
                                                blockBuffer,
                                                channelNum);
                    if (LFEPulseDetector != null)
                    {
                        bool m = MonoPulseDetector.Predict();
                        bool l = LFEPulseDetector.Predict();
                        if (m || l)
                        {
                            double angle = localisationer.GetLoudestAngle();
#if DEBUG
                            Console.Clear();
                            Console.WriteLine($"LFE Level: {LFEPulseDetector.CurrentReading:F3}, LFE Threshold: {LFEPulseDetector.CurrentThreshold:F3}");
                            Console.WriteLine($"Mixed Level: {MonoPulseDetector.CurrentReading:F3}, Mixed Threshold: {MonoPulseDetector.CurrentThreshold:F3}");
                            Console.WriteLine($"Impulse Detected - Mono:{m}, LFE:{l}, Angle: {angle:F3}, Hit Count:{hitCount}");
#endif
                            if (publisher != null && outlet != null)
                            {
                                publisher.Publish(outlet, $"{m}|{l}|{angle:F3}");
                            }

                            hitCount++;
                        }
                    }
                    else
                    {
                        if (MonoPulseDetector.Predict())
                        {
                            double angle = localisationer.GetLoudestAngle();
#if DEBUG
                            Console.Clear();
                            Console.WriteLine($"Level: {MonoPulseDetector.CurrentReading:F3}, Threshold: {MonoPulseDetector.CurrentThreshold:F3}");
                            Console.WriteLine($"Impulse Detected - Mono, Angle:{angle:F3}, Hit Count:{hitCount}");
#endif
                            if (publisher != null && outlet != null)
                            {
                                publisher.Publish(outlet, $"True|False|{angle:F3}");
                            }

                            hitCount++;
                        }
                    }
                }
            };

            StartCapturingAndHold(capture);
        }
Esempio n. 26
0
        public void Run_Btn_Click(object sender, EventArgs e)
        {
            isNeedAudio = isNeedAudio_Checkbox.Checked;
            switch (run_btn.Text)
            {
            case "開始錄影 (F2)":
                write_finish = false;
                renew_path();
                isRecording  = true;
                run_btn.Text = "停止錄影 (F2)";
                f2.can_drag  = false;

                videoOption.V_l = f2.Left;
                videoOption.V_t = f2.Top;
                videoOption.V_w = f2.Width;
                videoOption.V_h = f2.Height;


                int width  = videoOption.V_w;
                int height = videoOption.V_h;
                //from https://en.code-bude.net/2013/04/17/how-to-create-video-files-in-c-from-single-images/
                /*my.file_put_contents(video_path, "");*/

                //writer.Open("C:\\temp\\a.avi", V_w, V_h);
                show_hide_f2(false);
                writer = new VideoFileWriter();
                int w            = Convert.ToInt32(Math.Ceiling(videoOption.V_w / 10.0)) * 10;
                int h            = Convert.ToInt32(Math.Ceiling(videoOption.V_h / 10.0)) * 10;
                int videoBitRate = w * h * 3 * 30;
                //int videoBitRate = 1200 * 1000;
                //int audioBitRate = 320 * 1000;
                if (isNeedAudio)
                {
                    //audio = new AudioCaptureDevice();
                    //audio.Format = Accord.Audio.SampleFormat.Format16Bit;
                    //audio.SampleRate = 22050;// Accord.DirectSound.Accord.Audio.Tools.Default.SampleRate;
                    //audio.DesiredFrameSize = 4096;
                    //audio.NewFrame += audioDevice_NewFrame;
                    //audio.DesiredFrameSize = 4096;



                    //mciSendString("open new Type waveaudio alias recsound", null, 0, IntPtr.Zero);
                    //mciSendString("set capture time format ms bitspersample 16 channels 2 samplespersec 48000 bytespersec 192000 alignment 4", null, 0, IntPtr.Zero);
                    //mciSendString("record recsound", null, 0, IntPtr.Zero);
                    //alert("gg");
                    //writer.Open(video_path, w, h, 30, VideoCodec.H264, videoBitRate, AudioCodec.MP3, audioBitRate, 44100,1);
                    //writer.Open(video_path, w, h, 30, VideoCodec.H264, videoBitRate);
                    //audio = new WasapiLoopbackCapture();

                    //audio.Initialize();
                    //audio_w = new WaveWriter(audio_path, audio.WaveFormat);

                    //setup an eventhandler to receive the recorded data
                    //audio.DataAvailable += (s, ee) =>
                    // {
                    //save the recorded audio
                    //    audio_w.Write(ee.Data, ee.Offset, ee.ByteCount);
                    // };

                    //start recording
                    //audio.Start();
                    //Console.ReadKey();
                    system_audio = new WasapiLoopbackCapture();
                    //system_audio = new WasapiLoopbackCapture();

                    //wtf.DataAvailable
                    //waveSource.WaveFormat = new WaveFormat(44100, 2);

                    system_audio.DataAvailable += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable);

                    //system_audio.DataAvailable += (s, ee) =>
                    // {
                    //save the recorded audio
                    //    waveSource_DataAvailable(s, ee);
                    //audio_w.Write(ee.Data, ee.Offset, ee.ByteCount);
                    // };

                    system_audio.RecordingStopped += new EventHandler <StoppedEventArgs>(waveSource_RecordingStopped);

                    waveFile = new WaveFileWriter(audio_path, system_audio.WaveFormat);

                    system_audio.StartRecording();
                    // system_audio.StartRecording();
                }

                writer.Open(video_path, w, h, 30, VideoCodec.H264, videoBitRate);
                //

                //msc = new Thread(thread_msc);
                // msc.Start();
                timer1.Enabled = true;
                sc             = new Thread(thread_sc);
                sc.Start();



                break;

            default:
                show_hide_f2(true);
                run_btn.Text = "開始錄影 (F2)";
                f2.can_drag  = true;
                isRecording  = false;
                if (isNeedAudio)
                {
                    //mciSendString("save recsound " + audio_path, null, 0, IntPtr.Zero);
                    //mciSendString("close recsound", null, 0, IntPtr.Zero);
                    //https://github.com/accord-net/framework/issues/418
                    //byte[] bffr = File.ReadAllBytes(audio_path);
                    //writer.WriteAudioFrame()
                    //writer.WriteAudioFrame()
                    //
                    // audio.Stop();
                    // audio.Dispose();
                    //audio = null;
                    //audio_w.Dispose();
                    //audio_w = null;
                    //GC.Collect();
                    //audio.Dispose();
                    system_audio.StopRecording();
                    if (system_audio != null)
                    {
                        system_audio.Dispose();
                        system_audio = null;
                    }

                    if (waveFile != null)
                    {
                        waveFile.Dispose();
                        waveFile = null;
                    }
                    // system_audio.StopRecording();
                    //  system_audio.Dispose();
                    //my.copy(audio_path, _nt);

                    //sc.Abort();
                    //Thread.Sleep(1000);
                    //GC.Collect();

                    while (write_finish == false)
                    {
                        Thread.Sleep(10);
                    }
                    writer.Close();
                    writer = null;

                    AviManager aviManager = new AviManager(video_path, true);
                    aviManager.AddAudioStream(audio_path, 0);
                    aviManager.Close();
                    timer1.Enabled = false;

                    my.unlink(audio_path);
                }


                break;
            }
        }
Esempio n. 27
0
        private Stream GetFileWriter(WasapiLoopbackCapture waveIn)
        {
            Stream writer;
            string insertArtistDir = _fileManager.CreateDirectory();

            if (_userSettings.MediaFormat.Equals(MediaFormat.Mp3))
            {
                try
                {
                    _currentFile = _fileManager.BuildFileName(_userSettings.OutputPath + insertArtistDir);
                    writer       = new LameMP3FileWriter(
                        _currentFile,
                        waveIn.WaveFormat,
                        _userSettings.Bitrate);

                    return(writer);
                }
                catch (ArgumentException ex)
                {
                    var message = $"{FrmEspionSpotify.Rm.GetString($"logUnknownException")}: ${ex.Message}";

                    if (!Directory.Exists(_userSettings.OutputPath))
                    {
                        message = FrmEspionSpotify.Rm.GetString($"logInvalidOutput");
                    }
                    else if (ex.Message.StartsWith("Unsupported Sample Rate"))
                    {
                        message = FrmEspionSpotify.Rm.GetString($"logUnsupportedRate");
                    }
                    else if (ex.Message.StartsWith("Access to the path"))
                    {
                        message = FrmEspionSpotify.Rm.GetString("logNoAccessOutput");
                    }
                    else if (ex.Message.StartsWith("Unsupported number of channels"))
                    {
                        var numberOfChannels = ex.Message.Length > 32 ? ex.Message.Remove(0, 31) : "?";
                        var indexOfBreakLine = numberOfChannels.IndexOf("\r\n");
                        numberOfChannels = numberOfChannels.Substring(0, indexOfBreakLine != -1 ? indexOfBreakLine : 0);
                        message          = String.Format(FrmEspionSpotify.Rm.GetString($"logUnsupportedNumberChannels"), numberOfChannels);
                    }

                    _form.WriteIntoConsole(message);
                    return(null);
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                    return(null);
                }
            }

            try
            {
                _currentFile = _fileManager.BuildFileName($"{_userSettings.OutputPath}{insertArtistDir}");
                writer       = new WaveFileWriter(
                    _currentFile,
                    waveIn.WaveFormat
                    );
                return(writer);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                return(null);
            }
        }
Esempio n. 28
0
        void StartRecording()
        {
            var SelectedAudioSourceId   = AudioSettings.SelectedAudioSourceId;
            var SelectedVideoSourceKind = VideoSettings.SelectedVideoSourceKind;
            var SelectedVideoSource     = VideoSettings.SelectedVideoSource;
            var Encoder = VideoSettings.Encoder;

            Duration = OtherSettings.CaptureDuration;
            Delay    = OtherSettings.StartDelay;

            if (Duration != 0 && (Delay * 1000 > Duration))
            {
                Status.Content = "Delay cannot be greater than Duration";
                SystemSounds.Asterisk.Play();
                return;
            }

            if (OtherSettings.MinimizeOnStart)
            {
                WindowState = WindowState.Minimized;
            }

            VideoSettings.Instance.VideoSourceKindBox.IsEnabled = false;
            VideoSettings.Instance.VideoSourceBox.IsEnabled     = SelectedVideoSourceKind == VideoSourceKind.Window;

            // UI Buttons
            RecordButton.ToolTip  = "Stop";
            RecordButton.IconData = (RectangleGeometry)FindResource("StopIcon");

            ReadyToRecord = false;

            int temp;

            string Extension = SelectedVideoSourceKind == VideoSourceKind.NoVideo
                ? (AudioSettings.EncodeAudio && int.TryParse(SelectedAudioSourceId, out temp) ? ".mp3" : ".wav")
                : (Encoder.Name == "Gif" ? ".gif" : ".avi");

            lastFileName = Path.Combine(OutPath.Text, DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss") + Extension);

            Status.Content = Delay > 0 ? string.Format("Recording from t={0}ms...", Delay) : "Recording...";

            DTimer.Stop();
            Seconds             = Minutes = 0;
            TimeManager.Content = "00:00";

            DTimer.Start();

            int AudioBitRate = App.IsLamePresent ? Mp3EncoderLame.SupportedBitRates[AudioSettings.AudioQuality] : 0;

            IAudioProvider AudioSource = null;
            WaveFormat     wf          = new WaveFormat(44100, 16, AudioSettings.Stereo ? 2 : 1);

            if (SelectedAudioSourceId != "-1")
            {
                int i;
                if (int.TryParse(SelectedAudioSourceId, out i))
                {
                    AudioSource = new WaveIn(i, VideoSettings.FrameRate, wf);
                }
                else
                {
                    AudioSource = new WasapiLoopbackCapture(WasapiAudioDevice.Get(SelectedAudioSourceId), true);
                    wf          = AudioSource.WaveFormat;
                }
            }

            #region ImageProvider
            IImageProvider ImgProvider = null;

            Func <System.Windows.Media.Color, System.Drawing.Color> ConvertColor = (C) => System.Drawing.Color.FromArgb(C.A, C.R, C.G, C.B);

            var mouseKeyHook = new MouseKeyHook(OtherSettings.CaptureClicks,
                                                OtherSettings.CaptureKeystrokes);

            if (SelectedVideoSourceKind == VideoSourceKind.Window)
            {
                var Src = SelectedVideoSource as WindowVSLI;

                if (Src.Handle == RegionSelector.Instance.Handle &&
                    OtherSettings.StaticRegionCapture)
                {
                    ImgProvider = new StaticRegionProvider(RegionSelector.Instance,
                                                           cursor,
                                                           mouseKeyHook);
                    VideoSettings.Instance.VideoSourceBox.IsEnabled = false;
                }
                else
                {
                    ImgProvider = new WindowProvider(() => (VideoSettings.SelectedVideoSource as WindowVSLI).Handle,
                                                     ConvertColor(VideoSettings.BackgroundColor),
                                                     cursor,
                                                     mouseKeyHook);
                }
            }
            else if (SelectedVideoSourceKind == VideoSourceKind.Screen)
            {
                ImgProvider = new ScreenProvider((SelectedVideoSource as ScreenVSLI).Screen,
                                                 cursor,
                                                 mouseKeyHook);
            }
            #endregion

            #region VideoEncoder
            IVideoFileWriter VideoEncoder = null;

            if (Encoder.Name == "Gif")
            {
                if (GifSettings.UnconstrainedGif)
                {
                    Recorder = new UnconstrainedFrameRateGifRecorder(
                        new GifWriter(lastFileName,
                                      Repeat: GifSettings.GifRepeat ? GifSettings.GifRepeatCount : -1),
                        ImgProvider);
                }

                else
                {
                    VideoEncoder = new GifWriter(lastFileName, 1000 / VideoSettings.FrameRate,
                                                 GifSettings.GifRepeat ? GifSettings.GifRepeatCount : -1);
                }
            }

            else if (SelectedVideoSourceKind != VideoSourceKind.NoVideo)
            {
                VideoEncoder = new AviWriter(lastFileName,
                                             ImgProvider,
                                             Encoder,
                                             VideoSettings.VideoQuality,
                                             VideoSettings.FrameRate,
                                             AudioSource,
                                             AudioBitRate == 0 ? null
                                                                : new Mp3EncoderLame(wf.Channels, wf.SampleRate, AudioBitRate));
            }
            #endregion

            if (Recorder == null)
            {
                if (SelectedVideoSourceKind == VideoSourceKind.NoVideo)
                {
                    if (AudioSettings.EncodeAudio)
                    {
                        Recorder = new AudioRecorder(AudioSource, new EncodedAudioFileWriter(lastFileName, new Mp3EncoderLame(wf.Channels, wf.SampleRate, AudioBitRate)));
                    }
                    else
                    {
                        Recorder = new AudioRecorder(AudioSource, new WaveFileWriter(lastFileName, wf));
                    }
                }
                else
                {
                    Recorder = new Recorder(VideoEncoder, ImgProvider, AudioSource);
                }
            }

            Recorder.RecordingStopped += (E) => Dispatcher.Invoke(() =>
            {
                OnStopped();

                if (E != null)
                {
                    Status.Content = "Error";
                    MessageBox.Show(E.ToString());
                }
            });

            Recorder.Start(Delay);

            Recent.Add(lastFileName,
                       VideoEncoder == null ? RecentItemType.Audio : RecentItemType.Video);
        }
Esempio n. 29
0
        void loopRecord()
        {
            // Get current window title of active window
            string title = GetWindowTitle();

            // Wait for the title to change, check 10 times per second
            while (title == GetWindowTitle() || GetWindowTitle() == "Advertisement" || GetWindowTitle() == "Spotify")
            {
                Thread.Sleep(100);
            }
            updateWindowNameDisplay();
            btn_toggleRecord.Invoke((MethodInvoker) delegate
            {
                btn_toggleRecord.Text      = "Recording...";
                btn_toggleRecord.BackColor = Color.LightGreen;
                btn_toggleRecord.ForeColor = Color.White;
            });
            while (!stopRecording)
            {
                using (WasapiCapture capture = new WasapiLoopbackCapture())
                {
                    currentlyplaying cp = null;
                    while (cp == null)
                    {
                        cp = get_Currently_Playing();
                    }
                    if (cp.item != null)
                    {
                        string filename = cp.item.id; // GetWindowTitle();

                        //rtxt_songlist.Invoke((MethodInvoker)delegate {
                        //    // Running on the UI thread
                        //    rtxt_songlist.Text += filename + "\n";
                        //});

                        // rtxt_songlist.Text += filename + "\n";
                        //foreach (char c in System.IO.Path.GetInvalidFileNameChars())
                        //{
                        //    filename = filename.Replace(c, '_');
                        //}


                        //initialize the selected device for recording
                        capture.Initialize();

                        if (!Directory.Exists(path))
                        {
                            Directory.CreateDirectory(path);
                        }

                        //create a wavewriter to write the data to
                        using (WaveWriter w = new WaveWriter(path + "\\" + filename + ".wav", capture.WaveFormat))
                        {
                            //setup an eventhandler to receive the recorded data
                            capture.DataAvailable += (s, E) =>
                            {
                                //save the recorded audio
                                w.Write(E.Data, E.Offset, E.ByteCount);
                            };

                            //start recording
                            capture.Start();

                            //for (int i = 0; i < 100; i++)
                            //{
                            //    Thread.Sleep(time / 100);
                            //    prog_recording.Value = 1 * i;
                            //}

                            // Get current window title of active window
                            string newTitle = GetWindowTitle();
                            // Wait for the title to change, check 10 times per second
                            while (newTitle == GetWindowTitle())
                            {
                                Thread.Sleep(100);
                                updateWindowNameDisplay();
                            }
                            //stop recording
                            capture.Stop();
                            updateWindowNameDisplay();
                            while (GetWindowTitle() == "Advertisement" || GetWindowTitle() == "Spotify")
                            {
                                Thread.Sleep(100);
                                updateWindowNameDisplay();
                            }
                            convertTagAsynch(path, filename, cp);


                            // Thread.Sleep(time);
                        }
                    }
                }

                if (title == GetWindowTitle())
                {
                    stopRecording = true;
                }
            }
            btn_toggleRecord.Invoke((MethodInvoker) delegate
            {
                btn_toggleRecord.Text      = "Record";
                btn_toggleRecord.BackColor = Color.FromArgb(30, 30, 30);
                btn_toggleRecord.ForeColor = Color.White;
            });
        }
Esempio n. 30
0
        static int Main(string[] args)
        {
            int    time;
            string output_file;

            switch (args.Length)
            {
            case 1:
                if (args[0] == "-h")
                {
                    System.Console.WriteLine("Usage:");
                    System.Console.WriteLine("    LoopbackCapture.exe <output/wav> <time/milliseconds>");
                    return(1);
                }
                output_file = args[0];
                time        = 0;
                break;

            case 2:
                output_file = args[0];
                try
                {
                    time = Int32.Parse(args[1]);
                }
                catch
                {
                    time = 0;
                }
                break;

            default:
                time        = 0;
                output_file = "record.wav";
                break;
            }

            int sampleRate    = 48000;
            int bitsPerSample = 24;

            //create a new soundIn instance
            using (WasapiCapture soundIn = new WasapiLoopbackCapture())
            {
                //initialize the soundIn instance
                soundIn.Initialize();

                //create a SoundSource around the the soundIn instance
                SoundInSource soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the soundInSource to any other format

                IWaveSource convertedSource = soundInSource
                                              .ChangeSampleRate(sampleRate) // sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(bitsPerSample); //bits per sample

                //channels...
                using (convertedSource = convertedSource.ToStereo())
                {
                    //create a new wavefile
                    using (WaveWriter waveWriter = new WaveWriter(output_file, convertedSource.WaveFormat))
                    {
                        //register an event handler for the DataAvailable event of the soundInSource
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            //read data from the converedSource
                            byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int    read;

                            //keep reading as long as we still get some data
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                //write the read data to a file
                                waveWriter.Write(buffer, 0, read);
                            }
                        };

                        //start recording
                        soundIn.Start();

                        //delay and keep recording
                        if (time != 0)
                        {
                            Thread.Sleep(time);
                        }
                        else
                        {
                            Console.ReadKey();
                        }

                        //stop recording
                        soundIn.Stop();
                    }
                }
            }
            return(0);
        }
Esempio n. 31
0
        static void Main(string[] args)
        {
            myConfig = System.Reflection.Assembly.GetExecutingAssembly().GetName().Name + ".cfg";
            if (!ReadConfig() & args.Length == 0)
            {
                WriteLog("ERROR! (Main): Can't read a configuration file. Please, create a new one.");
                Array.Resize(ref args, 1);
                args[0] = "/config";
            }

            string myFullName = System.Reflection.Assembly.GetExecutingAssembly().GetName().FullName;

            WriteLog("(Main): Program started. (" + myFullName + ")");
            string sArgs = string.Join(" ", args);

            WriteLog("(Main): Arguments: " + sArgs);

            if (sArgs.Contains(@"/?"))
            {
                Console.WriteLine("");

                Console.WriteLine("/?       -   Print this message and exit.");
                Console.WriteLine("/config  -   Configure program and exit.");

                Console.WriteLine("\n Press any key to exit...");
                Console.ReadKey();
                Exit(0);
            }

            if (sArgs.Contains(@"/config"))
            {
                Console.WriteLine("");

                Console.WriteLine($"Please, enter a full path of media player application (default is: {appPath}): ");
                string a = Console.ReadLine();
                if (a.Length == 0)
                {
                    a = appPath;
                }
                if (!IsValidFullPath(a))
                {
                    Console.WriteLine("ERROR! Wrong application path.");
                    a = appPath;
                }
                appPath = a;
                appName = string.Join(".", Pop(System.IO.Path.GetFileName(appPath).Split('.')));

                Console.WriteLine($"Please, enter arguments for application if needed (default is: {appARGV}): ");
                a = Console.ReadLine();
                if (a != null & a.Length > 0)
                {
                    appARGV = a;
                }

                Console.WriteLine($"Please, enter a value of delay (in seconds >=5 ) before application will be restarted (default is: {delayBeforeRestartProgram}): ");
                a = Console.ReadLine();
                bool e = false;
                if (a.Length == 0)
                {
                    a = delayBeforeRestartProgram.ToString();
                }
                int b = StrToInt(a, ref e);
                if (b >= 5 & !e)
                {
                    delayBeforeRestartProgram = b;
                }
                else
                {
                    Console.WriteLine("ERROR! Wrong delay value! Should be >=5.");
                }

                while (true)
                {
                    Console.WriteLine("Is configuration below correct?");
                    Console.WriteLine($"\nappPath: {appPath}\nappName: {appName}\nappARGV: {appARGV}\ndelayBeforeRestartProgram: {delayBeforeRestartProgram}");
                    Console.Write("(Y/N): ");
                    a = Console.ReadLine();
                    if (a == "Y" || a == "y")
                    {
                        WriteConfig();
                        break;
                    }
                    else if (a == "N" || a == "n")
                    {
                        Console.WriteLine("Please, rerun this application with /config argument to try again.");
                        break;
                    }
                }


                Console.WriteLine("\nPress any key to exit...");
                Console.ReadKey();
                Exit(0);
            }

            var soundIn = new WasapiLoopbackCapture();

            WriteLog("(Main): Working with: " + soundIn.Device.FriendlyName);
            try
            {
                soundIn.Initialize();
            }
            catch
            {
                WriteLog("ERROR! (Main): Error while initializing device(39). Exiting.");
                Exit(1);
            }
            var soundInSource = new SoundInSource(soundIn);

            try
            {
                ISampleSource source = soundInSource.ToSampleSource();
                soundInSource.DataAvailable += (s, aEvent) => NewData(source);
            }
            catch
            {
                WriteLog("ERROR! (Main): Error while initializing device(50). Exiting.");
                Exit(1);
            }

            WriteLog("(Main): Trying to start sound capturing...");
            try
            {
                soundIn.Start();
                Thread.Sleep(2000);
                if (!newDataIsRunning & !noSoundIsRunning)
                {
                    Thread noSound = new Thread(NoSound);
                    noSound.IsBackground = true;
                    noSound.Start();
                }
            }
            catch
            {
                WriteLog("ERROR! (Main): Error while sound capturing. Exiting.");
                Exit(1);
            }

            WriteLog("(Main): Started.");
        }
Esempio n. 32
0
 private void loadNAudio()
 {
     audio = new WasapiLoopbackCapture();
     audio.DataAvailable += Audio_DataAvailable;
     audio.StartRecording();
 }
Esempio n. 33
0
        /// <summary>
        /// Creates a new IWaveProvider using a Wasapi Capture device
        /// </summary>
        /// <param name="id">The ID of the Wasapi Device</param>
        /// <param name="inputLatency">Length of Wasapi buffer in ms, or -1 for automatic value</param>
        /// <param name="bufferLatency">Length of Wavebuffer in ms, or -1 for automatic value</param>
        public WasapiProvider(string id, int inputLatency = -1, int bufferLatency = -1)
        {
            MMDevice device = null;

            if (id == "<default>")
            {
                device = WasapiCapture.GetDefaultCaptureDevice();
            }
            else if (id == "<defaultLoopback>")
            {
                device = WasapiLoopbackCapture.GetDefaultLoopbackCaptureDevice();
            }
            else
            {
                foreach (MMDevice dev in deviceEnumerator.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active))
                {
                    if (dev.ID == id)
                    {
                        device = dev;
                        break;
                    }
                }
            }

            if (device == null)
            {
                throw new KeyNotFoundException($"Device with ID '{id}' not found or inactive");
            }

            Name = (device.DataFlow == DataFlow.Capture ? "In " : "Out ") + device.FriendlyName;

            if (device.DataFlow == DataFlow.Capture)
            {
                if (inputLatency == -1)
                {
                    capture = new WasapiCapture(device);
                }
                else
                {
                    capture = new WasapiCapture(device, false, inputLatency);
                }
            }
            else
            {
                capture = new WasapiLoopbackCapture(device);
            }


            if (bufferLatency == -1)
            {
                buffer = new BufferedWaveProvider(capture.WaveFormat)
                {
                    DiscardOnBufferOverflow = true
                }
            }
            ;
            else
            {
                buffer = new BufferedWaveProvider(capture.WaveFormat)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(bufferLatency)
                }
            };

            capture.DataAvailable += Capture_DataAvailable;
        }
Esempio n. 34
0
        public Form1()
        {
            InitializeComponent();
            // Setup settings
            settings           = Settings.GetSettings(); // get settings
            textBox1.Font      = new Font("Arial", settings.TextSize);
            textBox1.ForeColor = settings.TextColor;
            textBox1.ReadOnly  = true;

            HideCaret(textBox1.Handle);  // hide it

            if (!settings.ShowWatermark) // Watermark setting
            {
                label1.Text = "";
            }

            CheckForIllegalCrossThreadCalls = false; // Allow the thread to f**k with the ui

            th = new Thread(() =>
            {
                // wav stuff
                WasapiLoopbackCapture CaptureInstance = new WasapiLoopbackCapture();
                CaptureInstance.DataAvailable        += (s, a) =>
                {
                    float max  = 0;
                    var buffer = new WaveBuffer(a.Buffer);
                    // interpret as 32 bit floating point audio
                    for (int index = 0; index < a.BytesRecorded / 4; index++)
                    {
                        var sample = buffer.FloatBuffer[index];

                        // absolute value
                        if (sample < 0)
                        {
                            sample = -sample;
                        }
                        // is this the max value?
                        if (sample > max)
                        {
                            max = sample;
                        }
                    }

                    if (!max.Equals(0))
                    {
                        textBox1.Text += CalcTheAnimation(max * 100) + "\r\n";
                    }
                    else
                    {
                        textBox1.Text = "";
                    }
                };
                // Record
                CaptureInstance.StartRecording();
                // sleeeep
                Thread.Sleep(-1);
            });
            // start
            th.Start();
            // No scroll bars and align correctly.
            textBox1.ScrollBars = ScrollBars.None;
            textBox1.TextAlign  = HorizontalAlignment.Center;
            // Close the thread and then save settings and gtfo
            FormClosing += OnFormClosing;
        }
Esempio n. 35
0
        static void Main(string[] args)
        {
            // 1. Select input device

            int waveInDevices = WaveIn.DeviceCount;
            int waveInDevice;

            for (waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            Console.WriteLine($"Device {waveInDevice}: WASAPI Loopback");

            int chosenDevice = args.Length > 0 ? int.Parse(args[0]) : int.Parse(Console.ReadLine());

            // 2. Start streaming

            var socket = new WebSocketSharp.Server.WebSocketServer(5001);

            socket.AddWebSocketService <WebSocketStreamService>("/stream");
            socket.Start();

            IWaveIn waveIn;

            if (chosenDevice == waveInDevice)
            {
                waveIn = new WasapiLoopbackCapture();
            }
            else
            {
                waveIn = new WaveInEvent
                {
                    DeviceNumber = chosenDevice,
                    WaveFormat   = new WaveFormat(44100, 2)
                };
            }

            var wav = new BufferedWaveProvider(waveIn.WaveFormat);

            wav.BufferDuration = TimeSpan.FromMinutes(1);

            waveIn.DataAvailable += (sender, e) =>
            {
                wav.AddSamples(e.Buffer, 0, e.BytesRecorded);
            };
            waveIn.StartRecording();

            while (true)
            {
                Console.WriteLine("Waiting for audio...");

                while (wav.BufferedDuration < TimeSpan.FromSeconds(SegmentDurationSeconds))
                {
                    // Wait for more audio to become available
                }

                // Write audio to wave file

                var fn = "sample.wav";
                using (var fs = File.OpenWrite(fn))
                    using (var wtr = new WaveFileWriter(fs, wav.WaveFormat))
                    {
                        int total = 0;
                        while (total < wav.WaveFormat.AverageBytesPerSecond * SegmentDurationSeconds)
                        {
                            byte[] buffer = new byte[wav.WaveFormat.AverageBytesPerSecond];
                            int    read   = wav.Read(buffer, 0, buffer.Length);

                            wtr.Write(buffer, 0, read);

                            total += read;
                        }
                    }

                // Transcode wave file to vorbis webm file

                string output = "sample.webm";

                if (File.Exists(output))
                {
                    File.Delete(output);
                }

                var process = new Process
                {
                    StartInfo = new ProcessStartInfo("ffmpeg.exe",
                                                     $"-i \"{fn}\" -c:a libvorbis -qscale:a 7 \"{output}\"")
                    {
                        UseShellExecute = false
                    }
                };

                process.Start();
                process.WaitForExit();

                socket.WebSocketServices.Broadcast(File.ReadAllBytes(output));

                File.Delete(output);
            }
        }
Esempio n. 36
0
        static async Task MainAsync()
        {
            WaveOut sp = new WaveOut();
            WasapiLoopbackCapture input = new WasapiLoopbackCapture();

            input.DataAvailable += Input_DataAvailable;
            BufferedWaveProvider streamOut = new BufferedWaveProvider(input.WaveFormat);

            streamOut.BufferLength            = maxBufferSize;
            streamOut.DiscardOnBufferOverflow = true;
            sp.Init(streamOut);
            sp.Play();

            Console.WriteLine("Какова роль приложения? (0=приёмник, 1=передатчик):");
            int role = Convert.ToInt32(Console.ReadLine());

            Console.WriteLine("Размер окна:");
            bufSize = Convert.ToInt32(Console.ReadLine());
            if (role == 0)
            {
                Console.WriteLine("Порт:");
                int port = Convert.ToInt32(Console.ReadLine());
                server = new TcpListener(IPAddress.Any, port);
                server.Start(1);
                client = server.AcceptTcpClient();
                client.SendBufferSize    = bufSize;
                client.ReceiveBufferSize = bufSize;
                stream = client.GetStream();

                ulong    packetCount    = 0;
                DateTime lastPacketTime = DateTime.Now;

                while (true)
                {
                    try
                    {
                        while (stream.DataAvailable)
                        {
                            byte[] buf = new byte[8192];
                            int    len = stream.Read(buf, 0, buf.Length);
                            streamOut.AddSamples(buf, 0, len);
                        }
                    }
                    catch (Exception) { }
                }
            }
            else
            {
                Console.WriteLine("Адрес клиента (IP:Port):");
                string addr = Console.ReadLine();
                endPoint = new IPEndPoint(IPAddress.Parse(addr.Split(':')[0]), Convert.ToInt32(addr.Split(':')[1]));
                client   = new TcpClient();
                client.Connect(endPoint);
                client.SendBufferSize    = bufSize;
                client.ReceiveBufferSize = bufSize;
                stream = client.GetStream();
                input.StartRecording();
                startTime = DateTime.Now;
                Thread.Sleep(-1);
            }
        }
Esempio n. 37
0
 /// <summary>
 /// Handles captured audio from a Wasapi device by converting it to PCM16 and writing it into a voice transmit sink.
 /// </summary>
 /// <param name="sink">The Discord VoiceTransmitSink instance.</param>
 /// <param name="device">The WasapiLoopbackCapture device.</param>
 private static async void AudioDataAvilableEventHander(object s, WaveInEventArgs e, VoiceTransmitSink sink, WasapiLoopbackCapture device)
 {
     // If audio data is available, convert it into PCM16 format and write it into the stream.
     if (e.Buffer.Length > 0)
     {
         await sink.WriteAsync(Utils.AudioToPCM16(e.Buffer, e.BytesRecorded, device.WaveFormat));
     }
 }
Esempio n. 38
0
        private void simpleButton1_Click_1(object sender, EventArgs e)
        {
            //recording
            //stop record and delete 2 file
            if (sbtStop.Enabled == true)
            {
                if (sourceStream != null)
                {
                    sourceStream.StopRecording();
                    sourceStream.Dispose();
                    sourceStream = null;
                }

                if (sourceStream1 != null)
                {
                    sourceStream1.StopRecording();
                    sourceStream1.Dispose();
                    sourceStream1 = null;
                }
                if (this.waveWriter == null)
                {
                    return;
                }
                if (this.waveWriter2 == null)
                {
                    return;
                }
                waveWriter.Dispose();
                waveWriter2.Dispose();

                this.waveWriter = null;
                waveWriter2     = null;

                if (cdDelete.Checked)
                {
                    File.Delete(currentrecord);
                    File.Delete(currentrecord.Replace(".wav", "mic.wav"));
                }
            }
            //recorded 2 file wav separate
            //want not to play and delete 2 file
            else if (sbtPlay.Enabled == true)
            {
                if (cdDelete.Checked)
                {
                    File.Delete(currentrecord);
                    File.Delete(currentrecord.Replace(".wav", "mic.wav"));
                }
            }
            //recording mix file
            else if (sbtSave.Enabled == true)
            {
                if (sourceStream1 != null)
                {
                    sourceStream1.StopRecording();
                    sourceStream1.Dispose();
                    sourceStream1   = null;
                    sbtSave.Enabled = false;
                    setdefaul();
                    sbtOpen.Focus();
                    txtTenFile.Text = "";
                }
                if (waveWriter != null)
                {
                    waveWriter.Dispose();
                    waveWriter = null;
                }
                steam.Dispose();
                steam2.Dispose();
                output.Dispose();
                output2.Dispose();
                if (cdDelete.Checked)
                {
                    File.Delete(currentrecord);
                    File.Delete(currentrecord.Replace(".wav", "mic.wav"));
                }
                File.Delete(currentrecord.Replace(".wav", "mix.wav"));
            }
            setdefaul();
        }
Esempio n. 39
0
        public static void Main()
        {
            //Decrypt resources and load.
            Console.WriteLine("---------------------------------------------------------------------");
            Console.WriteLine("Dual Audio - (C) 2014 Thr. Using NAudio (http://naudio.codeplex.com/)");
            Console.WriteLine("---------------------------------------------------------------------");

            int waveInDevices = WaveIn.DeviceCount;
            int waveOutDevices = WaveOut.DeviceCount;
            int inputdevice = 0;
            int output = 0;

            List<int> Outputs = new List<int>();
            Console.WriteLine("Located {0} Input Devices.\n", waveInDevices);
            Console.Write("How many outputs to bind to? (max {0}): ", waveOutDevices);
            //grab inputs.

            while (!int.TryParse(Console.ReadLine(), out totaloutputs) || totaloutputs > waveOutDevices)
                Console.Write("How many outputs to bind to? (max {0}): ", waveOutDevices);

            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("{0}: {1}, {2} channels.", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            Console.Write("Select Input Line: ");
            while (!int.TryParse(Console.ReadLine(), out inputdevice))
                Console.Write("Select Input Line: ");

            Console.WriteLine("Successfully set input as device {0}.", inputdevice);
            Console.WriteLine("");
            output = totaloutputs;
            while (output > 0)
            {
                for (int waveOutDevice = 0; waveOutDevice < waveOutDevices; waveOutDevice++)
                {
                    if (!Outputs.Contains(waveOutDevice))
                    {
                        WaveOutCapabilities deviceInfo = WaveOut.GetCapabilities(waveOutDevice);
                        Console.WriteLine("{0}: {1}, {2}", waveOutDevice, deviceInfo.ProductName, deviceInfo.Channels);
                    }
                }
                Console.Write("Select the output device for playback{0}: ", (totaloutputs - output).ToString());
                int device = 0;
                while(!int.TryParse(Console.ReadLine(), out device) || device > waveOutDevices - 1)
                {
                    Console.WriteLine("Invalid Device!");
                    Console.Write("Select the output device for playback{0}: ", (totaloutputs - output).ToString());
                }
                Outputs.Add(device);
                Console.WriteLine("Successfully set the output device for playback{0}.", (totaloutputs - output).ToString());
                output--;
            }
            Console.WriteLine("");
            string p = "";
            Console.Write("Dump to file? (Y\\N) ");
            while((p = Console.ReadLine().ToLower()) != "y" && p != "n")
            {
                Console.WriteLine("");
                Console.Write("Dump to file? (Y\\N) ");
            }
            dumptofile = Convert.ToBoolean(p == "y" ? true : false);

            Console.Write("Amplify Output? (Y\\N) ");
            while ((p = Console.ReadLine().ToLower()) != "y" && p != "n")
            {
                Console.WriteLine("");
                Console.Write("Amplify Output? (Y\\N) ");
            }
            amplify = Convert.ToBoolean(p == "y" ? true : false);

            waveIn = new WasapiLoopbackCapture();

            Console.WriteLine("Initialized Loopback Capture...");
            if (dumptofile)
            {
                string filename = "";
                Console.Write("Filename (without extension): ");
                while((filename = Console.ReadLine()) == "")
                {
                    Console.WriteLine("");
                    Console.Write("Filename (without extension): ");
                }
                f = new WaveFileWriter(File.OpenWrite(Environment.CurrentDirectory + "\\" + filename + ".wav"), waveIn.WaveFormat);
            }
            waveIn.DataAvailable += InputBufferToFileCallback;
            waveIn.StartRecording(); //Start our loopback capture.

            WaveOut[] devices = new WaveOut[totaloutputs];

            m1 = new BufferedWaveProvider[totaloutputs];
            for (int i = 0; i < totaloutputs; i++)
            {
                m1[i] = new BufferedWaveProvider(waveIn.WaveFormat);
                m1[i].BufferLength = 1024 * 1024 * 10;
                m1[i].DiscardOnBufferOverflow = true;
                devices[i] = new WaveOut();
                devices[i].Volume = 3;
                devices[i].NumberOfBuffers = 3;
                devices[i].DeviceNumber = Outputs[i];
                devices[i].DesiredLatency = 61;
                devices[i].Init(m1[i]);
                Console.WriteLine("Initializing Device{0}...", i);
                devices[i].Play();
                Console.WriteLine("Started Playing on Device{0}...", i);
            }

            while (true)
                if(Console.ReadLine().ToLower() == "s")
                {
                    stop = true;
                    for (int i = 0; i < devices.Length; i++)
                        devices[i].Stop();
                    waveIn.StopRecording();
                    f.Close();
                    Environment.Exit(0);
                }
        }