コード例 #1
0
 public WaveFormat GetWaveFormat(int sampleRate, int numberOfChannels)
 {
     return(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, numberOfChannels));
 }
コード例 #2
0
 public void SetWaveFormat(int sampleRate, int channels)
 {
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
 }
コード例 #3
0
        /// <summary>
        ///   Constructs a new Audio Output Device.
        /// </summary>
        ///
        /// <param name="device">Global identifier of the audio output device.</param>
        /// <param name="owner">The owner window handle.</param>
        /// <param name="samplingRate">The sampling rate of the device.</param>
        /// <param name="channels">The number of channels of the device.</param>
        ///
        public AudioOutputDevice(Guid device, IntPtr owner, int samplingRate, int channels)
        {
            this.owner        = owner;
            this.samplingRate = samplingRate;
            this.channels     = channels;
            this.device       = device;

            DirectSound ds = new DirectSound(device);

            ds.SetCooperativeLevel(owner, CooperativeLevel.Priority);


            // Set the output format
            WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channels);

            bufferSize = 8 * waveFormat.AverageBytesPerSecond;


            // Setup the secondary buffer
            SoundBufferDescription desc2 = new SoundBufferDescription();

            desc2.Flags =
                BufferFlags.GlobalFocus |
                BufferFlags.ControlPositionNotify |
                BufferFlags.GetCurrentPosition2;
            desc2.BufferBytes = bufferSize;
            desc2.Format      = waveFormat;

            buffer = new SecondarySoundBuffer(ds, desc2);


            var list = new List <NotificationPosition>();
            int numberOfPositions = 32;

            // Set notification for buffer percentiles
            for (int i = 0; i < numberOfPositions; i++)
            {
                list.Add(new NotificationPosition()
                {
                    WaitHandle = new AutoResetEvent(false),
                    Offset     = i * bufferSize / numberOfPositions + 1,
                });
            }

            // Set notification for end of buffer
            list.Add(new NotificationPosition()
            {
                Offset     = bufferSize - 1,
                WaitHandle = new AutoResetEvent(false)
            });

            firstHalfBufferIndex  = numberOfPositions / 2;
            secondHalfBufferIndex = numberOfPositions;

            notifications = list.ToArray();

            Accord.Diagnostics.Debug.Assert(notifications[firstHalfBufferIndex].Offset == bufferSize / 2 + 1);
            Accord.Diagnostics.Debug.Assert(notifications[secondHalfBufferIndex].Offset == bufferSize - 1);

            // Make a copy of the wait handles
            waitHandles = new WaitHandle[notifications.Length];
            for (int i = 0; i < notifications.Length; i++)
            {
                waitHandles[i] = notifications[i].WaitHandle;
            }

            // Store all notification positions
            buffer.SetNotificationPositions(notifications);
        }
コード例 #4
0
 private WaveFormat GetWaveFormatMP3Supported(WaveFormat waveFormat)
 {
     return(WaveFormat.CreateIeeeFloatWaveFormat(
                Math.Min(MP3_MAX_SAMPLE_RATE, waveFormat.SampleRate),
                Math.Min(MP3_MAX_NUMBER_CHANNELS, waveFormat.Channels)));
 }
コード例 #5
0
 public void CanRequestIfFormatIsSupportedIeee()
 {
     GetAudioClient().IsFormatSupported(AudioClientShareMode.Shared, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));
 }
コード例 #6
0
 /// <summary>
 /// Initializes a new instance of the Tone class, an ISampleProvider that
 /// reads fundamental tone and overtone characteristics from a file.
 /// </summary>
 /// <param name="sampleRate">Desired sample rate.</param>
 /// <param name="channel">Number of channels.</param>
 /// <param name="overtones">List of WaveCharacter objects for each overtone.</param>
 public SineSumSampleProvider(int sampleRate, int channel)
 {
     waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channel);
     Tones      = new List <double[]>();
 }
コード例 #7
0
 public TrackSampleProvider()
 {
     mix = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));
     //pan = new PanningSampleProvider(mix);
     volume = new VolumeSampleProvider(mix);
 }
コード例 #8
0
 /// <summary>
 /// Initialises a new instance of SampleProviderConverterBase
 /// </summary>
 /// <param name="source">Source Wave provider</param>
 public SampleProviderConverterBase(IWaveProvider source)
 {
     this.source = source;
     waveFormat  = WaveFormat.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, source.WaveFormat.Channels);
 }
コード例 #9
0
        public VorbisWaveReader(string fileName)
        {
            _reader = new VorbisReader(fileName);

            _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(_reader.SampleRate, _reader.Channels);
        }
コード例 #10
0
        public static void Init()
        {
            //"boom shakalaka", "Wubba lubba dub dub", "Slow the frak down", "speed the hell up", "Play Normal Speed"
            //"Hit the sack jack",

            recorder                = new WaveInEvent();
            samplerate              = recorder.WaveFormat.SampleRate;
            recorder.DataAvailable += RecorderOnDataAvailable;
            recorder.WaveFormat     = WaveFormat.CreateIeeeFloatWaveFormat(44100, 1);

            _timer = new Timer(x => UpdateThread(), null, 0, 500);
            recorder.StartRecording();


            // Create an in-process speech recognizer for the en-US locale.
            recognizer = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("en-US"));

            Choices normalCommands = new Choices();

            normalCommands.Add(new string[] { "Clear Queue" });
            normalCommands.Add(new string[] { "Speed Up" });
            normalCommands.Add(new string[] { "Slow Down" });
            normalCommands.Add(new string[] { "Normal speed" });

            GrammarBuilder gb = new GrammarBuilder();

            gb.Append(normalCommands);


            //Stop TTS For a user.
            //BAN HAMMER and/or TIMEOUT?
            //Number of TTS's (default 2)


            //DynamicCommands.Add(new string[] { "Repeat dfoxlive last 5 messages" });
            GrammarBuilder gb2 = new GrammarBuilder();

            gb2.Append("repeat");
            gb2.AppendDictation();
            gb2.Append("message");



            GrammarBuilder finalgb = new GrammarBuilder(new Choices(gb, gb2));

            // Create the Grammar instance.
            Grammar g = new Grammar(finalgb);

            recognizer.LoadGrammar(g);

            // Create and load a dictation grammar.
            //recognizer.LoadGrammar(new DictationGrammar());

            // Add a handler for the speech recognized event.
            recognizer.SpeechRecognized += Sr_SpeechRecognized;

            // Configure input to the speech recognizer.
            recognizer.SetInputToDefaultAudioDevice();

            //DEFAULT OFF.
            // Start asynchronous, continuous speech recognition.
            //recognizer.RecognizeAsync(RecognizeMode.Multiple);
        }
コード例 #11
0
 public void SetWaveFormat(int sampleRate, int channels)
 {
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
     //Console.WriteLine("buffer " + waveFormat.BitsPerSample);
     // zgornje se vpada z spodnjim "samplesRequired" ki je "count / 4"
 }
コード例 #12
0
ファイル: Audio.cs プロジェクト: hnjm/CChat2
 public static WaveFormat GetFormat()
 {
     return(WaveFormat.CreateIeeeFloatWaveFormat(SAMPLE_RATE, CHANNELS));
 }
コード例 #13
0
        // Construction event
        partial void Constructed()
        {
            for (var device = 0; device < NAudio.Midi.MidiIn.NumberOfDevices; device++)
            {
                _MidiDevices.Add(NAudio.Midi.MidiIn.DeviceInfo(device).ProductName);
            }

            if (_MidiDevices.Count > 0)
            {
                MidiDevice     = MidiDevices[0];
                MidiVisibility = Visibility.Visible;
            }
            else
            {
                MidiVisibility = Visibility.Collapsed;
            }

            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 1);

            _mixer = new MixingSampleProvider(waveFormat)
            {
                ReadFully = true
            };                                                                  // Always produce samples
            _volControl = new VolumeSampleProvider(_mixer)
            {
                Volume = 0.25f,
            };

            _tremolo     = new TremoloSampleProvider(_volControl, TremoloFreq, TremoloGain);
            _chorus      = new ChorusSampleProvider(_tremolo);
            _phaser      = new PhaserSampleProvider(_chorus);
            _delay       = new DelaySampleProvider(_phaser);
            _lpf         = new LowPassFilterSampleProvider(_delay, 20000);
            _fftProvider = new FFTSampleProvider(8, (ss, cs) => Dispatch(() => UpdateRealTimeData(ss, cs)), _lpf);

            WaveType1       = SignalGeneratorType.Sin;
            Volume          = -15.0; // dB
            Attack          = Attack2 = Attack3 = 0.01f;
            Decay           = Decay2 = Decay3 = 0.01f;
            Sustain         = Sustain2 = Sustain3 = 1.0f;
            Release         = Release2 = Release3 = 0.3f;
            CutOff          = 4000;
            Q               = 0.7f;
            TremoloFreq     = 5;
            TremoloFreqMult = 1;
            ChorusDelay     = 0.0f;
            ChorusSweep     = 0.0f;
            ChorusWidth     = 0.0f;
            PhaserDry       = 0.0f;
            PhaserWet       = 0.0f;
            PhaserFeedback  = 0.0f;
            PhaserFreq      = 0.0f;
            PhaserWidth     = 0.0f;
            PhaserSweep     = 0.0f;

            DelayMs       = 0;
            DelayFeedback = 0.6f;
            DelayMix      = 1.0f;
            DelayWet      = 0.5f;
            DelayDry      = 1.0f;

            // Voice Levels in dB
            Level1 = 0.0f;
            Level2 = -48.0f;
            Level3 = -48.0f;
        }
コード例 #14
0
        public void mixing()
        {
            /*
             * var mixer = new WaveMixerStream32 { AutoStop = true };
             * var wav1 = new WaveFileReader(_micPath);
             * var wav2 = new WaveFileReader(_loopBackPath);
             *
             * // float levelWav1_mic = 1.0f;
             * // float levelWav2_loopBack = 1.0f;
             * var waveChan1 = new WaveChannel32(wav1);
             * // waveChan1.Volume = 2.0f;
             * if (TextBox_LevelMic.Text.Length > 0)
             * {
             *  string strLevelMic = TextBox_LevelMic.Text;
             *  if(strLevelMic != "1.0")
             *  {
             *      float fLevelMic = float.Parse(strLevelMic);
             *      waveChan1.Volume = fLevelMic;
             *  }
             *
             * }
             * mixer.AddInputStream(waveChan1);
             * var waveChan2 = new WaveChannel32(wav2);
             * // waveChan2.Volume = 0.5f;
             * if (TextBox_levelLoopBack.Text.Length > 0)
             * {
             *  string strlevelLoopBack = TextBox_levelLoopBack.Text;
             *  if (strlevelLoopBack != "1.0")
             *  {
             *      float fLevelLoopBack = float.Parse(strlevelLoopBack);
             *      waveChan2.Volume = fLevelLoopBack;
             *  }
             * }
             * mixer.AddInputStream(waveChan2);
             * _outputPath = TextBox_area2.Text;
             *
             * _fileOutputWavFullExt = System.IO.Path.Combine(_outputPath, Path.GetFileName(_micPath).Replace("_mic", ""));
             * WaveFileWriter.CreateWaveFile(_fileOutputWavFullExt, mixer);
             *
             */
            ////////////////////////////////////////////////////////////////////////////////
            const int rate = 48000;
            //const int bits = 32;
            const int channels = 2;
            //WaveFormat wave_format = new WaveFormat(rate, bits, channels);
            WaveFormat wave_format = WaveFormat.CreateIeeeFloatWaveFormat(rate, channels);
            var        wav1        = new AudioFileReader(_micPath);
            var        wav2        = new AudioFileReader(_loopBackPath);

            if (TextBox_LevelMic.Text.Length > 0)
            {
                string strLevelMic = TextBox_LevelMic.Text;
                if (strLevelMic != "1.0")
                {
                    float fLevelMic = float.Parse(strLevelMic);
                    wav1.Volume = fLevelMic;
                }
            }
            if (TextBox_levelLoopBack.Text.Length > 0)
            {
                string strlevelLoopBack = TextBox_levelLoopBack.Text;
                if (strlevelLoopBack != "1.0")
                {
                    float fLevelLoopBack = float.Parse(strlevelLoopBack);
                    wav2.Volume = fLevelLoopBack;
                }
            }
            var resampler2 = new MediaFoundationResampler(wav2, wave_format);
            var mixer      = new MixingSampleProvider(wave_format);

            //      var mixer = new MixingSampleProvider(new[] { wav1, wav2 });
            mixer.AddMixerInput(resampler2);
            mixer.AddMixerInput(wav1.ToWaveProvider());

            _outputPath = TextBox_area2.Text;


            this.Dispatcher.BeginInvoke((Action)(() =>
            {
                if (_outputPath.Length == 0)
                {
                    _outputPath = _mainWindow.TextBox_path.Text;
                    TextBox_area2.Text = _outputPath;
                }
                _fileOutputWavFullExt = System.IO.Path.Combine(_outputPath, Path.GetFileName(_micPath).Replace("_mic", ""));
                WaveFileWriter.CreateWaveFile(_fileOutputWavFullExt, mixer.ToWaveProvider());
            }));
        }
コード例 #15
0
        private void StartRecording(bool shouldTryUseExclusive = true)
        {
            if (waveIn != null)
            {
                StopRecording();
            }
            if (SelectedInputSource != null)
            {
                this.codec = SelectedCodec.Codec;

                var  deviceFormat    = WaveFormat.CreateIeeeFloatWaveFormat(codec.RecordFormat.SampleRate, codec.RecordFormat.Channels);
                bool canUseExclusive = false;

                if (SelectedInputSource.Provider == DeviceProvider.Wasapi)
                {
                    var mmdevice = SelectedInputSource.MMDevice;

                    WaveFormatExtensible bestMatch;
                    canUseExclusive = mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, deviceFormat, out bestMatch);
                    if (canUseExclusive && shouldTryUseExclusive)
                    {
                        if (bestMatch != null)
                        {
                            deviceFormat = bestMatch;
                        }
                    }
                    else
                    {
                        mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, deviceFormat, out bestMatch);
                        if (bestMatch != null)
                        {
                            deviceFormat = bestMatch;
                        }
                    }


                    if (deviceFormat.Encoding != WaveFormatEncoding.IeeeFloat && deviceFormat.BitsPerSample != 16)
                    {
                        deviceFormat = mmdevice.AudioClient.MixFormat;

                        if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, codec.RecordFormat))
                        {
                            canUseExclusive = true;
                            deviceFormat    = codec.RecordFormat;
                        }
                        else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, codec.RecordFormat))
                        {
                            canUseExclusive = false;
                            deviceFormat    = codec.RecordFormat;
                        }
                        else
                        {
                            WaveFormat newFormat;
                            WaveFormat altWaveFormat  = new WaveFormat(deviceFormat.SampleRate, 16, deviceFormat.Channels);
                            WaveFormat altFloatFormat = WaveFormat.CreateIeeeFloatWaveFormat(mmdevice.AudioClient.MixFormat.SampleRate, mmdevice.AudioClient.MixFormat.Channels);

                            if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, altFloatFormat))
                            {
                                canUseExclusive = true;
                                newFormat       = altFloatFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, altWaveFormat))
                            {
                                canUseExclusive = true;
                                newFormat       = altWaveFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, altFloatFormat))
                            {
                                canUseExclusive = false;
                                newFormat       = altFloatFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, altWaveFormat))
                            {
                                canUseExclusive = false;
                                newFormat       = altWaveFormat;
                            }
                            else
                            {
                                throw new Exception("Device does not support 16bit PCM, or device is in use");
                            }

                            deviceFormat = newFormat;

                            Console.WriteLine("Initializing Wasapi\n  Device: {0}\n  Format: {1}\n  Mode: {2}\n  Resampling: {3}",
                                              mmdevice.FriendlyName,
                                              deviceFormat,
                                              canUseExclusive ? "Exclusive" : "Shared",
                                              deviceFormat.Equals(codec.RecordFormat) ? "NO" : "YES");
                        }
                    }

                    AudioClientShareMode shareMode;
                    if (canUseExclusive && shouldTryUseExclusive)
                    {
                        shareMode = AudioClientShareMode.Exclusive;
                    }
                    else
                    {
                        shareMode = AudioClientShareMode.Shared;
                    }

                    Guid audioSessionGuid = Guid.NewGuid();
                    try
                    {
                        mmdevice.AudioClient.Reset();
                    }
                    catch { }

                    BufferTargetMs = Math.Max(BufferTargetMs, mmdevice.MinBufferDurationMs);
                    var w = new WasapiCapture(mmdevice, BufferTargetMs);
                    w.RecordingStopped += wasapi_RecordingStopped;
                    waveIn              = w;
                    waveIn.WaveFormat   = deviceFormat;
                    w.ShareMode         = shareMode;

                    LevelManager = new AudioInLevelManager(w, mmdevice);
                }
                else
                {
                    Console.WriteLine("Initializing WaveIn{0}. Buffer:{1}ms Device:{2} Format:{3}", UseWaveEvent ? "Event" : "", BufferTargetMs, SelectedInputSource.WavDeviceNumber, deviceFormat);
                    if (UseWaveEvent)
                    {
                        var w = new WaveInEvent();
                        w.BufferMilliseconds = BufferTargetMs;
                        w.DeviceNumber       = SelectedInputSource.WavDeviceNumber;
                        LevelManager         = new AudioInLevelManager(w);
                        waveIn = w;
                    }
                    else
                    {
                        var w = new WaveIn();
                        w.BufferMilliseconds = BufferTargetMs;
                        w.DeviceNumber       = SelectedInputSource.WavDeviceNumber;
                        LevelManager         = new AudioInLevelManager(w);
                        waveIn = w;
                    }
                    waveIn.WaveFormat = deviceFormat;
                    canUseExclusive   = false;
                }

                waveIn.DataAvailable    += waveIn_DataAvailable;
                waveIn.RecordingStopped += waveIn_RecordingStopped;

                try
                {
                    waveIn.StartRecording();
                    ControlsEnabled = false;
                }
                catch (NAudio.MmException ex)
                {
                    Console.WriteLine("Audio Error: Couldn't open recording device\n{0}", ex.Message);
                    waveIn      = null;
                    IsRecording = false;
                }
                catch (ArgumentException ex)
                {
                    Console.WriteLine("Couldn't start recording: {0}", ex.Message);
                    IsRecording = false;
                    return;
                }
                catch (Exception ex)
                {
                    Console.WriteLine("Couldn't start recording: {0}", ex);
                    IsRecording = false;
                    return;
                }
            }
            else
            {
                IsRecording = false;
            }
        }
コード例 #16
0
        public VorbisWaveReader(Stream sourceStream)
        {
            _reader = new VorbisReader(sourceStream, false);

            _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(_reader.SampleRate, _reader.Channels);
        }
コード例 #17
0
ファイル: Program.cs プロジェクト: littlecxm/2dxrender
        private static void mixFinalAudio(string outputFilename, List <KeyPosition> sounds, List <string> samples)
        {
            var mixedSamples = new List <OffsetSampleProvider>();

            // Find P1 and P2 ends
            int[] playerEnd = { -1, -1 };
            foreach (var sound in sounds)
            {
                if (sound.keysoundId == -1 && sound.key == -1)
                {
                    playerEnd[sound.player] = sound.offset;
                }
            }

            foreach (var sound in sounds)
            {
                if (sound.keysoundId == -1)
                {
                    continue;
                }

                var audioFile = new AudioFileReader(samples[sound.keysoundId]);
                var volSample = new VolumeSampleProvider(audioFile);

                if (volSample.WaveFormat.Channels == 1)
                {
                    volSample = new VolumeSampleProvider(volSample.ToStereo());
                }

                if (volSample.WaveFormat.SampleRate != 44100)
                {
                    // Causes pop sound at end of audio
                    volSample = new VolumeSampleProvider(
                        new WaveToSampleProvider(
                            new MediaFoundationResampler(
                                new SampleToWaveProvider(volSample),
                                WaveFormat.CreateIeeeFloatWaveFormat(44100, 2)
                                )
                    {
                        ResamplerQuality = 60
                    }
                            )
                        );
                }

                if (options.AssistClap && sound.keysoundId == assistClapIdx)
                {
                    volSample.Volume = options.AssistClapVolume;
                }
                else
                {
                    volSample.Volume = options.RenderVolume;
                }

                var sample = new OffsetSampleProvider(volSample);
                sample.DelayBy = TimeSpan.FromMilliseconds(sound.offset);

                if (sound.player >= 0 && sound.player <= 1 && playerEnd[sound.player] != -1 && sound.offset + audioFile.TotalTime.TotalMilliseconds > playerEnd[sound.player])
                {
                    sample.Take = TimeSpan.FromMilliseconds(playerEnd[sound.player] - sound.offset);
                }

                mixedSamples.Add(sample);
            }

            var mixers = new List <MixingSampleProvider>();

            for (int i = 0; i < mixedSamples.Count; i += 128)
            {
                var arr = mixedSamples.Skip(i).Take(128).ToArray();
                mixers.Add(new MixingSampleProvider(arr));
            }

            var mixer = new MixingSampleProvider(mixers);

            if (options.OutputFormat.ToLower() == "wav")
            {
                WaveFileWriter.CreateWaveFile16(outputFilename, mixer);
            }
            else if (options.OutputFormat.ToLower() == "mp3")
            {
                var tempFilename = GetTempFileName();

                WaveFileWriter.CreateWaveFile16(tempFilename, mixer);

                ID3TagData id3 = new ID3TagData();
                id3.Album       = options.Id3Album;
                id3.AlbumArtist = options.Id3AlbumArtist;
                id3.Title       = options.Id3Title;
                id3.Artist      = options.Id3Artist;
                id3.Genre       = options.Id3Genre;
                id3.Track       = options.Id3Track;
                id3.Year        = options.Id3Year;

                using (var reader = new AudioFileReader(tempFilename))
                    using (var writer = new LameMP3FileWriter(outputFilename, reader.WaveFormat, 320, id3))
                    {
                        reader.CopyTo(writer);
                    }

                File.Delete(tempFilename);
            }
        }
コード例 #18
0
 public AudioSignal(int sampleRate)
 {
     this.WaveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
     AudioService.Engine.FinishedReading += EngineFinishedReading;
 }
コード例 #19
0
ファイル: Program.cs プロジェクト: SlimeNull/AudioTest
        static void Main(string[] sysargs)
        {
            WaveFormat  format = WaveFormat.CreateIeeeFloatWaveFormat(1000000, 2);
            StartupArgs args   = Initialize(sysargs);

            if (args.Host)
            {
                if (!int.TryParse(args.Port, out int port) || port < 0)
                {
                    ErrorExit(-1, "Invalid port specified.");
                }

                using WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
                EventedListener      listener = new EventedListener(IPAddress.Any, port);
                List <EventedClient> clients  = new List <EventedClient>();
                try
                {
                    listener.Start();
                }
                catch (Exception e)
                {
                    ErrorExit(-1, $"{e.GetType().Name}: {e.Message}");
                }
                listener.StartAcceptClient();
                listener.ClientConnected += (s, args) =>
                {
                    lock (clients)
                    {
                        EventedClient client = args.Client;
                        clients.Add(client);
                        Console.WriteLine($"Client connected: {client.BaseSocket.RemoteEndPoint}");
                    }
                };
                capture.DataAvailable += (sender, args) =>
                {
                    lock (clients)
                    {
                        List <EventedClient> clientsToRemove = new List <EventedClient>();
                        foreach (var client in clients)
                        {
                            try
                            {
                                client.SendData(args.Buffer, 0, args.BytesRecorded);
                            }
                            catch
                            {
                                clientsToRemove.Add(client);
                                Console.WriteLine($"Client disconnected: {client.BaseSocket.RemoteEndPoint}");
                            }
                        }
                        foreach (var client in clientsToRemove)
                        {
                            clients.Remove(client);
                        }
                    }
                };
                capture.StartRecording();

                Console.WriteLine("Syncing audio as host...");
                while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped)
                {
                    ;
                }
            }
            else if (args.Sync)
            {
                if (!TryGetAddress(args.Address, out IPAddress address))
                {
                    ErrorExit(-1, "Invalid address specified.");
                }
                if (!int.TryParse(args.Port, out int port) || port < 0)
                {
                    ErrorExit(-1, "Invalid port specified.");
                }
                EventedClient client = new EventedClient();
                try
                {
                    client.Connect(address, port);
                }
                catch
                {
                    ErrorExit(-2, "Cannot connect to host");
                }
                NetSampleProvider src = new NetSampleProvider(client);
                client.StartReceiveData();
                WaveOut wout = new WaveOut();
                wout.Init(src);
                wout.Play();

                Console.WriteLine("Syncing audio as client...");
                while (wout.PlaybackState != PlaybackState.Stopped)
                {
                    ;
                }
            }
            else if (args.Help)
            {
                Console.WriteLine(
                    $@"Null.AudioSync : Sync audio with another computer
  Null.AudioSync Command Arguments
    Commands:
      Host : Build a AudioSync server.
      Sync : Connect a AudioSync server.
    Arguments:
      Address : Should be specified when SyncAudio from a server.
      Port    : Port will be listened or connected. default is 10001.
");
            }
            else
            {
                Console.WriteLine("Unknown command, use 'Null.AudioSync Help' for help");
            }
        }
コード例 #20
0
        public AudioEngine()
        {
            var format = WaveFormat.CreateIeeeFloatWaveFormat(44100, 1);

            MultiInputProvider = new MultipleSampleToWaveProvider(format, () => OnFinishedReading());
        }
コード例 #21
0
 public AudioPlaybackEngine(int sampleRate = 44100, int channelCount = 2)
 {
     mixer                  = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channelCount));
     mixer.ReadFully        = true;
     mixer.MixerInputEnded += OnMixerInputEnded;
 }
コード例 #22
0
        /// <summary>
        /// Combine two stereo files to one quad file
        /// </summary>
        /// <param name="filePathLeft">file path to the left stereo file</param>
        /// <param name="filePathRight">file path to the right stereo file</param>
        /// <param name="combinedFileNamePath">file path to the combined quad file</param>
        /// <returns></returns>
        public static bool CombineStereoToQuad(string filePathLeft, string filePathRight, string combinedFileNamePath)
        {
            WaveFormat waveFormatLeft  = GetWaveFormat(filePathLeft);
            WaveFormat waveFormatRight = GetWaveFormat(filePathRight);

            if (!waveFormatLeft.Equals(waveFormatRight))
            {
                Console.Out.WriteLine("The two files to combine must have the same format");
                return(false);
            }
            if (waveFormatLeft.Channels != 2 || waveFormatRight.Channels != 2)
            {
                Console.Out.WriteLine("The two files to combine must be stereo");
                return(false);
            }

            int sampleRate = waveFormatLeft.SampleRate;

            float[] channel1;
            float[] channel2;
            float[] channel3;
            float[] channel4;
            SplitStereoWaveFileToMono(filePathLeft, out channel1, out channel2);
            SplitStereoWaveFileToMono(filePathRight, out channel3, out channel4);

            // find out what channel is longest
            int maxLength = Math.Max(channel1.Length, channel3.Length);

            using (WaveFileWriter wavWriter = new WaveFileWriter(combinedFileNamePath, WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 4)))
            {
                // write one and one float (interlaced), pad if neccesary
                for (int i = 0; i < maxLength; i++)
                {
                    if (i < channel1.Length)
                    {
                        wavWriter.WriteSample(channel1[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel2.Length)
                    {
                        wavWriter.WriteSample(channel2[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel3.Length)
                    {
                        wavWriter.WriteSample(channel3[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel4.Length)
                    {
                        wavWriter.WriteSample(channel4[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                }
            }
            return(true);
        }
コード例 #23
0
 private void InitMixer(int sampleRate, int channels)
 {
     mixer           = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels)); // 32 bit IEEE floating point
     mixer.ReadFully = true;                                                                                 // return silence even if there is no input, such you get no cawing
 }
コード例 #24
0
        public void Init(ulong mySteamID)
        {
            if (isInitialized)
            {
                return;
            }
            isInitialized = true;

            RemoveListenerSteamID(mySteamID);
            this.mySteamID        = new CSteamID(mySteamID);
            newConnectionCallback = Callback <P2PSessionRequest_t> .Create(t => SteamNetworking.AcceptP2PSessionWithUser(t.m_steamIDRemote));

            // default accept all

            Program.MainWindow.InvokeFunc(() => GlobalHook.RegisterHandler(PushToTalkHotkey, (key, pressed) =>
            {
                if (pressed)
                {
                    if (!isRecording)
                    {
                        SteamUser.StartVoiceRecording();
                        SteamFriends.SetInGameVoiceSpeaking(new CSteamID(mySteamID), true);
                        isRecording = true;
                        talkWaiter.Set();
                        lastPacket[mySteamID] = DateTime.Now;
                        UserStartsTalking(mySteamID);
                    }
                }
                else
                {
                    if (isRecording)
                    {
                        SteamUser.StopVoiceRecording();
                        SteamFriends.SetInGameVoiceSpeaking(new CSteamID(mySteamID), false);
                        isRecording           = false;
                        lastPacket[mySteamID] = null;
                        UserStopsTalking(mySteamID);
                    }
                }
                return(false);
            }));

            soundOut = new DirectSoundOut();

            mixer           = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1));
            mixer.ReadFully = true;

            mixerWrapper = new MixerWrapper <ulong>(mixer);


            silencer = new byte[(int)(new WaveFormat(sampleRate, 1).AverageBytesPerSecond *silencerBufferMs / 1000.0 / 2 * 2)];

            soundOut.Init(mixer);
            soundOut.Play();

            new Thread(RecordingFunc).Start();
            if (testLoopback)
            {
                new Thread(LoopbackFunc).Start();
            }
            else
            {
                new Thread(PlayingFunc).Start();
            }

            foreach (var t in targetSteamIDs)
            {
                SendDummyP2PPacket(t.Key);
            }
            lastPacket[mySteamID] = null;
            UserVoiceEnabled(mySteamID);
        }
コード例 #25
0
        private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader, bool useOverrides, int rate, int chan, int bps)
        {
            IMFMediaType uncompressedMediaType;

            reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType);
            // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType)
            var  outputMediaType = new MediaType(uncompressedMediaType);
            Guid actualMajorType = outputMediaType.MajorType;

            Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio);
            Guid audioSubType = outputMediaType.SubType;

            int channels   = !useOverrides ? outputMediaType.ChannelCount: chan;
            int bits       = !useOverrides ? outputMediaType.BitsPerSample : bps;
            int sampleRate = !useOverrides ? outputMediaType.SampleRate : rate;

            return(audioSubType == AudioSubtypes.MFAudioFormat_PCM ? new WaveFormat(sampleRate, bits, channels) : WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels));
        }
コード例 #26
0
        internal void WaveFormatRestriction_ReturnsChannelCodeOverStereo()
        {
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(Recorder.MP3_MAX_SAMPLE_RATE, 6);

            Assert.Contains(WaveFormatMP3Restriction.Channel, waveFormat.GetMP3RestrictionCode());
        }
コード例 #27
0
 private void InitMixers()
 {
     _clientAudioMixer           = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(OUTPUT_SAMPLE_RATE, 2));
     _clientAudioMixer.ReadFully = true;
 }
コード例 #28
0
        internal void WaveFormatRestriction_ReturnsSampleRateCodeOver48k()
        {
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(96000, Recorder.MP3_MAX_NUMBER_CHANNELS);

            Assert.Contains(WaveFormatMP3Restriction.SampleRate, waveFormat.GetMP3RestrictionCode());
        }
コード例 #29
0
 public SineGen()
 {
     waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 1);
     Freq       = 440;
     gain       = 1;
 }
コード例 #30
0
 public WaveFileWriter GetWriter(string pathToFile, int sampleRate, int numberOfChannels)
 {
     return(new WaveFileWriter(pathToFile, WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, numberOfChannels)));
 }