예제 #1
0
        private void InitializeInternal()
        {
            //Use Desktophandle as default handle
            IntPtr handle = DSUtils.GetDesktopWindow();

            IntPtr pdsound;

            DirectSoundException.Try(NativeMethods.DirectSoundCreate8(ref _device, out pdsound, IntPtr.Zero),
                                     "DSInterop", "DirectSoundCreate8");

            _directSound = new DirectSound8(pdsound);
            _directSound.SetCooperativeLevel(handle, DSCooperativeLevelType.Normal); //use normal as default
            if (!_directSound.SupportsFormat(_source.WaveFormat))
            {
                if (_source.WaveFormat.WaveFormatTag == AudioEncoding.IeeeFloat) //directsound does not support ieeefloat
                {
                    _source = _source.ToSampleSource().ToWaveSource(16);
                }

                WaveFormat format16Bit = (WaveFormat)_source.WaveFormat.Clone();
                format16Bit.BitsPerSample = 16;
                WaveFormat format8Bit = (WaveFormat)_source.WaveFormat.Clone();
                format8Bit.BitsPerSample = 8;

                if (_directSound.SupportsFormat(format16Bit))
                {
                    _source = _source.ToSampleSource().ToWaveSource(16);
                }
                else if (_directSound.SupportsFormat(format8Bit))
                {
                    _source = _source.ToSampleSource().ToWaveSource(8);
                }
                else
                {
                    throw new NotSupportedException(
                              "WaveFormat of the source is not supported.");
                }

                if (!_directSound.SupportsFormat(_source.WaveFormat))
                {
                    throw new NotSupportedException(
                              "WaveFormat of the source is not supported.");
                }
            }

            WaveFormat waveFormat = _source.WaveFormat;
            var        bufferSize = (int)waveFormat.MillisecondsToBytes(_latency);

            _primaryBuffer   = new DirectSoundPrimaryBuffer(_directSound);
            _secondaryBuffer = new DirectSoundSecondaryBuffer(_directSound, waveFormat, bufferSize * 2);
        }
예제 #2
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource());
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            throw new ArgumentException(
                      "The specified input can't be converted to a mono source. The input does not provide a WaveFormatExtensible.",
                      "input");
        }
예제 #3
0
        public void Load(Uri uri)
        {
            Dispose();
            _uri        = uri;
            _waveSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri)
                          .ToSampleSource()
                          .ToMono()
                          .ToWaveSource();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => SpectrumProvider.Add(a.Left, a.Right);
            _waveSource = notificationSource.ToWaveSource(16);
            // Load the sample source
            var ws = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri);

            _sampleSource = ws.ToSampleSource();
            RaiseSourceEvent(SourceEventType.Loaded);
            hasMedia = true;
            LoadSoundOut();
        }
예제 #4
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // Create SoundOut
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);

            if (this.outputDevice == null)
            {
                // If no output device was provided, we're playing on the default device.
                // In such case, we want to detected when the default device changes.
                // This is done by setting stream routing options
                ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;
            }
            else
            {
                // If an output device was provided, assign it to soundOut.Device.
                // Only allow stream routing when the device was disconnected.
                ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.OnDeviceDisconnect;
                ((WasapiOut)this.soundOut).Device = this.outputDevice;
            }

            // Initialize SoundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            if (inputStreamList.Count != 0)
            {
                foreach (var inputStream in inputStreamList)
                {
                    this.notificationSource.SingleBlockRead += inputStream;
                }
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;
            this.soundOut.Volume   = this.volume;
        }
예제 #5
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource(input.WaveFormat.BitsPerSample));
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            Debug.WriteLine("MultiChannel stream with no ChannelMask.");

            WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();

            waveFormat.Channels = 1;
            return(new DmoResampler(input, waveFormat));
        }
예제 #6
0
        public void Open(string filename, MMDevice device)
        {
            CleanupPlayback();

            _waveSource =
                CodecFactory.Instance.GetCodec(filename)
                .ToSampleSource()
                .ToMono()
                .ToWaveSource();

            _sampleSource = _waveSource.ToSampleSource()
                            .AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(_sampleSource);

            _soundOut = new WasapiOut()
            {
                Latency = 100, Device = device
            };
            _soundOut.Initialize(_waveSource);
            if (PlaybackStopped != null)
            {
                _soundOut.Stopped += PlaybackStopped;
            }
        }
예제 #7
0
 public override IWaveSource AppendSource(IWaveSource source)
 {
     return(source
            .ToSampleSource()
            .AppendSource(CSCore.Streams.Effects.Equalizer.Create10BandEqualizer, out _equalizer)
            .ToWaveSource());
 }
예제 #8
0
        public void CargarCancion(string cual)
        {
            switch (Path.GetExtension(cual))
            {
            case ".mp3":
                FormatoSonido = FormatoSonido.MP3;
                break;

            case ".flac":
                FormatoSonido = FormatoSonido.FLAC;
                break;

            case ".ogg":
                FormatoSonido = FormatoSonido.OGG;
                break;

            default:
                break;
            }
            try
            {
                Log.Instance.PrintMessage("Intentando cargar " + cual, MessageType.Info);
                if (Path.GetExtension(cual) == ".ogg")
                {
                    FileStream stream = new FileStream(cual, FileMode.Open, FileAccess.Read);
                    NVorbis = new NVorbisSource(stream);
                    _sound  = NVorbis.ToWaveSource(16);
                }
                else
                {
                    _sound             = CSCore.Codecs.CodecFactory.Instance.GetCodec(cual).ToSampleSource().ToStereo().ToWaveSource(16);
                    notificationStream = new SingleBlockNotificationStream(_sound.ToSampleSource());
                    FileInfo info = new FileInfo(cual);
                    tamFich = info.Length;
                }

                _output = new WasapiOut(false, AudioClientShareMode.Shared, 100);
                //_sonido.Position = 0;
                _output.Initialize(_sound);
                Log.Instance.PrintMessage("Cargado correctamente" + cual, MessageType.Correct);
            }
            catch (IOException ex)
            {
                Log.Instance.PrintMessage("Error de IO", MessageType.Error);
                Log.Instance.PrintMessage(ex.Message, MessageType.Error);
                Kernel.ShowError(Kernel.LocalTexts.GetString("errorReproduccion"));
                _output = null;
                _sound  = null;
                throw;
            }
            catch (Exception ex)
            {
                Log.Instance.PrintMessage("Hubo un problema...", MessageType.Error);
                Log.Instance.PrintMessage(ex.Message, MessageType.Error);
                Kernel.ShowError(ex.Message);
                _output = null;
                _sound  = null;
                throw;
            }
        }
예제 #9
0
        private void InitializeSoundOut(IWaveSource soundSource, MMDevice outputDevice)
        {
            // SoundOut implementation which plays the sound
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest)
            {
                Device = outputDevice
            };
            ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;

            // Initialize the soundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            if (inputStreamList.Count != 0)
            {
                foreach (var inputStream in inputStreamList)
                {
                    this.notificationSource.SingleBlockRead += inputStream;
                }
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;

            this.soundOut.Volume = this.volume;
        }
예제 #10
0
        public void CargarCancion(string cual)
        {
            switch (Path.GetExtension(cual))
            {
            case ".mp3":
                CSCore.Tags.ID3.ID3v2 mp3tag = CSCore.Tags.ID3.ID3v2.FromFile(cual);
                tags          = new CSCore.Tags.ID3.ID3v2QuickInfo(mp3tag);
                FormatoSonido = FormatoSonido.MP3;
                break;

            case ".flac":
                _ficheroFLAC = new FLACFile(cual, true);
                CSCore.Codecs.FLAC.FlacFile ff = new CSCore.Codecs.FLAC.FlacFile(cual);
                FormatoSonido = FormatoSonido.FLAC;
                break;

            case ".ogg":
                FormatoSonido = FormatoSonido.OGG;
                break;

            default:
                break;
            }
            try
            {
                Log.Instance.ImprimirMensaje("Intentando cargar " + cual, TipoMensaje.Info);
                if (Path.GetExtension(cual) == ".ogg")
                {
                    FileStream stream = new FileStream(cual, FileMode.Open);
                    NVorbis = new NVorbisSource(stream);
                    _sonido = NVorbis.ToWaveSource(16);
                }
                else
                {
                    _sonido            = CSCore.Codecs.CodecFactory.Instance.GetCodec(cual).ToSampleSource().ToStereo().ToWaveSource(16);
                    notificationStream = new SingleBlockNotificationStream(_sonido.ToSampleSource());
                    //_salida.Initialize(notificationStream.ToWaveSource(16));
                    FileInfo info = new FileInfo(cual);
                    tamFich = info.Length;
                }
                _salida          = new WasapiOut(false, AudioClientShareMode.Shared, 100);
                _sonido.Position = 0;
                _salida.Initialize(_sonido);
                Log.Instance.ImprimirMensaje("Cargado correctamente" + cual, TipoMensaje.Correcto);
            }
            catch (IOException)
            {
                Log.Instance.ImprimirMensaje("No se puede reproducir el fichero porque está siendo utilizado por otro proceso", TipoMensaje.Error);
                throw;
            }
            catch (Exception)
            {
                Log.Instance.ImprimirMensaje("No se encuentra el fichero", TipoMensaje.Advertencia);
                throw;
            }
        }
예제 #11
0
 public void ConvertToPCM16Bit(string pathToSourceFile, string pathToDestinationFile)
 {
     using (IWaveSource source = CodecFactory.Instance.GetCodec(pathToSourceFile))
     {
         using (IWaveSource destination = source.ToSampleSource().ToWaveSource(16))
         {
             destination.WriteToFile(pathToDestinationFile);
         }
     }
 }
예제 #12
0
        private void InitializeInternal()
        {
            //Use Desktophandle as default handle
            IntPtr handle = DSUtils.GetDesktopWindow();

            IntPtr pdsound;

            DirectSoundException.Try(NativeMethods.DirectSoundCreate8(ref _device, out pdsound, IntPtr.Zero), "DSInterop", "DirectSoundCreate8");

            _directSound = new DirectSound8(pdsound);
            _directSound.SetCooperativeLevel(handle, DSCooperativeLevelType.DSSCL_NORMAL); //use normal as default
            if (!_directSound.SupportsFormat(_source.WaveFormat))
            {
                if (_source.WaveFormat.WaveFormatTag == AudioEncoding.IeeeFloat)
                {
                    _source = _source.ToSampleSource().ToWaveSource(16);
                }
                if (_directSound.SupportsFormat(new WaveFormat(_source.WaveFormat.SampleRate, 16, _source.WaveFormat.Channels, _source.WaveFormat.WaveFormatTag)))
                {
                    _source = _source.ToSampleSource().ToWaveSource(16);
                }
                else if (_directSound.SupportsFormat(new WaveFormat(_source.WaveFormat.SampleRate, 8, _source.WaveFormat.Channels, _source.WaveFormat.WaveFormatTag)))
                {
                    _source = _source.ToSampleSource().ToWaveSource(8);
                }
                else
                {
                    throw new InvalidOperationException("Invalid WaveFormat. WaveFormat specified by parameter {_source} is not supported by this DirectSound-Device.");
                }

                if (!_directSound.SupportsFormat(_source.WaveFormat))
                {
                    throw new InvalidOperationException("Invalid WaveFormat. WaveFormat specified by parameter {_source} is not supported by this DirectSound-Device.");
                }
            }

            WaveFormat waveFormat = _source.WaveFormat;
            int        bufferSize = (int)waveFormat.MillisecondsToBytes(_latency);

            _primaryBuffer   = new DirectSoundPrimaryBuffer(_directSound);
            _secondaryBuffer = new DirectSoundSecondaryBuffer(_directSound, waveFormat, bufferSize);
        }
예제 #13
0
        public override IWaveSource AppendSource(IWaveSource source)
        {
            var newSource = source
                            .ToSampleSource()
                            .AppendSource(x => new BiQuadFilterSource(x), out var biQuadFilterSource)
                            .ToWaveSource();

            biQuadFilterSource.Filter = new CSCore.DSP.LowpassFilter(newSource.WaveFormat.SampleRate, 1000);

            return(newSource);
        }
예제 #14
0
 private void initializeSoundSource(MusicEntity music)
 {
     iws?.Dispose();
     iws = GetCodec(music.Extension, music.Path);
     iws = lfw.Initialize(iws.ToSampleSource()
                          .ChangeSampleRate(32000)
                          .AppendSource(Equalizer.Create10BandEqualizer, out mEqualizer));
     wasapiOut.Initialize(iws);
     Volume           = 0.7f;
     wasapiOut.Volume = Volume;
 }
예제 #15
0
        public override IWaveSource AppendSource(IWaveSource source)
        {
            var newSource = source
                            .ToSampleSource()
                            .AppendSource(x => new BiQuadFilterSource(x), out _biQuadFilterSource)
                            .ToWaveSource();

            _sampleRate = newSource.WaveFormat.SampleRate;

            InitializeFilter();

            return(newSource);
        }
예제 #16
0
 ISoundOut CreateSoundOut(ref IWaveSource source)
 {
     ISoundOut soundOut;
     if (WasapiOut.IsSupportedOnCurrentPlatform)
         soundOut = new WasapiOut(true, AudioClientShareMode.Shared, 50);
     else
     {
         soundOut = new DirectSoundOut() { Latency = 100 };
         if (source.WaveFormat.BitsPerSample > 16)
             source = source.ToSampleSource().ToWaveSource(16);
     }
     return soundOut;
 }
예제 #17
0
        private static ISampleSource GetSampleSourceForSound(Sound sound)
        {
            var soundImpl = sound;

            IWaveSource waveSource = soundImpl.Format switch
            {
                SoundFormat.Wav => new WaveFileReader(soundImpl.SoundStream.MakeShared()),
                SoundFormat.Mp3 => new DmoMp3Decoder(soundImpl.SoundStream.MakeShared()),
                _ => throw new ArgumentOutOfRangeException($"Unsupported sound format: {soundImpl.Format}.")
            };

            return(waveSource.ToSampleSource());
        }
예제 #18
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="reverb"></param>
        public void ApplyPitch(float pitch)
        {
            if (_waveSource == null)
            {
                return;
            }

            var pitchPass = new PitchShifter(_waveSource.ToSampleSource());

            pitchPass.PitchShiftFactor = pitch;

            InitPlayback(pitchPass.ToWaveSource());
        }
예제 #19
0
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var openFileDialog = new OpenFileDialog()
            {
                Filter = CodecFactory.SupportedFilesFilterEn,
                Title  = "Select a file..."
            };

            if (openFileDialog.ShowDialog() == DialogResult.OK)
            {
                Stop();

                const FftSize fftSize = FftSize.Fft4096;

                IWaveSource source = CodecFactory.Instance.GetCodec(openFileDialog.FileName);

                var spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels,
                                                                 source.WaveFormat.SampleRate, fftSize);
                _lineSpectrum = new LineSpectrum(fftSize)
                {
                    SpectrumProvider = spectrumProvider,
                    UseAverage       = true,
                    BarCount         = 50,
                    BarSpacing       = 2,
                    IsXLogScale      = true,
                    ScalingStrategy  = ScalingStrategy.Sqrt
                };
                _voicePrint3DSpectrum = new VoicePrint3DSpectrum(fftSize)
                {
                    SpectrumProvider = spectrumProvider,
                    UseAverage       = true,
                    PointCount       = 200,
                    IsXLogScale      = true,
                    ScalingStrategy  = ScalingStrategy.Sqrt
                };

                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());
                notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

                _source = notificationSource.ToWaveSource(16);

                _soundOut = new WasapiOut();
                _soundOut.Initialize(_source.ToMono());
                _soundOut.Play();

                timer1.Start();

                propertyGridTop.SelectedObject    = _lineSpectrum;
                propertyGridBottom.SelectedObject = _voicePrint3DSpectrum;
            }
        }
예제 #20
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // Create SoundOut
            if (this.supportsWindowsMediaFoundation)
            {
                this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);

                // Map stereo or mono file to all channels
                ((WasapiOut)this.soundOut).UseChannelMixingMatrices = this.useAllAvailableChannels;

                if (this.outputDevice == null)
                {
                    // If no output device was provided, we're playing on the default device.
                    // In such case, we want to detected when the default device changes.
                    // This is done by setting stream routing options
                    ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;
                }
                else
                {
                    // If an output device was provided, assign it to soundOut.Device.
                    // Only allow stream routing when the device was disconnected.
                    ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.OnDeviceDisconnect;
                    ((WasapiOut)this.soundOut).Device = this.outputDevice;
                }

                // Initialize SoundOut
                this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
                this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

                if (inputStreamList.Count != 0)
                {
                    foreach (var inputStream in inputStreamList)
                    {
                        this.notificationSource.SingleBlockRead += inputStream;
                    }
                }
            }
            else
            {
                this.soundOut = new DirectSoundOut(this.latency, ThreadPriority.Highest);

                // Initialize SoundOut
                // Spectrum analyzer performance is only acceptable with WasapiOut,
                // so we're not setting a notificationSource for DirectSoundOut
                this.soundOut.Initialize(soundSource);
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;
            this.soundOut.Volume   = this.volume;
        }
예제 #21
0
        private void LoadSoundOut()
        {
            if (_waveSource != null)
            {
                _soundOut = new CSCore.SoundOut.WasapiOut(true, CSCore.CoreAudioAPI.AudioClientShareMode.Shared,
                                                          100, System.Threading.ThreadPriority.Highest)
                {
                    Device = OutputDevice.ActualDevice
                };

                _soundOut.Initialize(_waveSource.ToSampleSource().ToWaveSource(16));
                // set defaults
                _soundOut.Volume = volume;
            }
        }
        public void Initialize(MMDevice captureDevice)
        {
            //BLARG 01.14.2020: Don't need the default when we're given an Audio Enpoint
            //MMDevice captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try { _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i)); } catch { /* We're just trying ... */ }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            //BLARG: Actually setting the Device
            _capture.Device = captureDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _source = _soundInSource.WaveFormat.SampleRate == 44100
                          ? _soundInSource.ToStereo()
                          : _soundInSource.ChangeSampleRate(44100).ToStereo();

            _stream = new SingleBlockNotificationStream(_source.ToSampleSource());
            _stream.SingleBlockRead += StreamOnSingleBlockRead;

            _source = _stream.ToWaveSource();

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            _soundInSource.DataAvailable += (s, aEvent) =>
            {
                while ((_source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _capture.Start();
        }
예제 #23
0
        public static void chargerChanson(string chemin, out double Fe, out double[] res)
        {
            IWaveSource   source = CodecFactory.Instance.GetCodec(chemin);
            ISampleSource signal = source.ToSampleSource();

            Fe     = source.WaveFormat.SampleRate;
            signal = signal.ToMono();
            int size = (int)signal.Length;

            float[] valeurs = new float[size];
            signal.Read(valeurs, 0, size);
            res = new double[size];
            for (int i = 0; i < size; i++)
            {
                res[i] = (double)valeurs[i];
            }
        }
예제 #24
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // SoundOut implementation which plays the sound
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);
            ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;

            // Initialize the soundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            // Create the FFT provider
            this.fftProvider = new FftProvider(this.soundOut.WaveSource.WaveFormat.Channels, FftSize.Fft2048);

            this.notificationSource.SingleBlockRead += this.InputStream_Sample;
            this.soundOut.Stopped += this.SoundOutStoppedHandler;

            this.soundOut.Volume = this.volume;
        }
예제 #25
0
        public void Open(IWaveSource source, MMDevice device)
        {
            CleanupPlayback();

            _waveSource = source;

            _sampleSource = _waveSource.ToSampleSource()
                            .AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(_sampleSource);

            _soundOut = new WasapiOut()
            {
                Latency = 100, Device = device
            };
            _soundOut.Initialize(_waveSource);
            if (PlaybackStopped != null)
            {
                _soundOut.Stopped += PlaybackStopped;
            }
        }
예제 #26
0
        ISoundOut CreateSoundOut(ref IWaveSource source)
        {
            ISoundOut soundOut;

            if (WasapiOut.IsSupportedOnCurrentPlatform)
            {
                soundOut = new WasapiOut(true, AudioClientShareMode.Shared, 50);
            }
            else
            {
                soundOut = new DirectSoundOut()
                {
                    Latency = 100
                };
                if (source.WaveFormat.BitsPerSample > 16)
                {
                    source = source.ToSampleSource().ToWaveSource(16);
                }
            }
            return(soundOut);
        }
예제 #27
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource());
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            //throw new ArgumentException(
            //    "The specified input can't be converted to a mono source. The input does not provide a WaveFormatExtensible.",
            //    "input");

            Debug.WriteLine("MultiChannel stream with no ChannelMask.");

            WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();

            waveFormat.Channels = 1;
            return(new DmoResampler(input, waveFormat));
        }
예제 #28
0
파일: SpeechFX.cs 프로젝트: Swiftey63/EDDI
        private IWaveSource addEffectsToSource(IWaveSource source, int chorusLevel, int reverbLevel, int echoDelay, int distortionLevel, bool radio)
        {
            // Effects level is increased by damage if distortion is enabled
            int effectsLevel = fxLevel(distortionLevel);

            // Add various effects...
            Logging.Debug("Effects level is " + effectsLevel + ", chorus level is " + chorusLevel + ", reverb level is " + reverbLevel + ", echo delay is " + echoDelay);

            // We need to extend the duration of the wave source if we have any effects going on
            if (chorusLevel != 0 || reverbLevel != 0 || echoDelay != 0)
            {
                // Add a base of 500ms plus 10ms per effect level over 50
                Logging.Debug("Extending duration by " + 500 + Math.Max(0, (effectsLevel - 50) * 10) + "ms");
                source = source.AppendSource(x => new ExtendedDurationWaveSource(x, 500 + Math.Max(0, (effectsLevel - 50) * 10)));
            }

            // We always have chorus
            if (chorusLevel != 0)
            {
                source = source.AppendSource(x => new DmoChorusEffect(x)
                {
                    Depth = chorusLevel, WetDryMix = Math.Min(100, (int)(180 * (effectsLevel) / ((decimal)100))), Delay = 16, Frequency = (effectsLevel / 10), Feedback = 25
                });
            }

            // We only have reverb and echo if we're not transmitting or receiving
            if (!radio)
            {
                if (reverbLevel != 0)
                {
                    source = source.AppendSource(x => new DmoWavesReverbEffect(x)
                    {
                        ReverbTime = (int)(1 + 999 * (effectsLevel) / ((decimal)100)), ReverbMix = Math.Max(-96, -96 + (96 * reverbLevel / 100))
                    });
                }

                if (echoDelay != 0)
                {
                    source = source.AppendSource(x => new DmoEchoEffect(x)
                    {
                        LeftDelay = echoDelay, RightDelay = echoDelay, WetDryMix = Math.Max(5, (int)(10 * (effectsLevel) / ((decimal)100))), Feedback = 0
                    });
                }
            }
            // Apply a high pass filter for a radio effect
            else
            {
                var sampleSource = source.ToSampleSource().AppendSource(x => new BiQuadFilterSource(x));
                sampleSource.Filter = new HighpassFilter(source.WaveFormat.SampleRate, 1015);
                source = sampleSource.ToWaveSource();
            }

            // Adjust gain
            if (effectsLevel != 0 && chorusLevel != 0)
            {
                int radioGain = radio ? 7 : 0;
                source = source.AppendSource(x => new DmoCompressorEffect(x)
                {
                    Gain = effectsLevel / 15 + radioGain
                });
            }

            return(source);
        }
예제 #29
0
        static private void ApplyEffects(ref IWaveSource src, SoundEffectSettings ap)   // ap may be null
        {
            if (ap != null && ap.Any)
            {
                int extend = 0;
                if (ap.echoenabled)
                {
                    extend = ap.echodelay * 2;
                }
                if (ap.chorusenabled)
                {
                    extend = Math.Max(extend, 50);
                }
                if (ap.reverbenabled)
                {
                    extend = Math.Max(extend, 50);
                }

                if (extend > 0)
                {
                    //System.Diagnostics.Debug.WriteLine("Extend by " + extend + " ms due to effects");
                    src = src.AppendSource(x => new ExtendWaveSource(x, extend));
                }

                if (ap.chorusenabled)
                {
                    src = src.AppendSource(x => new DmoChorusEffect(x)
                    {
                        WetDryMix = ap.chorusmix, Feedback = ap.chorusfeedback, Delay = ap.chorusdelay, Depth = ap.chorusdepth
                    });
                }

                if (ap.reverbenabled)
                {
                    src = src.AppendSource(x => new DmoWavesReverbEffect(x)
                    {
                        InGain = 0, ReverbMix = ap.reverbmix, ReverbTime = ((float)ap.reverbtime) / 1000.0F, HighFrequencyRTRatio = ((float)ap.reverbhfratio) / 1000.0F
                    });
                }

                if (ap.distortionenabled)
                {
                    src = src.AppendSource(x => new DmoDistortionEffect(x)
                    {
                        Gain = ap.distortiongain, Edge = ap.distortionedge, PostEQCenterFrequency = ap.distortioncentrefreq, PostEQBandwidth = ap.distortionfreqwidth
                    });
                }

                if (ap.gargleenabled)
                {
                    src = src.AppendSource(x => new DmoGargleEffect(x)
                    {
                        RateHz = ap.garglefreq
                    });
                }

                if (ap.echoenabled)
                {
                    src = src.AppendSource(x => new DmoEchoEffect(x)
                    {
                        WetDryMix = ap.echomix, Feedback = ap.echofeedback, LeftDelay = ap.echodelay, RightDelay = ap.echodelay
                    });
                }

                if (ap.pitchshiftenabled)
                {
                    ISampleSource srs = src.ToSampleSource();
                    srs = srs.AppendSource(x => new PitchShifter(x)
                    {
                        PitchShiftFactor = ((float)ap.pitchshift) / 100.0F
                    });
                    src = srs.ToWaveSource();
                }
            }
        }
예제 #30
0
        public void Record(string filename)
        {
            if (string.IsNullOrWhiteSpace(filename))
            {
                return;
            }

            cachedPosition = TimeSpan.Zero;
            position       = TimeSpan.Zero;
            sampleLength   = 0;
            recordedData   = new List <float>();

            if (InputDevice == null)
            {
                return;
            }

            if (recordingState == RecordingState.Recording)
            {
                return;
            }

            recordingState = RecordingState.Recording;

            if (inputDevice.Type == DeviceType.Capture)
            {
                _capture = new WasapiCapture();
            }
            else
            {
                _capture = new WasapiLoopbackCapture();
            }

            _capture.Device = inputDevice.ActualDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _soundInSource.DataAvailable += _soundInSource_DataAvailable;

            _waveSource = _soundInSource
                          .ChangeSampleRate(SampleRate)
                          .ToSampleSource()
                          .ToWaveSource(BitResolution)
                          .ToMono();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            _waveWriter = new WaveWriter(filename, _waveSource.WaveFormat);

            //the SingleBlockNotificationStream is used to intercept the played samples
            _notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            _notificationSource.SingleBlockRead += _notificationSource_SingleBlockRead;
            _waveSource = _notificationSource.ToWaveSource(16);

            RaiseSourceEvent(SourceEventType.Loaded);
            _capture.Start();
            RaiseSourcePropertyChangedEvent(SourceProperty.RecordingState, _capture.RecordingState);
        }
예제 #31
0
        private async Task Cmd(string cmd)
        {
            Console.ForegroundColor = ConsoleColor.Gray;
            Console.BackgroundColor = ConsoleColor.Black;
            if (cmd.StartsWith("vol "))
            {
                float.TryParse(cmd.Substring(4), out targetVolume);
                Console.Write("set volume to " + targetVolume.ToString());
                return;
            }
            if (cmd.StartsWith("pos"))
            {
                Console.Write("current pos is " + wasapiOut.WaveSource.GetPosition().ToString());
                return;
            }
            if (cmd.StartsWith("playlist "))
            {
                if (initialized)
                {
                    wasapiOut.Stop();
                    initialized = false;
                }
                string path       = cmd.Substring(9);
                bool   onComputer = true;
                if (!File.Exists(@path))
                {
                    onComputer = false;
                }
                Stream stream = Stream.Null;
                if (onComputer)
                {
                    stream = File.OpenRead(@path);
                }
                else
                {
                    try
                    {
                        HttpWebRequest  req      = (HttpWebRequest)WebRequest.Create(path);
                        HttpWebResponse response = (HttpWebResponse)req.GetResponse();
                        stream = response.GetResponseStream();
                    } catch { }
                }
                if (stream == Stream.Null)
                {
                    Console.Write("couldn't read " + path);
                    return;
                }
                string extension = Path.GetExtension(path);
                IPlaylistParser <IBasePlaylist> parser = PlaylistParserFactory.GetPlaylistParser(extension);
                IBasePlaylist playlist = parser.GetFromStream(stream);
                foreach (string str in playlist.GetTracksPaths())
                {
                    currentStream = new Mp3WebStream(str, false);
                    ISampleSource source             = currentStream.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter);
                    var           notificationSource = new SingleBlockNotificationStream(source);
                    notificationSource.SingleBlockRead += (s, a) =>
                    {
                        leftPitch  = Math.Abs(a.Left) * 10;
                        rightPitch = Math.Abs(a.Right) * 10;
                    };
                    currentStream = notificationSource.ToWaveSource();
                    currentPath   = path;
                    wasapiOut.Initialize(currentStream);
                    wasapiOut.Volume = 0.0f;
                    initialized      = true;
                }
                Console.Write("set playlist to " + path);
                return;
            }
            if (cmd.StartsWith("thread"))
            {
                string board = "a";
                if (cmd.Length > 6)
                {
                    board = cmd.Substring(7);
                }

                Dictionary <int, int> a_threads    = new Dictionary <int, int>();
                Dictionary <int, int> smug_threads = new Dictionary <int, int>();
                using (HttpClient a_client = new HttpClient())
                    using (HttpResponseMessage a_response = await a_client.GetAsync("https://8ch.net/" + board + "/catalog.html"))
                        using (HttpContent a_content = a_response.Content)
                        {
                            string soykaf = await a_content.ReadAsStringAsync();

                            string pattern = "data-reply=\"";
                            for (int i = 0; i < soykaf.Length - pattern.Length; ++i)
                            {
                                if (soykaf.Substring(i, pattern.Length) == pattern)
                                {
                                    int    replyCountEnd   = FindNext(soykaf.Substring(i + pattern.Length), "\"");
                                    string replyCount      = soykaf.Substring(i + pattern.Length, replyCountEnd);
                                    int    threadIdBegin   = i + pattern.Length + FindNext(soykaf.Substring(i + pattern.Length), "data-id=\"");
                                    string threadId        = soykaf.Substring(threadIdBegin + 9, FindNext(soykaf.Substring(threadIdBegin + 9), "\""));
                                    int    threadNameBegin = threadIdBegin + 9 + FindNext(soykaf.Substring(threadIdBegin + 9), "data-subject=\"");
                                    string threadName      = soykaf.Substring(threadNameBegin + 14, FindNext(soykaf.Substring(threadNameBegin + 14), "\""));

                                    if (FindNext(threadName.ToLower(), "r/a/dio") >= 0 || FindNext(threadName.ToLower(), "radio") >= 0)
                                    {
                                        int.TryParse(threadId, out int ID);
                                        int.TryParse(replyCount, out int REPLY);
                                        a_threads.Add(ID, REPLY);
                                    }
                                }
                            }
                        }
                Console.Write("got " + a_threads.Count + " r/a/dio thread" + (a_threads.Count > 1 ? "s" : "") + " from 8/" + board + "/");
                if (board == "a")
                {
                    using (HttpClient smug_client = new HttpClient())
                        using (HttpResponseMessage smug_response = await smug_client.GetAsync("https://smuglo.li/a/catalog.html"))
                            using (HttpContent smug_content = smug_response.Content)
                            {
                                string soykaf = await smug_content.ReadAsStringAsync();

                                string pattern = "data-reply=\"";
                                for (int i = 0; i < soykaf.Length - pattern.Length; ++i)
                                {
                                    if (soykaf.Substring(i, pattern.Length) == pattern)
                                    {
                                        int    replyCountEnd   = FindNext(soykaf.Substring(i + pattern.Length), "\"");
                                        string replyCount      = soykaf.Substring(i + pattern.Length, replyCountEnd);
                                        int    threadIdBegin   = i + pattern.Length + FindNext(soykaf.Substring(i + pattern.Length), "data-id=\"");
                                        string threadId        = soykaf.Substring(threadIdBegin + 9, FindNext(soykaf.Substring(threadIdBegin + 9), "\""));
                                        int    threadNameBegin = threadIdBegin + 9 + FindNext(soykaf.Substring(threadIdBegin + 9), "data-subject=\"");
                                        string threadName      = soykaf.Substring(threadNameBegin + 14, FindNext(soykaf.Substring(threadNameBegin + 14), "\""));

                                        if (FindNext(threadName.ToLower(), "r/a/dio") >= 0 || FindNext(threadName.ToLower(), "radio") >= 0)
                                        {
                                            if (int.TryParse(threadId, out int ID) && int.TryParse(replyCount, out int REPLY))
                                            {
                                                smug_threads.Add(ID, REPLY);
                                            }
                                        }
                                    }
                                }
                            }
                    Console.Write("\ngot " + smug_threads.Count + " r/a/dio thread" + (smug_threads.Count > 1 ? "s" : "") + " from the bunker");
                }
                Thread.Sleep(500);
                Console.Write("\nopening the most active thread(s)");
                Thread.Sleep(1000);
                foreach (var x in a_threads)
                {
                    Process.Start("https://8ch.net/a/res/" + x.Key + ".html");
                    break;
                }
                foreach (var x in smug_threads)
                {
                    Process.Start("https://smuglo.li/a/res/" + x.Key + ".html");
                    break;
                }
                return;
            }
            if (cmd.StartsWith("play"))
            {
                if (M3uCheck())
                {
                    return;
                }

                wasapiOut.Play();
                Console.Write("started playing");
                return;
            }
            if (cmd.StartsWith("stop"))
            {
                if (M3uCheck())
                {
                    return;
                }

                wasapiOut.Stop();
                Console.Write("stopped playing");
                return;
            }
            if (cmd.StartsWith("pause"))
            {
                if (M3uCheck())
                {
                    return;
                }

                wasapiOut.Pause();
                Console.Write("paused playing");
                return;
            }
            if (cmd.StartsWith("resume"))
            {
                if (M3uCheck())
                {
                    return;
                }

                wasapiOut.Resume();
                Console.Write("resumed playing");
                return;
            }
            if (cmd.StartsWith("help"))
            {
                Console.Write(HELP_MESSAGE);
                return;
            }

            Console.ForegroundColor = ConsoleColor.Black;
            Console.BackgroundColor = ConsoleColor.Red;
            Console.Write("nANI!?");
            Console.ForegroundColor = ConsoleColor.Red;
            Console.BackgroundColor = ConsoleColor.Black;
            Console.Write("?");
            return;
        }