예제 #1
0
        public virtual void Initialize()
        {
            _wasapiCapture = new WasapiCapture
            {
                Device = _captureDevice
            };
            _wasapiCapture.Initialize();

            var soundInSource = new SoundInSource(_wasapiCapture);

            if (_triggerSingleBlockRead)
            {
                var notificationStream =
                    new SingleBlockNotificationStream(soundInSource.ChangeSampleRate(48000).ToMono().ToSampleSource());
                notificationStream.SingleBlockRead += NotificationStreamOnSingleBlockRead;
                _captureSource = notificationStream.ToWaveSource(16);
            }
            else
            {
                _captureSource = soundInSource
                                 .ChangeSampleRate(48000)
                                 .ToMono()
                                 .ToSampleSource()
                                 .ToWaveSource(16);
            }

            soundInSource.DataAvailable += SoundInSourceOnDataAvailable;
            _wasapiCapture.Start();
        }
예제 #2
0
        private void CreateLoopback()
        {
            try {
                loopback.Initialize();
            } catch (Exception e) {
                Debug.LogException(e);

                return;
            }

            soundIn          = new SoundInSource(loopback);
            spectrumProvider = new BasicSpectrumProvider(soundIn.WaveFormat.Channels, soundIn.WaveFormat.SampleRate, FftSize.Fft4096);
            spectrum         = new LineSpectrum(FftSize.Fft4096)
            {
                SpectrumProvider = spectrumProvider,
                BarCount         = 512,
                UseAverage       = true,
                IsXLogScale      = true,
            };

            loopback.Start();

            blockNotifyStream = new SingleBlockNotificationStream(soundIn.ToSampleSource());
            realtime          = blockNotifyStream.ToWaveSource();

            buffer = new float[realtime.WaveFormat.BytesPerSecond / sizeof(float) / 2];

            soundIn.DataAvailable += AudioDataAvailable;

            blockNotifyStream.SingleBlockRead += SingleBlockRead;
        }
예제 #3
0
        private void SetupSampleSource(ISampleSource aSampleSource)
        {
            const FftSize fftSize = FftSize.Fft4096;
            //create a spectrum provider which provides fft data based on some input
            var spectrumProvider = new BasicSpectrumProvider(aSampleSource.WaveFormat.Channels,
                                                             aSampleSource.WaveFormat.SampleRate, fftSize);

            //linespectrum and voiceprint3dspectrum used for rendering some fft data
            //in oder to get some fft data, set the previously created spectrumprovider
            _lineSpectrum = new LineSpectrum(fftSize)
            {
                SpectrumProvider = spectrumProvider,
                UseAverage       = true,
                BarCount         = 50,
                BarSpacing       = 2,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Sqrt
            };

            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(aSampleSource);

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            _waveSource = notificationSource.ToWaveSource(16);
        }
예제 #4
0
    void Start()
    {
        fftData = new float[fftSize];

        persistentSamples = new FixedQueue <float> [PersSampleUpperIndex - PersSampleLowerIndex];
        smoothedSamples   = new float[persistentSamples.Length];
        for (int i = 0; i < persistentSamples.Length; i++)
        {
            persistentSamples[i] = new FixedQueue <float>(PersistenSampleLength);
        }

        line        = GetComponent <LineRenderer>();
        leftChannel = new float[TotalSamples];

        capture = new WasapiLoopbackCapture();
        capture.Initialize();
        var soundInSource = new SoundInSource(capture);
        var source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter);

        fft1 = new FftTransform(source.WaveFormat.Channels, fftSize);
        fft2 = new FftProvider(source.WaveFormat.Channels, FftSize.Fft2048);

        stream = new SingleBlockNotificationStream(pitchShifter);
        stream.SingleBlockRead += SingleBlockRead;

        waveSource = stream.ToWaveSource(16);
        buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

        soundInSource.DataAvailable += DataAvailable;

        capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
        capture.Start();
    }
예제 #5
0
        private void StartCapture()
        {
            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == "Capture")
            {
                _soundIn = new WasapiCapture();
            }

            _soundIn.Device = SelectedDevice;
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter("tmp.wav", _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; // visualization

            _soundIn.Start();
        }
예제 #6
0
        private void InitializeSoundOut(IWaveSource soundSource, MMDevice outputDevice)
        {
            // SoundOut implementation which plays the sound
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest)
            {
                Device = outputDevice
            };
            ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;

            // Initialize the soundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            if (inputStreamList.Count != 0)
            {
                foreach (var inputStream in inputStreamList)
                {
                    this.notificationSource.SingleBlockRead += inputStream;
                }
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;

            this.soundOut.Volume = this.volume;
        }
예제 #7
0
        public void Initialize()
        {
            _wasapiOut   = new WasapiOut();
            _opusDecoder = OpusDecoder.Create(48000, 1);

            //var waveForm = new WaveFormatExtensible(48000, 16, 1, Guid.Parse("00000003-0000-0010-8000-00aa00389b71"));
            var waveForm = new WaveFormat(48000, 16, 1);

            _writeableBufferingSource = new WriteableBufferingSource(waveForm)
            {
                FillWithZeros = true
            };

            IWaveSource waveSource;

            if (_triggerSingleBlockRead)
            {
                var singleBlockNotificationStream =
                    new SingleBlockNotificationStream(_writeableBufferingSource.ToSampleSource());
                singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;
                waveSource = singleBlockNotificationStream.ToWaveSource();
            }
            else
            {
                waveSource = _writeableBufferingSource;
            }

            _wasapiOut.Initialize(waveSource);
            _wasapiOut.Play();
        }
예제 #8
0
        public FFTDataProvider(FftSize fftSize)
        {
            this.fftSize = fftSize;
            _soundIn     = new WasapiLoopbackCapture();
            _soundIn.Initialize();
            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            fftProvider  = new FftProvider(source.WaveFormat.Channels, fftSize);
            fftProvider2 = new FftProvider(source.WaveFormat.Channels, fftSize);
            var notificationSource = new SingleBlockNotificationStream(source);

            SamplesRate = source.WaveFormat.SampleRate;
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += addToFFTs;
            var _source = notificationSource.ToWaveSource(16);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 8];// 1/8 seconds
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _readStarted = false;
        }
예제 #9
0
        public void InitialiseAudioProgram()
        {
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            var spectrumProvider = new SpectrumProvider(2, 48000, FftSize.Fft4096);

            _spectrum = new LineSpectrum(spectrumProvider, _barCount);
            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            _source = notificationSource.ToWaveSource(16);

            // Read from the source otherwise SingleBlockRead is never called
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (src, evt) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();

            for (int i = 0; i < MatrixCount; i++)
            {
                _Programs[i] = i == 0 ? AudioSequence().GetEnumerator() : null;
            }
        }
예제 #10
0
        public void Dispose()
        {
            if (_waveWriter != null)
            {
                _waveWriter.Dispose();
                _waveWriter = null;
            }

            if (_notificationSource != null)
            {
                _notificationSource.Dispose();
                _notificationSource = null;
            }

            if (_waveSource != null)
            {
                _waveSource.Dispose();
                _waveSource = null;
            }

            if (_soundInSource != null)
            {
                _soundInSource.Dispose();
                _soundInSource = null;
            }

            if (_capture != null)
            {
                _capture.Dispose();
                _capture = null;
            }
        }
예제 #11
0
            public SoundCapture()
            {
                // This uses the wasapi api to get any sound data played by the computer
                capture = new WasapiLoopbackCapture();

                capture.Initialize();

                // Get our capture as a source
                IWaveSource source = new SoundInSource(capture);


                // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

                // This is the typical size, you can change this for higher detail as needed
                fftSize = FftSize.Fft4096;

                // Actual fft data
                fftBuffer = new float[(int)fftSize];


                // Tells us when data is available to send to our spectrum
                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

                notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

                // We use this to request data so it actualy flows through (figuring this out took forever...)
                finalSource = notificationSource.ToWaveSource();

                capture.DataAvailable += Capture_DataAvailable;
                capture.Start();
            }
예제 #12
0
        public void Load(Uri uri)
        {
            Dispose();
            _uri        = uri;
            _waveSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri)
                          .ToSampleSource()
                          .ToMono()
                          .ToWaveSource();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => SpectrumProvider.Add(a.Left, a.Right);
            _waveSource = notificationSource.ToWaveSource(16);
            // Load the sample source
            var ws = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri);

            _sampleSource = ws.ToSampleSource();
            RaiseSourceEvent(SourceEventType.Loaded);
            hasMedia = true;
            LoadSoundOut();
        }
예제 #13
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // Create SoundOut
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);

            if (this.outputDevice == null)
            {
                // If no output device was provided, we're playing on the default device.
                // In such case, we want to detected when the default device changes.
                // This is done by setting stream routing options
                ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;
            }
            else
            {
                // If an output device was provided, assign it to soundOut.Device.
                // Only allow stream routing when the device was disconnected.
                ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.OnDeviceDisconnect;
                ((WasapiOut)this.soundOut).Device = this.outputDevice;
            }

            // Initialize SoundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            if (inputStreamList.Count != 0)
            {
                foreach (var inputStream in inputStreamList)
                {
                    this.notificationSource.SingleBlockRead += inputStream;
                }
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;
            this.soundOut.Volume   = this.volume;
        }
예제 #14
0
        // Helper for State.Looking
        void StartCapture()
        {
            Debug.Assert(m_State == State.Looking);
            Debug.Assert(m_AudioCapture != null);

            // TODO: This starts as a WaveSource (raw bytes), converts to floats
            // so we can notify once for each sample.
            // The SingleBlockNotificationStream is very garbagey; we should use our own
            // wrapper that grabs all the samples read and pushes them into m_HotValues
            // en masse instead of one-at-a-time.
            var soundInSource = new SoundInSource(m_AudioCapture);
            var sampleSource  = soundInSource.ToSampleSource();
            var singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);

            m_FinalSource = singleBlockNotificationStream;

            // Consume and discard any bytes when they come in. We do this for
            // its side effects (firing the SingleBlockNotificationStream event).
            // buffer is closed-over by the lambda.
            float[] buffer = new float[m_FinalSource.WaveFormat.BytesPerSecond / 4];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                do
                {
                    read = m_FinalSource.Read(buffer, 0, buffer.Length);
                } while (read > 0);
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;
            m_AudioCapture.Start();
        }
        private IWaveSource CreateWaveSource(ISampleSource sampleSource, CustomFftProvider fftProvider)
        {
            var sampleStream = new SingleBlockNotificationStream(sampleSource);

            sampleStream.SingleBlockRead += (s, a) => fftProvider.Add(a.Left, a.Right);
            return(sampleStream.ToWaveSource(16));
        }
예제 #16
0
        //most of this code is stolen from the example in the CSCore github so idk what it does 40% of the time
        public void Initialize(FFTSize _size = FFTSize._4096)
        {
            size     = _size;
            _soundIn = new WasapiLoopbackCapture();

            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);

            var source = soundInSource.ToSampleSource();

            _fft = new FftProvider(source.WaveFormat.Channels, (FftSize)size);

            var n = new SingleBlockNotificationStream(source);

            n.SingleBlockRead += (s, a) => _fft.Add(a.Left, a.Right);

            _source = n.ToWaveSource(16);
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _soundIn.Start();
        }
예제 #17
0
        public void CargarCancion(string cual)
        {
            switch (Path.GetExtension(cual))
            {
            case ".mp3":
                FormatoSonido = FormatoSonido.MP3;
                break;

            case ".flac":
                FormatoSonido = FormatoSonido.FLAC;
                break;

            case ".ogg":
                FormatoSonido = FormatoSonido.OGG;
                break;

            default:
                break;
            }
            try
            {
                Log.Instance.PrintMessage("Intentando cargar " + cual, MessageType.Info);
                if (Path.GetExtension(cual) == ".ogg")
                {
                    FileStream stream = new FileStream(cual, FileMode.Open, FileAccess.Read);
                    NVorbis = new NVorbisSource(stream);
                    _sound  = NVorbis.ToWaveSource(16);
                }
                else
                {
                    _sound             = CSCore.Codecs.CodecFactory.Instance.GetCodec(cual).ToSampleSource().ToStereo().ToWaveSource(16);
                    notificationStream = new SingleBlockNotificationStream(_sound.ToSampleSource());
                    FileInfo info = new FileInfo(cual);
                    tamFich = info.Length;
                }

                _output = new WasapiOut(false, AudioClientShareMode.Shared, 100);
                //_sonido.Position = 0;
                _output.Initialize(_sound);
                Log.Instance.PrintMessage("Cargado correctamente" + cual, MessageType.Correct);
            }
            catch (IOException ex)
            {
                Log.Instance.PrintMessage("Error de IO", MessageType.Error);
                Log.Instance.PrintMessage(ex.Message, MessageType.Error);
                Kernel.ShowError(Kernel.LocalTexts.GetString("errorReproduccion"));
                _output = null;
                _sound  = null;
                throw;
            }
            catch (Exception ex)
            {
                Log.Instance.PrintMessage("Hubo un problema...", MessageType.Error);
                Log.Instance.PrintMessage(ex.Message, MessageType.Error);
                Kernel.ShowError(ex.Message);
                _output = null;
                _sound  = null;
                throw;
            }
        }
예제 #18
0
        public Spectrograph()
        {
            InitializeComponent();

            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource);

            _source = singleBlockNotificationStream.ToWaveSource();

            if (!Directory.Exists(_loopbackDir))
            {
                Directory.CreateDirectory(_loopbackDir);
            }

            _writer = new WaveWriter(_loopbackDir + "/loopback.wav", _source.WaveFormat);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            _lineSpectrumProvider = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate, fftSize);
            _spectrogramProvider  = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate, fftSize);

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
            _soundIn.Start();

            _lineSpectrum = new LineSpectrum(fftSize)
            {
                SpectrumProvider = _lineSpectrumProvider,
                UseAverage       = true,
                BarCount         = 22,
                BarSpacing       = 1,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Sqrt
            };
            _oscilloscope = new Oscilloscope();
            _spectrogram  = new Spectrogram(fftSize)
            {
                SpectrumProvider = _spectrogramProvider,
                UseAverage       = true,
                BarCount         = (int)fftSize,
                BarSpacing       = 0,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Sqrt
            };
            _keyboardVisualizer = new KeyboardVisualizer();

            UpdateTimer.Start();
        }
예제 #19
0
 private void DisposeSampleSource()
 {
     if (this.sampleSource != null)
     {
         this.sampleSource.SingleBlockRead -= this.SampleSource_SingleBlockRead;
         this.sampleSource.Dispose();
         this.sampleSource = null;
     }
 }
예제 #20
0
        static void Main(string[] args)

        {
            MMDevice dev = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            capture        = new WasapiLoopbackCapture();
            capture.Device = dev;
            capture.Initialize();

            SoundInSource soundInSource = new SoundInSource(capture);

            nStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
            final   = nStream.ToWaveSource();
            nStream.SingleBlockRead     += NStream_SingleBlockRead;
            soundInSource.DataAvailable += encode;
            trashBuf = new byte[final.WaveFormat.BytesPerSecond / 2];

            Console.WriteLine($"sample rate:{capture.WaveFormat.SampleRate}");
            Console.WriteLine($"bits per sample:{capture.WaveFormat.BitsPerSample }");
            Console.WriteLine($"channels:{capture.WaveFormat.Channels }");
            Console.WriteLine($"bytes per sample:{capture.WaveFormat.BytesPerSample }");
            Console.WriteLine($"bytes per second:{capture.WaveFormat.BytesPerSecond }");
            Console.WriteLine($"AudioEncoding:{capture.WaveFormat.WaveFormatTag  }");


            EncodingContext context = FrameEncoder.GetDefaultsContext();

            context.Channels        = 6;
            context.SampleRate      = capture.WaveFormat.SampleRate;
            context.AudioCodingMode = AudioCodingMode.Front3Rear2;
            context.HasLfe          = true;
            context.SampleFormat    = A52SampleFormat.Float;
            enc = new FrameEncoderFloat(ref context);

            //_writer = new WaveWriter("test.ac3", final.WaveFormat);


            capture.Start();

            wBuffSrc = new WriteableBufferingSource(new WaveFormat(capture.WaveFormat.SampleRate, capture.WaveFormat.BitsPerSample, capture.WaveFormat.Channels, AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF), (int)capture.WaveFormat.MillisecondsToBytes(20));

            w = new WasapiOut2(false, AudioClientShareMode.Shared, 20);

            w.Device = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active).Where(x => x.FriendlyName.Contains("Digital")).Single();
            AudioClient a = AudioClient.FromMMDevice(w.Device);

            w.Initialize(wBuffSrc);
            w.Play();


            Task.Run(async() => await encoderThread());
            //encodeSinus();

            Console.ReadLine();

            System.Environment.Exit(0);
        }
예제 #21
0
        public void CargarCancion(string cual)
        {
            switch (Path.GetExtension(cual))
            {
            case ".mp3":
                CSCore.Tags.ID3.ID3v2 mp3tag = CSCore.Tags.ID3.ID3v2.FromFile(cual);
                tags          = new CSCore.Tags.ID3.ID3v2QuickInfo(mp3tag);
                FormatoSonido = FormatoSonido.MP3;
                break;

            case ".flac":
                _ficheroFLAC = new FLACFile(cual, true);
                CSCore.Codecs.FLAC.FlacFile ff = new CSCore.Codecs.FLAC.FlacFile(cual);
                FormatoSonido = FormatoSonido.FLAC;
                break;

            case ".ogg":
                FormatoSonido = FormatoSonido.OGG;
                break;

            default:
                break;
            }
            try
            {
                Log.Instance.ImprimirMensaje("Intentando cargar " + cual, TipoMensaje.Info);
                if (Path.GetExtension(cual) == ".ogg")
                {
                    FileStream stream = new FileStream(cual, FileMode.Open);
                    NVorbis = new NVorbisSource(stream);
                    _sonido = NVorbis.ToWaveSource(16);
                }
                else
                {
                    _sonido            = CSCore.Codecs.CodecFactory.Instance.GetCodec(cual).ToSampleSource().ToStereo().ToWaveSource(16);
                    notificationStream = new SingleBlockNotificationStream(_sonido.ToSampleSource());
                    //_salida.Initialize(notificationStream.ToWaveSource(16));
                    FileInfo info = new FileInfo(cual);
                    tamFich = info.Length;
                }
                _salida          = new WasapiOut(false, AudioClientShareMode.Shared, 100);
                _sonido.Position = 0;
                _salida.Initialize(_sonido);
                Log.Instance.ImprimirMensaje("Cargado correctamente" + cual, TipoMensaje.Correcto);
            }
            catch (IOException)
            {
                Log.Instance.ImprimirMensaje("No se puede reproducir el fichero porque está siendo utilizado por otro proceso", TipoMensaje.Error);
                throw;
            }
            catch (Exception)
            {
                Log.Instance.ImprimirMensaje("No se encuentra el fichero", TipoMensaje.Advertencia);
                throw;
            }
        }
예제 #22
0
        private void SetupSampleSource()
        {
            this.DisposeSampleSource();
            ISampleSource sampleSource = this.soundInSource.ToSampleSource();

            this.sampleSource = new SingleBlockNotificationStream(sampleSource);
            this.sampleSource.SingleBlockRead += this.SampleSource_SingleBlockRead;

            this.DisposeSampledWaveSource();
            this.sampledWaveSource = this.sampleSource.ToWaveSource(16);
        }
예제 #23
0
        /// <summary>
        /// Initializes a new instance of the recorder class.
        /// </summary>
        /// <param name="captureDevice"></param>
        /// <param name="captureMode"></param>
        public Recorder(MMDevice captureDevice, DataFlow captureMode)
        {
            var wasapiCapture = Convert.ToBoolean(captureMode) ? new WasapiCapture() : new WasapiLoopbackCapture();

            wasapiCapture.Device = captureDevice;
            wasapiCapture.Initialize();

            _soundInSource     = new SoundInSource(wasapiCapture);
            NotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _waveStream        = NotificationStream.ToWaveSource();
        }
예제 #24
0
    void Awake()
    {
        barData = new float[numBars];
        // This uses the wasapi api to get any sound data played by the computer
        switch (audioType)
        {
        case AudioSourceType.Microphone: capture = new WasapiCapture();
            break;

        case AudioSourceType.Speakers: capture = new WasapiLoopbackCapture();
            break;
        }

        capture.Initialize();

        // Get our capture as a source
        IWaveSource source = new SoundInSource(capture);


        // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

        // This is the typical size, you can change this for higher detail as needed
        fftSize = FftSize.Fft4096;

        // Actual fft data
        fftBuffer = new float[(int)fftSize];

        // These are the actual classes that give you spectrum data
        // The specific vars of lineSpectrum are changed below in the editor so most of these aren't that important here
        spectrumProvider = new BasicSpectrumProvider(capture.WaveFormat.Channels,
                                                     capture.WaveFormat.SampleRate, fftSize);

        lineSpectrum = new LineSpectrum(fftSize)
        {
            SpectrumProvider = spectrumProvider,
            UseAverage       = isAverage,
            BarCount         = numBars,
            BarSpacing       = 2,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        // Tells us when data is available to send to our spectrum
        var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        // We use this to request data so it actualy flows through (figuring this out took forever...)
        finalSource = notificationSource.ToWaveSource();

        capture.DataAvailable += Capture_DataAvailable;
        capture.Start();
    }
예제 #25
0
    // Start is called before the first frame update
    void Start()
    {
        loopbackCapture = new WasapiLoopbackCapture();
        loopbackCapture.Initialize();

        soundInSource = new SoundInSource(loopbackCapture);

        fftBuffer = new float[(int)CFftSize];

        basicSpectrumProvider = new BasicSpectrumProvider(soundInSource.WaveFormat.Channels,
                                                          soundInSource.WaveFormat.SampleRate, CFftSize);

        lineSpectrum = new LineSpectrum(CFftSize)
        {
            SpectrumProvider = basicSpectrumProvider,
            BarCount         = numBars,
            UseAverage       = true,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        var notificationSource = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        finalSource = notificationSource.ToWaveSource();

        loopbackCapture.DataAvailable += Capture_DataAvailable;
        loopbackCapture.Start();

        //singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
        //realTimeSource = singleBlockNotificationStream.ToWaveSource();

        //byte[] buffer = new byte[realTimeSource.WaveFormat.BytesPerSecond / 2];

        //soundInSource.DataAvailable += (s, ea) =>
        //{
        //    while (realTimeSource.Read(buffer, 0, buffer.Length) > 0)
        //    {
        //        float[] spectrumData = lineSpectrum.GetSpectrumData(10);
        //        receiveAudio(spectrumData);
        //        Debug.Log(receiveAudio);

        //        if (spectrumData != null && receiveAudio != null)
        //        {
        //            receiveAudio(spectrumData);
        //            Debug.Log(receiveAudio);
        //        }
        //    }
        //};

        //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
    }
예제 #26
0
        static void Main(string[] args)
        {
            WasapiGeneral wasapi = new WasapiGeneral();

            wasapi.StartCaptureMicrophone();
            wasapi.ApplyEffect(new DmoEchoEffect(wasapi.wave));
            wasapi.StartOutput();

            FftSize fftsize = CSCore.DSP.FftSize.Fft1024;

            FFTHandler fftHandler = new FFTHandler(wasapi.sis.WaveFormat.Channels, wasapi.sis.WaveFormat.SampleRate, CSCore.DSP.FftSize.Fft1024);

            float[] fftValues = new float[(int)fftsize];

            var notificationSource = new SingleBlockNotificationStream(wasapi.output.WaveSource.ToSampleSource());

            string gainGraph = "";
            int    i;

            while (true)
            {
                gainGraph = "";

                i = 0;

                notificationSource.SingleBlockRead += (s, a) => fftHandler.Add(a.Left, a.Right);
                fftHandler.GetFftData(fftValues);

                foreach (int x in fftValues)
                {
                    foreach (int y in Enumerable.Range(0, x))
                    {
                        /*
                         * i += 1;
                         * if (i == 1)
                         * {
                         *  gainGraph += "#";
                         *  i = 0;
                         * }
                         */
                        Console.WriteLine(y);
                    }
                    Console.WriteLine(x);
                }

                //Console.WriteLine(gainGraph);
            }

            Console.ReadKey();

            wasapi.StopCapture();
            wasapi.StopPlayback();
        }
예제 #27
0
파일: AudioManager.cs 프로젝트: Slion/CIC
        /// <summary>
        ///
        /// </summary>
        private void StartAudioVisualization()
        {
            //Open the default device
            iSoundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //iSoundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            iSoundIn.Initialize();

            SoundInSource soundInSource = new SoundInSource(iSoundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            const FftSize fftSize = FftSize.Fft2048;
            //create a spectrum provider which provides fft data based on some input
            BasicSpectrumProvider spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels, source.WaveFormat.SampleRate, fftSize);

            //linespectrum and voiceprint3dspectrum used for rendering some fft data
            //in oder to get some fft data, set the previously created spectrumprovider
            iLineSpectrum = new LineSpectrum(fftSize)
            {
                SpectrumProvider = spectrumProvider,
                UseAverage       = false, // Does not matter since we hacked it
                BarCount         = 16,
                BarSpacing       = 1,
                IsXLogScale      = true,                   // Does not matter since we hacked it
                ScalingStrategy  = ScalingStrategy.Decibel // Does not matter since we hacked it
            };


            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(source);

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            iWaveSource = notificationSource.ToWaveSource(16);


            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[iWaveSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = iWaveSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //Start recording
            iSoundIn.Start();
        }
예제 #28
0
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var openFileDialog = new OpenFileDialog()
            {
                Filter = CodecFactory.SupportedFilesFilterEn,
                Title  = "Select a file..."
            };

            if (openFileDialog.ShowDialog() == DialogResult.OK)
            {
                Stop();

                const FftSize fftSize = FftSize.Fft4096;

                IWaveSource source = CodecFactory.Instance.GetCodec(openFileDialog.FileName);

                var spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels,
                                                                 source.WaveFormat.SampleRate, fftSize);
                _lineSpectrum = new LineSpectrum(fftSize)
                {
                    SpectrumProvider = spectrumProvider,
                    UseAverage       = true,
                    BarCount         = 50,
                    BarSpacing       = 2,
                    IsXLogScale      = true,
                    ScalingStrategy  = ScalingStrategy.Sqrt
                };
                _voicePrint3DSpectrum = new VoicePrint3DSpectrum(fftSize)
                {
                    SpectrumProvider = spectrumProvider,
                    UseAverage       = true,
                    PointCount       = 200,
                    IsXLogScale      = true,
                    ScalingStrategy  = ScalingStrategy.Sqrt
                };

                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());
                notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

                _source = notificationSource.ToWaveSource(16);

                _soundOut = new WasapiOut();
                _soundOut.Initialize(_source.ToMono());
                _soundOut.Play();

                timer1.Start();

                propertyGridTop.SelectedObject    = _lineSpectrum;
                propertyGridBottom.SelectedObject = _voicePrint3DSpectrum;
            }
        }
예제 #29
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // Create SoundOut
            if (this.supportsWindowsMediaFoundation)
            {
                this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);

                // Map stereo or mono file to all channels
                ((WasapiOut)this.soundOut).UseChannelMixingMatrices = this.useAllAvailableChannels;

                if (this.outputDevice == null)
                {
                    // If no output device was provided, we're playing on the default device.
                    // In such case, we want to detected when the default device changes.
                    // This is done by setting stream routing options
                    ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;
                }
                else
                {
                    // If an output device was provided, assign it to soundOut.Device.
                    // Only allow stream routing when the device was disconnected.
                    ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.OnDeviceDisconnect;
                    ((WasapiOut)this.soundOut).Device = this.outputDevice;
                }

                // Initialize SoundOut
                this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
                this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

                if (inputStreamList.Count != 0)
                {
                    foreach (var inputStream in inputStreamList)
                    {
                        this.notificationSource.SingleBlockRead += inputStream;
                    }
                }
            }
            else
            {
                this.soundOut = new DirectSoundOut(this.latency, ThreadPriority.Highest);

                // Initialize SoundOut
                // Spectrum analyzer performance is only acceptable with WasapiOut,
                // so we're not setting a notificationSource for DirectSoundOut
                this.soundOut.Initialize(soundSource);
            }

            this.soundOut.Stopped += this.SoundOutStoppedHandler;
            this.soundOut.Volume   = this.volume;
        }
예제 #30
0
        public Visualizer()
        {
            InitializeComponent();

            _graphics = DrawPanel.CreateGraphics();
            _graphics.SmoothingMode      = SmoothingMode.AntiAlias;
            _graphics.CompositingQuality = CompositingQuality.AssumeLinear;
            _graphics.PixelOffsetMode    = PixelOffsetMode.Default;
            _graphics.TextRenderingHint  = TextRenderingHint.ClearTypeGridFit;
            _graphics.Clear(Color.Black);

            _oscilloscope = new Oscilloscope();

            for (int i = 0; i < _pens.Length; i++)
            {
                _pens[i] = new Pen(Color.FromArgb(i, i, i));
            }

            _fftProvider = new FftProvider(1, FftSize.Fft4096);

            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource);

            _source = singleBlockNotificationStream.ToWaveSource();

            if (!Directory.Exists("%AppData%/Spectrograph"))
            {
                Directory.CreateDirectory("%AppData%/Spectrograph");
            }

            _writer = new WaveWriter("%AppData%/Spectrograph/loopback.wav", _source.WaveFormat);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

            _soundIn.Start();
        }