/// <summary>
        /// Event handler to capture waspi device and convert to pcm16.
        /// </summary>
        /// <remarks>
        /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af
        /// </remarks>
        /// <param name="sender"></param>
        /// <param name="eventArgs"></param>
        private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs)
        {
            if (eventArgs.BytesRecorded == 0)
            {
                ResampledDataAvailable?.Invoke(this, new byte[0]);
                ResampledMaxValueAvailable?.Invoke(this, 0);
                return;
            }

            using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat))
                {
                    var           sampleStream       = new WaveToSampleProvider(inputStream);
                    var           resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate);
                    var           pcmProvider        = new SampleToWaveProvider16(resamplingProvider);
                    IWaveProvider targetProvider     = pcmProvider;
                    if (capture.WaveFormat.Channels == 2)
                    {
                        var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider);
                        stereoToMonoProvider.RightVolume = 0.5f;
                        stereoToMonoProvider.LeftVolume  = 0.5f;
                        targetProvider = stereoToMonoProvider;
                    }

                    byte[] buffer = new byte[eventArgs.BytesRecorded];

                    var outputStream = new MemoryStream();
                    int readBytes;
                    int writeBytes = 0;
                    while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0)
                    {
                        outputStream.Write(buffer, 0, readBytes);
                        writeBytes += readBytes;
                    }
                    var aryOutputStream = outputStream.ToArray();
                    ResampledDataAvailable?.Invoke(this, aryOutputStream);

                    float max        = 0;
                    var   tempBuffer = new WaveBuffer(aryOutputStream);
                    for (int index = 0; index < aryOutputStream.Length / 2; index++)
                    {
                        var sample = (double)tempBuffer.ShortBuffer[index];
                        // absolute value
                        if (sample < 0.0)
                        {
                            sample = -sample;
                        }
                        // is this the max value?
                        if (sample > max)
                        {
                            max = (float)sample;
                        }
                    }
                    ResampledMaxValueAvailable?.Invoke(this, max);
                }
            }
        }
Exemple #2
0
        string micAdd()
        {
            string          ret = "";
            ISampleProvider micProv;

            if (micVol != null && micVol.OK())
            {
                return("");
            }

            if (settings.devMic != null && settings.devMic.mm != null)
            {
                Logger.mix.a("create mic");
                micCap = new WasapiCapture(settings.devMic.mm);
                micCap.DataAvailable += micDev_DataAvailable_03;
                micIn = new BufferedWaveProvider(micCap.WaveFormat);
                if (micCap.WaveFormat.SampleRate != settings.samplerate)
                {
                    Logger.mix.a("create mic resampler");
                    micRe = new MediaFoundationResampler(micIn, settings.samplerate);
                    micRe.ResamplerQuality = 60;
                    ret += "Incorrect samplerate on microphone device, resampling\n" +
                           settings.devMic.mm.DeviceFriendlyName + "\n" +
                           settings.devMic.mm.FriendlyName + "\n" +
                           settings.devMic.id + "\n" +
                           LSDevice.stringer(settings.devMic.wf) + "\n" +
                           LSDevice.stringer(micCap.WaveFormat) + "\n\n";
                }
                micProv = new WaveToSampleProvider((IWaveProvider)micRe ?? (IWaveProvider)micIn);
                if (micCap.WaveFormat.Channels == 1)
                {
                    Logger.mix.a("mic mono2stereo");
                    micProv = new MonoToStereoSampleProvider(micProv);
                }
                else if (settings.micLeft != settings.micRight)
                {
                    Logger.mix.a("mic chanselector");
                    micProv = new NPatch.ChannelSelector(micProv, settings.micLeft ? 0 : 1);
                }
                if (settings.reverbP > 0)
                {
                    micProv = new NPatch.Reverb(micProv);
                }

                micVol.SetSource(micProv);
                mixa.AddMixerInput(micVol);
                Logger.mix.a("mic done");
            }
            else
            {
                Logger.mix.a("mic skipped");
            }
            return(ret);
        }
 public float[] ReadMonoFromSource(string source, int sampleRate, double secondsToRead, double startAtSecond)
 {
     using (var stream = naudioFactory.GetStream(source))
     {
         SeekToSecondInCaseIfRequired(startAtSecond, stream);
         using (var resampler = naudioFactory.GetResampler(stream, sampleRate, Mono))
         {
             var waveToSampleProvider = new WaveToSampleProvider(resampler);
             return samplesAggregator.ReadSamplesFromSource(new NAudioSamplesProviderAdapter(waveToSampleProvider), secondsToRead, sampleRate);
         }
     }
 }
Exemple #4
0
        public static float[] BuildPeaks(UWavePart part, System.ComponentModel.BackgroundWorker worker)
        {
            const double peaksRate = 4000;

            System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
            sw.Start();
            float[] peaks;
            using (var stream = new AudioFileReader(part.FilePath))
            {
                int    channels     = part.Channels;
                double peaksSamples = (int)((double)stream.Length / stream.WaveFormat.BlockAlign / stream.WaveFormat.SampleRate * peaksRate);
                peaks = new float[(int)(peaksSamples + 1) * channels];
                double blocksPerPixel = stream.Length / stream.WaveFormat.BlockAlign / peaksSamples;

                var converted = new WaveToSampleProvider(stream);

                float[] buffer = new float[4096];

                int    readed;
                int    readPos = 0;
                int    peaksPos = 0;
                double bufferPos = 0;
                float  lmax = 0, lmin = 0, rmax = 0, rmin = 0;
                while ((readed = converted.Read(buffer, 0, 4096)) != 0)
                {
                    readPos += readed;
                    for (int i = 0; i < readed; i += channels)
                    {
                        lmax = Math.Max(lmax, buffer[i]);
                        lmin = Math.Min(lmin, buffer[i]);
                        if (channels > 1)
                        {
                            rmax = Math.Max(rmax, buffer[i + 1]);
                            rmin = Math.Min(rmin, buffer[i + 1]);
                        }
                        if (i > bufferPos)
                        {
                            lmax = -lmax; lmin = -lmin; rmax = -rmax; rmin = -rmin; // negate peaks to fipped waveform
                            peaks[peaksPos * channels]     = lmax == 0 ? lmin : lmin == 0 ? lmax : (lmin + lmax) / 2;
                            peaks[peaksPos * channels + 1] = rmax == 0 ? rmin : rmin == 0 ? rmax : (rmin + rmax) / 2;
                            peaksPos++;
                            lmax       = lmin = rmax = rmin = 0;
                            bufferPos += blocksPerPixel * stream.WaveFormat.Channels;
                        }
                    }
                    bufferPos -= readed;
                    worker.ReportProgress((int)((double)readPos * sizeof(float) * 100 / stream.Length));
                }
            }
            sw.Stop();
            System.Diagnostics.Debug.WriteLine("Build peaks {0} ms", sw.Elapsed.TotalMilliseconds);
            return(peaks);
        }
Exemple #5
0
 public float[] ReadMonoFromSource(string source, int sampleRate, double secondsToRead, double startAtSecond)
 {
     using (var stream = naudioFactory.GetStream(source))
     {
         SeekToSecondInCaseIfRequired(startAtSecond, stream);
         using (var resampler = naudioFactory.GetResampler(stream, sampleRate, Mono))
         {
             var waveToSampleProvider = new WaveToSampleProvider(resampler);
             return(samplesAggregator.ReadSamplesFromSource(new NAudioSamplesProviderAdapter(waveToSampleProvider), secondsToRead, sampleRate));
         }
     }
 }
Exemple #6
0
 private float[] ReadMonoFromSource(string pathToSource, int sampleRate, int secondsToRead, int startAtSecond, Func <SampleProviderConverterBase, ISamplesProvider> getSamplesProvider)
 {
     using (var reader = new MediaFoundationReader(pathToSource))
     {
         SeekToSecondInCaseIfRequired(startAtSecond, reader);
         var ieeeFloatWaveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
         using (var resampler = new MediaFoundationResampler(reader, ieeeFloatWaveFormat))
         {
             var waveToSampleProvider = new WaveToSampleProvider(resampler);
             return(samplesAggregator.ReadSamplesFromSource(getSamplesProvider(waveToSampleProvider), secondsToRead, sampleRate));
         }
     }
 }
Exemple #7
0
        public float[] ReadMonoFromFile(string filename, int samplerate, int milliseconds, int startmillisecond)
        {
            SamplesAggregator samplesAggregator = new SamplesAggregator();

            using (var stream = GetStream(filename))
            {
                SeekToSecondInCaseIfRequired(startmillisecond, stream);
                using (var resampler = GetResampler(stream, samplerate, Mono, downSamplingQuality))
                {
                    var waveToSampleProvider = new WaveToSampleProvider(resampler);
                    return(samplesAggregator.ReadSamplesFromSource(new NAudioSamplesProviderAdapter(waveToSampleProvider), milliseconds, samplerate));
                }
            }
        }
Exemple #8
0
        private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM_Alpha(WaveInEventArgs e, int sampleRate)
        {
            byte[] recorded_buf    = e.Buffer;
            int    recorded_length = e.BytesRecorded;

            byte[] result_buf = null;
            int    result_len = -1;

            try
            {
                //// 生データを再生可能なデータに変換
                var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat);
                waveBufferResample.DiscardOnBufferOverflow = true;
                waveBufferResample.ReadFully    = false; // leave a buffer?
                waveBufferResample.BufferLength = recorded_length;
                var sampleStream = new WaveToSampleProvider(waveBufferResample);

                // Downsample
                var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate);

                // Stereo to mono
                var monoProvider = new StereoToMonoSampleProvider(resamplingProvider)
                {
                    LeftVolume  = 1f,
                    RightVolume = 1f
                };

                // Convert to 32bit float to 16bit PCM
                var ieeeToPcm               = new SampleToWaveProvider16(monoProvider);
                var depthConvertProvider    = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 8, 1), ieeeToPcm);
                var depthConvertProviderRev = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 16, 1), depthConvertProvider);

                waveBufferResample.AddSamples(recorded_buf, 0, recorded_length);

                result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv
                result_buf = new byte[result_len];
                depthConvertProviderRev.Read(result_buf, 0, result_len);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                Console.WriteLine("exit...");
                System.Windows.Forms.Application.Exit();
            }

            return(result_buf);
        }
        public SampleJoiner(IEnumerable <WaveStream> samples)
        {
            var s = samples.ToArray();

            WaveFormat = s[0].WaveFormat;
            _samples   = new ISampleProvider[s.Length];
            for (int i = 0; i < _samples.Length; i++)
            {
                var s2 = s[i];
                if (s2.WaveFormat.SampleRate != WaveFormat.SampleRate)
                {
                    s2 = new WaveFormatConversionStream(WaveFormat, s2);
                }

                _samples[i] = new WaveToSampleProvider(s2);
            }
        }
Exemple #10
0
 public static SampleSource CreateFromWaveFile(string fileName)
 {
     using (var reader = new WaveFileReader(fileName))
     {
         ISampleProvider sp;
         int             sourceSamples;
         if (reader.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
         {
             if (reader.WaveFormat.BitsPerSample == 16)
             {
                 sp            = new Pcm16BitToSampleProvider(reader);
                 sourceSamples = (int)(reader.Length / 2);
             }
             else if (reader.WaveFormat.BitsPerSample == 24)
             {
                 sp            = new Pcm24BitToSampleProvider(reader);
                 sourceSamples = (int)(reader.Length / 3);
             }
             else
             {
                 throw new ArgumentException("Currently only 16 or 24 bit PCM samples are supported");
             }
         }
         else if (reader.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
         {
             sp            = new WaveToSampleProvider(reader);
             sourceSamples = (int)(reader.Length / 4);
         }
         else
         {
             throw new ArgumentException("Must be PCM or IEEE float");
         }
         float[] sampleData = new float[sourceSamples];
         int     n          = sp.Read(sampleData, 0, sourceSamples);
         if (n != sourceSamples)
         {
             throw new InvalidOperationException(String.Format("Couldn't read the whole sample, expected {0} samples, got {1}", n, sourceSamples));
         }
         SampleSource ss = new SampleSource(sampleData, sp.WaveFormat);
         return(ss);
     }
 }
Exemple #11
0
 private void Recorder_DataAvailable(object sender, WaveInEventArgs e)
 {
     try
     {
         byte[] result;
         using (var ms = new MemoryStream())
         {
             ms.Write(e.Buffer, 0, e.BytesRecorded);
             ms.Seek(0, SeekOrigin.Begin);
             using (var inputStream = new RawSourceWaveStream(ms, LoopInput.WaveFormat))
             {
                 var sampleStream = new WaveToSampleProvider(inputStream);
                 var resample     = new WdlResamplingSampleProvider(sampleStream, 16000);
                 result = readStream(resample.ToWaveProvider16(), e.BytesRecorded);
             }
         }
         Connection.SendMessage(new MessageClass(Connection.ID, UserID, Commands.LoopData, ID, result));
     }
     catch { }
 }
Exemple #12
0
    /// <summary>
    /// Converts from 32-bit Ieee Floating-point format to MuLaw 8khz 8-bit 1 channel.
    /// Used for WasapiCapture and WasapiLoopbackCapture.
    /// </summary>
    /// <param name="audio">The raw audio stream.</param>
    /// <param name="inputFormat">The input format.</param>
    public MuLawResamplerProvider(byte[] stream, WaveFormat inputFormat)
    {
        // Root buffer provider.
        waveBuffer = new BufferedWaveProvider(inputFormat);
        waveBuffer.DiscardOnBufferOverflow = false;
        waveBuffer.ReadFully = false;
        waveBuffer.AddSamples(stream, 0, stream.Length);
        var sampleStream = new WaveToSampleProvider(waveBuffer);
        // Stereo to mono filter.
        var monoStream = new StereoToMonoSampleProvider(sampleStream)
        {
            LeftVolume  = 2.0f,
            RightVolume = 2.0f
        };
        // Downsample to 8000 filter.
        var resamplingProvider = new WdlResamplingSampleProvider(monoStream, 8000);

        // Convert to 16-bit in order to use ACM or MuLaw tools.
        ieeeToPcm    = new SampleToWaveProvider16(resamplingProvider);
        sourceBuffer = new byte[ieeeToPcm.WaveFormat.AverageBytesPerSecond];
    }
        /// <summary>
        /// This method convert the Audio encoding to PCM 16 format from the given format
        /// </summary>
        /// <param name="input"></param>
        /// <param name="length"></param>
        /// <param name="format"></param>
        /// <returns></returns>
        private byte[] ConvertToPCM16Bit(byte[] input, int length, WaveFormat format)
        {
            if (length == 0)
            {
                return(new byte[0]);
            }

            using (var memStream = new MemoryStream(input, 0, length))
            {
                using (var inputStream = new RawSourceWaveStream(memStream, format))
                {
                    //convert bytes to floats for operations.
                    WaveToSampleProvider sampleStream = new WaveToSampleProvider(inputStream);

                    //resample to 48khz
                    var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, 48000);

                    //convert float stream to PCM 16 bit.
                    var ieeeToPCM = new SampleToWaveProvider16(resamplingProvider);
                    return(readStream(ieeeToPCM));
                }
            }
        }
        private void _play()
        {
            /* Audio chain */

            // Sampling
            _wavesampler = new WaveToSampleProvider(new Wave16ToFloatProvider(_wavebuffer));

            // Fading component
            _fade = new FadeInOutSampleProvider(_wavesampler);
            _fade.BeginFadeIn(1500);

            // Notifying component
            var _notify = new NotifyingSampleProvider(_fade);

            _notify.Sample += new EventHandler <SampleEventArgs>(_notify_Sample);

            // Gain adjustment component
            _volume        = new VolumeSampleProvider(_notify);
            _volume.Volume = this.Volume;

            // Output
            Output.Init(new SampleToWaveProvider16(_volume));

            /* Playback loop */
            do
            {
                if (_cancel_play.IsCancellationRequested)
                {
                    Console.WriteLine("[Playback thread] Cancellation requested.");

                    // Fade out and stop
                    Console.WriteLine("[Playback thread] Fading out and stopping...");
                    _fade.BeginFadeOut(500);
                    Thread.Sleep(500);
                    Output.Stop();
                    Console.WriteLine("[Playback thread] Output stopped.");
                    this.Status = StreamStatus.Stopped;
                    Console.WriteLine("[Playback thread] Acknowledged as status.");

                    //_cancel_play_token.ThrowIfCancellationRequested();
                    //Console.WriteLine("[Playback thread] WARNING: Cancellation token is not cleanly set!");
                    return;
                }

                if (Output.PlaybackState != PlaybackState.Playing && _wavebuffer.BufferedDuration.TotalMilliseconds > 2750)
                {
                    // Buffer is filled enough
                    Console.WriteLine("[Playback thread] Buffer is okay now, start playback!");
                    this.Status = StreamStatus.Playing;
                    Output.Play();
                }
                else if (Output.PlaybackState == PlaybackState.Playing && _wavebuffer.BufferedDuration.TotalMilliseconds < 2250)
                {
                    // Buffer is underrunning
                    Console.WriteLine("[Playback thread] Buffer is underrunning, pausing playback...");
                    this.Status = StreamStatus.Buffering;
                    Output.Pause();
                }

                if (_bufferThread.Exception != null)
                {
                    Console.WriteLine("[Playback thread] Buffering thread is faulted, aborting playback");
                    throw new Exception("Buffering thread faulted, aborting playback");
                }

                Thread.Sleep(100);
            }while (true);
        }
Exemple #15
0
        void doMagic()
        {
            Logger.mix.a("doMagic");
            cage = new List <Object>();
            string lq = "";

            recCap = null;
            micCap = null;
            recRe  = micRe = null;
            ISampleProvider recProv;

            format = WaveFormat.CreateIeeeFloatWaveFormat(settings.samplerate, 2);
            mixa   = new NPatch.Mixa(format);

            Logger.mix.a("create rec");
            if (settings.devRec is LSDevice)
            {
                recCap = new WasapiLoopbackCapture(((LSDevice)settings.devRec).mm);
            }
            else
            {
                recCap = new LSWavetailDev((LSWavetail)settings.devRec);
            }

            recCap.DataAvailable += recDev_DataAvailable_03;
            recIn = new BufferedWaveProvider(recCap.WaveFormat);

            //recIn.ReadFully = false;
            if (recCap.WaveFormat.SampleRate != settings.samplerate)
            {
                Logger.mix.a("create rec resampler");
                recRe = new MediaFoundationResampler(recIn, settings.samplerate);
                recRe.ResamplerQuality = 60;
                lq += "Incorrect samplerate on music device, resampling\n";

                if (settings.devRec is LSDevice)
                {
                    lq +=
                        ((LSDevice)settings.devRec).mm.DeviceFriendlyName + "\n" +
                        ((LSDevice)settings.devRec).mm.FriendlyName + "\n";
                }

                lq += settings.devRec.id + "\n" +
                      LSDevice.stringer(settings.devRec.wf) + "\n" +
                      LSDevice.stringer(recCap.WaveFormat) + "\n\n";
            }

            recProv = new WaveToSampleProvider((IWaveProvider)recRe ?? (IWaveProvider)recIn);
            if (recCap.WaveFormat.Channels != settings.chRec.Length)
            {
                cage.Add(recProv);
                Logger.mix.a("rec chanselector");
                recProv = new NPatch.ChannelSelectorIn(recProv, settings.chRec, 2);
            }
            cage.Add(recProv);
            recVol = new NPatch.VolumeSlider();
            recVol.SetSource(recProv);
            mixa.AddMixerInput(recVol);
            Logger.mix.a("rec done");

            killmic          = new System.Windows.Forms.Timer();
            killmic.Interval = 1000;
            killmic.Tick    += killmic_Tick;
            micVol           = new NPatch.VolumeSlider();
            lq += micAdd();

            NPatch.Fork fork = new NPatch.Fork(mixa, 2);
            cage.Add(fork);
            lameOutlet = fork.providers[1];
            outVol     = new NPatch.VolumeSlider();
            outVol.SetSource(fork.providers[0]);

            ISampleProvider outProv = outVol;

            if (settings.devOut.wf.Channels != settings.chOut.Length)
            {
                Logger.mix.a("create ChannelMapperOut " + settings.devOut.wf.Channels);
                outProv = new NPatch.ChannelMapperOut(outVol, settings.chOut, settings.devOut.wf.Channels);
                cage.Add(outProv);
            }
            SampleToWaveProvider muxer = new SampleToWaveProvider(outProv);

            cage.Add(muxer);

            Logger.mix.a("init mixer vol");
            recVol.SetVolume((float)settings.mixer.vRec);
            micVol.SetVolume((float)settings.mixer.vMic);
            outVol.SetVolume((float)settings.mixer.vOut);
            recVol.boostLock = (float)settings.mixer.yRec;
            micVol.boostLock = (float)settings.mixer.yMic;
            recVol.boost     = (float)settings.mixer.xRec;
            micVol.boost     = (float)settings.mixer.xMic;
            recVol.muted     = !settings.mixer.bRec;
            micVol.muted     = !settings.mixer.bMic;
            outVol.muted     = !settings.mixer.bOut;

            Logger.mix.a("create mixOut");
            mixOut = new WasapiOut(((LSDevice)settings.devOut).mm,
                                   AudioClientShareMode.Shared, false, 100);

            Logger.mix.a("init mixOut");
            mixOut.Init(muxer);

            try
            {
                Logger.mix.a("rec.startRec");
                recCap.StartRecording();

                if (micCap != null)
                {
                    Logger.mix.a("mic.startRec");
                    micCap.StartRecording();
                }
                //throw new System.Runtime.InteropServices.COMException("fgsfds", 1234);
            }
            catch (System.Runtime.InteropServices.COMException ce)
            {
                string msg = WinapiShit.comExMsg((uint)ce.ErrorCode);
                System.Windows.Forms.MessageBox.Show(msg + "\r\n\r\ngonna crash now, bye");
                throw;
            }

            // give wasapicapture some time to fill the buffer
            startReading = new System.Windows.Forms.Timer();
            //startReading_Tick(null, null);
            startReading.Tick    += startReading_Tick;
            startReading.Interval = 300;
            startReading.Start();

            if (settings.vu)
            {
                recVol.enVU = true;
                micVol.enVU = true;
                outVol.enVU = true;
                bars[0].src = recVol;
                bars[1].src = micVol;
                bars[2].src = outVol;
            }

            if (!string.IsNullOrEmpty(lq))
            {
                isLQ = lq;
            }
        }
Exemple #16
0
        void doMagic()
        {
            Logger.mix.a("doMagic");
            string lq = "";

            recCap = null;
            micCap = null;
            recRe  = micRe = null;
            ISampleProvider recProv;

            format = WaveFormat.CreateIeeeFloatWaveFormat(settings.samplerate, 2);
            //mixer = new MixingSampleProvider(format);
            mixa = new NPatch.Mixa(format);

            Logger.mix.a("create rec");
            recCap = new WasapiLoopbackCapture(settings.devRec.mm);
            recCap.DataAvailable += recDev_DataAvailable_03;
            recIn = new BufferedWaveProvider(recCap.WaveFormat);
            if (recCap.WaveFormat.SampleRate != settings.samplerate)
            {
                Logger.mix.a("create rec resampler");
                recRe = new MediaFoundationResampler(recIn, settings.samplerate);
                recRe.ResamplerQuality = 60;
                lq += "Incorrect samplerate on music device, resampling\n" +
                      settings.devRec.mm.DeviceFriendlyName + "\n" +
                      settings.devRec.mm.FriendlyName + "\n" +
                      settings.devRec.id + "\n" +
                      LSDevice.stringer(settings.devRec.wf) + "\n" +
                      LSDevice.stringer(recCap.WaveFormat) + "\n\n";
            }
            recProv = new WaveToSampleProvider((IWaveProvider)recRe ?? (IWaveProvider)recIn);
            recVol  = new NPatch.VolumeSlider();
            recVol.SetSource(recProv);
            mixa.AddMixerInput(recVol);
            Logger.mix.a("rec done");

            killmic          = new System.Windows.Forms.Timer();
            killmic.Interval = 1000;
            killmic.Tick    += killmic_Tick;
            micVol           = new NPatch.VolumeSlider();
            lq += micAdd();

            //mixer.ReadFully = true;
            fork       = new NPatch.Fork(mixa, 2);
            lameOutlet = fork.providers[1];
            outVol     = new NPatch.VolumeSlider();
            outVol.SetSource(fork.providers[0]);
            muxer = new SampleToWaveProvider(outVol);

            Logger.mix.a("init mixer vol");
            recVol.SetVolume((float)settings.mixer.vRec);
            micVol.SetVolume((float)settings.mixer.vMic);
            outVol.SetVolume((float)settings.mixer.vOut);
            recVol.boostLock = (float)settings.mixer.yRec;
            micVol.boostLock = (float)settings.mixer.yMic;
            recVol.boost     = (float)settings.mixer.xRec;
            micVol.boost     = (float)settings.mixer.xMic;
            recVol.muted     = !settings.mixer.bRec;
            micVol.muted     = !settings.mixer.bMic;
            outVol.muted     = !settings.mixer.bOut;

            Logger.mix.a("create mixOut");
            mixOut = new WasapiOut(settings.devOut.mm,
                                   AudioClientShareMode.Shared, false, 100);



            Logger.mix.a("init mixOut");
            mixOut.Init(outVol);

            Logger.mix.a("rec.startRec");
            recCap.StartRecording();

            //System.Threading.Thread.Sleep(100);
            if (micCap != null)
            {
                Logger.mix.a("mic.startRec");
                micCap.StartRecording();
            }
            Logger.mix.a("mixOut.play (ready)");
            mixOut.Play();

            if (settings.vu)
            {
                recVol.enVU = true;
                micVol.enVU = true;
                outVol.enVU = true;
                bars[0].src = recVol;
                bars[1].src = micVol;
                bars[2].src = outVol;
            }

            if (!string.IsNullOrEmpty(lq))
            {
                isLQ = lq;
            }

            /*byte[] buffer = new byte[outVol.WaveFormat.AverageBytesPerSecond * 10];
             * while (true)
             * {
             *  int i = wp16.Read(buffer, 0, fork.providers[1].avail());
             *  waver.Write(buffer, 0, i);
             *  System.Threading.Thread.Sleep(10);
             *  System.Windows.Forms.Application.DoEvents();
             * }*/
        }
Exemple #17
0
        public AudioSampleBuffer Update(WaveInputDevice device, out string status,
                                        out WaveFormat waveFormat, out int latency, out float cpuUsage, out int bufferUnderRuns,
                                        int driverLatency = 150, int internalLatency = 8, int bufferSize = 512, bool reset = false)
        {
            bool hasDeviceChanged = Device?.Value != device?.Value ||
                                    DriverLatency != driverLatency ||
                                    BufferSize != bufferSize ||
                                    reset;

            Device          = device;
            DriverLatency   = driverLatency;
            InternalLatency = internalLatency;
            BufferSize      = bufferSize;

            if (hasDeviceChanged)
            {
                processor?.Dispose();
                processor = null;

                if (waveIn != null)
                {
                    AudioEngine.Log("Stopping WaveIn...");
                    waveIn.StopRecording();
                    waveIn.Dispose();
                }
            }


            if (processor == null)
            {
                processor = new AudioThread.AudioThreadProcessor(bufferSize);
            }

            processor.EnsureThreadIsRunning();
            processor.RequestedLatency = internalLatency;

            if (hasDeviceChanged)
            {
                if (device != null)
                {
                    AudioEngine.Log(
                        $"WaveInput: Configuration changed, device={device.Value}, requested latency={DriverLatency}");
                    try
                    {
                        waveIn       = ((IWaveInputFactory)device.Tag).Create(DriverLatency);
                        bufferedWave = new BufferedWaveProvider(waveIn.WaveFormat);
                        bufferedWave.DiscardOnBufferOverflow = true;
                        sampleProvider       = new WaveToSampleProvider(bufferedWave);
                        OutputFormat         = sampleProvider.WaveFormat;
                        processor.WaveFormat = OutputFormat;
                        processor.Input      = sampleProvider;

                        waveIn.DataAvailable += (s, a) => { bufferedWave.AddSamples(a.Buffer, 0, a.BytesRecorded); };
                        waveIn.StartRecording();
                        AudioEngine.Log("WaveInput: Started");

                        output = new AudioSampleBuffer(OutputFormat)
                        {
                            Processor = processor
                        };
                    }
                    catch (Exception e)
                    {
                        AudioEngine.Log(e);
                        waveIn = null;
                    }
                }
            }


            status          = waveIn != null ? "Recording" : "Uninitialized";
            waveFormat      = OutputFormat;
            latency         = processor.Latency;
            cpuUsage        = processor.CpuUsage;
            bufferUnderRuns = processor.BufferUnderRuns;
            return(output);
        }
 private float[] ReadMonoFromSource(string pathToSource, int sampleRate, int secondsToRead, int startAtSecond, Func<SampleProviderConverterBase, ISamplesProvider> getSamplesProvider)
 {
     using (var reader = new MediaFoundationReader(pathToSource))
     {
         SeekToSecondInCaseIfRequired(startAtSecond, reader);
         var ieeeFloatWaveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
         using (var resampler = new MediaFoundationResampler(reader, ieeeFloatWaveFormat))
         {
             var waveToSampleProvider = new WaveToSampleProvider(resampler);
             return samplesAggregator.ReadSamplesFromSource(getSamplesProvider(waveToSampleProvider), secondsToRead, sampleRate);
         }
     }
 }