Пример #1
0
 /// <summary>
 /// Adds a new mixer input
 /// </summary>
 /// <param name="mixerInput">Mixer input</param>
 public void AddMixerInput(ISampleProvider mixerInput)
 {
     // we'll just call the lock around add since we are protecting against an AddMixerInput at
     // the same time as a Read, rather than two AddMixerInput calls at the same time
     lock (sources)
     {
         if (this.sources.Count >= maxInputs)
         {
             throw new InvalidOperationException("Too many mixer inputs");
         }
         this.sources.Add(mixerInput);
     }
     if (this.waveFormat == null)
     {
         this.waveFormat = mixerInput.WaveFormat;
     }
     else
     {
         if (this.WaveFormat.SampleRate != mixerInput.WaveFormat.SampleRate ||
             this.WaveFormat.Channels != mixerInput.WaveFormat.Channels)
         {
             throw new ArgumentException("All mixer inputs must have the same WaveFormat");
         }
     }
 }
 public AutoDisposeSampleProvider(ISampleProvider provider,
      IEnumerable<IDisposable> disposables)
 {
     this._provider = provider;
     this._disposables = new CompositeDisposable(disposables);
     this.WaveFormat = provider.WaveFormat;
 }
Пример #3
0
 /// <summary>
 /// Removes a mixer input
 /// </summary>
 /// <param name="mixerInput">Mixer input to remove</param>
 public void RemoveMixerInput(ISampleProvider mixerInput)
 {
     lock (sources)
     {
         this.sources.Remove(mixerInput);
     }
 }
        public AudioStreamModifier(ISampleProvider sample, double rateMult, int pitchDelta)
        {
            _sample = sample;
            WaveFormat = _sample.WaveFormat;

            _soundTouch = new SoundTouch<float, double>();

            channelCount = sample.WaveFormat.Channels;
            _soundTouch.SetSampleRate(sample.WaveFormat.SampleRate);
            _soundTouch.SetChannels(channelCount);

            rateMult = (rateMult - 1) * 100;
            _soundTouch.SetTempoChange(rateMult);
            _soundTouch.SetPitchSemiTones(pitchDelta*0.25f);
            _soundTouch.SetRateChange(1.0f);

            _soundTouch.SetSetting(SettingId.UseQuickseek, 1);
            _soundTouch.SetSetting(SettingId.UseAntiAliasFilter, 1);

            _soundTouch.SetSetting(SettingId.SequenceDurationMs, 40);
            _soundTouch.SetSetting(SettingId.SeekwindowDurationMs, 15);
            _soundTouch.SetSetting(SettingId.OverlapDurationMs, 8);

            sourceReadBuffer = new float[(WaveFormat.SampleRate * channelCount * readDurationMilliseconds) / 1000];
            soundTouchReadBuffer = new float[sourceReadBuffer.Length * 10]; // support down to 0.1 speed
        }
Пример #5
0
		// Constructor, sets the {@link Decoder}, the sample window size and the
		// hop size for the spectra returned. Say the sample window size is 1024
		// samples. To get an overlapp of 50% you specify a hop size of 512 samples,
		// for 25% overlap you specify a hopsize of 256 and so on. Hop sizes are of
		// course not limited to powers of 2.
		// 
		// @param decoder The decoder to get the samples from.
		// @param sampleWindowSize The sample window size.
		// @param hopSize The hop size.
		// @param useHamming Wheter to use hamming smoothing or not.
		public SpectrumProvider(ISampleProvider decoder, int sampleWindowSize, int hopSize, bool useHamming)
		{
			if(decoder == null)
				throw new ArgumentException("Decoder must be != null");

			if(sampleWindowSize <= 0)
				throw new ArgumentException("Sample window size must be > 0");
			if(hopSize <= 0)
				throw new ArgumentException("Hop size must be > 0");

			if(sampleWindowSize < hopSize)
				throw new ArgumentException("Hop size must be <= sampleSize");


			this.decoder = decoder;
			this.samples = new float[sampleWindowSize];
			this.nextSamples = new float[sampleWindowSize];
			this.tempSamples = new float[sampleWindowSize];
			this.hopSize = hopSize;
			fft = new FFT(sampleWindowSize, 44100);

			// calculate averages based on a miminum octave width of 22 Hz
			// split each octave into three bands
			// this should result in 30 averages
			//fft.LogAverages(22, 3);
			
			if(useHamming)
				fft.Window(FFT.HAMMING);

			decoder.Read(samples, 0, samples.Length);
			decoder.Read(nextSamples, 0, nextSamples.Length);
		}
 public EnvelopeSampleProvider(ISampleProvider source, List<ExpPoint> envelope, double skipOver)
 {
     this.source = source;
     foreach (var pt in envelope) this.envelope.Add(pt.Clone());
     int skipOverSamples = (int)(skipOver * WaveFormat.SampleRate / 1000);
     ConvertEnvelope(skipOverSamples);
 }
 /// <summary>
 /// Initializes a new instance of BalanceSampleProvider
 /// </summary>
 /// <param name="source">Source Sample Provider</param>
 public BalanceSampleProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Channels != 2)
         throw new InvalidOperationException("Input wave format must be stereo!");
     _source = source;
     LeftVolume = 1.0f;
     RightVolume = 1.0f;
 }
 /// <summary>
 /// Initialises a new instance of MeteringSampleProvider 
 /// </summary>
 /// <param name="source">source sampler provider</param>
 /// <param name="samplesPerNotification">Number of samples between notifications</param>
 public MeteringSampleProvider(ISampleProvider source, int samplesPerNotification)
 {
     this.source = source;
     this.channels = source.WaveFormat.Channels;
     this.maxSamples = new float[channels];
     this.SamplesPerNotification = samplesPerNotification;
     this.args = new StreamVolumeEventArgs() { MaxSampleValues = this.maxSamples }; // create objects up front giving GC little to do
 }
Пример #9
0
 public void AddSource(ISampleProvider source, TimeSpan delayBy)
 {
     ISampleProvider _source;
     if (source.WaveFormat.Channels == 1) _source = new MonoToStereoSampleProvider(source);
     else if (source.WaveFormat.Channels == 2) _source = source;
     else return;
     mix.AddMixerInput(new OffsetSampleProvider(_source) { DelayBy = delayBy });
 }
 /// <summary>
 /// Initializes a new instance of the WaveProviderFloatToWaveProvider class
 /// </summary>
 /// <param name="source">Source wave provider</param>
 public SampleToWaveProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
     {
         throw new ArgumentException("Must be already floating point");
     }
     this.source = source;
 }
		public const float StandardBufferSizeSeconds = 0.05f;	// 1/20 second buffer

		/// <summary>
		/// Constructor
		/// </summary>
		/// <param name="centreFrequency">For BiQuad filters</param>
		/// <param name="q">For BiQuad filters</param>
		public FilteredSampleProvider(ISampleProvider sourceProvider, float centreFrequency, float q) {
			this.sourceProvider = sourceProvider;
			channels = WaveFormat.Channels;
			StandardBufferSize = (int)WaveFormat.SecondsToSamples(StandardBufferSizeSeconds);
			filters = new BiQuadFilter[channels];
			for (int n = 0; n < channels; n++) {
				filters[n] = BiQuadFilter.BandPassFilterConstantPeakGain(WaveFormat.SampleRate, centreFrequency, q);
			}
		}
Пример #12
0
 public Equalizer(ISampleProvider sourceProvider, EqualizerBand[] bands)
 {
     this.sourceProvider = sourceProvider;
     this.bands = bands;
     channels = sourceProvider.WaveFormat.Channels;
     bandCount = bands.Length;
     filters = new BiQuadFilter[channels, bands.Length];
     CreateFilters();
 }
 /// <summary>
 /// Initializes a new instance of MonoToStereoSampleProvider
 /// </summary>
 /// <param name="source">Source sample provider</param>
 public MonoToStereoSampleProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Channels != 1)
     {
         throw new ArgumentException("Source must be mono");
     }
     this.source = source;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, 2);
 }
 /// <summary>
 /// Initialises a new instance of the PanningSampleProvider
 /// </summary>
 /// <param name="source">Source sample provider, must be mono</param>
 public PanningSampleProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Channels != 1)
     {
         throw new ArgumentException("Source sample provider must be mono");
     }
     this.source = source;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, 2);
     this.panStrategy = new SinPanStrategy();
 }
Пример #15
0
        // Constructor, plays back the audio form the decoder and
        // sets the marker of the plot accordingly. This will return
        // when the playback is done.
        // @param plot The plot.
        // @param samplesPerPixel the numbe of samples per pixel.
        // @param FILE The audio file.
        public PlaybackVisualizer(Plot plot, int samplesPerPixel, String FILE)
        {
            this.plot = plot;
            this.samplesPerPixel = samplesPerPixel;
            this.FILE = FILE;
            this.decoder = new AudioFileReader(FILE);

            plot.Shown += new EventHandler(PlotShown);
            plot.FormClosing += new FormClosingEventHandler(PlotFormClosing);
            Application.Run(plot);
        }
Пример #16
0
			public EffectStream(ISampleProvider stream, int length, float factor)
			{
				this.SourceStream = stream;
				this.EchoLength = length;
				this.EchoFactor = factor;
				this.samples = new Queue<float>();

				for (int i = 0; i < length; i++) {
					this.samples.Enqueue(0f);
				}
			}
 /// <summary>
 /// Creates a new mono ISampleProvider based on a stereo input
 /// </summary>
 /// <param name="sourceProvider">Stereo 16 bit PCM input</param>
 public StereoToMonoSampleProvider(ISampleProvider sourceProvider)
 {
     LeftVolume = 0.5f;
     RightVolume = 0.5f;
     if (sourceProvider.WaveFormat.Channels != 2)
     {
         throw new ArgumentException("Source must be stereo");
     }
     this.sourceProvider = sourceProvider;
     WaveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 1);
 }
Пример #18
0
 /// <summary>
 /// Creates a new AdsrSampleProvider with default values
 /// </summary>
 public AdsrSampleProvider(ISampleProvider source)
 {
     if (source.WaveFormat.Channels > 1) throw new ArgumentException("Currently only supports mono inputs");
     this.source = source;
     adsr = new EnvelopeGenerator();
     AttackSeconds = 0.01f;
     adsr.SustainLevel = 1.0f;
     adsr.DecayRate = 0.0f * WaveFormat.SampleRate;
     ReleaseSeconds = 0.3f;
     adsr.Gate(true);
 }
Пример #19
0
 public Equalizer(ISampleProvider sourceProvider, ObservableCollection<IEqualizerBand> bands)
 {
     _sourceProvider = sourceProvider;
     _bands = bands;
     _channels = sourceProvider.WaveFormat.Channels;
     _lockObj = new object();
     foreach (IEqualizerBand band in _bands) band.PropertyChanged += EqualizerBandPropertyChanged;
     _bands.CollectionChanged += BandsOnCollectionChanged;
     _filters = new List<BiQuadFilter[]>();
     CreateFilters();
 }
Пример #20
0
        /// <summary>
        /// Creates a new SampleToWaveProvider16
        /// </summary>
        /// <param name="sourceProvider">the source provider</param>
        public SampleToWaveProvider16(ISampleProvider sourceProvider)
        {
            if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
                throw new ApplicationException("Only PCM supported");
            if (sourceProvider.WaveFormat.BitsPerSample != 32)
                throw new ApplicationException("Only 32 bit audio supported");

            waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels);

            this.sourceProvider = sourceProvider;
            this.volume = 1.0f;
        }
Пример #21
0
        /// <summary>
        /// Converts from an ISampleProvider (IEEE float) to a 16 bit PCM IWaveProvider.
        /// Number of channels and sample rate remain unchanged.
        /// </summary>
        /// <param name="sourceProvider">The input source provider</param>
        public SampleToWaveProvider16(ISampleProvider sourceProvider)
        {
            if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
                throw new ArgumentException("Input source provider must be IEEE float", "sourceProvider");
            if (sourceProvider.WaveFormat.BitsPerSample != 32)
                throw new ArgumentException("Input source provider must be 32 bit", "sourceProvider");

            waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels);

            this.sourceProvider = sourceProvider;
            this.volume = 1.0f;
        }
        public FadeOutSampleProvider(ISampleProvider source, float fadeAfter, float fadeDuration)
        {
            if(source.WaveFormat.Channels != 1)
            {
                throw new NotSupportedException("Supported only 1 channel Sample providers");
            }

            Source = source;
            FadeAfterPosition = (long)(fadeAfter * WaveFormat.AverageBytesPerSecond);
            FadeSamples = (int)(fadeDuration * WaveFormat.AverageBytesPerSecond);
            FadeSamplesRemaining = FadeSamples;
        }
Пример #23
0
 private ISampleProvider ConvertToRightChannelCount(ISampleProvider input)
 {
     if (input.WaveFormat.Channels == mixer.WaveFormat.Channels)
     {
         return input;
     }
     if (input.WaveFormat.Channels == 1 && mixer.WaveFormat.Channels == 2)
     {
         return new MonoToStereoSampleProvider(input);
     }
     throw new NotImplementedException("Not yet implemented this channel count conversion");
 }
        /// <summary>
        /// Constructs a new resampler
        /// </summary>
        /// <param name="source">Source to resample</param>
        /// <param name="newSampleRate">Desired output sample rate</param>
        public WdlResamplingSampleProvider(ISampleProvider source, int newSampleRate)
        {
            channels = source.WaveFormat.Channels;
            outFormat = WaveFormat.CreateIeeeFloatWaveFormat(newSampleRate, channels);
            this.source = source;

            resampler = new WdlResampler();
            resampler.SetMode(true, 2, false);
            resampler.SetFilterParms();
            resampler.SetFeedMode(false); // output driven
            resampler.SetRates(source.WaveFormat.SampleRate, newSampleRate);
        }
Пример #25
0
 public SampleAggregator(ISampleProvider source, int fftLength = 1024)
 {
     _channels = source.WaveFormat.Channels;
     if (!IsPowerOfTwo(fftLength))
     {
         throw new ArgumentException("FFT Length must be a power of two");
     }
     _m = (int)Math.Log(fftLength, 2.0);
     _fftLength = fftLength;
     _fftBuffer = new Complex[fftLength];
     _fftArgs = new FftEventArgs(_fftBuffer);
     _source = source;
 }
 public RenderItemSampleProvider(RenderItem renderItem)
 {
     this.RenderItem = renderItem;
     var cachedSampleProvider = new CachedSoundSampleProvider(RenderItem.Sound);
     var offsetSampleProvider = new OffsetSampleProvider(new EnvelopeSampleProvider(cachedSampleProvider, RenderItem.Envelope, RenderItem.SkipOver))
     {
         DelayBySamples = (int)(RenderItem.PosMs * cachedSampleProvider.WaveFormat.SampleRate / 1000),
         TakeSamples = (int)(RenderItem.DurMs * cachedSampleProvider.WaveFormat.SampleRate / 1000),
         SkipOverSamples = (int)(RenderItem.SkipOver * cachedSampleProvider.WaveFormat.SampleRate / 1000)
     };
     this.signalChain = offsetSampleProvider;
     this.firstSample = offsetSampleProvider.DelayBySamples + offsetSampleProvider.SkipOverSamples;
     this.lastSample = this.firstSample + offsetSampleProvider.TakeSamples;
 }
Пример #27
0
 private static ISampleProvider PrepareSound(ISampleProvider input)
 {
     if (input.WaveFormat.Channels == _mixer.WaveFormat.Channels &&
         input.WaveFormat.SampleRate == _mixer.WaveFormat.SampleRate)
     {
         // correctly formatted wave audio file.
         return input;
     }
     if (input.WaveFormat.Channels == 1 && _mixer.WaveFormat.Channels == 2)
     {
         // mono -> stereo
         return new MonoToStereoSampleProvider(input);
     }
     throw new ArgumentException("This audio source is not supported yet.");
 }
Пример #28
0
        public void Read(ISampleProvider waveSampleProvider, FFTCalculated FFTCalculated)
        {
            float[] dummyFftArray = new float[fftBufferLength];
            var dummyFftArrayLength = waveSampleProvider.Read(dummyFftArray, 0, fftBufferLength);

            for (int i = 0; i < dummyFftArrayLength; i += channels) {
                fftBuffer[fftBufferCurPossition].X = (float)(dummyFftArray[i] * FastFourierTransform.HammingWindow(fftBufferCurPossition, fftBufferLength));
                fftBuffer[fftBufferCurPossition].Y = 0;
                fftBufferCurPossition++;
                if (fftBufferCurPossition >= fftBufferLength) {
                    fftBufferCurPossition = 0;
                    // 1024 = 2^10
                    FastFourierTransform.FFT(true, m, fftBuffer);
                    FFTCalculated(fftBuffer);
                }
            }
        }
        public FastAttackCompressor1175(ISampleProvider sourceProvider)
        {
            this.sourceProvider = sourceProvider;
            SampleRate = 44100;

            Threshold = new Setting<float>(0, -60, 0, 0.1f, "Threshold (dB)");
            Ratio = new Setting<int>(1, 0, 3, 1, "Ratio");
            Gain = new Setting<float>(0, -20, 20, 0.1f, "Gain");
            Attack = new Setting<int>(20, 20, 2000, 10, "Attack time (usec.)");
            Release = new Setting<int>(250, 20, 1000, 1, "Release time (msec.)");
            Mix = new Setting<float>(100, 0, 100, 0.1f, "Mix%");
            //ratioSlider.DiscreteValueText.Add("4");
            //ratioSlider.DiscreteValueText.Add("8");
            //ratioSlider.DiscreteValueText.Add("12");
            //ratioSlider.DiscreteValueText.Add("20");
            //ratioSlider.DiscreteValueText.Add("All");

            Init();
        }
Пример #30
0
 //int frac;
 //bool mul;
 public ChannelSelector(ISampleProvider source, int keepChannel)
 {
     if (source.WaveFormat.Channels < 2)
     {
         throw new ArgumentException("Source must be stereo or more");
     }
     this.ch = keepChannel;
     this.source = source;
     this.channels = source.WaveFormat.Channels;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(source.WaveFormat.SampleRate, channels);
     /*if (waveFormat.Channels > source.WaveFormat.Channels)
     {
         frac = waveFormat.Channels / source.WaveFormat.Channels;
         mul = false;
     }
     else
     {
         frac = source.WaveFormat.Channels / waveFormat.Channels;
         mul = true;
     }*/
 }
Пример #31
0
 public EfectoVolumen(ISampleProvider fuente)
 {
     this.fuente = fuente;
     volume      = 1;
 }
Пример #32
0
 /// <summary>Initializes the cubic interpolator with a sample provider</summary>
 /// <param name="sampleProvider">Object through which to query for samples</param>
 public CubicInterpolator(ISampleProvider <SampleType> sampleProvider) :
     this(sampleProvider, HermiteMatrix)
 {
 }
Пример #33
0
 /// <summary>
 /// Creates a 16 bit Wave File from an ISampleProvider
 /// BEWARE: the source provider must not return data indefinitely
 /// </summary>
 /// <param name="filename">The filename to write to</param>
 /// <param name="sourceProvider">The source sample provider</param>
 public static void CreateWaveFile16(string filename, ISampleProvider sourceProvider)
 {
     CreateWaveFile(filename, new SampleToWaveProvider16(sourceProvider));
 }
Пример #34
0
 /// <summary>
 /// Creates a new FadeInOutSampleProvider
 /// </summary>
 /// <param name="source">The source stream with the audio to be faded in or out</param>
 /// <param name="initiallySilent">If true, we start faded out</param>
 public FadeInOutSampleProvider(ISampleProvider source, bool initiallySilent = false)
 {
     this.source    = source;
     this.fadeState = initiallySilent ? FadeState.Silence : FadeState.FullVolume;
 }
Пример #35
0
 public SampleAggregator(ISampleProvider source)
 {
     _channels    = source.WaveFormat.Channels;
     this._source = source;
 }
Пример #36
0
 private ISampleProvider StereoToMono(ISampleProvider input)
 {
     return(input.ToStereo());
 }
Пример #37
0
 /// <summary>
 /// Initializes a new instance of NotifyingSampleProvider
 /// </summary>
 /// <param name="source">Source Sample Provider</param>
 public NotifyingSampleProvider(ISampleProvider source)
 {
     this.source   = source;
     this.channels = this.WaveFormat.Channels;
 }
        // ReSharper restore InconsistentNaming

        /// <summary>
        /// Creates a new SMB Pitch Shifting Sample Provider with default settings
        /// </summary>
        /// <param name="sourceProvider">Source provider</param>
        public SmbPitchShiftingSampleProvider(ISampleProvider sourceProvider)
            : this(sourceProvider, 4096, 4L, 1f)
        {
        }
Пример #39
0
 /// <summary>
 /// Initialises a new instance of MeteringSampleProvider that raises 10 stream volume
 /// events per second
 /// </summary>
 /// <param name="source">Source sample provider</param>
 public AfvMeteringSampleProvider(ISampleProvider source) :
     this(source, source.WaveFormat.SampleRate / 10)
 {
 }
 /// <summary>
 /// Creates a new instance of offsetSampleProvider
 /// </summary>
 /// <param name="sourceProvider">The Source Sample Provider to read from</param>
 public OffsetSampleProvider(ISampleProvider sourceProvider)
 {
     this.sourceProvider = sourceProvider;
 }
Пример #41
0
 public static IWaveProvider ToWaveProvider16(this ISampleProvider sampleProvider)
 {
     return(new SampleToWaveProvider16(sampleProvider));
 }
Пример #42
0
 public static ISampleProvider FollowedBy(this ISampleProvider sampleProvider, ISampleProvider next)
 {
     return(new ConcatenatingSampleProvider(new ISampleProvider[]
     {
         sampleProvider,
         next
     }));
 }
Пример #43
0
 public FadeInOutSampleProvider(ISampleProvider source)
 {
     this.source = source;
     this.fadeState = FadeState.FullVolume;
 }
Пример #44
0
        private void Connect()
        {
            if (this.currentState != UiState.ReadyToConnect)
            {
                return;
            }

            Stopwatch watch = Stopwatch.StartNew();

            UpdateUiState(UiState.Connecting);
            if (ShowMiniWindow.IsChecked.Value)
            {
                miniwindow.Show();
            }
            //This section is putting default values in case there are missing values in the UI
            // Minimal validation
            if (this.IsMissingInput(this.FromLanguage.SelectedItem, "source language"))
            {
                return;
            }
            if (this.IsMissingInput(this.ToLanguage.SelectedItem, "target language"))
            {
                return;
            }
            //if (this.IsMissingInput(this.Voice.SelectedItem, "voice")) return;
            if (this.IsMissingInput(this.Profanity.SelectedItem, "profanity filter"))
            {
                return;
            }
            if (this.IsMissingInput(this.Mic.SelectedItem, "microphone"))
            {
                return;
            }
            if (this.IsMissingInput(this.Speaker.SelectedItem, "speaker"))
            {
                return;
            }

            if (this.LogAutoSave.IsChecked.Value)
            {
                this.autoSaveFrom = this.Logs.Items.Count;
            }

            string tag = ((ComboBoxItem)Mic.SelectedItem).Tag as string;
            string audioFileInputPath = null;

            if (tag == "File")
            {
                audioFileInputPath = this.AudioFileInput.Text;
                if (!File.Exists(audioFileInputPath))
                {
                    SetMessage(String.Format("Invalid audio source: selected file does not exist."), "", MessageKind.Error);
                    UpdateUiState(UiState.ReadyToConnect);
                    return;
                }
            }
            bool shouldSuspendInputAudioDuringTTS = this.CutInputAudioCheckBox.IsChecked.HasValue ? this.CutInputAudioCheckBox.IsChecked.Value : false;

            this.correlationId = Guid.NewGuid().ToString("D").Split('-')[0].ToUpperInvariant();

            // Setup speech translation client options
            SpeechClientOptions options;

            string voicename = "";

            if (this.Voice.SelectedItem != null)
            {
                voicename = ((ComboBoxItem)this.Voice.SelectedItem).Tag.ToString();
            }
            options = new SpeechTranslateClientOptions()
            {
                TranslateFrom = ((ComboBoxItem)this.FromLanguage.SelectedItem).Tag.ToString(),
                TranslateTo   = ((ComboBoxItem)this.ToLanguage.SelectedItem).Tag.ToString(),
                Voice         = voicename,
            };

            options.Hostname        = baseUrl;
            options.AuthHeaderKey   = "Authorization";
            options.AuthHeaderValue = ""; // set later in ConnectAsync.
            options.ClientAppId     = new Guid("EA66703D-90A8-436B-9BD6-7A2707A2AD99");
            options.CorrelationId   = this.correlationId;
            options.Features        = GetFeatures().ToString().Replace(" ", "");
            options.Profanity       = ((SpeechClient.ProfanityFilter)Enum.Parse(typeof(SpeechClient.ProfanityFilter), ((ComboBoxItem)this.Profanity.SelectedItem).Tag.ToString(), true)).ToString();

            // Setup player and recorder but don't start them yet.
            WaveFormat waveFormat = new WaveFormat(16000, 16, 1);

            // WaveProvider for incoming TTS
            // We use a rather large BVufferDuration because we need to be able to hold an entire utterance.
            // TTS audio is received in bursts (faster than real-time).
            textToSpeechBytes = 0;
            playerTextToSpeechWaveProvider = new BufferedWaveProvider(waveFormat);
            playerTextToSpeechWaveProvider.BufferDuration = TimeSpan.FromMinutes(5);

            ISampleProvider sampleProvider = null;

            if (audioFileInputPath != null)
            {
                // Setup mixing of audio from input file and from TTS
                playerAudioInputWaveProvider = new BufferedWaveProvider(waveFormat);
                var srce1 = new Pcm16BitToSampleProvider(playerTextToSpeechWaveProvider);
                var srce2 = new Pcm16BitToSampleProvider(playerAudioInputWaveProvider);
                var mixer = new MixingSampleProvider(srce1.WaveFormat);
                mixer.AddMixerInput(srce1);
                mixer.AddMixerInput(srce2);
                sampleProvider = mixer;
            }
            else
            {
                recorder = new WaveIn();
                recorder.DeviceNumber   = (int)((ComboBoxItem)Mic.SelectedItem).Tag;
                recorder.WaveFormat     = waveFormat;
                recorder.DataAvailable += OnRecorderDataAvailable;
                sampleProvider          = playerTextToSpeechWaveProvider.ToSampleProvider();
            }

            player = new WaveOut();
            player.DeviceNumber = (int)((ComboBoxItem)Speaker.SelectedItem).Tag;
            player.Init(sampleProvider);

            this.audioBytesSent = 0;

            string logAudioFileName = null;

            if (LogSentAudio.IsChecked.Value || LogReceivedAudio.IsChecked.Value)
            {
                string logAudioPath = System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), Properties.Settings.Default.OutputDirectory);
                try
                {
                    Directory.CreateDirectory(logAudioPath);
                }
                catch
                {
                    this.AddItemToLog(string.Format("Could not create folder {0}", logAudioPath));
                }

                if (LogSentAudio.IsChecked.Value)
                {
                    logAudioFileName = System.IO.Path.Combine(logAudioPath, string.Format("audiosent_{0}.wav", this.correlationId));
                }

                if (LogReceivedAudio.IsChecked.Value)
                {
                    string fmt = System.IO.Path.Combine(logAudioPath, string.Format("audiotts_{0}_{{0}}.wav", this.correlationId));
                    this.audioReceived = new BinaryMessageDecoder(fmt);
                }
            }


            ConnectAsync(options, shouldSuspendInputAudioDuringTTS).ContinueWith((t) =>
            {
                if (t.IsFaulted || t.IsCanceled || !s2smtClient.IsConnected()) //t.isfaulted OR t.iscancelled OR NOT s2smtclient.isconnected() do the following
                {
                    this.Log(t.Exception, "E: Unable to connect: cid='{0}', elapsedMs='{1}'.",
                             this.correlationId, watch.ElapsedMilliseconds);
                    this.SafeInvoke(() => {
                        this.AutoSaveLogs();
                        this.UpdateUiState(UiState.ReadyToConnect);
                    });
                }
                else
                {
                    // Start playing incoming audio
                    player.Play();
                    // Start recording and sending
                    if (logAudioFileName != null)
                    {
                        audioSent = new WaveFileWriter(logAudioFileName, waveFormat);
                        this.Log("I: Recording outgoing audio in {0}", logAudioFileName);
                    }
                    // Send the WAVE header
                    s2smtClient.SendBinaryMessage(new ArraySegment <byte>(GetWaveHeader(waveFormat)));
                    if (audioFileInputPath != null)
                    {
                        streamAudioFromFileInterrupt = new CancellationTokenSource();
                        Task.Run(() => this.StreamFile(audioFileInputPath, streamAudioFromFileInterrupt.Token))
                        .ContinueWith((x) =>
                        {
                            if (x.IsFaulted)
                            {
                                this.Log(x.Exception, "E: Error while playing audio from input file.");
                            }
                            else
                            {
                                this.Log("I: Done playing audio from input file.");
                            }
                        });
                    }
                    else
                    {
                        // Start sending audio from the recoder.
                        recorder.StartRecording();
                    }
                    this.Log("I: Connected: cid='{0}', elapsedMs='{1}'.",
                             this.correlationId, watch.ElapsedMilliseconds);
                    this.SafeInvoke(() => this.UpdateUiState(UiState.Connected));
                }
            }).ContinueWith((t) => {
                if (t.IsFaulted)
                {
                    Log(t.Exception, "E: Failed to start sending audio.");
                    this.SafeInvoke(() => {
                        this.AutoSaveLogs();
                        this.UpdateUiState(UiState.ReadyToConnect);
                    });
                }
            });
        }
Пример #45
0
 public MonoSampleProvider(ISampleProvider sourceProvider)
 {
     this.sourceProvider = sourceProvider;
     sourceChannels      = sourceProvider.WaveFormat.Channels;
     WaveFormat          = new WaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.BitsPerSample, 1);
 }
Пример #46
0
 //Add disposable reader to the mixer input after convertion to stereo.
 private void AddMixerInput(ISampleProvider input1, ISampleProvider input2)
 {
     mixer1.AddMixerInput(ResampleTo44100(ConvertToRightChannelCount(input1)));
     mixer2.AddMixerInput(ResampleTo44100(ConvertToRightChannelCount(input2)));
 }
Пример #47
0
 public Filter(ISampleProvider source, List <BiQuadFilter[]> filters)
 {
     _sourceProvider = source;
     _filters        = filters;
 }
Пример #48
0
 public Delay(ISampleProvider fuente)
 {
     this.fuente = fuente;
     // this.offsetTiempoMS = offsetTiempoMS;
     //50ms - 5000ms
 }
 /// <summary>
 /// Since we will use ClippingSampleProvider as an "effect", or in the middle of a signal chain,
 /// we will set the source we want to change in ClippingSampleProvider's constructor.
 /// </summary>
 /// <param name="source">
 /// Sample to be clipped.
 /// </param>
 public ClippingSampleProvider(ISampleProvider source)
 {
     this.source = source;
 }
Пример #50
0
 public override void AddSource(ISampleProvider source)
 {
     _Queue.Enqueue(source);
 }
Пример #51
0
        private void btnPlay_Click(object sender, RoutedEventArgs e)
        {
            if (_waveOut != null)
            {
                if (_waveOut.PlaybackState == PlaybackState.Playing)
                {
                    return;
                }
                else if (_waveOut.PlaybackState == PlaybackState.Paused)
                {
                    _waveOut.Play();

                    btnPlay.IsEnabled  = false;
                    btnPause.IsEnabled = true;
                    btnStop.IsEnabled  = true;
                    return;
                }
            }

            if (string.IsNullOrEmpty(FileName))
            {
                return;
            }

            try
            {
                CreateWaveOut();
            }
            catch (Exception driverCreateException)
            {
                Log.Debug("btnPlay_Click__MessageBox.Show driverCreateException BEFORE");
                MessageBox.Show(String.Format("{0}", driverCreateException.Message));
                Log.Debug("btnPlay_Click__MessageBox.Show driverCreateException AFTER");
                return;
            }

            ISampleProvider sampleProvider = null;

            try
            {
                sampleProvider = CreateInputStream();
            }
            catch (Exception createException)
            {
                Log.Debug("btnPlay_Click__MessageBox.Show createException BEFORE");
                MessageBox.Show(String.Format("{0}", createException.Message), "Error Loading File");
                Log.Debug("btnPlay_Click__MessageBox.Show createException AFTER");
                return;
            }

            sliderPosition.Maximum       = (int)_fileWaveStream.TotalTime.TotalSeconds;
            sliderPosition.TickFrequency = sliderPosition.Maximum / 60;
            txtTime.Text = string.Format("00:00:00 / {0:00}:{1:00}:{2:00}",
                                         (int)_fileWaveStream.TotalTime.Hours,
                                         (int)_fileWaveStream.TotalTime.Minutes,
                                         (int)_fileWaveStream.TotalTime.Seconds);

            try
            {
                _waveOut.Init(new SampleToWaveProvider(sampleProvider));
            }
            catch (Exception initException)
            {
                Log.Debug("btnPlay_Click__MessageBox.Show initException BEFORE");
                MessageBox.Show(String.Format("{0}", initException.Message), "Error Initializing Output");
                Log.Debug("btnPlay_Click__MessageBox.Show initException AFTER");
                return;
            }

            _waveOut.Play();
            _tickTimer.Start();

            btnPlay.IsEnabled  = false;
            btnPause.IsEnabled = true;
            btnStop.IsEnabled  = true;
        }
 private void AddMixerInput(ISampleProvider input)
 {
     mixer.AddMixerInput(ConvertToRightChannelCount(input));
 }
Пример #53
0
 public Delay(ISampleProvider fuente)
 {
     this.fuente    = fuente;
     offsetTiempoMS = 1000;
 }
 /// <summary>
 /// Initializes a new instance of VolumeSampleProvider
 /// </summary>
 /// <param name="source">Source Sample Provider</param>
 public VolumeSampleProvider(ISampleProvider source)
 {
     this.source = source;
     this.volume = 1.0f;
 }
 public EfectoFadeIn(ISampleProvider fuente, float duracion)
 {
     this.fuente   = fuente;
     this.duracion = duracion;
 }
Пример #56
0
    /// <summary>
    /// オーディオソースを設定する
    /// </summary>
    /// <param name="waveProvider"></param>
    public override void SetAudioSource(ISampleProvider waveProvider) => Invoke(() =>
    {
        var render = this.UpdateAudioRender();

        render.Init(waveProvider);
    });
 public void Play(ISampleProvider samples)
 {
     AddMixerInput(samples);
 }
 /// <summary>
 /// Constructs a new SampleProviderEventArgs
 /// </summary>
 public SampleProviderEventArgs(ISampleProvider sampleProvider)
 {
     SampleProvider = sampleProvider;
 }
Пример #59
0
        public static ISampleProvider FollowedBy(this ISampleProvider sampleProvider, TimeSpan silenceDuration, ISampleProvider next)
        {
            OffsetSampleProvider offsetSampleProvider = new OffsetSampleProvider(sampleProvider)
            {
                LeadOut = silenceDuration
            };

            return(new ConcatenatingSampleProvider(new ISampleProvider[]
            {
                offsetSampleProvider,
                next
            }));
        }
Пример #60
0
 public void Init(ISampleProvider provider, int samplesPerPeak)
 {
     Provider       = provider;
     SamplesPerPeak = samplesPerPeak;
     ReadBuffer     = new float[samplesPerPeak];
 }